2013-08-06 13:38:00 +00:00
|
|
|
"""
|
|
|
|
mbed SDK
|
2016-07-26 20:30:59 +00:00
|
|
|
Copyright (c) 2011-2016 ARM Limited
|
2013-08-06 13:38:00 +00:00
|
|
|
|
|
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
you may not use this file except in compliance with the License.
|
|
|
|
You may obtain a copy of the License at
|
|
|
|
|
|
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
See the License for the specific language governing permissions and
|
|
|
|
limitations under the License.
|
|
|
|
"""
|
2014-06-11 13:47:54 +00:00
|
|
|
|
2014-07-29 13:48:48 +00:00
|
|
|
import re
|
2014-08-15 10:17:33 +00:00
|
|
|
import tempfile
|
2017-04-04 16:35:00 +00:00
|
|
|
import datetime
|
2017-04-07 16:03:02 +00:00
|
|
|
import uuid
|
2013-02-18 15:32:11 +00:00
|
|
|
from types import ListType
|
2014-08-15 10:17:33 +00:00
|
|
|
from shutil import rmtree
|
2017-02-01 22:24:39 +00:00
|
|
|
from os.path import join, exists, dirname, basename, abspath, normpath, splitext
|
2017-04-06 16:17:54 +00:00
|
|
|
from os.path import relpath
|
2017-02-01 22:24:39 +00:00
|
|
|
from os import linesep, remove, makedirs
|
2015-11-05 20:42:45 +00:00
|
|
|
from time import time
|
2017-02-01 22:24:39 +00:00
|
|
|
from intelhex import IntelHex
|
2017-04-06 16:17:54 +00:00
|
|
|
from json import load, dump
|
2013-02-18 15:32:11 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException,\
|
2017-02-01 22:24:39 +00:00
|
|
|
ToolException, InvalidReleaseTargetException, intelhex_offset
|
2016-10-03 23:45:09 +00:00
|
|
|
from tools.paths import MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,\
|
|
|
|
MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL, MBED_CONFIG_FILE,\
|
2016-10-01 18:34:57 +00:00
|
|
|
MBED_LIBRARIES_DRIVERS, MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL,\
|
|
|
|
BUILD_DIR
|
2016-06-09 20:34:53 +00:00
|
|
|
from tools.targets import TARGET_NAMES, TARGET_MAP
|
|
|
|
from tools.libraries import Library
|
|
|
|
from tools.toolchains import TOOLCHAIN_CLASSES
|
2015-03-31 22:56:00 +00:00
|
|
|
from jinja2 import FileSystemLoader
|
|
|
|
from jinja2.environment import Environment
|
2016-06-09 22:50:03 +00:00
|
|
|
from tools.config import Config
|
2013-06-10 14:44:08 +00:00
|
|
|
|
2016-07-27 01:40:51 +00:00
|
|
|
RELEASE_VERSIONS = ['2', '5']
|
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
def prep_report(report, target_name, toolchain_name, id_name):
|
2016-08-15 15:23:35 +00:00
|
|
|
"""Setup report keys
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
report - the report to fill
|
|
|
|
target_name - the target being used
|
|
|
|
toolchain_name - the toolchain being used
|
|
|
|
id_name - the name of the executable or library being built
|
|
|
|
"""
|
2015-11-05 20:42:45 +00:00
|
|
|
if not target_name in report:
|
|
|
|
report[target_name] = {}
|
|
|
|
|
|
|
|
if not toolchain_name in report[target_name]:
|
|
|
|
report[target_name][toolchain_name] = {}
|
|
|
|
|
|
|
|
if not id_name in report[target_name][toolchain_name]:
|
|
|
|
report[target_name][toolchain_name][id_name] = []
|
|
|
|
|
2015-11-24 23:39:20 +00:00
|
|
|
def prep_properties(properties, target_name, toolchain_name, vendor_label):
|
2016-08-15 15:23:35 +00:00
|
|
|
"""Setup test properties
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
properties - the dict to fill
|
|
|
|
target_name - the target the test is targeting
|
|
|
|
toolchain_name - the toolchain that will compile the test
|
|
|
|
vendor_label - the vendor
|
|
|
|
"""
|
2015-11-05 20:42:45 +00:00
|
|
|
if not target_name in properties:
|
|
|
|
properties[target_name] = {}
|
|
|
|
|
|
|
|
if not toolchain_name in properties[target_name]:
|
|
|
|
properties[target_name][toolchain_name] = {}
|
|
|
|
|
|
|
|
properties[target_name][toolchain_name]["target"] = target_name
|
2015-11-24 23:39:20 +00:00
|
|
|
properties[target_name][toolchain_name]["vendor"] = vendor_label
|
2015-11-05 20:42:45 +00:00
|
|
|
properties[target_name][toolchain_name]["toolchain"] = toolchain_name
|
|
|
|
|
|
|
|
def create_result(target_name, toolchain_name, id_name, description):
|
2016-08-15 15:23:35 +00:00
|
|
|
"""Create a result dictionary
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
target_name - the target being built for
|
|
|
|
toolchain_name - the toolchain doing the building
|
|
|
|
id_name - the name of the executable or library being built
|
|
|
|
description - a human readable description of what's going on
|
|
|
|
"""
|
2015-11-05 20:42:45 +00:00
|
|
|
cur_result = {}
|
|
|
|
cur_result["target_name"] = target_name
|
|
|
|
cur_result["toolchain_name"] = toolchain_name
|
|
|
|
cur_result["id"] = id_name
|
|
|
|
cur_result["description"] = description
|
|
|
|
cur_result["elapsed_time"] = 0
|
|
|
|
cur_result["output"] = ""
|
|
|
|
|
|
|
|
return cur_result
|
|
|
|
|
|
|
|
def add_result_to_report(report, result):
|
2016-08-15 15:23:35 +00:00
|
|
|
"""Add a single result to a report dictionary
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
report - the report to append to
|
|
|
|
result - the result to append
|
|
|
|
"""
|
2017-04-07 15:54:41 +00:00
|
|
|
result["date"] = datetime.datetime.utcnow().isoformat()
|
2017-04-07 16:03:02 +00:00
|
|
|
result["uuid"] = str(uuid.uuid1())
|
2015-11-05 20:42:45 +00:00
|
|
|
target = result["target_name"]
|
|
|
|
toolchain = result["toolchain_name"]
|
|
|
|
id_name = result['id']
|
2016-08-12 16:27:39 +00:00
|
|
|
result_wrap = {0: result}
|
2015-11-05 20:42:45 +00:00
|
|
|
report[target][toolchain][id_name].append(result_wrap)
|
|
|
|
|
2016-07-18 18:57:59 +00:00
|
|
|
def get_config(src_paths, target, toolchain_name):
|
2016-08-15 15:23:35 +00:00
|
|
|
"""Get the configuration object for a target-toolchain combination
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
src_paths - paths to scan for the configuration files
|
|
|
|
target - the device we are building for
|
|
|
|
toolchain_name - the string that identifies the build tools
|
|
|
|
"""
|
2016-07-18 18:57:59 +00:00
|
|
|
# Convert src_paths to a list if needed
|
|
|
|
if type(src_paths) != ListType:
|
|
|
|
src_paths = [src_paths]
|
2016-06-09 22:50:03 +00:00
|
|
|
|
2016-07-18 18:57:59 +00:00
|
|
|
# Pass all params to the unified prepare_resources()
|
2017-02-28 20:04:54 +00:00
|
|
|
toolchain = prepare_toolchain(src_paths, None, target, toolchain_name)
|
2016-06-09 22:50:03 +00:00
|
|
|
|
|
|
|
# Scan src_path for config files
|
|
|
|
resources = toolchain.scan_resources(src_paths[0])
|
|
|
|
for path in src_paths[1:]:
|
|
|
|
resources.add(toolchain.scan_resources(path))
|
|
|
|
|
2016-06-14 19:17:38 +00:00
|
|
|
# Update configuration files until added features creates no changes
|
|
|
|
prev_features = set()
|
|
|
|
while True:
|
|
|
|
# Update the configuration with any .json files found while scanning
|
2016-07-18 18:57:59 +00:00
|
|
|
toolchain.config.add_config_files(resources.json_files)
|
2016-06-14 19:17:38 +00:00
|
|
|
|
|
|
|
# Add features while we find new ones
|
2016-11-10 19:41:22 +00:00
|
|
|
features = set(toolchain.config.get_features())
|
2016-06-14 19:17:38 +00:00
|
|
|
if features == prev_features:
|
|
|
|
break
|
|
|
|
|
|
|
|
for feature in features:
|
|
|
|
if feature in resources.features:
|
|
|
|
resources += resources.features[feature]
|
|
|
|
|
|
|
|
prev_features = features
|
2016-07-18 18:57:59 +00:00
|
|
|
toolchain.config.validate_config()
|
2017-04-07 16:32:42 +00:00
|
|
|
if toolchain.config.has_regions:
|
|
|
|
_ = list(toolchain.config.regions)
|
2016-06-14 19:17:38 +00:00
|
|
|
|
2016-07-18 18:57:59 +00:00
|
|
|
cfg, macros = toolchain.config.get_config_data()
|
|
|
|
features = toolchain.config.get_features()
|
2016-06-14 19:17:38 +00:00
|
|
|
return cfg, macros, features
|
2016-06-09 22:50:03 +00:00
|
|
|
|
2016-07-26 15:22:02 +00:00
|
|
|
def is_official_target(target_name, version):
|
|
|
|
""" Returns True, None if a target is part of the official release for the
|
|
|
|
given version. Return False, 'reason' if a target is not part of the
|
|
|
|
official release for the given version.
|
2016-07-27 01:40:51 +00:00
|
|
|
|
2016-08-15 15:23:35 +00:00
|
|
|
Positional arguments:
|
|
|
|
target_name - Name if the target (ex. 'K64F')
|
|
|
|
version - The release version string. Should be a string contained within
|
|
|
|
RELEASE_VERSIONS
|
2016-07-26 15:22:02 +00:00
|
|
|
"""
|
2016-08-12 16:27:39 +00:00
|
|
|
|
2016-07-26 15:22:02 +00:00
|
|
|
result = True
|
|
|
|
reason = None
|
|
|
|
target = TARGET_MAP[target_name]
|
2016-08-12 16:27:39 +00:00
|
|
|
|
|
|
|
if hasattr(target, 'release_versions') \
|
|
|
|
and version in target.release_versions:
|
2016-07-26 15:22:02 +00:00
|
|
|
if version == '2':
|
|
|
|
# For version 2, either ARM or uARM toolchain support is required
|
|
|
|
required_toolchains = set(['ARM', 'uARM'])
|
2016-08-12 16:27:39 +00:00
|
|
|
|
|
|
|
if not len(required_toolchains.intersection(
|
|
|
|
set(target.supported_toolchains))) > 0:
|
|
|
|
result = False
|
2016-07-26 15:22:02 +00:00
|
|
|
reason = ("Target '%s' must support " % target.name) + \
|
2016-08-12 16:27:39 +00:00
|
|
|
("one of the folowing toolchains to be included in the") + \
|
|
|
|
((" mbed 2.0 official release: %s" + linesep) %
|
|
|
|
", ".join(required_toolchains)) + \
|
2016-07-26 15:22:02 +00:00
|
|
|
("Currently it is only configured to support the ") + \
|
2016-08-12 16:27:39 +00:00
|
|
|
("following toolchains: %s" %
|
|
|
|
", ".join(target.supported_toolchains))
|
|
|
|
|
2016-07-26 15:22:02 +00:00
|
|
|
elif version == '5':
|
|
|
|
# For version 5, ARM, GCC_ARM, and IAR toolchain support is required
|
|
|
|
required_toolchains = set(['ARM', 'GCC_ARM', 'IAR'])
|
|
|
|
required_toolchains_sorted = list(required_toolchains)
|
|
|
|
required_toolchains_sorted.sort()
|
|
|
|
supported_toolchains = set(target.supported_toolchains)
|
|
|
|
supported_toolchains_sorted = list(supported_toolchains)
|
|
|
|
supported_toolchains_sorted.sort()
|
2016-08-12 16:27:39 +00:00
|
|
|
|
2016-07-26 15:22:02 +00:00
|
|
|
if not required_toolchains.issubset(supported_toolchains):
|
|
|
|
result = False
|
|
|
|
reason = ("Target '%s' must support " % target.name) + \
|
2016-08-12 16:27:39 +00:00
|
|
|
("ALL of the folowing toolchains to be included in the") + \
|
|
|
|
((" mbed OS 5.0 official release: %s" + linesep) %
|
|
|
|
", ".join(required_toolchains_sorted)) + \
|
2016-07-26 15:22:02 +00:00
|
|
|
("Currently it is only configured to support the ") + \
|
2016-08-12 16:27:39 +00:00
|
|
|
("following toolchains: %s" %
|
|
|
|
", ".join(supported_toolchains_sorted))
|
2016-07-28 16:06:53 +00:00
|
|
|
|
2016-08-17 12:16:07 +00:00
|
|
|
elif not target.default_lib == 'std':
|
2016-07-28 16:06:53 +00:00
|
|
|
result = False
|
2016-08-12 16:27:39 +00:00
|
|
|
reason = ("Target '%s' must set the " % target.name) + \
|
2016-08-17 12:16:07 +00:00
|
|
|
("'default_lib' to 'std' to be included in the ") + \
|
2016-08-12 16:27:39 +00:00
|
|
|
("mbed OS 5.0 official release." + linesep) + \
|
2016-08-17 12:16:07 +00:00
|
|
|
("Currently it is set to '%s'" % target.default_lib)
|
2016-07-28 16:06:53 +00:00
|
|
|
|
2016-07-27 01:40:51 +00:00
|
|
|
else:
|
|
|
|
result = False
|
2016-08-12 16:27:39 +00:00
|
|
|
reason = ("Target '%s' has set an invalid release version of '%s'" %
|
|
|
|
version) + \
|
|
|
|
("Please choose from the following release versions: %s" %
|
|
|
|
', '.join(RELEASE_VERSIONS))
|
2016-07-27 01:40:51 +00:00
|
|
|
|
2016-07-26 15:22:02 +00:00
|
|
|
else:
|
|
|
|
result = False
|
2016-07-27 18:30:53 +00:00
|
|
|
if not hasattr(target, 'release_versions'):
|
2016-08-12 16:27:39 +00:00
|
|
|
reason = "Target '%s' " % target.name
|
|
|
|
reason += "does not have the 'release_versions' key set"
|
2016-07-27 18:30:53 +00:00
|
|
|
elif not version in target.release_versions:
|
2016-08-12 16:27:39 +00:00
|
|
|
reason = "Target '%s' does not contain the version '%s' " % \
|
|
|
|
(target.name, version)
|
|
|
|
reason += "in its 'release_versions' key"
|
|
|
|
|
2016-07-26 15:22:02 +00:00
|
|
|
return result, reason
|
|
|
|
|
2016-07-27 01:40:51 +00:00
|
|
|
def transform_release_toolchains(toolchains, version):
|
|
|
|
""" Given a list of toolchains and a release version, return a list of
|
|
|
|
only the supported toolchains for that release
|
|
|
|
|
2016-08-15 15:23:35 +00:00
|
|
|
Positional arguments:
|
|
|
|
toolchains - The list of toolchains
|
|
|
|
version - The release version string. Should be a string contained within
|
|
|
|
RELEASE_VERSIONS
|
2016-07-27 01:40:51 +00:00
|
|
|
"""
|
|
|
|
if version == '5':
|
|
|
|
return ['ARM', 'GCC_ARM', 'IAR']
|
|
|
|
else:
|
|
|
|
return toolchains
|
|
|
|
|
2016-07-26 15:22:02 +00:00
|
|
|
|
|
|
|
def get_mbed_official_release(version):
|
2016-07-27 01:40:51 +00:00
|
|
|
""" Given a release version string, return a tuple that contains a target
|
|
|
|
and the supported toolchains for that release.
|
2016-08-12 16:27:39 +00:00
|
|
|
Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')),
|
|
|
|
('K64F', ('ARM', 'GCC_ARM')), ...)
|
2016-07-27 01:40:51 +00:00
|
|
|
|
2016-08-15 15:23:35 +00:00
|
|
|
Positional arguments:
|
|
|
|
version - The version string. Should be a string contained within
|
|
|
|
RELEASE_VERSIONS
|
2016-07-27 01:40:51 +00:00
|
|
|
"""
|
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
mbed_official_release = (
|
2016-07-26 15:22:02 +00:00
|
|
|
tuple(
|
|
|
|
tuple(
|
|
|
|
[
|
|
|
|
TARGET_MAP[target].name,
|
2016-08-12 16:27:39 +00:00
|
|
|
tuple(transform_release_toolchains(
|
|
|
|
TARGET_MAP[target].supported_toolchains, version))
|
2016-07-26 15:22:02 +00:00
|
|
|
]
|
2016-08-12 16:27:39 +00:00
|
|
|
) for target in TARGET_NAMES \
|
|
|
|
if (hasattr(TARGET_MAP[target], 'release_versions')
|
|
|
|
and version in TARGET_MAP[target].release_versions)
|
2016-07-26 15:22:02 +00:00
|
|
|
)
|
|
|
|
)
|
2016-08-12 16:27:39 +00:00
|
|
|
|
|
|
|
for target in mbed_official_release:
|
2016-07-26 15:22:02 +00:00
|
|
|
is_official, reason = is_official_target(target[0], version)
|
2016-08-12 16:27:39 +00:00
|
|
|
|
2016-07-26 15:22:02 +00:00
|
|
|
if not is_official:
|
|
|
|
raise InvalidReleaseTargetException(reason)
|
2016-08-12 16:27:39 +00:00
|
|
|
|
|
|
|
return mbed_official_release
|
2016-07-26 15:22:02 +00:00
|
|
|
|
2017-02-01 22:24:39 +00:00
|
|
|
def add_regions_to_profile(profile, config, toolchain_class):
|
|
|
|
"""Add regions to the build profile, if there are any.
|
|
|
|
|
|
|
|
Positional Arguments:
|
|
|
|
profile - the profile to update
|
|
|
|
config - the configuration object that owns the region
|
|
|
|
toolchain_class - the class of the toolchain being used
|
|
|
|
"""
|
2017-03-29 20:49:16 +00:00
|
|
|
if not profile:
|
|
|
|
return
|
2017-02-01 22:24:39 +00:00
|
|
|
regions = list(config.regions)
|
|
|
|
for region in regions:
|
|
|
|
for define in [(region.name.upper() + "_ADDR", region.start),
|
|
|
|
(region.name.upper() + "_SIZE", region.size)]:
|
|
|
|
profile["common"].append("-D%s=0x%x" % define)
|
|
|
|
active_region = [r for r in regions if r.active][0]
|
|
|
|
for define in [("MBED_APP_START", active_region.start),
|
|
|
|
("MBED_APP_SIZE", active_region.size)]:
|
|
|
|
profile["ld"].append(toolchain_class.make_ld_define(*define))
|
|
|
|
|
|
|
|
print("Using regions in this build:")
|
|
|
|
for region in regions:
|
|
|
|
print(" Region %s size 0x%x, offset 0x%x"
|
|
|
|
% (region.name, region.size, region.start))
|
|
|
|
|
2016-07-26 15:22:02 +00:00
|
|
|
|
2017-02-28 20:04:54 +00:00
|
|
|
def prepare_toolchain(src_paths, build_dir, target, toolchain_name,
|
2016-09-27 18:15:22 +00:00
|
|
|
macros=None, clean=False, jobs=1,
|
2016-08-12 16:27:39 +00:00
|
|
|
notify=None, silent=False, verbose=False,
|
2016-08-31 15:20:59 +00:00
|
|
|
extra_verbose=False, config=None,
|
2016-09-23 20:44:09 +00:00
|
|
|
app_config=None, build_profile=None):
|
2016-07-18 18:57:59 +00:00
|
|
|
""" Prepares resource related objects - toolchain, target, config
|
2016-08-15 15:23:35 +00:00
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
src_paths - the paths to source directories
|
2016-10-28 13:48:49 +00:00
|
|
|
target - ['LPC1768', 'LPC11U24', etc.]
|
2016-08-15 15:23:35 +00:00
|
|
|
toolchain_name - ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
|
|
|
|
|
|
|
|
Keyword arguments:
|
|
|
|
macros - additional macros
|
|
|
|
clean - Rebuild everything if True
|
|
|
|
jobs - how many compilers we can run at once
|
|
|
|
notify - Notify function for logs
|
|
|
|
silent - suppress printing of progress indicators
|
|
|
|
verbose - Write the actual tools command lines used if True
|
|
|
|
extra_verbose - even more output!
|
|
|
|
config - a Config object to use instead of creating one
|
2016-08-31 15:20:59 +00:00
|
|
|
app_config - location of a chosen mbed_app.json file
|
2016-09-27 18:15:22 +00:00
|
|
|
build_profile - a dict of flags that will be passed to the compiler
|
2014-09-25 10:03:37 +00:00
|
|
|
"""
|
2016-06-09 22:50:03 +00:00
|
|
|
|
2014-07-01 16:45:12 +00:00
|
|
|
# We need to remove all paths which are repeated to avoid
|
|
|
|
# multiple compilations and linking with the same objects
|
|
|
|
src_paths = [src_paths[0]] + list(set(src_paths[1:]))
|
2016-06-09 22:50:03 +00:00
|
|
|
|
|
|
|
# If the configuration object was not yet created, create it now
|
2016-09-29 18:37:22 +00:00
|
|
|
config = config or Config(target, src_paths, app_config=app_config)
|
2016-09-01 20:44:36 +00:00
|
|
|
target = config.target
|
2016-06-09 22:50:03 +00:00
|
|
|
try:
|
2017-02-01 22:24:39 +00:00
|
|
|
cur_tc = TOOLCHAIN_CLASSES[toolchain_name]
|
2016-08-12 16:27:39 +00:00
|
|
|
except KeyError:
|
2016-06-09 22:50:03 +00:00
|
|
|
raise KeyError("Toolchain %s not supported" % toolchain_name)
|
2017-02-01 22:24:39 +00:00
|
|
|
if config.has_regions:
|
|
|
|
add_regions_to_profile(build_profile, config, cur_tc)
|
|
|
|
|
|
|
|
# Toolchain instance
|
2017-02-28 20:04:54 +00:00
|
|
|
toolchain = cur_tc(target, notify, macros, silent, build_dir=build_dir,
|
2017-02-01 22:24:39 +00:00
|
|
|
extra_verbose=extra_verbose, build_profile=build_profile)
|
2016-06-09 22:50:03 +00:00
|
|
|
|
2016-07-18 18:57:59 +00:00
|
|
|
toolchain.config = config
|
2016-06-09 22:50:03 +00:00
|
|
|
toolchain.jobs = jobs
|
|
|
|
toolchain.build_all = clean
|
2016-07-18 18:57:59 +00:00
|
|
|
toolchain.VERBOSE = verbose
|
2014-07-14 16:45:01 +00:00
|
|
|
|
2016-07-18 18:57:59 +00:00
|
|
|
return toolchain
|
|
|
|
|
2017-02-01 22:24:39 +00:00
|
|
|
def merge_region_list(region_list, destination, padding=b'\xFF'):
|
|
|
|
"""Merege the region_list into a single image
|
|
|
|
|
|
|
|
Positional Arguments:
|
|
|
|
region_list - list of regions, which should contain filenames
|
|
|
|
destination - file name to write all regions to
|
|
|
|
padding - bytes to fill gapps with
|
|
|
|
"""
|
|
|
|
merged = IntelHex()
|
|
|
|
|
|
|
|
print("Merging Regions:")
|
|
|
|
|
|
|
|
for region in region_list:
|
|
|
|
if region.active and not region.filename:
|
|
|
|
raise ToolException("Active region has no contents: No file found.")
|
|
|
|
if region.filename:
|
|
|
|
print(" Filling region %s with %s" % (region.name, region.filename))
|
|
|
|
part = intelhex_offset(region.filename, offset=region.start)
|
|
|
|
part_size = (part.maxaddr() - part.minaddr()) + 1
|
|
|
|
if part_size > region.size:
|
|
|
|
raise ToolException("Contents of region %s does not fit"
|
|
|
|
% region.name)
|
|
|
|
merged.merge(part)
|
|
|
|
pad_size = region.size - part_size
|
|
|
|
if pad_size > 0 and region != region_list[-1]:
|
|
|
|
print(" Padding region %s with 0x%x bytes" % (region.name, pad_size))
|
|
|
|
merged.puts(merged.maxaddr() + 1, padding * pad_size)
|
|
|
|
|
|
|
|
if not exists(dirname(destination)):
|
|
|
|
makedirs(dirname(destination))
|
|
|
|
print("Space used after regions merged: 0x%x" %
|
|
|
|
(merged.maxaddr() - merged.minaddr() + 1))
|
|
|
|
with open(destination, "wb+") as output:
|
|
|
|
merged.tofile(output, format='bin')
|
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
def scan_resources(src_paths, toolchain, dependencies_paths=None,
|
2016-07-20 19:43:09 +00:00
|
|
|
inc_dirs=None, base_path=None):
|
2016-07-18 18:57:59 +00:00
|
|
|
""" Scan resources using initialized toolcain
|
2016-08-15 15:23:35 +00:00
|
|
|
|
|
|
|
Positional arguments
|
|
|
|
src_paths - the paths to source directories
|
|
|
|
toolchain - valid toolchain object
|
|
|
|
dependencies_paths - dependency paths that we should scan for include dirs
|
|
|
|
inc_dirs - additional include directories which should be added to
|
|
|
|
the scanner resources
|
2016-07-18 18:57:59 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
# Scan src_path
|
2016-07-20 19:43:09 +00:00
|
|
|
resources = toolchain.scan_resources(src_paths[0], base_path=base_path)
|
2016-07-18 18:57:59 +00:00
|
|
|
for path in src_paths[1:]:
|
2016-07-20 19:43:09 +00:00
|
|
|
resources.add(toolchain.scan_resources(path, base_path=base_path))
|
2016-07-18 18:57:59 +00:00
|
|
|
|
|
|
|
# Scan dependency paths for include dirs
|
|
|
|
if dependencies_paths is not None:
|
|
|
|
for path in dependencies_paths:
|
|
|
|
lib_resources = toolchain.scan_resources(path)
|
|
|
|
resources.inc_dirs.extend(lib_resources.inc_dirs)
|
|
|
|
|
|
|
|
# Add additional include directories if passed
|
|
|
|
if inc_dirs:
|
|
|
|
if type(inc_dirs) == ListType:
|
|
|
|
resources.inc_dirs.extend(inc_dirs)
|
|
|
|
else:
|
|
|
|
resources.inc_dirs.append(inc_dirs)
|
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
# Load resources into the config system which might expand/modify resources
|
|
|
|
# based on config data
|
2016-07-18 18:57:59 +00:00
|
|
|
resources = toolchain.config.load_resources(resources)
|
|
|
|
|
|
|
|
# Set the toolchain's configuration data
|
|
|
|
toolchain.set_config_data(toolchain.config.get_config_data())
|
|
|
|
|
|
|
|
return resources
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-07-18 18:57:59 +00:00
|
|
|
def build_project(src_paths, build_path, target, toolchain_name,
|
2016-09-27 18:15:22 +00:00
|
|
|
libraries_paths=None, linker_script=None,
|
2016-08-12 16:27:39 +00:00
|
|
|
clean=False, notify=None, verbose=False, name=None,
|
|
|
|
macros=None, inc_dirs=None, jobs=1, silent=False,
|
|
|
|
report=None, properties=None, project_id=None,
|
2016-08-31 15:20:59 +00:00
|
|
|
project_description=None, extra_verbose=False, config=None,
|
2016-09-23 20:44:09 +00:00
|
|
|
app_config=None, build_profile=None):
|
2016-08-15 15:23:35 +00:00
|
|
|
""" Build a project. A project may be a test or a user program.
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
src_paths - a path or list of paths that contain all files needed to build
|
|
|
|
the project
|
|
|
|
build_path - the directory where all of the object files will be placed
|
|
|
|
target - the MCU or board that the project will compile for
|
|
|
|
toolchain_name - the name of the build tools
|
|
|
|
|
|
|
|
Keyword arguments:
|
|
|
|
libraries_paths - The location of libraries to include when linking
|
|
|
|
linker_script - the file that drives the linker to do it's job
|
|
|
|
clean - Rebuild everything if True
|
|
|
|
notify - Notify function for logs
|
|
|
|
verbose - Write the actual tools command lines used if True
|
|
|
|
name - the name of the project
|
|
|
|
macros - additional macros
|
|
|
|
inc_dirs - additional directories where include files may be found
|
|
|
|
jobs - how many compilers we can run at once
|
|
|
|
silent - suppress printing of progress indicators
|
|
|
|
report - a dict where a result may be appended
|
|
|
|
properties - UUUUHHHHH beats me
|
|
|
|
project_id - the name put in the report
|
|
|
|
project_description - the human-readable version of what this thing does
|
|
|
|
extra_verbose - even more output!
|
|
|
|
config - a Config object to use instead of creating one
|
2016-08-31 15:20:59 +00:00
|
|
|
app_config - location of a chosen mbed_app.json file
|
2016-09-27 18:15:22 +00:00
|
|
|
build_profile - a dict of flags that will be passed to the compiler
|
2016-07-18 18:57:59 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
# Convert src_path to a list if needed
|
|
|
|
if type(src_paths) != ListType:
|
|
|
|
src_paths = [src_paths]
|
|
|
|
# Extend src_paths wiht libraries_paths
|
|
|
|
if libraries_paths is not None:
|
|
|
|
src_paths.extend(libraries_paths)
|
2016-10-05 16:06:27 +00:00
|
|
|
inc_dirs.extend(map(dirname, libraries_paths))
|
2016-07-18 18:57:59 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
if clean and exists(build_path):
|
|
|
|
rmtree(build_path)
|
2016-07-18 18:57:59 +00:00
|
|
|
mkdir(build_path)
|
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
toolchain = prepare_toolchain(
|
2017-02-28 20:04:54 +00:00
|
|
|
src_paths, build_path, target, toolchain_name, macros=macros,
|
|
|
|
clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose,
|
2016-09-23 20:44:09 +00:00
|
|
|
extra_verbose=extra_verbose, config=config, app_config=app_config,
|
|
|
|
build_profile=build_profile)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2016-07-18 18:57:59 +00:00
|
|
|
# The first path will give the name to the library
|
2017-04-04 00:20:28 +00:00
|
|
|
name = (name or toolchain.config.name or
|
|
|
|
basename(normpath(abspath(src_paths[0]))))
|
2016-08-12 16:27:39 +00:00
|
|
|
toolchain.info("Building project %s (%s, %s)" %
|
|
|
|
(name, toolchain.target.name, toolchain_name))
|
2016-07-18 18:57:59 +00:00
|
|
|
|
|
|
|
# Initialize reporting
|
2015-11-05 20:42:45 +00:00
|
|
|
if report != None:
|
2015-11-12 18:16:10 +00:00
|
|
|
start = time()
|
2016-06-09 22:50:03 +00:00
|
|
|
# If project_id is specified, use that over the default name
|
|
|
|
id_name = project_id.upper() if project_id else name.upper()
|
|
|
|
description = project_description if project_description else name
|
2016-07-18 18:57:59 +00:00
|
|
|
vendor_label = toolchain.target.extra_labels[0]
|
|
|
|
prep_report(report, toolchain.target.name, toolchain_name, id_name)
|
2016-08-12 16:27:39 +00:00
|
|
|
cur_result = create_result(toolchain.target.name, toolchain_name,
|
|
|
|
id_name, description)
|
2015-11-12 18:16:10 +00:00
|
|
|
if properties != None:
|
2016-08-12 16:27:39 +00:00
|
|
|
prep_properties(properties, toolchain.target.name, toolchain_name,
|
|
|
|
vendor_label)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
try:
|
2016-07-18 18:57:59 +00:00
|
|
|
# Call unified scan_resources
|
|
|
|
resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs)
|
2017-05-31 18:08:15 +00:00
|
|
|
if (hasattr(toolchain.target, "release_versions") and
|
|
|
|
"5" not in toolchain.target.release_versions and
|
|
|
|
"rtos" in toolchain.config.lib_config_data):
|
2017-05-31 18:08:43 +00:00
|
|
|
if "Cortex-A" in toolchain.target.core:
|
|
|
|
raise NotSupportedException(
|
|
|
|
("%s Will be supported in mbed OS 5.6. "
|
|
|
|
"To use the %s, please checkout the mbed OS 5.4 release branch. "
|
|
|
|
"See https://developer.mbed.org/platforms/Renesas-GR-PEACH/#important-notice "
|
|
|
|
"for more information") % (toolchain.target.name, toolchain.target.name))
|
|
|
|
else:
|
|
|
|
raise NotSupportedException("Target does not support mbed OS 5")
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2016-07-18 18:57:59 +00:00
|
|
|
# Change linker script if specified
|
2015-11-05 20:42:45 +00:00
|
|
|
if linker_script is not None:
|
|
|
|
resources.linker_script = linker_script
|
|
|
|
|
|
|
|
# Compile Sources
|
2017-02-28 20:04:54 +00:00
|
|
|
objects = toolchain.compile_sources(resources, resources.inc_dirs)
|
2016-06-15 21:07:49 +00:00
|
|
|
resources.objects.extend(objects)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# Link Program
|
2017-02-01 22:24:39 +00:00
|
|
|
if toolchain.config.has_regions:
|
|
|
|
res, _ = toolchain.link_program(resources, build_path, name + "_application")
|
|
|
|
region_list = list(toolchain.config.regions)
|
|
|
|
region_list = [r._replace(filename=res) if r.active else r
|
|
|
|
for r in region_list]
|
|
|
|
res = join(build_path, name) + ".bin"
|
|
|
|
merge_region_list(region_list, res)
|
|
|
|
else:
|
|
|
|
res, _ = toolchain.link_program(resources, build_path, name)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-10-11 23:24:01 +00:00
|
|
|
memap_instance = getattr(toolchain, 'memap_instance', None)
|
|
|
|
memap_table = ''
|
|
|
|
if memap_instance:
|
|
|
|
# Write output to stdout in text (pretty table) format
|
2016-10-11 23:37:54 +00:00
|
|
|
memap_table = memap_instance.generate_output('table')
|
|
|
|
|
|
|
|
if not silent:
|
|
|
|
print memap_table
|
2016-10-11 23:24:01 +00:00
|
|
|
|
|
|
|
# Write output to file in JSON format
|
|
|
|
map_out = join(build_path, name + "_map.json")
|
|
|
|
memap_instance.generate_output('json', map_out)
|
|
|
|
|
|
|
|
# Write output to file in CSV format for the CI
|
|
|
|
map_csv = join(build_path, name + "_map.csv")
|
|
|
|
memap_instance.generate_output('csv-ci', map_csv)
|
|
|
|
|
2016-09-30 19:41:11 +00:00
|
|
|
resources.detect_duplicates(toolchain)
|
2016-09-28 18:16:22 +00:00
|
|
|
|
2016-06-10 14:19:02 +00:00
|
|
|
if report != None:
|
2015-11-05 20:42:45 +00:00
|
|
|
end = time()
|
|
|
|
cur_result["elapsed_time"] = end - start
|
2016-10-11 23:24:01 +00:00
|
|
|
cur_result["output"] = toolchain.get_output() + memap_table
|
2015-11-05 20:42:45 +00:00
|
|
|
cur_result["result"] = "OK"
|
2016-06-28 15:34:28 +00:00
|
|
|
cur_result["memory_usage"] = toolchain.map_outputs
|
2017-04-04 16:35:00 +00:00
|
|
|
cur_result["bin"] = res
|
|
|
|
cur_result["elf"] = splitext(res)[0] + ".elf"
|
|
|
|
cur_result.update(toolchain.report)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
add_result_to_report(report, cur_result)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
return res
|
2014-07-01 16:45:12 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
except Exception as exc:
|
2015-11-12 18:16:10 +00:00
|
|
|
if report != None:
|
|
|
|
end = time()
|
2016-02-25 22:29:26 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
if isinstance(exc, NotSupportedException):
|
2016-02-25 22:29:26 +00:00
|
|
|
cur_result["result"] = "NOT_SUPPORTED"
|
|
|
|
else:
|
|
|
|
cur_result["result"] = "FAIL"
|
|
|
|
|
2015-11-12 18:16:10 +00:00
|
|
|
cur_result["elapsed_time"] = end - start
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2015-11-12 18:16:10 +00:00
|
|
|
toolchain_output = toolchain.get_output()
|
|
|
|
if toolchain_output:
|
|
|
|
cur_result["output"] += toolchain_output
|
2015-11-05 23:21:21 +00:00
|
|
|
|
2015-11-12 18:16:10 +00:00
|
|
|
add_result_to_report(report, cur_result)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# Let Exception propagate
|
2016-06-14 17:36:41 +00:00
|
|
|
raise
|
2013-06-10 14:44:08 +00:00
|
|
|
|
2013-04-18 14:43:29 +00:00
|
|
|
def build_library(src_paths, build_path, target, toolchain_name,
|
2016-09-27 18:15:22 +00:00
|
|
|
dependencies_paths=None, name=None, clean=False,
|
2016-08-12 16:27:39 +00:00
|
|
|
archive=True, notify=None, verbose=False, macros=None,
|
|
|
|
inc_dirs=None, jobs=1, silent=False, report=None,
|
2016-08-16 21:59:33 +00:00
|
|
|
properties=None, extra_verbose=False, project_id=None,
|
2016-09-27 18:15:22 +00:00
|
|
|
remove_config_header_file=False, app_config=None,
|
|
|
|
build_profile=None):
|
2016-08-15 15:23:35 +00:00
|
|
|
""" Build a library
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
src_paths - a path or list of paths that contain all files needed to build
|
|
|
|
the library
|
|
|
|
build_path - the directory where all of the object files will be placed
|
|
|
|
target - the MCU or board that the project will compile for
|
|
|
|
toolchain_name - the name of the build tools
|
|
|
|
|
|
|
|
Keyword arguments:
|
|
|
|
dependencies_paths - The location of libraries to include when linking
|
|
|
|
name - the name of the library
|
|
|
|
clean - Rebuild everything if True
|
|
|
|
archive - whether the library will create an archive file
|
|
|
|
notify - Notify function for logs
|
|
|
|
verbose - Write the actual tools command lines used if True
|
|
|
|
macros - additional macros
|
|
|
|
inc_dirs - additional directories where include files may be found
|
|
|
|
jobs - how many compilers we can run at once
|
|
|
|
silent - suppress printing of progress indicators
|
|
|
|
report - a dict where a result may be appended
|
|
|
|
properties - UUUUHHHHH beats me
|
|
|
|
extra_verbose - even more output!
|
|
|
|
project_id - the name that goes in the report
|
2016-08-16 21:59:33 +00:00
|
|
|
remove_config_header_file - delete config header file when done building
|
2016-08-31 15:20:59 +00:00
|
|
|
app_config - location of a chosen mbed_app.json file
|
2016-09-27 18:15:22 +00:00
|
|
|
build_profile - a dict of flags that will be passed to the compiler
|
2014-09-25 10:03:37 +00:00
|
|
|
"""
|
2016-07-18 18:57:59 +00:00
|
|
|
|
|
|
|
# Convert src_path to a list if needed
|
2014-07-01 16:45:12 +00:00
|
|
|
if type(src_paths) != ListType:
|
|
|
|
src_paths = [src_paths]
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-07-18 18:57:59 +00:00
|
|
|
# Build path
|
|
|
|
if archive:
|
|
|
|
# Use temp path when building archive
|
|
|
|
tmp_path = join(build_path, '.temp')
|
|
|
|
mkdir(tmp_path)
|
|
|
|
else:
|
|
|
|
tmp_path = build_path
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-08-01 23:16:48 +00:00
|
|
|
# Clean the build directory
|
2016-08-12 16:27:39 +00:00
|
|
|
if clean and exists(tmp_path):
|
|
|
|
rmtree(tmp_path)
|
2016-08-01 23:16:48 +00:00
|
|
|
mkdir(tmp_path)
|
|
|
|
|
2016-07-18 18:57:59 +00:00
|
|
|
# Pass all params to the unified prepare_toolchain()
|
2016-08-12 16:27:39 +00:00
|
|
|
toolchain = prepare_toolchain(
|
2017-02-28 20:04:54 +00:00
|
|
|
src_paths, build_path, target, toolchain_name, macros=macros,
|
|
|
|
clean=clean, jobs=jobs, notify=notify, silent=silent,
|
|
|
|
verbose=verbose, extra_verbose=extra_verbose, app_config=app_config,
|
2016-09-27 18:15:22 +00:00
|
|
|
build_profile=build_profile)
|
2016-07-05 12:40:12 +00:00
|
|
|
|
2016-07-18 18:57:59 +00:00
|
|
|
# The first path will give the name to the library
|
|
|
|
if name is None:
|
|
|
|
name = basename(normpath(abspath(src_paths[0])))
|
2016-08-12 16:27:39 +00:00
|
|
|
toolchain.info("Building library %s (%s, %s)" %
|
|
|
|
(name, toolchain.target.name, toolchain_name))
|
2016-07-05 12:40:12 +00:00
|
|
|
|
2016-07-18 18:57:59 +00:00
|
|
|
# Initialize reporting
|
2015-11-05 20:42:45 +00:00
|
|
|
if report != None:
|
2015-11-12 18:16:10 +00:00
|
|
|
start = time()
|
2016-06-09 22:50:03 +00:00
|
|
|
# If project_id is specified, use that over the default name
|
|
|
|
id_name = project_id.upper() if project_id else name.upper()
|
2015-11-12 18:16:10 +00:00
|
|
|
description = name
|
2016-07-18 18:57:59 +00:00
|
|
|
vendor_label = toolchain.target.extra_labels[0]
|
|
|
|
prep_report(report, toolchain.target.name, toolchain_name, id_name)
|
2016-08-12 16:27:39 +00:00
|
|
|
cur_result = create_result(toolchain.target.name, toolchain_name,
|
|
|
|
id_name, description)
|
2017-04-07 17:38:49 +00:00
|
|
|
cur_result['type'] = 'library'
|
2015-11-12 18:16:10 +00:00
|
|
|
if properties != None:
|
2016-08-12 16:27:39 +00:00
|
|
|
prep_properties(properties, toolchain.target.name, toolchain_name,
|
|
|
|
vendor_label)
|
2013-02-18 15:32:11 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
for src_path in src_paths:
|
|
|
|
if not exists(src_path):
|
|
|
|
error_msg = "The library source folder does not exist: %s", src_path
|
|
|
|
if report != None:
|
|
|
|
cur_result["output"] = error_msg
|
|
|
|
cur_result["result"] = "FAIL"
|
|
|
|
add_result_to_report(report, cur_result)
|
|
|
|
raise Exception(error_msg)
|
|
|
|
|
|
|
|
try:
|
2016-07-18 18:57:59 +00:00
|
|
|
# Call unified scan_resources
|
2016-08-12 16:27:39 +00:00
|
|
|
resources = scan_resources(src_paths, toolchain,
|
|
|
|
dependencies_paths=dependencies_paths,
|
|
|
|
inc_dirs=inc_dirs)
|
2016-06-14 17:36:41 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
# Copy headers, objects and static libraries - all files needed for
|
|
|
|
# static lib
|
2016-06-15 23:31:25 +00:00
|
|
|
toolchain.copy_files(resources.headers, build_path, resources=resources)
|
|
|
|
toolchain.copy_files(resources.objects, build_path, resources=resources)
|
2016-08-12 16:27:39 +00:00
|
|
|
toolchain.copy_files(resources.libraries, build_path,
|
|
|
|
resources=resources)
|
2016-08-16 21:59:33 +00:00
|
|
|
toolchain.copy_files(resources.json_files, build_path,
|
|
|
|
resources=resources)
|
2016-06-15 23:31:25 +00:00
|
|
|
if resources.linker_script:
|
2016-08-12 16:27:39 +00:00
|
|
|
toolchain.copy_files(resources.linker_script, build_path,
|
|
|
|
resources=resources)
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2016-07-22 22:48:52 +00:00
|
|
|
if resources.hex_files:
|
2016-08-12 16:27:39 +00:00
|
|
|
toolchain.copy_files(resources.hex_files, build_path,
|
|
|
|
resources=resources)
|
2016-06-15 23:31:25 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
# Compile Sources
|
2017-02-28 20:04:54 +00:00
|
|
|
objects = toolchain.compile_sources(resources, resources.inc_dirs)
|
2016-06-15 21:07:49 +00:00
|
|
|
resources.objects.extend(objects)
|
2015-11-05 22:53:23 +00:00
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
if archive:
|
2016-06-10 14:19:02 +00:00
|
|
|
toolchain.build_library(objects, build_path, name)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2016-08-16 21:59:33 +00:00
|
|
|
if remove_config_header_file:
|
|
|
|
config_header_path = toolchain.get_config_header()
|
|
|
|
if config_header_path:
|
|
|
|
remove(config_header_path)
|
|
|
|
|
2016-06-10 14:19:02 +00:00
|
|
|
if report != None:
|
2015-11-05 20:42:45 +00:00
|
|
|
end = time()
|
|
|
|
cur_result["elapsed_time"] = end - start
|
2015-11-05 23:21:21 +00:00
|
|
|
cur_result["output"] = toolchain.get_output()
|
2015-11-05 20:42:45 +00:00
|
|
|
cur_result["result"] = "OK"
|
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
add_result_to_report(report, cur_result)
|
2016-07-14 18:54:23 +00:00
|
|
|
return True
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
except Exception as exc:
|
2015-11-05 22:53:23 +00:00
|
|
|
if report != None:
|
|
|
|
end = time()
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
if isinstance(exc, ToolException):
|
2016-06-09 22:50:03 +00:00
|
|
|
cur_result["result"] = "FAIL"
|
2016-08-12 16:27:39 +00:00
|
|
|
elif isinstance(exc, NotSupportedException):
|
2016-06-09 22:50:03 +00:00
|
|
|
cur_result["result"] = "NOT_SUPPORTED"
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2015-11-05 22:53:23 +00:00
|
|
|
cur_result["elapsed_time"] = end - start
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2015-11-05 23:21:21 +00:00
|
|
|
toolchain_output = toolchain.get_output()
|
|
|
|
if toolchain_output:
|
|
|
|
cur_result["output"] += toolchain_output
|
|
|
|
|
2015-11-05 22:53:23 +00:00
|
|
|
add_result_to_report(report, cur_result)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# Let Exception propagate
|
2016-08-12 16:27:39 +00:00
|
|
|
raise
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2016-06-09 22:51:26 +00:00
|
|
|
######################
|
|
|
|
### Legacy methods ###
|
|
|
|
######################
|
|
|
|
|
2017-03-07 00:23:16 +00:00
|
|
|
def mbed2_obj_path(target_name, toolchain_name):
|
2017-03-10 17:08:56 +00:00
|
|
|
real_tc_name = TOOLCHAIN_CLASSES[toolchain_name].__name__
|
|
|
|
return join("TARGET_" + target_name, "TOOLCHAIN_" + real_tc_name)
|
2017-03-07 00:23:16 +00:00
|
|
|
|
2016-09-27 18:15:22 +00:00
|
|
|
def build_lib(lib_id, target, toolchain_name, verbose=False,
|
2016-08-12 16:27:39 +00:00
|
|
|
clean=False, macros=None, notify=None, jobs=1, silent=False,
|
2016-09-27 18:15:22 +00:00
|
|
|
report=None, properties=None, extra_verbose=False,
|
|
|
|
build_profile=None):
|
2016-06-09 22:51:26 +00:00
|
|
|
""" Legacy method for building mbed libraries
|
2016-08-12 16:27:39 +00:00
|
|
|
|
2016-08-15 15:23:35 +00:00
|
|
|
Positional arguments:
|
|
|
|
lib_id - the library's unique identifier
|
|
|
|
target - the MCU or board that the project will compile for
|
|
|
|
toolchain_name - the name of the build tools
|
|
|
|
|
|
|
|
Keyword arguments:
|
|
|
|
clean - Rebuild everything if True
|
|
|
|
verbose - Write the actual tools command lines used if True
|
|
|
|
macros - additional macros
|
|
|
|
notify - Notify function for logs
|
|
|
|
jobs - how many compilers we can run at once
|
|
|
|
silent - suppress printing of progress indicators
|
|
|
|
report - a dict where a result may be appended
|
|
|
|
properties - UUUUHHHHH beats me
|
|
|
|
extra_verbose - even more output!
|
2016-09-27 18:15:22 +00:00
|
|
|
build_profile - a dict of flags that will be passed to the compiler
|
2014-09-25 10:03:37 +00:00
|
|
|
"""
|
2013-02-18 15:32:11 +00:00
|
|
|
lib = Library(lib_id)
|
2016-06-09 22:51:26 +00:00
|
|
|
if not lib.is_supported(target, toolchain_name):
|
2016-08-12 16:27:39 +00:00
|
|
|
print('Library "%s" is not yet supported on target %s with toolchain %s'
|
|
|
|
% (lib_id, target.name, toolchain_name))
|
2015-11-05 20:42:45 +00:00
|
|
|
return False
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
# We need to combine macros from parameter list with macros from library
|
|
|
|
# definition
|
|
|
|
lib_macros = lib.macros if lib.macros else []
|
2016-06-09 22:51:26 +00:00
|
|
|
if macros:
|
2016-08-12 16:27:39 +00:00
|
|
|
macros.extend(lib_macros)
|
2016-06-09 22:51:26 +00:00
|
|
|
else:
|
2016-08-12 16:27:39 +00:00
|
|
|
macros = lib_macros
|
2016-06-09 22:51:26 +00:00
|
|
|
|
|
|
|
src_paths = lib.source_dir
|
|
|
|
build_path = lib.build_dir
|
|
|
|
dependencies_paths = lib.dependencies
|
|
|
|
inc_dirs = lib.inc_dirs
|
|
|
|
inc_dirs_ext = lib.inc_dirs_ext
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2016-06-09 22:51:26 +00:00
|
|
|
if type(src_paths) != ListType:
|
|
|
|
src_paths = [src_paths]
|
|
|
|
|
|
|
|
# The first path will give the name to the library
|
|
|
|
name = basename(src_paths[0])
|
|
|
|
|
|
|
|
if report != None:
|
|
|
|
start = time()
|
|
|
|
id_name = name.upper()
|
|
|
|
description = name
|
|
|
|
vendor_label = target.extra_labels[0]
|
|
|
|
cur_result = None
|
|
|
|
prep_report(report, target.name, toolchain_name, id_name)
|
2016-08-12 16:27:39 +00:00
|
|
|
cur_result = create_result(target.name, toolchain_name, id_name,
|
|
|
|
description)
|
2016-06-09 22:51:26 +00:00
|
|
|
|
|
|
|
if properties != None:
|
2016-08-12 16:27:39 +00:00
|
|
|
prep_properties(properties, target.name, toolchain_name,
|
|
|
|
vendor_label)
|
2016-06-09 22:51:26 +00:00
|
|
|
|
|
|
|
for src_path in src_paths:
|
|
|
|
if not exists(src_path):
|
|
|
|
error_msg = "The library source folder does not exist: %s", src_path
|
|
|
|
|
|
|
|
if report != None:
|
|
|
|
cur_result["output"] = error_msg
|
|
|
|
cur_result["result"] = "FAIL"
|
|
|
|
add_result_to_report(report, cur_result)
|
|
|
|
|
|
|
|
raise Exception(error_msg)
|
|
|
|
|
|
|
|
try:
|
|
|
|
# Toolchain instance
|
2017-03-07 00:23:16 +00:00
|
|
|
# Create the desired build directory structure
|
|
|
|
bin_path = join(build_path, mbed2_obj_path(target.name, toolchain_name))
|
|
|
|
mkdir(bin_path)
|
|
|
|
tmp_path = join(build_path, '.temp', mbed2_obj_path(target.name,
|
|
|
|
toolchain_name))
|
|
|
|
mkdir(tmp_path)
|
|
|
|
|
|
|
|
toolchain = prepare_toolchain(
|
|
|
|
src_paths, tmp_path, target, toolchain_name, macros=macros,
|
|
|
|
notify=notify, silent=silent, extra_verbose=extra_verbose,
|
|
|
|
build_profile=build_profile, jobs=jobs, clean=clean)
|
2013-02-18 15:32:11 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
toolchain.info("Building library %s (%s, %s)" %
|
|
|
|
(name.upper(), target.name, toolchain_name))
|
2016-06-09 22:51:26 +00:00
|
|
|
|
2016-08-11 10:54:41 +00:00
|
|
|
# Take into account the library configuration (MBED_CONFIG_FILE)
|
2017-03-07 00:23:16 +00:00
|
|
|
config = toolchain.config
|
2016-08-11 10:54:41 +00:00
|
|
|
config.add_config_files([MBED_CONFIG_FILE])
|
|
|
|
|
2016-06-09 22:51:26 +00:00
|
|
|
# Scan Resources
|
|
|
|
resources = []
|
|
|
|
for src_path in src_paths:
|
|
|
|
resources.append(toolchain.scan_resources(src_path))
|
|
|
|
|
|
|
|
# Add extra include directories / files which are required by library
|
|
|
|
# This files usually are not in the same directory as source files so
|
|
|
|
# previous scan will not include them
|
|
|
|
if inc_dirs_ext is not None:
|
|
|
|
for inc_ext in inc_dirs_ext:
|
|
|
|
resources.append(toolchain.scan_resources(inc_ext))
|
|
|
|
|
|
|
|
# Dependencies Include Paths
|
|
|
|
dependencies_include_dir = []
|
|
|
|
if dependencies_paths is not None:
|
|
|
|
for path in dependencies_paths:
|
|
|
|
lib_resources = toolchain.scan_resources(path)
|
|
|
|
dependencies_include_dir.extend(lib_resources.inc_dirs)
|
2016-10-01 23:08:38 +00:00
|
|
|
dependencies_include_dir.extend(map(dirname, lib_resources.inc_dirs))
|
2016-06-09 22:51:26 +00:00
|
|
|
|
|
|
|
if inc_dirs:
|
|
|
|
dependencies_include_dir.extend(inc_dirs)
|
|
|
|
|
2016-08-11 10:54:41 +00:00
|
|
|
# Add other discovered configuration data to the configuration object
|
2016-08-12 16:27:39 +00:00
|
|
|
for res in resources:
|
|
|
|
config.load_resources(res)
|
2016-08-11 10:54:41 +00:00
|
|
|
toolchain.set_config_data(toolchain.config.get_config_data())
|
|
|
|
|
2016-06-09 22:51:26 +00:00
|
|
|
|
|
|
|
# Copy Headers
|
|
|
|
for resource in resources:
|
2016-08-12 16:27:39 +00:00
|
|
|
toolchain.copy_files(resource.headers, build_path,
|
|
|
|
resources=resource)
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
dependencies_include_dir.extend(
|
|
|
|
toolchain.scan_resources(build_path).inc_dirs)
|
2016-06-09 22:51:26 +00:00
|
|
|
|
|
|
|
# Compile Sources
|
|
|
|
objects = []
|
|
|
|
for resource in resources:
|
2017-02-28 20:04:54 +00:00
|
|
|
objects.extend(toolchain.compile_sources(resource, dependencies_include_dir))
|
2016-06-09 22:51:26 +00:00
|
|
|
|
|
|
|
needed_update = toolchain.build_library(objects, bin_path, name)
|
|
|
|
|
|
|
|
if report != None and needed_update:
|
|
|
|
end = time()
|
|
|
|
cur_result["elapsed_time"] = end - start
|
|
|
|
cur_result["output"] = toolchain.get_output()
|
|
|
|
cur_result["result"] = "OK"
|
|
|
|
|
|
|
|
add_result_to_report(report, cur_result)
|
2016-07-14 18:54:23 +00:00
|
|
|
return True
|
2016-06-09 22:51:26 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
except Exception:
|
2016-06-09 22:51:26 +00:00
|
|
|
if report != None:
|
|
|
|
end = time()
|
|
|
|
cur_result["result"] = "FAIL"
|
|
|
|
cur_result["elapsed_time"] = end - start
|
|
|
|
|
|
|
|
toolchain_output = toolchain.get_output()
|
|
|
|
if toolchain_output:
|
|
|
|
cur_result["output"] += toolchain_output
|
|
|
|
|
|
|
|
add_result_to_report(report, cur_result)
|
|
|
|
|
|
|
|
# Let Exception propagate
|
2016-08-12 16:27:39 +00:00
|
|
|
raise
|
2013-02-18 15:32:11 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
# We do have unique legacy conventions about how we build and package the mbed
|
|
|
|
# library
|
2016-09-27 18:15:22 +00:00
|
|
|
def build_mbed_libs(target, toolchain_name, verbose=False,
|
2016-08-12 16:27:39 +00:00
|
|
|
clean=False, macros=None, notify=None, jobs=1, silent=False,
|
2016-09-27 18:15:22 +00:00
|
|
|
report=None, properties=None, extra_verbose=False,
|
|
|
|
build_profile=None):
|
2016-08-12 16:27:39 +00:00
|
|
|
""" Function returns True is library was built and false if building was
|
|
|
|
skipped
|
2016-08-15 15:23:35 +00:00
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
target - the MCU or board that the project will compile for
|
|
|
|
toolchain_name - the name of the build tools
|
|
|
|
|
|
|
|
Keyword arguments:
|
|
|
|
verbose - Write the actual tools command lines used if True
|
|
|
|
clean - Rebuild everything if True
|
|
|
|
macros - additional macros
|
|
|
|
notify - Notify function for logs
|
|
|
|
jobs - how many compilers we can run at once
|
|
|
|
silent - suppress printing of progress indicators
|
|
|
|
report - a dict where a result may be appended
|
|
|
|
properties - UUUUHHHHH beats me
|
|
|
|
extra_verbose - even more output!
|
2016-09-27 18:15:22 +00:00
|
|
|
build_profile - a dict of flags that will be passed to the compiler
|
2016-08-12 16:27:39 +00:00
|
|
|
"""
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
if report != None:
|
2015-11-12 18:16:10 +00:00
|
|
|
start = time()
|
|
|
|
id_name = "MBED"
|
|
|
|
description = "mbed SDK"
|
2015-11-24 23:39:20 +00:00
|
|
|
vendor_label = target.extra_labels[0]
|
2015-11-12 18:16:10 +00:00
|
|
|
cur_result = None
|
2015-11-05 20:42:45 +00:00
|
|
|
prep_report(report, target.name, toolchain_name, id_name)
|
2016-08-12 16:27:39 +00:00
|
|
|
cur_result = create_result(target.name, toolchain_name, id_name,
|
|
|
|
description)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2015-11-12 18:16:10 +00:00
|
|
|
if properties != None:
|
2016-08-12 16:27:39 +00:00
|
|
|
prep_properties(properties, target.name, toolchain_name,
|
|
|
|
vendor_label)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2013-06-10 14:44:08 +00:00
|
|
|
# Check toolchain support
|
|
|
|
if toolchain_name not in target.supported_toolchains:
|
2014-08-01 14:27:34 +00:00
|
|
|
supported_toolchains_text = ", ".join(target.supported_toolchains)
|
2016-08-12 16:27:39 +00:00
|
|
|
print('%s target is not yet supported by toolchain %s' %
|
|
|
|
(target.name, toolchain_name))
|
|
|
|
print('%s target supports %s toolchain%s' %
|
|
|
|
(target.name, supported_toolchains_text, 's'
|
|
|
|
if len(target.supported_toolchains) > 1 else ''))
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
if report != None:
|
|
|
|
cur_result["result"] = "SKIP"
|
|
|
|
add_result_to_report(report, cur_result)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
return False
|
2014-06-09 15:10:47 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
try:
|
2017-03-07 00:23:16 +00:00
|
|
|
# Source and Build Paths
|
|
|
|
build_target = join(MBED_LIBRARIES, "TARGET_" + target.name)
|
2017-03-10 17:08:56 +00:00
|
|
|
build_toolchain = join(MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name))
|
2017-03-07 00:23:16 +00:00
|
|
|
mkdir(build_toolchain)
|
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
# Toolchain
|
2017-03-07 00:23:16 +00:00
|
|
|
tmp_path = join(MBED_LIBRARIES, '.temp', mbed2_obj_path(target.name, toolchain_name))
|
2017-02-28 20:04:54 +00:00
|
|
|
mkdir(tmp_path)
|
|
|
|
|
2017-03-07 00:23:16 +00:00
|
|
|
toolchain = prepare_toolchain(
|
|
|
|
[""], tmp_path, target, toolchain_name, macros=macros,
|
|
|
|
notify=notify, silent=silent, extra_verbose=extra_verbose,
|
|
|
|
build_profile=build_profile, jobs=jobs, clean=clean)
|
2017-02-28 20:04:54 +00:00
|
|
|
|
2016-08-10 16:03:13 +00:00
|
|
|
# Take into account the library configuration (MBED_CONFIG_FILE)
|
2017-03-07 00:23:16 +00:00
|
|
|
config = toolchain.config
|
2016-08-10 16:03:13 +00:00
|
|
|
config.add_config_files([MBED_CONFIG_FILE])
|
|
|
|
toolchain.set_config_data(toolchain.config.get_config_data())
|
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
# CMSIS
|
2016-08-12 16:27:39 +00:00
|
|
|
toolchain.info("Building library %s (%s, %s)" %
|
|
|
|
('CMSIS', target.name, toolchain_name))
|
2016-10-03 23:45:09 +00:00
|
|
|
cmsis_src = MBED_CMSIS_PATH
|
2015-11-05 20:42:45 +00:00
|
|
|
resources = toolchain.scan_resources(cmsis_src)
|
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
toolchain.copy_files(resources.headers, build_target)
|
|
|
|
toolchain.copy_files(resources.linker_script, build_toolchain)
|
|
|
|
toolchain.copy_files(resources.bin_files, build_toolchain)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
objects = toolchain.compile_sources(resources, tmp_path)
|
|
|
|
toolchain.copy_files(objects, build_toolchain)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# mbed
|
2016-08-12 16:27:39 +00:00
|
|
|
toolchain.info("Building library %s (%s, %s)" %
|
|
|
|
('MBED', target.name, toolchain_name))
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# Common Headers
|
2016-09-30 23:22:31 +00:00
|
|
|
toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES)
|
2016-10-01 23:08:38 +00:00
|
|
|
library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES]
|
|
|
|
|
2016-10-01 18:34:57 +00:00
|
|
|
for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS),
|
|
|
|
(MBED_PLATFORM, MBED_LIBRARIES_PLATFORM),
|
|
|
|
(MBED_HAL, MBED_LIBRARIES_HAL)]:
|
|
|
|
resources = toolchain.scan_resources(dir)
|
|
|
|
toolchain.copy_files(resources.headers, dest)
|
2016-10-01 23:08:38 +00:00
|
|
|
library_incdirs.append(dest)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# Target specific sources
|
2016-09-30 23:22:31 +00:00
|
|
|
hal_src = MBED_TARGETS_PATH
|
2016-08-12 16:27:39 +00:00
|
|
|
hal_implementation = toolchain.scan_resources(hal_src)
|
|
|
|
toolchain.copy_files(hal_implementation.headers +
|
|
|
|
hal_implementation.hex_files +
|
2016-09-29 21:08:16 +00:00
|
|
|
hal_implementation.libraries +
|
|
|
|
[MBED_CONFIG_FILE],
|
2016-08-12 16:27:39 +00:00
|
|
|
build_target, resources=hal_implementation)
|
2016-10-04 23:04:52 +00:00
|
|
|
toolchain.copy_files(hal_implementation.linker_script, build_toolchain)
|
|
|
|
toolchain.copy_files(hal_implementation.bin_files, build_toolchain)
|
2016-08-12 16:27:39 +00:00
|
|
|
incdirs = toolchain.scan_resources(build_target).inc_dirs
|
2017-02-28 20:04:54 +00:00
|
|
|
objects = toolchain.compile_sources(hal_implementation,
|
2016-10-01 23:08:38 +00:00
|
|
|
library_incdirs + incdirs)
|
2016-10-04 23:04:52 +00:00
|
|
|
toolchain.copy_files(objects, build_toolchain)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# Common Sources
|
2016-10-01 18:34:57 +00:00
|
|
|
mbed_resources = None
|
|
|
|
for dir in [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL]:
|
|
|
|
mbed_resources += toolchain.scan_resources(dir)
|
|
|
|
|
2017-02-28 20:04:54 +00:00
|
|
|
objects = toolchain.compile_sources(mbed_resources,
|
2016-10-04 23:04:52 +00:00
|
|
|
library_incdirs + incdirs)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# A number of compiled files need to be copied as objects as opposed to
|
2016-08-12 16:27:39 +00:00
|
|
|
# way the linker search for symbols in archives. These are:
|
2017-04-27 13:31:04 +00:00
|
|
|
# - mbed_retarget.o: to make sure that the C standard lib symbols get
|
2016-08-12 16:27:39 +00:00
|
|
|
# overridden
|
2017-04-27 13:31:04 +00:00
|
|
|
# - mbed_board.o: mbed_die is weak
|
2016-08-12 16:27:39 +00:00
|
|
|
# - mbed_overrides.o: this contains platform overrides of various
|
|
|
|
# weak SDK functions
|
2017-04-27 13:31:04 +00:00
|
|
|
# - mbed_main.o: this contains main redirection
|
|
|
|
separate_names, separate_objects = ['mbed_retarget.o', 'mbed_board.o',
|
|
|
|
'mbed_overrides.o', 'mbed_main.o'], []
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
for obj in objects:
|
2015-11-05 20:42:45 +00:00
|
|
|
for name in separate_names:
|
2016-08-12 16:27:39 +00:00
|
|
|
if obj.endswith(name):
|
|
|
|
separate_objects.append(obj)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
for obj in separate_objects:
|
|
|
|
objects.remove(obj)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
toolchain.build_library(objects, build_toolchain, "mbed")
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
for obj in separate_objects:
|
|
|
|
toolchain.copy_files(obj, build_toolchain)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2016-06-10 14:19:02 +00:00
|
|
|
if report != None:
|
2015-11-05 20:42:45 +00:00
|
|
|
end = time()
|
|
|
|
cur_result["elapsed_time"] = end - start
|
2015-11-05 23:21:21 +00:00
|
|
|
cur_result["output"] = toolchain.get_output()
|
2015-11-05 20:42:45 +00:00
|
|
|
cur_result["result"] = "OK"
|
|
|
|
|
|
|
|
add_result_to_report(report, cur_result)
|
|
|
|
|
|
|
|
return True
|
2015-11-05 22:53:23 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
except Exception as exc:
|
2015-11-05 22:53:23 +00:00
|
|
|
if report != None:
|
|
|
|
end = time()
|
|
|
|
cur_result["result"] = "FAIL"
|
|
|
|
cur_result["elapsed_time"] = end - start
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2015-11-05 23:21:21 +00:00
|
|
|
toolchain_output = toolchain.get_output()
|
|
|
|
if toolchain_output:
|
|
|
|
cur_result["output"] += toolchain_output
|
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
cur_result["output"] += str(exc)
|
2015-11-05 23:21:21 +00:00
|
|
|
|
2015-11-05 22:53:23 +00:00
|
|
|
add_result_to_report(report, cur_result)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# Let Exception propagate
|
2016-08-12 16:27:39 +00:00
|
|
|
raise
|
2014-08-04 13:29:20 +00:00
|
|
|
|
2016-06-09 22:51:26 +00:00
|
|
|
|
2016-07-27 01:40:51 +00:00
|
|
|
def get_unique_supported_toolchains(release_targets=None):
|
|
|
|
""" Get list of all unique toolchains supported by targets
|
2016-08-15 15:23:35 +00:00
|
|
|
|
|
|
|
Keyword arguments:
|
|
|
|
release_targets - tuple structure returned from get_mbed_official_release().
|
|
|
|
If release_targets is not specified, then it queries all
|
|
|
|
known targets
|
2016-07-27 01:40:51 +00:00
|
|
|
"""
|
2014-06-09 15:10:47 +00:00
|
|
|
unique_supported_toolchains = []
|
2016-07-27 01:40:51 +00:00
|
|
|
|
|
|
|
if not release_targets:
|
|
|
|
for target in TARGET_NAMES:
|
|
|
|
for toolchain in TARGET_MAP[target].supported_toolchains:
|
|
|
|
if toolchain not in unique_supported_toolchains:
|
|
|
|
unique_supported_toolchains.append(toolchain)
|
|
|
|
else:
|
|
|
|
for target in release_targets:
|
|
|
|
for toolchain in target[1]:
|
|
|
|
if toolchain not in unique_supported_toolchains:
|
|
|
|
unique_supported_toolchains.append(toolchain)
|
|
|
|
|
2014-06-09 15:10:47 +00:00
|
|
|
return unique_supported_toolchains
|
|
|
|
|
2017-03-22 18:06:57 +00:00
|
|
|
def mcu_toolchain_list(release_version='5'):
|
|
|
|
""" Shows list of toolchains
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
if isinstance(release_version, basestring):
|
|
|
|
# Force release_version to lowercase if it is a string
|
|
|
|
release_version = release_version.lower()
|
|
|
|
else:
|
|
|
|
# Otherwise default to printing all known targets and toolchains
|
|
|
|
release_version = 'all'
|
|
|
|
|
|
|
|
|
|
|
|
version_release_targets = {}
|
|
|
|
version_release_target_names = {}
|
|
|
|
|
|
|
|
for version in RELEASE_VERSIONS:
|
|
|
|
version_release_targets[version] = get_mbed_official_release(version)
|
|
|
|
version_release_target_names[version] = [x[0] for x in
|
|
|
|
version_release_targets[
|
|
|
|
version]]
|
|
|
|
|
|
|
|
if release_version in RELEASE_VERSIONS:
|
|
|
|
release_targets = version_release_targets[release_version]
|
|
|
|
else:
|
|
|
|
release_targets = None
|
|
|
|
|
|
|
|
unique_supported_toolchains = get_unique_supported_toolchains(
|
|
|
|
release_targets)
|
|
|
|
columns = ["mbed OS %s" % x for x in RELEASE_VERSIONS] + unique_supported_toolchains
|
|
|
|
return "\n".join(columns)
|
|
|
|
|
|
|
|
|
|
|
|
def mcu_target_list(release_version='5'):
|
|
|
|
""" Shows target list
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
if isinstance(release_version, basestring):
|
|
|
|
# Force release_version to lowercase if it is a string
|
|
|
|
release_version = release_version.lower()
|
|
|
|
else:
|
|
|
|
# Otherwise default to printing all known targets and toolchains
|
|
|
|
release_version = 'all'
|
|
|
|
|
|
|
|
|
|
|
|
version_release_targets = {}
|
|
|
|
version_release_target_names = {}
|
|
|
|
|
|
|
|
for version in RELEASE_VERSIONS:
|
|
|
|
version_release_targets[version] = get_mbed_official_release(version)
|
|
|
|
version_release_target_names[version] = [x[0] for x in
|
|
|
|
version_release_targets[
|
|
|
|
version]]
|
|
|
|
|
|
|
|
if release_version in RELEASE_VERSIONS:
|
|
|
|
release_targets = version_release_targets[release_version]
|
|
|
|
else:
|
|
|
|
release_targets = None
|
|
|
|
|
|
|
|
target_names = []
|
|
|
|
|
|
|
|
if release_targets:
|
|
|
|
target_names = [x[0] for x in release_targets]
|
|
|
|
else:
|
|
|
|
target_names = TARGET_NAMES
|
|
|
|
|
|
|
|
return "\n".join(target_names)
|
|
|
|
|
2014-06-09 15:10:47 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
def mcu_toolchain_matrix(verbose_html=False, platform_filter=None,
|
|
|
|
release_version='5'):
|
2016-08-15 15:23:35 +00:00
|
|
|
""" Shows target map using prettytable
|
|
|
|
|
|
|
|
Keyword arguments:
|
|
|
|
verbose_html - emit html instead of a simple table
|
|
|
|
platform_filter - remove results that match the string
|
|
|
|
release_version - get the matrix for this major version number
|
|
|
|
"""
|
2016-08-12 16:27:39 +00:00
|
|
|
# Only use it in this function so building works without extra modules
|
|
|
|
from prettytable import PrettyTable
|
2014-06-09 15:10:47 +00:00
|
|
|
|
2016-07-27 01:40:51 +00:00
|
|
|
if isinstance(release_version, basestring):
|
|
|
|
# Force release_version to lowercase if it is a string
|
|
|
|
release_version = release_version.lower()
|
|
|
|
else:
|
|
|
|
# Otherwise default to printing all known targets and toolchains
|
|
|
|
release_version = 'all'
|
|
|
|
|
|
|
|
|
|
|
|
version_release_targets = {}
|
|
|
|
version_release_target_names = {}
|
|
|
|
|
|
|
|
for version in RELEASE_VERSIONS:
|
|
|
|
version_release_targets[version] = get_mbed_official_release(version)
|
2016-08-12 16:27:39 +00:00
|
|
|
version_release_target_names[version] = [x[0] for x in
|
|
|
|
version_release_targets[
|
|
|
|
version]]
|
2016-07-27 01:40:51 +00:00
|
|
|
|
|
|
|
if release_version in RELEASE_VERSIONS:
|
|
|
|
release_targets = version_release_targets[release_version]
|
|
|
|
else:
|
|
|
|
release_targets = None
|
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
unique_supported_toolchains = get_unique_supported_toolchains(
|
|
|
|
release_targets)
|
2016-07-27 01:40:51 +00:00
|
|
|
prepend_columns = ["Target"] + ["mbed OS %s" % x for x in RELEASE_VERSIONS]
|
|
|
|
|
2014-06-09 15:10:47 +00:00
|
|
|
# All tests status table print
|
2016-07-27 01:40:51 +00:00
|
|
|
columns = prepend_columns + unique_supported_toolchains
|
2016-08-12 16:27:39 +00:00
|
|
|
table_printer = PrettyTable(columns)
|
2014-06-09 15:10:47 +00:00
|
|
|
# Align table
|
|
|
|
for col in columns:
|
2016-08-12 16:27:39 +00:00
|
|
|
table_printer.align[col] = "c"
|
|
|
|
table_printer.align["Target"] = "l"
|
2014-06-09 15:10:47 +00:00
|
|
|
|
2014-07-01 16:45:12 +00:00
|
|
|
perm_counter = 0
|
2014-07-29 14:43:41 +00:00
|
|
|
target_counter = 0
|
2016-07-27 01:40:51 +00:00
|
|
|
|
|
|
|
target_names = []
|
|
|
|
|
|
|
|
if release_targets:
|
|
|
|
target_names = [x[0] for x in release_targets]
|
|
|
|
else:
|
|
|
|
target_names = TARGET_NAMES
|
|
|
|
|
|
|
|
for target in sorted(target_names):
|
2014-07-29 13:48:48 +00:00
|
|
|
if platform_filter is not None:
|
|
|
|
# FIlter out platforms using regex
|
|
|
|
if re.search(platform_filter, target) is None:
|
|
|
|
continue
|
2014-07-29 14:43:41 +00:00
|
|
|
target_counter += 1
|
2014-07-29 13:48:48 +00:00
|
|
|
|
2014-06-09 15:10:47 +00:00
|
|
|
row = [target] # First column is platform name
|
2016-07-27 01:40:51 +00:00
|
|
|
|
|
|
|
for version in RELEASE_VERSIONS:
|
|
|
|
if target in version_release_target_names[version]:
|
|
|
|
text = "Supported"
|
|
|
|
else:
|
|
|
|
text = "-"
|
|
|
|
row.append(text)
|
|
|
|
|
2014-06-09 15:10:47 +00:00
|
|
|
for unique_toolchain in unique_supported_toolchains:
|
2016-06-12 00:06:15 +00:00
|
|
|
if unique_toolchain in TARGET_MAP[target].supported_toolchains:
|
2014-06-09 15:10:47 +00:00
|
|
|
text = "Supported"
|
2014-06-09 15:25:53 +00:00
|
|
|
perm_counter += 1
|
2016-06-12 00:06:15 +00:00
|
|
|
else:
|
|
|
|
text = "-"
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2014-07-01 16:45:12 +00:00
|
|
|
row.append(text)
|
2016-08-12 16:27:39 +00:00
|
|
|
table_printer.add_row(row)
|
2014-06-11 12:15:14 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
result = table_printer.get_html_string() if verbose_html \
|
|
|
|
else table_printer.get_string()
|
2014-06-11 12:15:14 +00:00
|
|
|
result += "\n"
|
2016-06-12 00:06:15 +00:00
|
|
|
result += "Supported targets: %d\n"% (target_counter)
|
|
|
|
if target_counter == 1:
|
|
|
|
result += "Supported toolchains: %d"% (perm_counter)
|
2014-06-11 12:15:14 +00:00
|
|
|
return result
|
2014-06-10 14:29:15 +00:00
|
|
|
|
|
|
|
|
2014-07-10 09:56:14 +00:00
|
|
|
def get_target_supported_toolchains(target):
|
2016-08-15 15:23:35 +00:00
|
|
|
""" Returns target supported toolchains list
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
target - the target to get the supported toolchains of
|
|
|
|
"""
|
2016-08-12 16:27:39 +00:00
|
|
|
return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP \
|
|
|
|
else None
|
2014-07-10 09:56:14 +00:00
|
|
|
|
|
|
|
|
2014-06-23 12:36:55 +00:00
|
|
|
def print_build_results(result_list, build_name):
|
2016-08-15 15:23:35 +00:00
|
|
|
""" Generate result string for build results
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
result_list - the list of results to print
|
|
|
|
build_name - the name of the build we are printing result for
|
|
|
|
"""
|
2014-06-23 12:36:55 +00:00
|
|
|
result = ""
|
2015-05-02 22:08:00 +00:00
|
|
|
if len(result_list) > 0:
|
2014-06-23 12:36:55 +00:00
|
|
|
result += build_name + "\n"
|
|
|
|
result += "\n".join([" * %s" % f for f in result_list])
|
2014-07-28 13:20:17 +00:00
|
|
|
result += "\n"
|
2014-06-23 12:36:55 +00:00
|
|
|
return result
|
2015-03-27 23:55:50 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
def print_build_memory_usage(report):
|
2016-06-28 15:34:28 +00:00
|
|
|
""" Generate result table with memory usage values for build results
|
2016-08-12 16:27:39 +00:00
|
|
|
Aggregates (puts together) reports obtained from self.get_memory_summary()
|
2016-08-15 15:23:35 +00:00
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
report - Report generated during build procedure.
|
2016-06-28 15:34:28 +00:00
|
|
|
"""
|
|
|
|
from prettytable import PrettyTable
|
|
|
|
columns_text = ['name', 'target', 'toolchain']
|
|
|
|
columns_int = ['static_ram', 'stack', 'heap', 'total_ram', 'total_flash']
|
|
|
|
table = PrettyTable(columns_text + columns_int)
|
|
|
|
|
|
|
|
for col in columns_text:
|
|
|
|
table.align[col] = 'l'
|
|
|
|
|
|
|
|
for col in columns_int:
|
|
|
|
table.align[col] = 'r'
|
|
|
|
|
|
|
|
for target in report:
|
|
|
|
for toolchain in report[target]:
|
|
|
|
for name in report[target][toolchain]:
|
|
|
|
for dlist in report[target][toolchain][name]:
|
|
|
|
for dlistelem in dlist:
|
2016-08-12 16:27:39 +00:00
|
|
|
# Get 'memory_usage' record and build table with
|
|
|
|
# statistics
|
2016-06-28 15:34:28 +00:00
|
|
|
record = dlist[dlistelem]
|
|
|
|
if 'memory_usage' in record and record['memory_usage']:
|
|
|
|
# Note that summary should be in the last record of
|
2016-08-12 16:27:39 +00:00
|
|
|
# 'memory_usage' section. This is why we are
|
|
|
|
# grabbing last "[-1]" record.
|
2016-06-28 15:34:28 +00:00
|
|
|
row = [
|
|
|
|
record['description'],
|
|
|
|
record['target_name'],
|
|
|
|
record['toolchain_name'],
|
2016-08-12 16:27:39 +00:00
|
|
|
record['memory_usage'][-1]['summary'][
|
|
|
|
'static_ram'],
|
2016-06-28 15:34:28 +00:00
|
|
|
record['memory_usage'][-1]['summary']['stack'],
|
|
|
|
record['memory_usage'][-1]['summary']['heap'],
|
2016-08-12 16:27:39 +00:00
|
|
|
record['memory_usage'][-1]['summary'][
|
|
|
|
'total_ram'],
|
|
|
|
record['memory_usage'][-1]['summary'][
|
|
|
|
'total_flash'],
|
2016-06-28 15:34:28 +00:00
|
|
|
]
|
|
|
|
table.add_row(row)
|
|
|
|
|
|
|
|
result = "Memory map breakdown for built projects (values in Bytes):\n"
|
|
|
|
result += table.get_string(sortby='name')
|
|
|
|
return result
|
|
|
|
|
2015-04-01 18:15:15 +00:00
|
|
|
def write_build_report(build_report, template_filename, filename):
|
2016-08-15 15:23:35 +00:00
|
|
|
"""Write a build report to disk using a template file
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
build_report - a report generated by the build system
|
|
|
|
template_filename - a file that contains the template for the style of build
|
|
|
|
report
|
|
|
|
filename - the location on disk to write the file to
|
|
|
|
"""
|
2015-03-31 22:56:00 +00:00
|
|
|
build_report_failing = []
|
|
|
|
build_report_passing = []
|
2015-03-27 23:55:50 +00:00
|
|
|
|
2015-03-31 22:56:00 +00:00
|
|
|
for report in build_report:
|
|
|
|
if len(report["failing"]) > 0:
|
|
|
|
build_report_failing.append(report)
|
|
|
|
else:
|
|
|
|
build_report_passing.append(report)
|
2015-04-01 18:15:15 +00:00
|
|
|
|
2015-03-31 22:56:00 +00:00
|
|
|
env = Environment(extensions=['jinja2.ext.with_'])
|
2015-04-01 18:15:15 +00:00
|
|
|
env.loader = FileSystemLoader('ci_templates')
|
|
|
|
template = env.get_template(template_filename)
|
2015-03-31 22:56:00 +00:00
|
|
|
|
2016-08-12 16:27:39 +00:00
|
|
|
with open(filename, 'w+') as placeholder:
|
|
|
|
placeholder.write(template.render(
|
|
|
|
failing_builds=build_report_failing,
|
|
|
|
passing_builds=build_report_passing))
|
2017-04-06 16:17:54 +00:00
|
|
|
|
|
|
|
|
2017-04-07 17:38:49 +00:00
|
|
|
def merge_build_data(filename, toolchain_report, app_type):
|
2017-04-06 16:17:54 +00:00
|
|
|
path_to_file = dirname(abspath(filename))
|
|
|
|
try:
|
|
|
|
build_data = load(open(filename))
|
|
|
|
except (IOError, ValueError):
|
|
|
|
build_data = {'builds': []}
|
|
|
|
for tgt in toolchain_report.values():
|
|
|
|
for tc in tgt.values():
|
|
|
|
for project in tc.values():
|
|
|
|
for build in project:
|
|
|
|
try:
|
|
|
|
build[0]['elf'] = relpath(build[0]['elf'], path_to_file)
|
|
|
|
build[0]['bin'] = relpath(build[0]['bin'], path_to_file)
|
|
|
|
except KeyError:
|
|
|
|
pass
|
2017-04-07 17:38:49 +00:00
|
|
|
if 'type' not in build[0]:
|
|
|
|
build[0]['type'] = app_type
|
2017-04-06 16:17:54 +00:00
|
|
|
build_data['builds'].append(build[0])
|
|
|
|
dump(build_data, open(filename, "wb"), indent=4, separators=(',', ': '))
|