2013-08-06 13:38:00 +00:00
|
|
|
"""
|
|
|
|
mbed SDK
|
|
|
|
Copyright (c) 2011-2013 ARM Limited
|
|
|
|
|
|
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
you may not use this file except in compliance with the License.
|
|
|
|
You may obtain a copy of the License at
|
|
|
|
|
|
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
See the License for the specific language governing permissions and
|
|
|
|
limitations under the License.
|
|
|
|
"""
|
2014-06-11 13:47:54 +00:00
|
|
|
|
2014-07-29 13:48:48 +00:00
|
|
|
import re
|
2014-08-15 10:17:33 +00:00
|
|
|
import tempfile
|
2015-02-10 22:41:39 +00:00
|
|
|
import colorama
|
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
from copy import copy
|
2013-02-18 15:32:11 +00:00
|
|
|
from types import ListType
|
2014-08-15 10:17:33 +00:00
|
|
|
from shutil import rmtree
|
2016-06-09 22:50:03 +00:00
|
|
|
from os.path import join, exists, basename, abspath, normpath
|
|
|
|
from os import getcwd, walk
|
2015-11-05 20:42:45 +00:00
|
|
|
from time import time
|
2016-06-09 22:50:03 +00:00
|
|
|
import fnmatch
|
2013-02-18 15:32:11 +00:00
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException, ToolException
|
2016-06-09 20:34:53 +00:00
|
|
|
from tools.paths import MBED_TARGETS_PATH, MBED_LIBRARIES, MBED_API, MBED_HAL, MBED_COMMON
|
|
|
|
from tools.targets import TARGET_NAMES, TARGET_MAP
|
|
|
|
from tools.libraries import Library
|
|
|
|
from tools.toolchains import TOOLCHAIN_CLASSES
|
2015-03-31 22:56:00 +00:00
|
|
|
from jinja2 import FileSystemLoader
|
|
|
|
from jinja2.environment import Environment
|
2016-06-09 22:50:03 +00:00
|
|
|
from tools.config import Config
|
2013-06-10 14:44:08 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
def prep_report(report, target_name, toolchain_name, id_name):
|
|
|
|
# Setup report keys
|
|
|
|
if not target_name in report:
|
|
|
|
report[target_name] = {}
|
|
|
|
|
|
|
|
if not toolchain_name in report[target_name]:
|
|
|
|
report[target_name][toolchain_name] = {}
|
|
|
|
|
|
|
|
if not id_name in report[target_name][toolchain_name]:
|
|
|
|
report[target_name][toolchain_name][id_name] = []
|
|
|
|
|
2015-11-24 23:39:20 +00:00
|
|
|
def prep_properties(properties, target_name, toolchain_name, vendor_label):
|
2015-11-05 20:42:45 +00:00
|
|
|
# Setup test properties
|
|
|
|
if not target_name in properties:
|
|
|
|
properties[target_name] = {}
|
|
|
|
|
|
|
|
if not toolchain_name in properties[target_name]:
|
|
|
|
properties[target_name][toolchain_name] = {}
|
|
|
|
|
|
|
|
properties[target_name][toolchain_name]["target"] = target_name
|
2015-11-24 23:39:20 +00:00
|
|
|
properties[target_name][toolchain_name]["vendor"] = vendor_label
|
2015-11-05 20:42:45 +00:00
|
|
|
properties[target_name][toolchain_name]["toolchain"] = toolchain_name
|
|
|
|
|
|
|
|
def create_result(target_name, toolchain_name, id_name, description):
|
|
|
|
cur_result = {}
|
|
|
|
cur_result["target_name"] = target_name
|
|
|
|
cur_result["toolchain_name"] = toolchain_name
|
|
|
|
cur_result["id"] = id_name
|
|
|
|
cur_result["description"] = description
|
|
|
|
cur_result["elapsed_time"] = 0
|
|
|
|
cur_result["output"] = ""
|
|
|
|
|
|
|
|
return cur_result
|
|
|
|
|
|
|
|
def add_result_to_report(report, result):
|
|
|
|
target = result["target_name"]
|
|
|
|
toolchain = result["toolchain_name"]
|
|
|
|
id_name = result['id']
|
|
|
|
result_wrap = { 0: result }
|
|
|
|
report[target][toolchain][id_name].append(result_wrap)
|
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
def get_config(src_path, target, toolchain_name):
|
|
|
|
# Convert src_path to a list if needed
|
|
|
|
src_paths = [src_path] if type(src_path) != ListType else src_path
|
|
|
|
# We need to remove all paths which are repeated to avoid
|
|
|
|
# multiple compilations and linking with the same objects
|
|
|
|
src_paths = [src_paths[0]] + list(set(src_paths[1:]))
|
|
|
|
|
|
|
|
# Create configuration object
|
|
|
|
config = Config(target, src_paths)
|
|
|
|
|
|
|
|
# If the 'target' argument is a string, convert it to a target instance
|
|
|
|
if isinstance(target, str):
|
|
|
|
try:
|
|
|
|
target = TARGET_MAP[target]
|
|
|
|
except KeyError:
|
|
|
|
raise KeyError("Target '%s' not found" % target)
|
|
|
|
|
|
|
|
# Toolchain instance
|
|
|
|
try:
|
|
|
|
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options=None, notify=None, macros=None, silent=True, extra_verbose=False)
|
|
|
|
except KeyError as e:
|
|
|
|
raise KeyError("Toolchain %s not supported" % toolchain_name)
|
|
|
|
|
|
|
|
# Scan src_path for config files
|
|
|
|
resources = toolchain.scan_resources(src_paths[0])
|
|
|
|
for path in src_paths[1:]:
|
|
|
|
resources.add(toolchain.scan_resources(path))
|
|
|
|
|
|
|
|
config.add_config_files(resources.json_files)
|
|
|
|
return config.get_config_data()
|
|
|
|
|
2013-06-10 14:44:08 +00:00
|
|
|
def build_project(src_path, build_path, target, toolchain_name,
|
2013-07-02 15:43:29 +00:00
|
|
|
libraries_paths=None, options=None, linker_script=None,
|
2015-11-05 20:42:45 +00:00
|
|
|
clean=False, notify=None, verbose=False, name=None, macros=None, inc_dirs=None,
|
2016-06-09 22:50:03 +00:00
|
|
|
jobs=1, silent=False, report=None, properties=None, project_id=None, project_description=None,
|
|
|
|
extra_verbose=False, config=None):
|
2014-09-25 10:03:37 +00:00
|
|
|
""" This function builds project. Project can be for example one test / UT
|
|
|
|
"""
|
2016-06-09 22:50:03 +00:00
|
|
|
|
|
|
|
# Convert src_path to a list if needed
|
2013-10-17 15:19:07 +00:00
|
|
|
src_paths = [src_path] if type(src_path) != ListType else src_path
|
2014-07-01 16:45:12 +00:00
|
|
|
|
|
|
|
# We need to remove all paths which are repeated to avoid
|
|
|
|
# multiple compilations and linking with the same objects
|
|
|
|
src_paths = [src_paths[0]] + list(set(src_paths[1:]))
|
2016-06-09 22:50:03 +00:00
|
|
|
first_src_path = src_paths[0] if src_paths[0] != "." and src_paths[0] != "./" else getcwd()
|
|
|
|
abs_path = abspath(first_src_path)
|
|
|
|
project_name = basename(normpath(abs_path))
|
|
|
|
|
|
|
|
# If the configuration object was not yet created, create it now
|
|
|
|
config = config or Config(target, src_paths)
|
|
|
|
|
|
|
|
# If the 'target' argument is a string, convert it to a target instance
|
|
|
|
if isinstance(target, str):
|
|
|
|
try:
|
|
|
|
target = TARGET_MAP[target]
|
|
|
|
except KeyError:
|
|
|
|
raise KeyError("Target '%s' not found" % target)
|
|
|
|
|
|
|
|
# Toolchain instance
|
|
|
|
try:
|
|
|
|
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, notify, macros, silent, extra_verbose=extra_verbose)
|
|
|
|
except KeyError as e:
|
|
|
|
raise KeyError("Toolchain %s not supported" % toolchain_name)
|
|
|
|
|
|
|
|
toolchain.VERBOSE = verbose
|
|
|
|
toolchain.jobs = jobs
|
|
|
|
toolchain.build_all = clean
|
2014-07-14 16:45:01 +00:00
|
|
|
|
2013-06-10 14:44:08 +00:00
|
|
|
if name is None:
|
2014-07-14 16:45:01 +00:00
|
|
|
# We will use default project name based on project folder name
|
2016-06-09 22:50:03 +00:00
|
|
|
name = project_name
|
|
|
|
toolchain.info("Building project %s (%s, %s)" % (project_name, target.name, toolchain_name))
|
2014-07-14 16:45:01 +00:00
|
|
|
else:
|
|
|
|
# User used custom global project name to have the same name for the
|
2016-06-09 22:50:03 +00:00
|
|
|
toolchain.info("Building project %s to %s (%s, %s)" % (project_name, name, target.name, toolchain_name))
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
if report != None:
|
2015-11-12 18:16:10 +00:00
|
|
|
start = time()
|
2016-06-09 22:50:03 +00:00
|
|
|
|
|
|
|
# If project_id is specified, use that over the default name
|
|
|
|
id_name = project_id.upper() if project_id else name.upper()
|
|
|
|
description = project_description if project_description else name
|
2015-11-24 23:39:20 +00:00
|
|
|
vendor_label = target.extra_labels[0]
|
2015-11-12 18:16:10 +00:00
|
|
|
cur_result = None
|
2015-11-05 20:42:45 +00:00
|
|
|
prep_report(report, target.name, toolchain_name, id_name)
|
|
|
|
cur_result = create_result(target.name, toolchain_name, id_name, description)
|
|
|
|
|
2015-11-12 18:16:10 +00:00
|
|
|
if properties != None:
|
2015-11-24 23:39:20 +00:00
|
|
|
prep_properties(properties, target.name, toolchain_name, vendor_label)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
# Scan src_path and libraries_paths for resources
|
|
|
|
resources = toolchain.scan_resources(src_paths[0])
|
|
|
|
for path in src_paths[1:]:
|
2013-06-10 14:44:08 +00:00
|
|
|
resources.add(toolchain.scan_resources(path))
|
2015-11-05 20:42:45 +00:00
|
|
|
if libraries_paths is not None:
|
|
|
|
src_paths.extend(libraries_paths)
|
|
|
|
for path in libraries_paths:
|
|
|
|
resources.add(toolchain.scan_resources(path))
|
|
|
|
|
|
|
|
if linker_script is not None:
|
|
|
|
resources.linker_script = linker_script
|
|
|
|
|
|
|
|
# Build Directory
|
|
|
|
if clean:
|
|
|
|
if exists(build_path):
|
|
|
|
rmtree(build_path)
|
|
|
|
mkdir(build_path)
|
|
|
|
|
|
|
|
# We need to add if necessary additional include directories
|
|
|
|
if inc_dirs:
|
|
|
|
if type(inc_dirs) == ListType:
|
|
|
|
resources.inc_dirs.extend(inc_dirs)
|
|
|
|
else:
|
|
|
|
resources.inc_dirs.append(inc_dirs)
|
2016-06-09 22:50:03 +00:00
|
|
|
|
|
|
|
# Update the configuration with any .json files found while scanning
|
|
|
|
config.add_config_files(resources.json_files)
|
|
|
|
# And add the configuration macros to the toolchain
|
|
|
|
toolchain.add_macros(config.get_config_data_macros())
|
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
# Compile Sources
|
|
|
|
for path in src_paths:
|
|
|
|
src = toolchain.scan_resources(path)
|
2015-11-05 23:21:21 +00:00
|
|
|
objects = toolchain.compile_sources(src, build_path, resources.inc_dirs)
|
2015-11-05 20:42:45 +00:00
|
|
|
resources.objects.extend(objects)
|
|
|
|
|
|
|
|
# Link Program
|
2016-06-10 14:19:02 +00:00
|
|
|
res, _ = toolchain.link_program(resources, build_path, name)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-06-10 14:19:02 +00:00
|
|
|
if report != None:
|
2015-11-05 20:42:45 +00:00
|
|
|
end = time()
|
|
|
|
cur_result["elapsed_time"] = end - start
|
2015-11-05 23:21:21 +00:00
|
|
|
cur_result["output"] = toolchain.get_output()
|
2015-11-05 20:42:45 +00:00
|
|
|
cur_result["result"] = "OK"
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
add_result_to_report(report, cur_result)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
return res
|
2014-07-01 16:45:12 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
except Exception, e:
|
2015-11-12 18:16:10 +00:00
|
|
|
if report != None:
|
|
|
|
end = time()
|
2016-02-25 22:29:26 +00:00
|
|
|
|
|
|
|
if isinstance(e, NotSupportedException):
|
|
|
|
cur_result["result"] = "NOT_SUPPORTED"
|
|
|
|
else:
|
|
|
|
cur_result["result"] = "FAIL"
|
|
|
|
|
2015-11-12 18:16:10 +00:00
|
|
|
cur_result["elapsed_time"] = end - start
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2015-11-12 18:16:10 +00:00
|
|
|
toolchain_output = toolchain.get_output()
|
|
|
|
if toolchain_output:
|
|
|
|
cur_result["output"] += toolchain_output
|
2015-11-05 23:21:21 +00:00
|
|
|
|
2015-11-12 18:16:10 +00:00
|
|
|
add_result_to_report(report, cur_result)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# Let Exception propagate
|
|
|
|
raise e
|
2013-06-10 14:44:08 +00:00
|
|
|
|
2013-04-18 14:43:29 +00:00
|
|
|
def build_library(src_paths, build_path, target, toolchain_name,
|
2016-06-09 22:50:03 +00:00
|
|
|
dependencies_paths=None, options=None, name=None, clean=False, archive=True,
|
2015-11-05 20:42:45 +00:00
|
|
|
notify=None, verbose=False, macros=None, inc_dirs=None, inc_dirs_ext=None,
|
2016-06-09 22:50:03 +00:00
|
|
|
jobs=1, silent=False, report=None, properties=None, extra_verbose=False,
|
|
|
|
project_id=None):
|
2014-07-01 16:45:12 +00:00
|
|
|
""" src_path: the path of the source directory
|
|
|
|
build_path: the path of the build directory
|
|
|
|
target: ['LPC1768', 'LPC11U24', 'LPC2368']
|
2016-02-11 05:27:33 +00:00
|
|
|
toolchain: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
|
2014-07-01 16:45:12 +00:00
|
|
|
library_paths: List of paths to additional libraries
|
|
|
|
clean: Rebuild everything if True
|
|
|
|
notify: Notify function for logs
|
|
|
|
verbose: Write the actual tools command lines if True
|
2014-09-25 10:03:37 +00:00
|
|
|
inc_dirs: additional include directories which should be included in build
|
2014-11-11 15:31:18 +00:00
|
|
|
inc_dirs_ext: additional include directories which should be copied to library directory
|
2014-09-25 10:03:37 +00:00
|
|
|
"""
|
2014-07-01 16:45:12 +00:00
|
|
|
if type(src_paths) != ListType:
|
|
|
|
src_paths = [src_paths]
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-10 14:44:08 +00:00
|
|
|
# The first path will give the name to the library
|
2016-06-09 22:50:03 +00:00
|
|
|
project_name = basename(src_paths[0] if src_paths[0] != "." and src_paths[0] != "./" else getcwd())
|
|
|
|
if name is None:
|
|
|
|
# We will use default project name based on project folder name
|
|
|
|
name = project_name
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
if report != None:
|
2015-11-12 18:16:10 +00:00
|
|
|
start = time()
|
2016-06-09 22:50:03 +00:00
|
|
|
|
|
|
|
# If project_id is specified, use that over the default name
|
|
|
|
id_name = project_id.upper() if project_id else name.upper()
|
2015-11-12 18:16:10 +00:00
|
|
|
description = name
|
2015-11-24 23:39:20 +00:00
|
|
|
vendor_label = target.extra_labels[0]
|
2015-11-12 18:16:10 +00:00
|
|
|
cur_result = None
|
2015-11-05 20:42:45 +00:00
|
|
|
prep_report(report, target.name, toolchain_name, id_name)
|
|
|
|
cur_result = create_result(target.name, toolchain_name, id_name, description)
|
2013-02-18 15:32:11 +00:00
|
|
|
|
2015-11-12 18:16:10 +00:00
|
|
|
if properties != None:
|
2015-11-24 23:39:20 +00:00
|
|
|
prep_properties(properties, target.name, toolchain_name, vendor_label)
|
2013-02-18 15:32:11 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
for src_path in src_paths:
|
|
|
|
if not exists(src_path):
|
|
|
|
error_msg = "The library source folder does not exist: %s", src_path
|
|
|
|
|
|
|
|
if report != None:
|
|
|
|
cur_result["output"] = error_msg
|
|
|
|
cur_result["result"] = "FAIL"
|
|
|
|
add_result_to_report(report, cur_result)
|
|
|
|
|
|
|
|
raise Exception(error_msg)
|
|
|
|
|
|
|
|
try:
|
|
|
|
# Toolchain instance
|
2015-11-12 18:16:10 +00:00
|
|
|
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose)
|
2015-11-05 20:42:45 +00:00
|
|
|
toolchain.VERBOSE = verbose
|
|
|
|
toolchain.jobs = jobs
|
|
|
|
toolchain.build_all = clean
|
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
toolchain.info("Building library %s (%s, %s)" % (name, target.name, toolchain_name))
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# Scan Resources
|
2016-06-09 22:50:03 +00:00
|
|
|
resources = None
|
|
|
|
for path in src_paths:
|
|
|
|
# Scan resources
|
|
|
|
resource = toolchain.scan_resources(path)
|
|
|
|
|
|
|
|
# Copy headers, objects and static libraries - all files needed for static lib
|
|
|
|
toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path)
|
|
|
|
toolchain.copy_files(resource.objects, build_path, rel_path=resource.base_path)
|
|
|
|
toolchain.copy_files(resource.libraries, build_path, rel_path=resource.base_path)
|
|
|
|
if resource.linker_script:
|
|
|
|
toolchain.copy_files(resource.linker_script, build_path, rel_path=resource.base_path)
|
|
|
|
|
|
|
|
# Extend resources collection
|
|
|
|
if not resources:
|
|
|
|
resources = resource
|
|
|
|
else:
|
|
|
|
resources.add(resource)
|
|
|
|
|
|
|
|
# We need to add if necessary additional include directories
|
|
|
|
if inc_dirs:
|
|
|
|
if type(inc_dirs) == ListType:
|
|
|
|
resources.inc_dirs.extend(inc_dirs)
|
|
|
|
else:
|
|
|
|
resources.inc_dirs.append(inc_dirs)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# Add extra include directories / files which are required by library
|
|
|
|
# This files usually are not in the same directory as source files so
|
|
|
|
# previous scan will not include them
|
|
|
|
if inc_dirs_ext is not None:
|
|
|
|
for inc_ext in inc_dirs_ext:
|
2016-06-09 22:50:03 +00:00
|
|
|
resources.add(toolchain.scan_resources(inc_ext))
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# Dependencies Include Paths
|
|
|
|
if dependencies_paths is not None:
|
|
|
|
for path in dependencies_paths:
|
|
|
|
lib_resources = toolchain.scan_resources(path)
|
2016-06-09 22:50:03 +00:00
|
|
|
resources.inc_dirs.extend(lib_resources.inc_dirs)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
if archive:
|
|
|
|
# Use temp path when building archive
|
|
|
|
tmp_path = join(build_path, '.temp')
|
|
|
|
mkdir(tmp_path)
|
|
|
|
else:
|
|
|
|
tmp_path = build_path
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
# Handle configuration
|
|
|
|
config = Config(target)
|
|
|
|
# Update the configuration with any .json files found while scanning
|
|
|
|
config.add_config_files(resources.json_files)
|
|
|
|
# And add the configuration macros to the toolchain
|
|
|
|
toolchain.add_macros(config.get_config_data_macros())
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# Compile Sources
|
2016-06-09 22:50:03 +00:00
|
|
|
for path in src_paths:
|
|
|
|
src = toolchain.scan_resources(path)
|
|
|
|
objects = toolchain.compile_sources(src, abspath(tmp_path), resources.inc_dirs)
|
|
|
|
resources.objects.extend(objects)
|
2015-11-05 22:53:23 +00:00
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
if archive:
|
2016-06-10 14:19:02 +00:00
|
|
|
toolchain.build_library(objects, build_path, name)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2016-06-10 14:19:02 +00:00
|
|
|
if report != None:
|
2015-11-05 20:42:45 +00:00
|
|
|
end = time()
|
|
|
|
cur_result["elapsed_time"] = end - start
|
2015-11-05 23:21:21 +00:00
|
|
|
cur_result["output"] = toolchain.get_output()
|
2015-11-05 20:42:45 +00:00
|
|
|
cur_result["result"] = "OK"
|
|
|
|
|
|
|
|
add_result_to_report(report, cur_result)
|
|
|
|
|
|
|
|
except Exception, e:
|
2015-11-05 22:53:23 +00:00
|
|
|
if report != None:
|
|
|
|
end = time()
|
2016-06-09 22:50:03 +00:00
|
|
|
|
|
|
|
if isinstance(e, ToolException):
|
|
|
|
cur_result["result"] = "FAIL"
|
|
|
|
elif isinstance(e, NotSupportedException):
|
|
|
|
cur_result["result"] = "NOT_SUPPORTED"
|
|
|
|
|
2015-11-05 22:53:23 +00:00
|
|
|
cur_result["elapsed_time"] = end - start
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2015-11-05 23:21:21 +00:00
|
|
|
toolchain_output = toolchain.get_output()
|
|
|
|
if toolchain_output:
|
|
|
|
cur_result["output"] += toolchain_output
|
|
|
|
|
|
|
|
cur_result["output"] += str(e)
|
|
|
|
|
2015-11-05 22:53:23 +00:00
|
|
|
add_result_to_report(report, cur_result)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# Let Exception propagate
|
|
|
|
raise e
|
|
|
|
|
2016-06-09 22:51:26 +00:00
|
|
|
######################
|
|
|
|
### Legacy methods ###
|
|
|
|
######################
|
|
|
|
|
|
|
|
def build_lib(lib_id, target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False):
|
|
|
|
""" Legacy method for building mbed libraries
|
2014-09-25 10:03:37 +00:00
|
|
|
Function builds library in proper directory using all dependencies and macros defined by user.
|
|
|
|
"""
|
2013-02-18 15:32:11 +00:00
|
|
|
lib = Library(lib_id)
|
2016-06-09 22:51:26 +00:00
|
|
|
if not lib.is_supported(target, toolchain_name):
|
2014-07-28 13:20:17 +00:00
|
|
|
print 'Library "%s" is not yet supported on target %s with toolchain %s' % (lib_id, target.name, toolchain)
|
2015-11-05 20:42:45 +00:00
|
|
|
return False
|
2016-06-09 22:51:26 +00:00
|
|
|
|
|
|
|
# We need to combine macros from parameter list with macros from library definition
|
|
|
|
MACROS = lib.macros if lib.macros else []
|
|
|
|
if macros:
|
|
|
|
macros.extend(MACROS)
|
|
|
|
else:
|
|
|
|
macros = MACROS
|
|
|
|
|
|
|
|
src_paths = lib.source_dir
|
|
|
|
build_path = lib.build_dir
|
|
|
|
dependencies_paths = lib.dependencies
|
|
|
|
inc_dirs = lib.inc_dirs
|
|
|
|
inc_dirs_ext = lib.inc_dirs_ext
|
|
|
|
|
|
|
|
""" src_path: the path of the source directory
|
|
|
|
build_path: the path of the build directory
|
|
|
|
target: ['LPC1768', 'LPC11U24', 'LPC2368']
|
|
|
|
toolchain: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
|
|
|
|
library_paths: List of paths to additional libraries
|
|
|
|
clean: Rebuild everything if True
|
|
|
|
notify: Notify function for logs
|
|
|
|
verbose: Write the actual tools command lines if True
|
|
|
|
inc_dirs: additional include directories which should be included in build
|
|
|
|
inc_dirs_ext: additional include directories which should be copied to library directory
|
|
|
|
"""
|
|
|
|
if type(src_paths) != ListType:
|
|
|
|
src_paths = [src_paths]
|
|
|
|
|
|
|
|
# The first path will give the name to the library
|
|
|
|
name = basename(src_paths[0])
|
|
|
|
|
|
|
|
if report != None:
|
|
|
|
start = time()
|
|
|
|
id_name = name.upper()
|
|
|
|
description = name
|
|
|
|
vendor_label = target.extra_labels[0]
|
|
|
|
cur_result = None
|
|
|
|
prep_report(report, target.name, toolchain_name, id_name)
|
|
|
|
cur_result = create_result(target.name, toolchain_name, id_name, description)
|
|
|
|
|
|
|
|
if properties != None:
|
|
|
|
prep_properties(properties, target.name, toolchain_name, vendor_label)
|
|
|
|
|
|
|
|
for src_path in src_paths:
|
|
|
|
if not exists(src_path):
|
|
|
|
error_msg = "The library source folder does not exist: %s", src_path
|
|
|
|
|
|
|
|
if report != None:
|
|
|
|
cur_result["output"] = error_msg
|
|
|
|
cur_result["result"] = "FAIL"
|
|
|
|
add_result_to_report(report, cur_result)
|
|
|
|
|
|
|
|
raise Exception(error_msg)
|
|
|
|
|
|
|
|
try:
|
|
|
|
# Toolchain instance
|
|
|
|
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose)
|
|
|
|
toolchain.VERBOSE = verbose
|
|
|
|
toolchain.jobs = jobs
|
|
|
|
toolchain.build_all = clean
|
2013-02-18 15:32:11 +00:00
|
|
|
|
2016-06-09 22:51:26 +00:00
|
|
|
toolchain.info("Building library %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
|
|
|
|
|
|
|
|
# Scan Resources
|
|
|
|
resources = []
|
|
|
|
for src_path in src_paths:
|
|
|
|
resources.append(toolchain.scan_resources(src_path))
|
|
|
|
|
|
|
|
# Add extra include directories / files which are required by library
|
|
|
|
# This files usually are not in the same directory as source files so
|
|
|
|
# previous scan will not include them
|
|
|
|
if inc_dirs_ext is not None:
|
|
|
|
for inc_ext in inc_dirs_ext:
|
|
|
|
resources.append(toolchain.scan_resources(inc_ext))
|
|
|
|
|
|
|
|
# Dependencies Include Paths
|
|
|
|
dependencies_include_dir = []
|
|
|
|
if dependencies_paths is not None:
|
|
|
|
for path in dependencies_paths:
|
|
|
|
lib_resources = toolchain.scan_resources(path)
|
|
|
|
dependencies_include_dir.extend(lib_resources.inc_dirs)
|
|
|
|
|
|
|
|
if inc_dirs:
|
|
|
|
dependencies_include_dir.extend(inc_dirs)
|
|
|
|
|
|
|
|
# Create the desired build directory structure
|
|
|
|
bin_path = join(build_path, toolchain.obj_path)
|
|
|
|
mkdir(bin_path)
|
|
|
|
tmp_path = join(build_path, '.temp', toolchain.obj_path)
|
|
|
|
mkdir(tmp_path)
|
|
|
|
|
|
|
|
# Copy Headers
|
|
|
|
for resource in resources:
|
|
|
|
toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path)
|
|
|
|
dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs)
|
|
|
|
|
|
|
|
# Compile Sources
|
|
|
|
objects = []
|
|
|
|
for resource in resources:
|
|
|
|
objects.extend(toolchain.compile_sources(resource, tmp_path, dependencies_include_dir))
|
|
|
|
|
|
|
|
needed_update = toolchain.build_library(objects, bin_path, name)
|
|
|
|
|
|
|
|
if report != None and needed_update:
|
|
|
|
end = time()
|
|
|
|
cur_result["elapsed_time"] = end - start
|
|
|
|
cur_result["output"] = toolchain.get_output()
|
|
|
|
cur_result["result"] = "OK"
|
|
|
|
|
|
|
|
add_result_to_report(report, cur_result)
|
|
|
|
|
|
|
|
except Exception, e:
|
|
|
|
if report != None:
|
|
|
|
end = time()
|
|
|
|
cur_result["result"] = "FAIL"
|
|
|
|
cur_result["elapsed_time"] = end - start
|
|
|
|
|
|
|
|
toolchain_output = toolchain.get_output()
|
|
|
|
if toolchain_output:
|
|
|
|
cur_result["output"] += toolchain_output
|
|
|
|
|
|
|
|
cur_result["output"] += str(e)
|
|
|
|
|
|
|
|
add_result_to_report(report, cur_result)
|
|
|
|
|
|
|
|
# Let Exception propagate
|
|
|
|
raise e
|
2013-02-18 15:32:11 +00:00
|
|
|
|
2013-06-10 14:44:08 +00:00
|
|
|
# We do have unique legacy conventions about how we build and package the mbed library
|
2015-11-12 18:16:10 +00:00
|
|
|
def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False):
|
2014-06-23 12:36:55 +00:00
|
|
|
""" Function returns True is library was built and false if building was skipped """
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
if report != None:
|
2015-11-12 18:16:10 +00:00
|
|
|
start = time()
|
|
|
|
id_name = "MBED"
|
|
|
|
description = "mbed SDK"
|
2015-11-24 23:39:20 +00:00
|
|
|
vendor_label = target.extra_labels[0]
|
2015-11-12 18:16:10 +00:00
|
|
|
cur_result = None
|
2015-11-05 20:42:45 +00:00
|
|
|
prep_report(report, target.name, toolchain_name, id_name)
|
|
|
|
cur_result = create_result(target.name, toolchain_name, id_name, description)
|
|
|
|
|
2015-11-12 18:16:10 +00:00
|
|
|
if properties != None:
|
2015-11-24 23:39:20 +00:00
|
|
|
prep_properties(properties, target.name, toolchain_name, vendor_label)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2013-06-10 14:44:08 +00:00
|
|
|
# Check toolchain support
|
|
|
|
if toolchain_name not in target.supported_toolchains:
|
2014-08-01 14:27:34 +00:00
|
|
|
supported_toolchains_text = ", ".join(target.supported_toolchains)
|
2014-07-09 13:51:54 +00:00
|
|
|
print '%s target is not yet supported by toolchain %s' % (target.name, toolchain_name)
|
2014-08-01 14:27:34 +00:00
|
|
|
print '%s target supports %s toolchain%s' % (target.name, supported_toolchains_text, 's' if len(target.supported_toolchains) > 1 else '')
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
if report != None:
|
|
|
|
cur_result["result"] = "SKIP"
|
|
|
|
add_result_to_report(report, cur_result)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
return False
|
2014-06-09 15:10:47 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
try:
|
|
|
|
# Toolchain
|
2015-11-12 18:16:10 +00:00
|
|
|
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose)
|
2015-11-05 20:42:45 +00:00
|
|
|
toolchain.VERBOSE = verbose
|
|
|
|
toolchain.jobs = jobs
|
|
|
|
toolchain.build_all = clean
|
|
|
|
|
|
|
|
# Source and Build Paths
|
|
|
|
BUILD_TARGET = join(MBED_LIBRARIES, "TARGET_" + target.name)
|
|
|
|
BUILD_TOOLCHAIN = join(BUILD_TARGET, "TOOLCHAIN_" + toolchain.name)
|
|
|
|
mkdir(BUILD_TOOLCHAIN)
|
|
|
|
|
|
|
|
TMP_PATH = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
|
|
|
|
mkdir(TMP_PATH)
|
|
|
|
|
|
|
|
# CMSIS
|
2015-11-05 23:21:21 +00:00
|
|
|
toolchain.info("Building library %s (%s, %s)"% ('CMSIS', target.name, toolchain_name))
|
2015-11-05 20:42:45 +00:00
|
|
|
cmsis_src = join(MBED_TARGETS_PATH, "cmsis")
|
|
|
|
resources = toolchain.scan_resources(cmsis_src)
|
|
|
|
|
2015-11-05 23:21:21 +00:00
|
|
|
toolchain.copy_files(resources.headers, BUILD_TARGET)
|
|
|
|
toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN)
|
|
|
|
toolchain.copy_files(resources.bin_files, BUILD_TOOLCHAIN)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2015-11-05 23:21:21 +00:00
|
|
|
objects = toolchain.compile_sources(resources, TMP_PATH)
|
|
|
|
toolchain.copy_files(objects, BUILD_TOOLCHAIN)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# mbed
|
2015-11-05 23:21:21 +00:00
|
|
|
toolchain.info("Building library %s (%s, %s)" % ('MBED', target.name, toolchain_name))
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# Common Headers
|
2015-11-05 23:21:21 +00:00
|
|
|
toolchain.copy_files(toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES)
|
|
|
|
toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# Target specific sources
|
|
|
|
HAL_SRC = join(MBED_TARGETS_PATH, "hal")
|
|
|
|
hal_implementation = toolchain.scan_resources(HAL_SRC)
|
2015-11-05 23:21:21 +00:00
|
|
|
toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files + hal_implementation.libraries, BUILD_TARGET, HAL_SRC)
|
2015-11-05 20:42:45 +00:00
|
|
|
incdirs = toolchain.scan_resources(BUILD_TARGET).inc_dirs
|
2015-11-05 23:21:21 +00:00
|
|
|
objects = toolchain.compile_sources(hal_implementation, TMP_PATH, [MBED_LIBRARIES] + incdirs)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# Common Sources
|
|
|
|
mbed_resources = toolchain.scan_resources(MBED_COMMON)
|
2015-11-05 23:21:21 +00:00
|
|
|
objects += toolchain.compile_sources(mbed_resources, TMP_PATH, [MBED_LIBRARIES] + incdirs)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# A number of compiled files need to be copied as objects as opposed to
|
|
|
|
# being part of the mbed library, for reasons that have to do with the way
|
|
|
|
# the linker search for symbols in archives. These are:
|
|
|
|
# - retarget.o: to make sure that the C standard lib symbols get overridden
|
|
|
|
# - board.o: mbed_die is weak
|
|
|
|
# - mbed_overrides.o: this contains platform overrides of various weak SDK functions
|
|
|
|
separate_names, separate_objects = ['retarget.o', 'board.o', 'mbed_overrides.o'], []
|
|
|
|
|
|
|
|
for o in objects:
|
|
|
|
for name in separate_names:
|
|
|
|
if o.endswith(name):
|
|
|
|
separate_objects.append(o)
|
|
|
|
|
|
|
|
for o in separate_objects:
|
|
|
|
objects.remove(o)
|
|
|
|
|
2016-06-10 14:19:02 +00:00
|
|
|
toolchain.build_library(objects, BUILD_TOOLCHAIN, "mbed")
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
for o in separate_objects:
|
2015-11-05 23:21:21 +00:00
|
|
|
toolchain.copy_files(o, BUILD_TOOLCHAIN)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2016-06-10 14:19:02 +00:00
|
|
|
if report != None:
|
2015-11-05 20:42:45 +00:00
|
|
|
end = time()
|
|
|
|
cur_result["elapsed_time"] = end - start
|
2015-11-05 23:21:21 +00:00
|
|
|
cur_result["output"] = toolchain.get_output()
|
2015-11-05 20:42:45 +00:00
|
|
|
cur_result["result"] = "OK"
|
|
|
|
|
|
|
|
add_result_to_report(report, cur_result)
|
|
|
|
|
|
|
|
return True
|
2015-11-05 22:53:23 +00:00
|
|
|
|
2015-11-05 20:42:45 +00:00
|
|
|
except Exception, e:
|
2015-11-05 22:53:23 +00:00
|
|
|
if report != None:
|
|
|
|
end = time()
|
|
|
|
cur_result["result"] = "FAIL"
|
|
|
|
cur_result["elapsed_time"] = end - start
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2015-11-05 23:21:21 +00:00
|
|
|
toolchain_output = toolchain.get_output()
|
|
|
|
if toolchain_output:
|
|
|
|
cur_result["output"] += toolchain_output
|
|
|
|
|
|
|
|
cur_result["output"] += str(e)
|
|
|
|
|
2015-11-05 22:53:23 +00:00
|
|
|
add_result_to_report(report, cur_result)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
|
|
|
# Let Exception propagate
|
|
|
|
raise e
|
2014-08-04 13:29:20 +00:00
|
|
|
|
2016-06-09 22:51:26 +00:00
|
|
|
|
2014-06-09 15:10:47 +00:00
|
|
|
def get_unique_supported_toolchains():
|
|
|
|
""" Get list of all unique toolchains supported by targets """
|
|
|
|
unique_supported_toolchains = []
|
|
|
|
for target in TARGET_NAMES:
|
|
|
|
for toolchain in TARGET_MAP[target].supported_toolchains:
|
|
|
|
if toolchain not in unique_supported_toolchains:
|
|
|
|
unique_supported_toolchains.append(toolchain)
|
|
|
|
return unique_supported_toolchains
|
|
|
|
|
|
|
|
|
2014-07-29 13:48:48 +00:00
|
|
|
def mcu_toolchain_matrix(verbose_html=False, platform_filter=None):
|
2014-06-09 15:10:47 +00:00
|
|
|
""" Shows target map using prettytable """
|
|
|
|
unique_supported_toolchains = get_unique_supported_toolchains()
|
|
|
|
from prettytable import PrettyTable # Only use it in this function so building works without extra modules
|
|
|
|
|
|
|
|
# All tests status table print
|
|
|
|
columns = ["Platform"] + unique_supported_toolchains
|
|
|
|
pt = PrettyTable(["Platform"] + unique_supported_toolchains)
|
|
|
|
# Align table
|
|
|
|
for col in columns:
|
|
|
|
pt.align[col] = "c"
|
|
|
|
pt.align["Platform"] = "l"
|
|
|
|
|
2014-07-01 16:45:12 +00:00
|
|
|
perm_counter = 0
|
2014-07-29 14:43:41 +00:00
|
|
|
target_counter = 0
|
2014-06-09 15:10:47 +00:00
|
|
|
for target in sorted(TARGET_NAMES):
|
2014-07-29 13:48:48 +00:00
|
|
|
if platform_filter is not None:
|
|
|
|
# FIlter out platforms using regex
|
|
|
|
if re.search(platform_filter, target) is None:
|
|
|
|
continue
|
2014-07-29 14:43:41 +00:00
|
|
|
target_counter += 1
|
2014-07-29 13:48:48 +00:00
|
|
|
|
2014-06-09 15:10:47 +00:00
|
|
|
row = [target] # First column is platform name
|
2014-07-03 09:27:08 +00:00
|
|
|
default_toolchain = TARGET_MAP[target].default_toolchain
|
2014-06-09 15:10:47 +00:00
|
|
|
for unique_toolchain in unique_supported_toolchains:
|
|
|
|
text = "-"
|
2014-07-03 09:27:08 +00:00
|
|
|
if default_toolchain == unique_toolchain:
|
2014-07-02 10:07:42 +00:00
|
|
|
text = "Default"
|
2014-07-08 13:09:50 +00:00
|
|
|
perm_counter += 1
|
2014-07-02 10:07:42 +00:00
|
|
|
elif unique_toolchain in TARGET_MAP[target].supported_toolchains:
|
2014-06-09 15:10:47 +00:00
|
|
|
text = "Supported"
|
2014-06-09 15:25:53 +00:00
|
|
|
perm_counter += 1
|
2014-07-01 16:45:12 +00:00
|
|
|
row.append(text)
|
2014-06-09 15:10:47 +00:00
|
|
|
pt.add_row(row)
|
2014-06-11 12:15:14 +00:00
|
|
|
|
|
|
|
result = pt.get_html_string() if verbose_html else pt.get_string()
|
|
|
|
result += "\n"
|
2014-07-02 10:07:42 +00:00
|
|
|
result += "*Default - default on-line compiler\n"
|
|
|
|
result += "*Supported - supported off-line compiler\n"
|
|
|
|
result += "\n"
|
2014-07-29 14:43:41 +00:00
|
|
|
result += "Total platforms: %d\n"% (target_counter)
|
2014-06-11 12:15:14 +00:00
|
|
|
result += "Total permutations: %d"% (perm_counter)
|
|
|
|
return result
|
2014-06-10 14:29:15 +00:00
|
|
|
|
|
|
|
|
2014-07-10 09:56:14 +00:00
|
|
|
def get_target_supported_toolchains(target):
|
|
|
|
""" Returns target supported toolchains list """
|
|
|
|
return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP else None
|
|
|
|
|
|
|
|
|
2015-11-12 18:16:10 +00:00
|
|
|
def static_analysis_scan(target, toolchain_name, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, extra_verbose=False):
|
2014-06-10 14:29:15 +00:00
|
|
|
# Toolchain
|
2015-11-12 18:16:10 +00:00
|
|
|
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, extra_verbose=extra_verbose)
|
2014-06-10 14:29:15 +00:00
|
|
|
toolchain.VERBOSE = verbose
|
2014-07-09 17:00:21 +00:00
|
|
|
toolchain.jobs = jobs
|
2014-06-10 14:29:15 +00:00
|
|
|
toolchain.build_all = clean
|
|
|
|
|
|
|
|
# Source and Build Paths
|
|
|
|
BUILD_TARGET = join(MBED_LIBRARIES, "TARGET_" + target.name)
|
|
|
|
BUILD_TOOLCHAIN = join(BUILD_TARGET, "TOOLCHAIN_" + toolchain.name)
|
|
|
|
mkdir(BUILD_TOOLCHAIN)
|
|
|
|
|
|
|
|
TMP_PATH = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
|
|
|
|
mkdir(TMP_PATH)
|
|
|
|
|
|
|
|
# CMSIS
|
2014-07-10 17:26:39 +00:00
|
|
|
toolchain.info("Static analysis for %s (%s, %s)" % ('CMSIS', target.name, toolchain_name))
|
2014-06-10 14:29:15 +00:00
|
|
|
cmsis_src = join(MBED_TARGETS_PATH, "cmsis")
|
|
|
|
resources = toolchain.scan_resources(cmsis_src)
|
|
|
|
|
2014-06-10 15:38:43 +00:00
|
|
|
# Copy files before analysis
|
2014-06-10 14:29:15 +00:00
|
|
|
toolchain.copy_files(resources.headers, BUILD_TARGET)
|
|
|
|
toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN)
|
|
|
|
|
2014-06-10 15:38:43 +00:00
|
|
|
# Gather include paths, c, cpp sources and macros to transfer to cppcheck command line
|
2014-06-12 15:03:03 +00:00
|
|
|
includes = ["-I%s"% i for i in resources.inc_dirs]
|
|
|
|
includes.append("-I%s"% str(BUILD_TARGET))
|
2014-06-10 14:29:15 +00:00
|
|
|
c_sources = " ".join(resources.c_sources)
|
|
|
|
cpp_sources = " ".join(resources.cpp_sources)
|
2014-06-12 15:03:03 +00:00
|
|
|
macros = ["-D%s"% s for s in toolchain.get_symbols() + toolchain.macros]
|
2014-06-10 14:29:15 +00:00
|
|
|
|
2014-06-11 16:08:35 +00:00
|
|
|
includes = map(str.strip, includes)
|
|
|
|
macros = map(str.strip, macros)
|
2014-06-11 13:47:54 +00:00
|
|
|
|
2014-06-11 16:08:35 +00:00
|
|
|
check_cmd = CPPCHECK_CMD
|
|
|
|
check_cmd += CPPCHECK_MSG_FORMAT
|
|
|
|
check_cmd += includes
|
|
|
|
check_cmd += macros
|
|
|
|
|
|
|
|
# We need to pass some params via file to avoid "command line too long in some OSs"
|
2014-06-11 13:47:54 +00:00
|
|
|
tmp_file = tempfile.NamedTemporaryFile(delete=False)
|
|
|
|
tmp_file.writelines(line + '\n' for line in c_sources.split())
|
|
|
|
tmp_file.writelines(line + '\n' for line in cpp_sources.split())
|
|
|
|
tmp_file.close()
|
2014-06-11 16:08:35 +00:00
|
|
|
check_cmd += ["--file-list=%s"% tmp_file.name]
|
2014-06-10 14:29:15 +00:00
|
|
|
|
2014-06-11 16:08:35 +00:00
|
|
|
_stdout, _stderr, _rc = run_cmd(check_cmd)
|
2014-06-10 15:38:43 +00:00
|
|
|
if verbose:
|
2014-06-11 14:26:00 +00:00
|
|
|
print _stdout
|
|
|
|
print _stderr
|
2014-06-10 14:29:15 +00:00
|
|
|
|
2014-06-11 13:47:54 +00:00
|
|
|
# =========================================================================
|
2014-06-11 14:07:38 +00:00
|
|
|
|
2014-06-10 14:29:15 +00:00
|
|
|
# MBED
|
2014-07-10 17:26:39 +00:00
|
|
|
toolchain.info("Static analysis for %s (%s, %s)" % ('MBED', target.name, toolchain_name))
|
2014-06-10 14:29:15 +00:00
|
|
|
|
|
|
|
# Common Headers
|
|
|
|
toolchain.copy_files(toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES)
|
|
|
|
toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES)
|
|
|
|
|
|
|
|
# Target specific sources
|
|
|
|
HAL_SRC = join(MBED_TARGETS_PATH, "hal")
|
|
|
|
hal_implementation = toolchain.scan_resources(HAL_SRC)
|
2014-06-10 15:38:43 +00:00
|
|
|
|
|
|
|
# Copy files before analysis
|
2014-06-10 14:29:15 +00:00
|
|
|
toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files, BUILD_TARGET, HAL_SRC)
|
|
|
|
incdirs = toolchain.scan_resources(BUILD_TARGET)
|
|
|
|
|
2014-06-11 16:08:35 +00:00
|
|
|
target_includes = ["-I%s" % i for i in incdirs.inc_dirs]
|
2014-06-12 15:03:03 +00:00
|
|
|
target_includes.append("-I%s"% str(BUILD_TARGET))
|
|
|
|
target_includes.append("-I%s"% str(HAL_SRC))
|
2014-06-10 14:29:15 +00:00
|
|
|
target_c_sources = " ".join(incdirs.c_sources)
|
|
|
|
target_cpp_sources = " ".join(incdirs.cpp_sources)
|
2014-06-12 15:03:03 +00:00
|
|
|
target_macros = ["-D%s"% s for s in toolchain.get_symbols() + toolchain.macros]
|
2014-06-10 14:29:15 +00:00
|
|
|
|
|
|
|
# Common Sources
|
|
|
|
mbed_resources = toolchain.scan_resources(MBED_COMMON)
|
|
|
|
|
2014-06-10 15:38:43 +00:00
|
|
|
# Gather include paths, c, cpp sources and macros to transfer to cppcheck command line
|
2014-06-11 16:08:35 +00:00
|
|
|
mbed_includes = ["-I%s" % i for i in mbed_resources.inc_dirs]
|
2014-06-12 15:03:03 +00:00
|
|
|
mbed_includes.append("-I%s"% str(BUILD_TARGET))
|
|
|
|
mbed_includes.append("-I%s"% str(MBED_COMMON))
|
|
|
|
mbed_includes.append("-I%s"% str(MBED_API))
|
|
|
|
mbed_includes.append("-I%s"% str(MBED_HAL))
|
2014-06-10 14:29:15 +00:00
|
|
|
mbed_c_sources = " ".join(mbed_resources.c_sources)
|
|
|
|
mbed_cpp_sources = " ".join(mbed_resources.cpp_sources)
|
|
|
|
|
2014-06-11 16:08:35 +00:00
|
|
|
target_includes = map(str.strip, target_includes)
|
|
|
|
mbed_includes = map(str.strip, mbed_includes)
|
|
|
|
target_macros = map(str.strip, target_macros)
|
|
|
|
|
|
|
|
check_cmd = CPPCHECK_CMD
|
|
|
|
check_cmd += CPPCHECK_MSG_FORMAT
|
|
|
|
check_cmd += target_includes
|
|
|
|
check_cmd += mbed_includes
|
|
|
|
check_cmd += target_macros
|
2014-06-11 13:47:54 +00:00
|
|
|
|
|
|
|
# We need to pass some parames via file to avoid "command line too long in some OSs"
|
|
|
|
tmp_file = tempfile.NamedTemporaryFile(delete=False)
|
|
|
|
tmp_file.writelines(line + '\n' for line in target_c_sources.split())
|
|
|
|
tmp_file.writelines(line + '\n' for line in target_cpp_sources.split())
|
|
|
|
tmp_file.writelines(line + '\n' for line in mbed_c_sources.split())
|
|
|
|
tmp_file.writelines(line + '\n' for line in mbed_cpp_sources.split())
|
|
|
|
tmp_file.close()
|
2014-06-11 16:08:35 +00:00
|
|
|
check_cmd += ["--file-list=%s"% tmp_file.name]
|
2014-06-10 14:29:15 +00:00
|
|
|
|
2014-06-11 16:08:35 +00:00
|
|
|
_stdout, _stderr, _rc = run_cmd_ext(check_cmd)
|
2014-06-10 15:38:43 +00:00
|
|
|
if verbose:
|
2014-06-11 14:26:00 +00:00
|
|
|
print _stdout
|
|
|
|
print _stderr
|
2014-06-10 16:25:54 +00:00
|
|
|
|
|
|
|
|
2014-07-01 16:45:12 +00:00
|
|
|
def static_analysis_scan_lib(lib_id, target, toolchain, cppcheck_cmd, cppcheck_msg_format,
|
2015-11-12 18:16:10 +00:00
|
|
|
options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, extra_verbose=False):
|
2014-06-10 16:25:54 +00:00
|
|
|
lib = Library(lib_id)
|
|
|
|
if lib.is_supported(target, toolchain):
|
2014-07-01 16:45:12 +00:00
|
|
|
static_analysis_scan_library(lib.source_dir, lib.build_dir, target, toolchain, cppcheck_cmd, cppcheck_msg_format,
|
2014-06-10 16:25:54 +00:00
|
|
|
lib.dependencies, options,
|
2015-11-12 18:16:10 +00:00
|
|
|
verbose=verbose, clean=clean, macros=macros, notify=notify, jobs=jobs, extra_verbose=extra_verbose)
|
2014-06-10 16:25:54 +00:00
|
|
|
else:
|
2014-09-25 16:21:03 +00:00
|
|
|
print 'Library "%s" is not yet supported on target %s with toolchain %s'% (lib_id, target.name, toolchain)
|
2014-06-10 16:25:54 +00:00
|
|
|
|
|
|
|
|
2014-07-01 16:45:12 +00:00
|
|
|
def static_analysis_scan_library(src_paths, build_path, target, toolchain_name, cppcheck_cmd, cppcheck_msg_format,
|
2014-06-10 16:25:54 +00:00
|
|
|
dependencies_paths=None, options=None, name=None, clean=False,
|
2015-11-12 18:16:10 +00:00
|
|
|
notify=None, verbose=False, macros=None, jobs=1, extra_verbose=False):
|
2014-07-01 16:45:12 +00:00
|
|
|
""" Function scans library (or just some set of sources/headers) for staticly detectable defects """
|
|
|
|
if type(src_paths) != ListType:
|
|
|
|
src_paths = [src_paths]
|
2014-06-10 16:25:54 +00:00
|
|
|
|
|
|
|
for src_path in src_paths:
|
|
|
|
if not exists(src_path):
|
|
|
|
raise Exception("The library source folder does not exist: %s", src_path)
|
|
|
|
|
|
|
|
# Toolchain instance
|
2015-11-12 18:16:10 +00:00
|
|
|
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, extra_verbose=extra_verbose)
|
2014-06-10 16:25:54 +00:00
|
|
|
toolchain.VERBOSE = verbose
|
2014-07-09 17:00:21 +00:00
|
|
|
toolchain.jobs = jobs
|
2014-06-10 16:25:54 +00:00
|
|
|
|
|
|
|
# The first path will give the name to the library
|
|
|
|
name = basename(src_paths[0])
|
2014-07-10 17:26:39 +00:00
|
|
|
toolchain.info("Static analysis for library %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
|
2014-06-10 16:25:54 +00:00
|
|
|
|
|
|
|
# Scan Resources
|
|
|
|
resources = []
|
|
|
|
for src_path in src_paths:
|
|
|
|
resources.append(toolchain.scan_resources(src_path))
|
|
|
|
|
|
|
|
# Dependencies Include Paths
|
|
|
|
dependencies_include_dir = []
|
|
|
|
if dependencies_paths is not None:
|
|
|
|
for path in dependencies_paths:
|
|
|
|
lib_resources = toolchain.scan_resources(path)
|
|
|
|
dependencies_include_dir.extend(lib_resources.inc_dirs)
|
|
|
|
|
|
|
|
# Create the desired build directory structure
|
|
|
|
bin_path = join(build_path, toolchain.obj_path)
|
|
|
|
mkdir(bin_path)
|
|
|
|
tmp_path = join(build_path, '.temp', toolchain.obj_path)
|
|
|
|
mkdir(tmp_path)
|
|
|
|
|
|
|
|
# Gather include paths, c, cpp sources and macros to transfer to cppcheck command line
|
2014-06-11 16:08:35 +00:00
|
|
|
includes = ["-I%s" % i for i in dependencies_include_dir + src_paths]
|
2014-06-10 16:25:54 +00:00
|
|
|
c_sources = " "
|
|
|
|
cpp_sources = " "
|
2014-06-11 16:08:35 +00:00
|
|
|
macros = ['-D%s' % s for s in toolchain.get_symbols() + toolchain.macros]
|
2014-06-10 16:25:54 +00:00
|
|
|
|
|
|
|
# Copy Headers
|
|
|
|
for resource in resources:
|
|
|
|
toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path)
|
2014-06-11 16:08:35 +00:00
|
|
|
includes += ["-I%s" % i for i in resource.inc_dirs]
|
2014-06-10 16:25:54 +00:00
|
|
|
c_sources += " ".join(resource.c_sources) + " "
|
|
|
|
cpp_sources += " ".join(resource.cpp_sources) + " "
|
|
|
|
|
|
|
|
dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs)
|
|
|
|
|
2014-06-11 16:08:35 +00:00
|
|
|
includes = map(str.strip, includes)
|
|
|
|
macros = map(str.strip, macros)
|
|
|
|
|
2014-07-01 16:45:12 +00:00
|
|
|
check_cmd = cppcheck_cmd
|
|
|
|
check_cmd += cppcheck_msg_format
|
2014-06-11 16:08:35 +00:00
|
|
|
check_cmd += includes
|
|
|
|
check_cmd += macros
|
2014-06-11 13:47:54 +00:00
|
|
|
|
2014-07-01 16:45:12 +00:00
|
|
|
# We need to pass some parameters via file to avoid "command line too long in some OSs"
|
|
|
|
# Temporary file is created to store e.g. cppcheck list of files for command line
|
2014-06-11 13:47:54 +00:00
|
|
|
tmp_file = tempfile.NamedTemporaryFile(delete=False)
|
|
|
|
tmp_file.writelines(line + '\n' for line in c_sources.split())
|
|
|
|
tmp_file.writelines(line + '\n' for line in cpp_sources.split())
|
|
|
|
tmp_file.close()
|
2014-06-11 16:08:35 +00:00
|
|
|
check_cmd += ["--file-list=%s"% tmp_file.name]
|
2014-06-10 16:25:54 +00:00
|
|
|
|
2014-07-01 16:45:12 +00:00
|
|
|
# This will allow us to grab result from both stdio and stderr outputs (so we can show them)
|
|
|
|
# We assume static code analysis tool is outputting defects on STDERR
|
2014-06-11 16:08:35 +00:00
|
|
|
_stdout, _stderr, _rc = run_cmd_ext(check_cmd)
|
2014-06-10 16:25:54 +00:00
|
|
|
if verbose:
|
2014-06-11 14:26:00 +00:00
|
|
|
print _stdout
|
|
|
|
print _stderr
|
2014-06-23 12:36:55 +00:00
|
|
|
|
|
|
|
|
|
|
|
def print_build_results(result_list, build_name):
|
2014-07-01 16:45:12 +00:00
|
|
|
""" Generate result string for build results """
|
2014-06-23 12:36:55 +00:00
|
|
|
result = ""
|
2015-05-02 22:08:00 +00:00
|
|
|
if len(result_list) > 0:
|
2014-06-23 12:36:55 +00:00
|
|
|
result += build_name + "\n"
|
|
|
|
result += "\n".join([" * %s" % f for f in result_list])
|
2014-07-28 13:20:17 +00:00
|
|
|
result += "\n"
|
2014-06-23 12:36:55 +00:00
|
|
|
return result
|
2015-03-27 23:55:50 +00:00
|
|
|
|
2015-04-01 18:15:15 +00:00
|
|
|
def write_build_report(build_report, template_filename, filename):
|
2015-03-31 22:56:00 +00:00
|
|
|
build_report_failing = []
|
|
|
|
build_report_passing = []
|
2015-03-27 23:55:50 +00:00
|
|
|
|
2015-03-31 22:56:00 +00:00
|
|
|
for report in build_report:
|
|
|
|
if len(report["failing"]) > 0:
|
|
|
|
build_report_failing.append(report)
|
|
|
|
else:
|
|
|
|
build_report_passing.append(report)
|
2015-04-01 18:15:15 +00:00
|
|
|
|
2015-03-31 22:56:00 +00:00
|
|
|
env = Environment(extensions=['jinja2.ext.with_'])
|
2015-04-01 18:15:15 +00:00
|
|
|
env.loader = FileSystemLoader('ci_templates')
|
|
|
|
template = env.get_template(template_filename)
|
2015-03-31 22:56:00 +00:00
|
|
|
|
|
|
|
with open(filename, 'w+') as f:
|
|
|
|
f.write(template.render(failing_builds=build_report_failing, passing_builds=build_report_passing))
|
2016-06-09 22:50:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
def scan_for_source_paths(path, exclude_paths=None):
|
|
|
|
ignorepatterns = []
|
|
|
|
paths = []
|
|
|
|
|
|
|
|
def is_ignored(file_path):
|
|
|
|
for pattern in ignorepatterns:
|
|
|
|
if fnmatch.fnmatch(file_path, pattern):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
""" os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
|
|
|
|
When topdown is True, the caller can modify the dirnames list in-place
|
|
|
|
(perhaps using del or slice assignment), and walk() will only recurse into
|
|
|
|
the subdirectories whose names remain in dirnames; this can be used to prune
|
|
|
|
the search, impose a specific order of visiting, or even to inform walk()
|
|
|
|
about directories the caller creates or renames before it resumes walk()
|
|
|
|
again. Modifying dirnames when topdown is False is ineffective, because in
|
|
|
|
bottom-up mode the directories in dirnames are generated before dirpath
|
|
|
|
itself is generated.
|
|
|
|
"""
|
|
|
|
for root, dirs, files in walk(path, followlinks=True):
|
|
|
|
# Remove ignored directories
|
|
|
|
# Check if folder contains .mbedignore
|
|
|
|
if ".mbedignore" in files :
|
|
|
|
with open (join(root,".mbedignore"), "r") as f:
|
|
|
|
lines=f.readlines()
|
|
|
|
lines = [l.strip() for l in lines] # Strip whitespaces
|
|
|
|
lines = [l for l in lines if l != ""] # Strip empty lines
|
|
|
|
lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
|
|
|
|
# Append root path to glob patterns
|
|
|
|
# and append patterns to ignorepatterns
|
|
|
|
ignorepatterns.extend([join(root,line.strip()) for line in lines])
|
|
|
|
|
|
|
|
for d in copy(dirs):
|
|
|
|
dir_path = join(root, d)
|
|
|
|
|
|
|
|
# Always ignore hidden directories
|
|
|
|
if d.startswith('.'):
|
|
|
|
dirs.remove(d)
|
|
|
|
|
|
|
|
# Remove dirs that already match the ignorepatterns
|
|
|
|
# to avoid travelling into them and to prevent them
|
|
|
|
# on appearing in include path.
|
|
|
|
if is_ignored(join(dir_path,"")):
|
|
|
|
dirs.remove(d)
|
|
|
|
|
|
|
|
if exclude_paths:
|
|
|
|
for exclude_path in exclude_paths:
|
|
|
|
rel_path = relpath(dir_path, exclude_path)
|
|
|
|
if not (rel_path.startswith('..')):
|
|
|
|
dirs.remove(d)
|
|
|
|
break
|
|
|
|
|
|
|
|
# Add root to include paths
|
|
|
|
paths.append(root)
|
|
|
|
|
|
|
|
return paths
|