mirror of https://github.com/ARMmbed/mbed-os.git
Merge pull request #3852 from theotherjimmy/ignore-build-dir
Ignore build directory from scan resourcespull/3294/merge
commit
e9158f4782
|
@ -34,7 +34,6 @@ from tools.options import get_default_options_parser
|
||||||
from tools.options import extract_profile
|
from tools.options import extract_profile
|
||||||
from tools.build_api import build_library, build_mbed_libs, build_lib
|
from tools.build_api import build_library, build_mbed_libs, build_lib
|
||||||
from tools.build_api import mcu_toolchain_matrix
|
from tools.build_api import mcu_toolchain_matrix
|
||||||
from tools.build_api import static_analysis_scan, static_analysis_scan_lib, static_analysis_scan_library
|
|
||||||
from tools.build_api import print_build_results
|
from tools.build_api import print_build_results
|
||||||
from tools.settings import CPPCHECK_CMD, CPPCHECK_MSG_FORMAT
|
from tools.settings import CPPCHECK_CMD, CPPCHECK_MSG_FORMAT
|
||||||
from utils import argparse_filestring_type, args_error
|
from utils import argparse_filestring_type, args_error
|
||||||
|
@ -120,12 +119,6 @@ if __name__ == '__main__':
|
||||||
default=None,
|
default=None,
|
||||||
help='For some commands you can use filter to filter out results')
|
help='For some commands you can use filter to filter out results')
|
||||||
|
|
||||||
parser.add_argument("--cppcheck",
|
|
||||||
action="store_true",
|
|
||||||
dest="cppcheck_validation",
|
|
||||||
default=False,
|
|
||||||
help="Forces 'cppcheck' static code analysis")
|
|
||||||
|
|
||||||
parser.add_argument("-j", "--jobs", type=int, dest="jobs",
|
parser.add_argument("-j", "--jobs", type=int, dest="jobs",
|
||||||
default=0, help="Number of concurrent jobs. Default: 0/auto (based on host machine's number of CPUs)")
|
default=0, help="Number of concurrent jobs. Default: 0/auto (based on host machine's number of CPUs)")
|
||||||
parser.add_argument("-N", "--artifact-name", dest="artifact_name",
|
parser.add_argument("-N", "--artifact-name", dest="artifact_name",
|
||||||
|
@ -212,34 +205,6 @@ if __name__ == '__main__':
|
||||||
|
|
||||||
for toolchain in toolchains:
|
for toolchain in toolchains:
|
||||||
for target in targets:
|
for target in targets:
|
||||||
# CPPCHECK code validation
|
|
||||||
if options.cppcheck_validation:
|
|
||||||
try:
|
|
||||||
mcu = TARGET_MAP[target]
|
|
||||||
# CMSIS and MBED libs analysis
|
|
||||||
profile = extract_profile(parser, options, toolchain)
|
|
||||||
static_analysis_scan(
|
|
||||||
mcu, toolchain, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT,
|
|
||||||
verbose=options.verbose, jobs=options.jobs,
|
|
||||||
build_profile=profile)
|
|
||||||
for lib_id in libraries:
|
|
||||||
# Static check for library
|
|
||||||
static_analysis_scan_lib(
|
|
||||||
lib_id, mcu, toolchain, CPPCHECK_CMD,
|
|
||||||
CPPCHECK_MSG_FORMAT,
|
|
||||||
extra_verbose=options.extra_verbose_notify,
|
|
||||||
verbose=options.verbose, jobs=options.jobs,
|
|
||||||
clean=options.clean, macros=options.macros,
|
|
||||||
build_profile=profile)
|
|
||||||
pass
|
|
||||||
except Exception, e:
|
|
||||||
if options.verbose:
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc(file=sys.stdout)
|
|
||||||
sys.exit(1)
|
|
||||||
print e
|
|
||||||
else:
|
|
||||||
# Build
|
|
||||||
tt_id = "%s::%s" % (toolchain, target)
|
tt_id = "%s::%s" % (toolchain, target)
|
||||||
if toolchain not in TARGET_MAP[target].supported_toolchains:
|
if toolchain not in TARGET_MAP[target].supported_toolchains:
|
||||||
# Log this later
|
# Log this later
|
||||||
|
|
|
@ -121,7 +121,7 @@ def get_config(src_paths, target, toolchain_name):
|
||||||
src_paths = [src_paths]
|
src_paths = [src_paths]
|
||||||
|
|
||||||
# Pass all params to the unified prepare_resources()
|
# Pass all params to the unified prepare_resources()
|
||||||
toolchain = prepare_toolchain(src_paths, target, toolchain_name)
|
toolchain = prepare_toolchain(src_paths, None, target, toolchain_name)
|
||||||
|
|
||||||
# Scan src_path for config files
|
# Scan src_path for config files
|
||||||
resources = toolchain.scan_resources(src_paths[0])
|
resources = toolchain.scan_resources(src_paths[0])
|
||||||
|
@ -299,7 +299,7 @@ def add_regions_to_profile(profile, config, toolchain_class):
|
||||||
% (region.name, region.size, region.start))
|
% (region.name, region.size, region.start))
|
||||||
|
|
||||||
|
|
||||||
def prepare_toolchain(src_paths, target, toolchain_name,
|
def prepare_toolchain(src_paths, build_dir, target, toolchain_name,
|
||||||
macros=None, clean=False, jobs=1,
|
macros=None, clean=False, jobs=1,
|
||||||
notify=None, silent=False, verbose=False,
|
notify=None, silent=False, verbose=False,
|
||||||
extra_verbose=False, config=None,
|
extra_verbose=False, config=None,
|
||||||
|
@ -339,7 +339,7 @@ def prepare_toolchain(src_paths, target, toolchain_name,
|
||||||
add_regions_to_profile(build_profile, config, cur_tc)
|
add_regions_to_profile(build_profile, config, cur_tc)
|
||||||
|
|
||||||
# Toolchain instance
|
# Toolchain instance
|
||||||
toolchain = cur_tc(target, notify, macros, silent,
|
toolchain = cur_tc(target, notify, macros, silent, build_dir=build_dir,
|
||||||
extra_verbose=extra_verbose, build_profile=build_profile)
|
extra_verbose=extra_verbose, build_profile=build_profile)
|
||||||
|
|
||||||
toolchain.config = config
|
toolchain.config = config
|
||||||
|
@ -475,8 +475,8 @@ def build_project(src_paths, build_path, target, toolchain_name,
|
||||||
|
|
||||||
# Pass all params to the unified prepare_toolchain()
|
# Pass all params to the unified prepare_toolchain()
|
||||||
toolchain = prepare_toolchain(
|
toolchain = prepare_toolchain(
|
||||||
src_paths, target, toolchain_name, macros=macros, clean=clean,
|
src_paths, build_path, target, toolchain_name, macros=macros,
|
||||||
jobs=jobs, notify=notify, silent=silent, verbose=verbose,
|
clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose,
|
||||||
extra_verbose=extra_verbose, config=config, app_config=app_config,
|
extra_verbose=extra_verbose, config=config, app_config=app_config,
|
||||||
build_profile=build_profile)
|
build_profile=build_profile)
|
||||||
|
|
||||||
|
@ -509,8 +509,7 @@ def build_project(src_paths, build_path, target, toolchain_name,
|
||||||
resources.linker_script = linker_script
|
resources.linker_script = linker_script
|
||||||
|
|
||||||
# Compile Sources
|
# Compile Sources
|
||||||
objects = toolchain.compile_sources(resources, build_path,
|
objects = toolchain.compile_sources(resources, resources.inc_dirs)
|
||||||
resources.inc_dirs)
|
|
||||||
resources.objects.extend(objects)
|
resources.objects.extend(objects)
|
||||||
|
|
||||||
# Link Program
|
# Link Program
|
||||||
|
@ -629,9 +628,9 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
||||||
|
|
||||||
# Pass all params to the unified prepare_toolchain()
|
# Pass all params to the unified prepare_toolchain()
|
||||||
toolchain = prepare_toolchain(
|
toolchain = prepare_toolchain(
|
||||||
src_paths, target, toolchain_name, macros=macros, clean=clean,
|
src_paths, build_path, target, toolchain_name, macros=macros,
|
||||||
jobs=jobs, notify=notify, silent=silent, verbose=verbose,
|
clean=clean, jobs=jobs, notify=notify, silent=silent,
|
||||||
extra_verbose=extra_verbose, app_config=app_config,
|
verbose=verbose, extra_verbose=extra_verbose, app_config=app_config,
|
||||||
build_profile=build_profile)
|
build_profile=build_profile)
|
||||||
|
|
||||||
# The first path will give the name to the library
|
# The first path will give the name to the library
|
||||||
|
@ -687,8 +686,7 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
||||||
resources=resources)
|
resources=resources)
|
||||||
|
|
||||||
# Compile Sources
|
# Compile Sources
|
||||||
objects = toolchain.compile_sources(resources, abspath(tmp_path),
|
objects = toolchain.compile_sources(resources, resources.inc_dirs)
|
||||||
resources.inc_dirs)
|
|
||||||
resources.objects.extend(objects)
|
resources.objects.extend(objects)
|
||||||
|
|
||||||
if archive:
|
if archive:
|
||||||
|
@ -733,6 +731,9 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
||||||
### Legacy methods ###
|
### Legacy methods ###
|
||||||
######################
|
######################
|
||||||
|
|
||||||
|
def mbed2_obj_path(target_name, toolchain_name):
|
||||||
|
return join("TARGET_" + target_name, "TOOLCHAIN_" + toolchain_name)
|
||||||
|
|
||||||
def build_lib(lib_id, target, toolchain_name, verbose=False,
|
def build_lib(lib_id, target, toolchain_name, verbose=False,
|
||||||
clean=False, macros=None, notify=None, jobs=1, silent=False,
|
clean=False, macros=None, notify=None, jobs=1, silent=False,
|
||||||
report=None, properties=None, extra_verbose=False,
|
report=None, properties=None, extra_verbose=False,
|
||||||
|
@ -809,19 +810,23 @@ def build_lib(lib_id, target, toolchain_name, verbose=False,
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Toolchain instance
|
# Toolchain instance
|
||||||
toolchain = TOOLCHAIN_CLASSES[toolchain_name](
|
# Create the desired build directory structure
|
||||||
target, macros=macros, notify=notify, silent=silent,
|
bin_path = join(build_path, mbed2_obj_path(target.name, toolchain_name))
|
||||||
extra_verbose=extra_verbose, build_profile=build_profile)
|
mkdir(bin_path)
|
||||||
toolchain.VERBOSE = verbose
|
tmp_path = join(build_path, '.temp', mbed2_obj_path(target.name,
|
||||||
toolchain.jobs = jobs
|
toolchain_name))
|
||||||
toolchain.build_all = clean
|
mkdir(tmp_path)
|
||||||
|
|
||||||
|
toolchain = prepare_toolchain(
|
||||||
|
src_paths, tmp_path, target, toolchain_name, macros=macros,
|
||||||
|
notify=notify, silent=silent, extra_verbose=extra_verbose,
|
||||||
|
build_profile=build_profile, jobs=jobs, clean=clean)
|
||||||
|
|
||||||
toolchain.info("Building library %s (%s, %s)" %
|
toolchain.info("Building library %s (%s, %s)" %
|
||||||
(name.upper(), target.name, toolchain_name))
|
(name.upper(), target.name, toolchain_name))
|
||||||
|
|
||||||
# Take into account the library configuration (MBED_CONFIG_FILE)
|
# Take into account the library configuration (MBED_CONFIG_FILE)
|
||||||
config = Config(target)
|
config = toolchain.config
|
||||||
toolchain.config = config
|
|
||||||
config.add_config_files([MBED_CONFIG_FILE])
|
config.add_config_files([MBED_CONFIG_FILE])
|
||||||
|
|
||||||
# Scan Resources
|
# Scan Resources
|
||||||
|
@ -852,11 +857,6 @@ def build_lib(lib_id, target, toolchain_name, verbose=False,
|
||||||
config.load_resources(res)
|
config.load_resources(res)
|
||||||
toolchain.set_config_data(toolchain.config.get_config_data())
|
toolchain.set_config_data(toolchain.config.get_config_data())
|
||||||
|
|
||||||
# Create the desired build directory structure
|
|
||||||
bin_path = join(build_path, toolchain.obj_path)
|
|
||||||
mkdir(bin_path)
|
|
||||||
tmp_path = join(build_path, '.temp', toolchain.obj_path)
|
|
||||||
mkdir(tmp_path)
|
|
||||||
|
|
||||||
# Copy Headers
|
# Copy Headers
|
||||||
for resource in resources:
|
for resource in resources:
|
||||||
|
@ -869,8 +869,7 @@ def build_lib(lib_id, target, toolchain_name, verbose=False,
|
||||||
# Compile Sources
|
# Compile Sources
|
||||||
objects = []
|
objects = []
|
||||||
for resource in resources:
|
for resource in resources:
|
||||||
objects.extend(toolchain.compile_sources(resource, tmp_path,
|
objects.extend(toolchain.compile_sources(resource, dependencies_include_dir))
|
||||||
dependencies_include_dir))
|
|
||||||
|
|
||||||
needed_update = toolchain.build_library(objects, bin_path, name)
|
needed_update = toolchain.build_library(objects, bin_path, name)
|
||||||
|
|
||||||
|
@ -954,28 +953,25 @@ def build_mbed_libs(target, toolchain_name, verbose=False,
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Toolchain
|
|
||||||
toolchain = TOOLCHAIN_CLASSES[toolchain_name](
|
|
||||||
target, macros=macros, notify=notify, silent=silent,
|
|
||||||
extra_verbose=extra_verbose, build_profile=build_profile)
|
|
||||||
toolchain.VERBOSE = verbose
|
|
||||||
toolchain.jobs = jobs
|
|
||||||
toolchain.build_all = clean
|
|
||||||
|
|
||||||
# Take into account the library configuration (MBED_CONFIG_FILE)
|
|
||||||
config = Config(target)
|
|
||||||
toolchain.config = config
|
|
||||||
config.add_config_files([MBED_CONFIG_FILE])
|
|
||||||
toolchain.set_config_data(toolchain.config.get_config_data())
|
|
||||||
|
|
||||||
# Source and Build Paths
|
# Source and Build Paths
|
||||||
build_target = join(MBED_LIBRARIES, "TARGET_" + target.name)
|
build_target = join(MBED_LIBRARIES, "TARGET_" + target.name)
|
||||||
build_toolchain = join(build_target, "TOOLCHAIN_" + toolchain.name)
|
build_toolchain = join(build_target, "TOOLCHAIN_" + toolchain_name)
|
||||||
mkdir(build_toolchain)
|
mkdir(build_toolchain)
|
||||||
|
|
||||||
tmp_path = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
|
# Toolchain
|
||||||
|
tmp_path = join(MBED_LIBRARIES, '.temp', mbed2_obj_path(target.name, toolchain_name))
|
||||||
mkdir(tmp_path)
|
mkdir(tmp_path)
|
||||||
|
|
||||||
|
toolchain = prepare_toolchain(
|
||||||
|
[""], tmp_path, target, toolchain_name, macros=macros,
|
||||||
|
notify=notify, silent=silent, extra_verbose=extra_verbose,
|
||||||
|
build_profile=build_profile, jobs=jobs, clean=clean)
|
||||||
|
|
||||||
|
# Take into account the library configuration (MBED_CONFIG_FILE)
|
||||||
|
config = toolchain.config
|
||||||
|
config.add_config_files([MBED_CONFIG_FILE])
|
||||||
|
toolchain.set_config_data(toolchain.config.get_config_data())
|
||||||
|
|
||||||
# CMSIS
|
# CMSIS
|
||||||
toolchain.info("Building library %s (%s, %s)" %
|
toolchain.info("Building library %s (%s, %s)" %
|
||||||
('CMSIS', target.name, toolchain_name))
|
('CMSIS', target.name, toolchain_name))
|
||||||
|
@ -1015,7 +1011,7 @@ def build_mbed_libs(target, toolchain_name, verbose=False,
|
||||||
toolchain.copy_files(hal_implementation.linker_script, build_toolchain)
|
toolchain.copy_files(hal_implementation.linker_script, build_toolchain)
|
||||||
toolchain.copy_files(hal_implementation.bin_files, build_toolchain)
|
toolchain.copy_files(hal_implementation.bin_files, build_toolchain)
|
||||||
incdirs = toolchain.scan_resources(build_target).inc_dirs
|
incdirs = toolchain.scan_resources(build_target).inc_dirs
|
||||||
objects = toolchain.compile_sources(hal_implementation, tmp_path,
|
objects = toolchain.compile_sources(hal_implementation,
|
||||||
library_incdirs + incdirs)
|
library_incdirs + incdirs)
|
||||||
toolchain.copy_files(objects, build_toolchain)
|
toolchain.copy_files(objects, build_toolchain)
|
||||||
|
|
||||||
|
@ -1024,7 +1020,7 @@ def build_mbed_libs(target, toolchain_name, verbose=False,
|
||||||
for dir in [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL]:
|
for dir in [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL]:
|
||||||
mbed_resources += toolchain.scan_resources(dir)
|
mbed_resources += toolchain.scan_resources(dir)
|
||||||
|
|
||||||
objects = toolchain.compile_sources(mbed_resources, tmp_path,
|
objects = toolchain.compile_sources(mbed_resources,
|
||||||
library_incdirs + incdirs)
|
library_incdirs + incdirs)
|
||||||
|
|
||||||
# A number of compiled files need to be copied as objects as opposed to
|
# A number of compiled files need to be copied as objects as opposed to
|
||||||
|
@ -1203,283 +1199,6 @@ def get_target_supported_toolchains(target):
|
||||||
else None
|
else None
|
||||||
|
|
||||||
|
|
||||||
def static_analysis_scan(target, toolchain_name, cppcheck_cmd,
|
|
||||||
cppcheck_msg_format, verbose=False,
|
|
||||||
clean=False, macros=None, notify=None, jobs=1,
|
|
||||||
extra_verbose=False, build_profile=None):
|
|
||||||
"""Perform static analysis on a target and toolchain combination
|
|
||||||
|
|
||||||
Positional arguments:
|
|
||||||
target - the target to fake the build for
|
|
||||||
toolchain_name - pretend you would compile with this toolchain
|
|
||||||
cppcheck_cmd - the command used to do static analysis
|
|
||||||
cppcheck_msg_format - the format of the check messages
|
|
||||||
|
|
||||||
Keyword arguments:
|
|
||||||
verbose - more printing!
|
|
||||||
clean - start from a clean slate
|
|
||||||
macros - extra macros to compile with
|
|
||||||
notify - the notification event handling function
|
|
||||||
jobs - number of commands to run at once
|
|
||||||
extra_verbose - even moar printing
|
|
||||||
build_profile - a dict of flags that will be passed to the compiler
|
|
||||||
"""
|
|
||||||
# Toolchain
|
|
||||||
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, macros=macros,
|
|
||||||
notify=notify,
|
|
||||||
extra_verbose=extra_verbose,
|
|
||||||
build_profile=build_profile)
|
|
||||||
toolchain.VERBOSE = verbose
|
|
||||||
toolchain.jobs = jobs
|
|
||||||
toolchain.build_all = clean
|
|
||||||
|
|
||||||
# Source and Build Paths
|
|
||||||
build_target = join(MBED_LIBRARIES, "TARGET_" + target.name)
|
|
||||||
build_toolchain = join(build_target, "TOOLCHAIN_" + toolchain.name)
|
|
||||||
mkdir(build_toolchain)
|
|
||||||
|
|
||||||
tmp_path = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
|
|
||||||
mkdir(tmp_path)
|
|
||||||
|
|
||||||
# CMSIS
|
|
||||||
toolchain.info("Static analysis for %s (%s, %s)" %
|
|
||||||
('CMSIS', target.name, toolchain_name))
|
|
||||||
cmsis_src = MBED_CMSIS_PATH
|
|
||||||
resources = toolchain.scan_resources(cmsis_src)
|
|
||||||
|
|
||||||
# Copy files before analysis
|
|
||||||
toolchain.copy_files(resources.headers, build_target)
|
|
||||||
toolchain.copy_files(resources.linker_script, build_toolchain)
|
|
||||||
|
|
||||||
# Gather include paths, c, cpp sources and macros to transfer to cppcheck
|
|
||||||
# command line
|
|
||||||
includes = ["-I%s"% i for i in resources.inc_dirs]
|
|
||||||
includes.append("-I%s"% str(build_target))
|
|
||||||
c_sources = " ".join(resources.c_sources)
|
|
||||||
cpp_sources = " ".join(resources.cpp_sources)
|
|
||||||
macros = ["-D%s"% s for s in toolchain.get_symbols() + toolchain.macros]
|
|
||||||
|
|
||||||
includes = [inc.strip() for inc in includes]
|
|
||||||
macros = [mac.strip() for mac in macros]
|
|
||||||
|
|
||||||
check_cmd = cppcheck_cmd
|
|
||||||
check_cmd += cppcheck_msg_format
|
|
||||||
check_cmd += includes
|
|
||||||
check_cmd += macros
|
|
||||||
|
|
||||||
# We need to pass some params via file to avoid "command line too long in
|
|
||||||
# some OSs"
|
|
||||||
tmp_file = tempfile.NamedTemporaryFile(delete=False)
|
|
||||||
tmp_file.writelines(line + '\n' for line in c_sources.split())
|
|
||||||
tmp_file.writelines(line + '\n' for line in cpp_sources.split())
|
|
||||||
tmp_file.close()
|
|
||||||
check_cmd += ["--file-list=%s"% tmp_file.name]
|
|
||||||
|
|
||||||
_stdout, _stderr, _ = run_cmd(check_cmd)
|
|
||||||
if verbose:
|
|
||||||
print _stdout
|
|
||||||
print _stderr
|
|
||||||
|
|
||||||
# =========================================================================
|
|
||||||
|
|
||||||
# MBED
|
|
||||||
toolchain.info("Static analysis for %s (%s, %s)" %
|
|
||||||
('MBED', target.name, toolchain_name))
|
|
||||||
|
|
||||||
# Common Headers
|
|
||||||
toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES)
|
|
||||||
toolchain.copy_files(toolchain.scan_resources(MBED_DRIVERS).headers,
|
|
||||||
MBED_LIBRARIES)
|
|
||||||
toolchain.copy_files(toolchain.scan_resources(MBED_PLATFORM).headers,
|
|
||||||
MBED_LIBRARIES)
|
|
||||||
toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers,
|
|
||||||
MBED_LIBRARIES)
|
|
||||||
|
|
||||||
# Target specific sources
|
|
||||||
hal_src = join(MBED_TARGETS_PATH, "hal")
|
|
||||||
hal_implementation = toolchain.scan_resources(hal_src)
|
|
||||||
|
|
||||||
# Copy files before analysis
|
|
||||||
toolchain.copy_files(hal_implementation.headers +
|
|
||||||
hal_implementation.hex_files, build_target,
|
|
||||||
resources=hal_implementation)
|
|
||||||
incdirs = toolchain.scan_resources(build_target)
|
|
||||||
|
|
||||||
target_includes = ["-I%s" % i for i in incdirs.inc_dirs]
|
|
||||||
target_includes.append("-I%s"% str(build_target))
|
|
||||||
target_includes.append("-I%s"% str(hal_src))
|
|
||||||
target_c_sources = " ".join(incdirs.c_sources)
|
|
||||||
target_cpp_sources = " ".join(incdirs.cpp_sources)
|
|
||||||
target_macros = ["-D%s"% s for s in
|
|
||||||
toolchain.get_symbols() + toolchain.macros]
|
|
||||||
|
|
||||||
# Common Sources
|
|
||||||
mbed_resources = toolchain.scan_resources(MBED_COMMON)
|
|
||||||
|
|
||||||
# Gather include paths, c, cpp sources and macros to transfer to cppcheck
|
|
||||||
# command line
|
|
||||||
mbed_includes = ["-I%s" % i for i in mbed_resources.inc_dirs]
|
|
||||||
mbed_includes.append("-I%s"% str(build_target))
|
|
||||||
mbed_includes.append("-I%s"% str(MBED_DRIVERS))
|
|
||||||
mbed_includes.append("-I%s"% str(MBED_PLATFORM))
|
|
||||||
mbed_includes.append("-I%s"% str(MBED_HAL))
|
|
||||||
mbed_c_sources = " ".join(mbed_resources.c_sources)
|
|
||||||
mbed_cpp_sources = " ".join(mbed_resources.cpp_sources)
|
|
||||||
|
|
||||||
target_includes = [inc.strip() for inc in target_includes]
|
|
||||||
mbed_includes = [inc.strip() for inc in mbed_includes]
|
|
||||||
target_macros = [mac.strip() for mac in target_macros]
|
|
||||||
|
|
||||||
check_cmd = cppcheck_cmd
|
|
||||||
check_cmd += cppcheck_msg_format
|
|
||||||
check_cmd += target_includes
|
|
||||||
check_cmd += mbed_includes
|
|
||||||
check_cmd += target_macros
|
|
||||||
|
|
||||||
# We need to pass some parames via file to avoid "command line too long in
|
|
||||||
# some OSs"
|
|
||||||
tmp_file = tempfile.NamedTemporaryFile(delete=False)
|
|
||||||
tmp_file.writelines(line + '\n' for line in target_c_sources.split())
|
|
||||||
tmp_file.writelines(line + '\n' for line in target_cpp_sources.split())
|
|
||||||
tmp_file.writelines(line + '\n' for line in mbed_c_sources.split())
|
|
||||||
tmp_file.writelines(line + '\n' for line in mbed_cpp_sources.split())
|
|
||||||
tmp_file.close()
|
|
||||||
check_cmd += ["--file-list=%s"% tmp_file.name]
|
|
||||||
|
|
||||||
_stdout, _stderr, _ = run_cmd_ext(check_cmd)
|
|
||||||
if verbose:
|
|
||||||
print _stdout
|
|
||||||
print _stderr
|
|
||||||
|
|
||||||
|
|
||||||
def static_analysis_scan_lib(lib_id, target, toolchain, cppcheck_cmd,
|
|
||||||
cppcheck_msg_format, verbose=False,
|
|
||||||
clean=False, macros=None, notify=None, jobs=1,
|
|
||||||
extra_verbose=False, build_profile=None):
|
|
||||||
"""Perform static analysis on a library as if it were to be compiled for a
|
|
||||||
particular target and toolchain combination
|
|
||||||
"""
|
|
||||||
lib = Library(lib_id)
|
|
||||||
if lib.is_supported(target, toolchain):
|
|
||||||
static_analysis_scan_library(
|
|
||||||
lib.source_dir, lib.build_dir, target, toolchain, cppcheck_cmd,
|
|
||||||
cppcheck_msg_format, lib.dependencies, verbose=verbose,
|
|
||||||
clean=clean, macros=macros, notify=notify, jobs=jobs,
|
|
||||||
extra_verbose=extra_verbose, build_profile=build_profile)
|
|
||||||
else:
|
|
||||||
print('Library "%s" is not yet supported on target %s with toolchain %s'
|
|
||||||
% (lib_id, target.name, toolchain))
|
|
||||||
|
|
||||||
|
|
||||||
def static_analysis_scan_library(src_paths, build_path, target, toolchain_name,
|
|
||||||
cppcheck_cmd, cppcheck_msg_format,
|
|
||||||
dependencies_paths=None,
|
|
||||||
name=None, clean=False, notify=None,
|
|
||||||
verbose=False, macros=None, jobs=1,
|
|
||||||
extra_verbose=False, build_profile=None):
|
|
||||||
""" Function scans library for statically detectable defects
|
|
||||||
|
|
||||||
Positional arguments:
|
|
||||||
src_paths - the list of library paths to scan
|
|
||||||
build_path - the location directory of result files
|
|
||||||
target - the target to fake the build for
|
|
||||||
toolchain_name - pretend you would compile with this toolchain
|
|
||||||
cppcheck_cmd - the command used to do static analysis
|
|
||||||
cppcheck_msg_format - the format of the check messages
|
|
||||||
|
|
||||||
Keyword arguments:
|
|
||||||
dependencies_paths - the paths to sources that this library depends on
|
|
||||||
name - the name of this library
|
|
||||||
clean - start from a clean slate
|
|
||||||
notify - the notification event handling function
|
|
||||||
verbose - more printing!
|
|
||||||
macros - extra macros to compile with
|
|
||||||
jobs - number of commands to run at once
|
|
||||||
extra_verbose - even moar printing
|
|
||||||
build_profile - a dict of flags that will be passed to the compiler
|
|
||||||
"""
|
|
||||||
if type(src_paths) != ListType:
|
|
||||||
src_paths = [src_paths]
|
|
||||||
|
|
||||||
for src_path in src_paths:
|
|
||||||
if not exists(src_path):
|
|
||||||
raise Exception("The library source folder does not exist: %s",
|
|
||||||
src_path)
|
|
||||||
|
|
||||||
# Toolchain instance
|
|
||||||
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, macros=macros,
|
|
||||||
notify=notify,
|
|
||||||
extra_verbose=extra_verbose,
|
|
||||||
build_profile=build_profile)
|
|
||||||
toolchain.VERBOSE = verbose
|
|
||||||
toolchain.jobs = jobs
|
|
||||||
|
|
||||||
# The first path will give the name to the library
|
|
||||||
name = basename(src_paths[0])
|
|
||||||
toolchain.info("Static analysis for library %s (%s, %s)" %
|
|
||||||
(name.upper(), target.name, toolchain_name))
|
|
||||||
|
|
||||||
# Scan Resources
|
|
||||||
resources = []
|
|
||||||
for src_path in src_paths:
|
|
||||||
resources.append(toolchain.scan_resources(src_path))
|
|
||||||
|
|
||||||
# Dependencies Include Paths
|
|
||||||
dependencies_include_dir = []
|
|
||||||
if dependencies_paths is not None:
|
|
||||||
for path in dependencies_paths:
|
|
||||||
lib_resources = toolchain.scan_resources(path)
|
|
||||||
dependencies_include_dir.extend(lib_resources.inc_dirs)
|
|
||||||
|
|
||||||
# Create the desired build directory structure
|
|
||||||
bin_path = join(build_path, toolchain.obj_path)
|
|
||||||
mkdir(bin_path)
|
|
||||||
tmp_path = join(build_path, '.temp', toolchain.obj_path)
|
|
||||||
mkdir(tmp_path)
|
|
||||||
|
|
||||||
# Gather include paths, c, cpp sources and macros to transfer to cppcheck
|
|
||||||
# command line
|
|
||||||
includes = ["-I%s" % i for i in dependencies_include_dir + src_paths]
|
|
||||||
c_sources = " "
|
|
||||||
cpp_sources = " "
|
|
||||||
macros = ['-D%s' % s for s in toolchain.get_symbols() + toolchain.macros]
|
|
||||||
|
|
||||||
# Copy Headers
|
|
||||||
for resource in resources:
|
|
||||||
toolchain.copy_files(resource.headers, build_path, resources=resource)
|
|
||||||
includes += ["-I%s" % i for i in resource.inc_dirs]
|
|
||||||
c_sources += " ".join(resource.c_sources) + " "
|
|
||||||
cpp_sources += " ".join(resource.cpp_sources) + " "
|
|
||||||
|
|
||||||
dependencies_include_dir.extend(
|
|
||||||
toolchain.scan_resources(build_path).inc_dirs)
|
|
||||||
|
|
||||||
includes = [inc.strip() for inc in includes]
|
|
||||||
macros = [mac.strip() for mac in macros]
|
|
||||||
|
|
||||||
check_cmd = cppcheck_cmd
|
|
||||||
check_cmd += cppcheck_msg_format
|
|
||||||
check_cmd += includes
|
|
||||||
check_cmd += macros
|
|
||||||
|
|
||||||
# We need to pass some parameters via file to avoid "command line too long
|
|
||||||
# in some OSs". A temporary file is created to store e.g. cppcheck list of
|
|
||||||
# files for command line
|
|
||||||
tmp_file = tempfile.NamedTemporaryFile(delete=False)
|
|
||||||
tmp_file.writelines(line + '\n' for line in c_sources.split())
|
|
||||||
tmp_file.writelines(line + '\n' for line in cpp_sources.split())
|
|
||||||
tmp_file.close()
|
|
||||||
check_cmd += ["--file-list=%s"% tmp_file.name]
|
|
||||||
|
|
||||||
# This will allow us to grab result from both stdio and stderr outputs (so
|
|
||||||
# we can show them) We assume static code analysis tool is outputting
|
|
||||||
# defects on STDERR
|
|
||||||
_stdout, _stderr, _ = run_cmd_ext(check_cmd)
|
|
||||||
if verbose:
|
|
||||||
print _stdout
|
|
||||||
print _stderr
|
|
||||||
|
|
||||||
|
|
||||||
def print_build_results(result_list, build_name):
|
def print_build_results(result_list, build_name):
|
||||||
""" Generate result string for build results
|
""" Generate result string for build results
|
||||||
|
|
||||||
|
|
|
@ -269,7 +269,7 @@ if __name__ == '__main__':
|
||||||
|
|
||||||
try:
|
try:
|
||||||
bin_file = build_project(test.source_dir, build_dir, mcu, toolchain,
|
bin_file = build_project(test.source_dir, build_dir, mcu, toolchain,
|
||||||
test.dependencies,
|
set(test.dependencies),
|
||||||
linker_script=options.linker_script,
|
linker_script=options.linker_script,
|
||||||
clean=options.clean,
|
clean=options.clean,
|
||||||
verbose=options.verbose,
|
verbose=options.verbose,
|
||||||
|
|
|
@ -186,10 +186,10 @@ def export_project(src_paths, export_path, target, ide, libraries_paths=None,
|
||||||
_, toolchain_name = get_exporter_toolchain(ide)
|
_, toolchain_name = get_exporter_toolchain(ide)
|
||||||
|
|
||||||
# Pass all params to the unified prepare_resources()
|
# Pass all params to the unified prepare_resources()
|
||||||
toolchain = prepare_toolchain(paths, target, toolchain_name, macros=macros,
|
toolchain = prepare_toolchain(
|
||||||
jobs=jobs, notify=notify, silent=silent,
|
paths, export_path, target, toolchain_name, macros=macros, jobs=jobs,
|
||||||
verbose=verbose, extra_verbose=extra_verbose,
|
notify=notify, silent=silent, verbose=verbose,
|
||||||
config=config, build_profile=build_profile)
|
extra_verbose=extra_verbose, config=config, build_profile=build_profile)
|
||||||
# The first path will give the name to the library
|
# The first path will give the name to the library
|
||||||
if name is None:
|
if name is None:
|
||||||
name = basename(normpath(abspath(src_paths[0])))
|
name = basename(normpath(abspath(src_paths[0])))
|
||||||
|
|
|
@ -58,7 +58,7 @@ class BuildApiTests(unittest.TestCase):
|
||||||
@patch('tools.utils.run_cmd', return_value=("", "", 0))
|
@patch('tools.utils.run_cmd', return_value=("", "", 0))
|
||||||
def test_always_complete_build(self, *_):
|
def test_always_complete_build(self, *_):
|
||||||
with MagicMock() as notify:
|
with MagicMock() as notify:
|
||||||
toolchain = prepare_toolchain(self.src_paths, self.target,
|
toolchain = prepare_toolchain(self.src_paths, self.build_path, self.target,
|
||||||
self.toolchain_name, notify=notify)
|
self.toolchain_name, notify=notify)
|
||||||
|
|
||||||
res = scan_resources(self.src_paths, toolchain)
|
res = scan_resources(self.src_paths, toolchain)
|
||||||
|
@ -66,9 +66,8 @@ class BuildApiTests(unittest.TestCase):
|
||||||
toolchain.RESPONSE_FILES=False
|
toolchain.RESPONSE_FILES=False
|
||||||
toolchain.config_processed = True
|
toolchain.config_processed = True
|
||||||
toolchain.config_file = "junk"
|
toolchain.config_file = "junk"
|
||||||
toolchain.compile_sources(res, self.build_path)
|
toolchain.compile_sources(res)
|
||||||
|
|
||||||
print notify.mock_calls
|
|
||||||
assert any('percent' in msg[0] and msg[0]['percent'] == 100.0
|
assert any('percent' in msg[0] and msg[0]['percent'] == 100.0
|
||||||
for _, msg, _ in notify.mock_calls if msg)
|
for _, msg, _ in notify.mock_calls if msg)
|
||||||
|
|
||||||
|
@ -90,7 +89,7 @@ class BuildApiTests(unittest.TestCase):
|
||||||
mock_target,
|
mock_target,
|
||||||
False)
|
False)
|
||||||
|
|
||||||
prepare_toolchain(self.src_paths, self.target, self.toolchain_name,
|
prepare_toolchain(self.src_paths, None, self.target, self.toolchain_name,
|
||||||
app_config=app_config)
|
app_config=app_config)
|
||||||
|
|
||||||
mock_config_init.assert_called_once_with(self.target, self.src_paths,
|
mock_config_init.assert_called_once_with(self.target, self.src_paths,
|
||||||
|
@ -112,7 +111,7 @@ class BuildApiTests(unittest.TestCase):
|
||||||
mock_target,
|
mock_target,
|
||||||
False)
|
False)
|
||||||
|
|
||||||
prepare_toolchain(self.src_paths, self.target, self.toolchain_name)
|
prepare_toolchain(self.src_paths, None, self.target, self.toolchain_name)
|
||||||
|
|
||||||
mock_config_init.assert_called_once_with(self.target, self.src_paths,
|
mock_config_init.assert_called_once_with(self.target, self.src_paths,
|
||||||
app_config=None)
|
app_config=None)
|
||||||
|
|
|
@ -2013,7 +2013,7 @@ def find_tests(base_dir, target_name, toolchain_name, app_config=None):
|
||||||
tests = {}
|
tests = {}
|
||||||
|
|
||||||
# Prepare the toolchain
|
# Prepare the toolchain
|
||||||
toolchain = prepare_toolchain([base_dir], target_name, toolchain_name,
|
toolchain = prepare_toolchain([base_dir], None, target_name, toolchain_name,
|
||||||
silent=True, app_config=app_config)
|
silent=True, app_config=app_config)
|
||||||
|
|
||||||
# Scan the directory for paths to probe for 'TESTS' folders
|
# Scan the directory for paths to probe for 'TESTS' folders
|
||||||
|
|
|
@ -256,7 +256,8 @@ class mbedToolchain:
|
||||||
|
|
||||||
profile_template = {'common':[], 'c':[], 'cxx':[], 'asm':[], 'ld':[]}
|
profile_template = {'common':[], 'c':[], 'cxx':[], 'asm':[], 'ld':[]}
|
||||||
|
|
||||||
def __init__(self, target, notify=None, macros=None, silent=False, extra_verbose=False, build_profile=None):
|
def __init__(self, target, notify=None, macros=None, silent=False,
|
||||||
|
extra_verbose=False, build_profile=None, build_dir=None):
|
||||||
self.target = target
|
self.target = target
|
||||||
self.name = self.__class__.__name__
|
self.name = self.__class__.__name__
|
||||||
|
|
||||||
|
@ -295,12 +296,9 @@ class mbedToolchain:
|
||||||
self.build_all = False
|
self.build_all = False
|
||||||
|
|
||||||
# Build output dir
|
# Build output dir
|
||||||
self.build_dir = None
|
self.build_dir = build_dir
|
||||||
self.timestamp = time()
|
self.timestamp = time()
|
||||||
|
|
||||||
# Output build naming based on target+toolchain combo (mbed 2.0 builds)
|
|
||||||
self.obj_path = join("TARGET_"+target.name, "TOOLCHAIN_"+self.name)
|
|
||||||
|
|
||||||
# Number of concurrent build jobs. 0 means auto (based on host system cores)
|
# Number of concurrent build jobs. 0 means auto (based on host system cores)
|
||||||
self.jobs = 0
|
self.jobs = 0
|
||||||
|
|
||||||
|
@ -580,7 +578,8 @@ class mbedToolchain:
|
||||||
self.add_ignore_patterns(root, base_path, lines)
|
self.add_ignore_patterns(root, base_path, lines)
|
||||||
|
|
||||||
# Skip the whole folder if ignored, e.g. .mbedignore containing '*'
|
# Skip the whole folder if ignored, e.g. .mbedignore containing '*'
|
||||||
if self.is_ignored(join(relpath(root, base_path),"")):
|
if (self.is_ignored(join(relpath(root, base_path),"")) or
|
||||||
|
self.build_dir == join(relpath(root, base_path))):
|
||||||
dirs[:] = []
|
dirs[:] = []
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -773,7 +772,7 @@ class mbedToolchain:
|
||||||
|
|
||||||
# THIS METHOD IS BEING CALLED BY THE MBED ONLINE BUILD SYSTEM
|
# THIS METHOD IS BEING CALLED BY THE MBED ONLINE BUILD SYSTEM
|
||||||
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
|
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
|
||||||
def compile_sources(self, resources, build_path, inc_dirs=None):
|
def compile_sources(self, resources, inc_dirs=None):
|
||||||
# Web IDE progress bar for project build
|
# Web IDE progress bar for project build
|
||||||
files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
|
files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
|
||||||
self.to_be_compiled = len(files_to_compile)
|
self.to_be_compiled = len(files_to_compile)
|
||||||
|
@ -790,8 +789,6 @@ class mbedToolchain:
|
||||||
inc_paths = sorted(set(inc_paths))
|
inc_paths = sorted(set(inc_paths))
|
||||||
# Unique id of all include paths
|
# Unique id of all include paths
|
||||||
self.inc_md5 = md5(' '.join(inc_paths)).hexdigest()
|
self.inc_md5 = md5(' '.join(inc_paths)).hexdigest()
|
||||||
# Where to store response files
|
|
||||||
self.build_dir = build_path
|
|
||||||
|
|
||||||
objects = []
|
objects = []
|
||||||
queue = []
|
queue = []
|
||||||
|
@ -804,7 +801,8 @@ class mbedToolchain:
|
||||||
# Sort compile queue for consistency
|
# Sort compile queue for consistency
|
||||||
files_to_compile.sort()
|
files_to_compile.sort()
|
||||||
for source in files_to_compile:
|
for source in files_to_compile:
|
||||||
object = self.relative_object_path(build_path, resources.file_basepath[source], source)
|
object = self.relative_object_path(
|
||||||
|
self.build_dir, resources.file_basepath[source], source)
|
||||||
|
|
||||||
# Queue mode (multiprocessing)
|
# Queue mode (multiprocessing)
|
||||||
commands = self.compile_command(source, object, inc_paths)
|
commands = self.compile_command(source, object, inc_paths)
|
||||||
|
|
|
@ -40,8 +40,10 @@ class ARM(mbedToolchain):
|
||||||
return mbedToolchain.generic_check_executable("ARM", 'armcc', 2, 'bin')
|
return mbedToolchain.generic_check_executable("ARM", 'armcc', 2, 'bin')
|
||||||
|
|
||||||
def __init__(self, target, notify=None, macros=None,
|
def __init__(self, target, notify=None, macros=None,
|
||||||
silent=False, extra_verbose=False, build_profile=None):
|
silent=False, extra_verbose=False, build_profile=None,
|
||||||
|
build_dir=None):
|
||||||
mbedToolchain.__init__(self, target, notify, macros, silent,
|
mbedToolchain.__init__(self, target, notify, macros, silent,
|
||||||
|
build_dir=build_dir,
|
||||||
extra_verbose=extra_verbose,
|
extra_verbose=extra_verbose,
|
||||||
build_profile=build_profile)
|
build_profile=build_profile)
|
||||||
|
|
||||||
|
|
|
@ -29,10 +29,11 @@ class GCC(mbedToolchain):
|
||||||
INDEX_PATTERN = re.compile('(?P<col>\s*)\^')
|
INDEX_PATTERN = re.compile('(?P<col>\s*)\^')
|
||||||
|
|
||||||
def __init__(self, target, notify=None, macros=None,
|
def __init__(self, target, notify=None, macros=None,
|
||||||
silent=False, extra_verbose=False, build_profile=None):
|
silent=False, extra_verbose=False, build_profile=None,
|
||||||
|
build_dir=None):
|
||||||
mbedToolchain.__init__(self, target, notify, macros, silent,
|
mbedToolchain.__init__(self, target, notify, macros, silent,
|
||||||
extra_verbose=extra_verbose,
|
extra_verbose=extra_verbose,
|
||||||
build_profile=build_profile)
|
build_profile=build_profile, build_dir=build_dir)
|
||||||
|
|
||||||
tool_path=TOOLCHAIN_PATHS['GCC_ARM']
|
tool_path=TOOLCHAIN_PATHS['GCC_ARM']
|
||||||
# Add flags for current size setting
|
# Add flags for current size setting
|
||||||
|
|
|
@ -37,8 +37,10 @@ class IAR(mbedToolchain):
|
||||||
return mbedToolchain.generic_check_executable("IAR", 'iccarm', 2, "bin")
|
return mbedToolchain.generic_check_executable("IAR", 'iccarm', 2, "bin")
|
||||||
|
|
||||||
def __init__(self, target, notify=None, macros=None,
|
def __init__(self, target, notify=None, macros=None,
|
||||||
silent=False, extra_verbose=False, build_profile=None):
|
silent=False, extra_verbose=False, build_profile=None,
|
||||||
|
build_dir=None):
|
||||||
mbedToolchain.__init__(self, target, notify, macros, silent,
|
mbedToolchain.__init__(self, target, notify, macros, silent,
|
||||||
|
build_dir=build_dir,
|
||||||
extra_verbose=extra_verbose,
|
extra_verbose=extra_verbose,
|
||||||
build_profile=build_profile)
|
build_profile=build_profile)
|
||||||
if target.core == "Cortex-M7F" or target.core == "Cortex-M7FD":
|
if target.core == "Cortex-M7F" or target.core == "Cortex-M7FD":
|
||||||
|
|
Loading…
Reference in New Issue