mbed Online Build System support:

* added/improved global chroot support
* added RESPONSE_FILES flag to support optional response files (on linux the cmd param length is 2 megabytes). Default True
* added unified handling for archive and link response file (similar to includes)
* added COMPILE_C_AS_CPP flag to support compiling of c files as cpp. Default False
* added mbedToolchain.init() for post __init__ hooks
* added caching to mbedToolchain.need_update() to reduce IO hits
* added support to identify compiler warning/error column (supports ARMCC, GCC and IAR). Errors/warnings now report file@line,col
* added global TOOLCHAIN_PATHS which allows overriding/changing of the toolchain paths. Also simplified ARM-related paths
* added target.json to mbed library release (by @0xc0170)* migrated compile_worker() to utils.py for lightweight thread initialization
* improved run_cmd() performance by removing unnecessary check about the command being executed (should be checked once in the relevant toolchain instead)
* removed remnants of Goanna support (should be reimplemented as hooks to compile/link/archive instead)
* fixes for Python 2.7 compatibility (by @0xc0170)
* fixes for Exporters (by @0xc0170)
pull/2180/head
Mihail Stoyanov 2016-07-19 11:14:42 +01:00
parent 8ab89c1131
commit 74b7f9e923
12 changed files with 299 additions and 283 deletions

View File

@ -69,7 +69,7 @@ class Exporter(object):
for r_type in ['headers', 's_sources', 'c_sources', 'cpp_sources', for r_type in ['headers', 's_sources', 'c_sources', 'cpp_sources',
'objects', 'libraries', 'linker_script', 'objects', 'libraries', 'linker_script',
'lib_builds', 'lib_refs', 'repo_files', 'hex_files', 'bin_files']: 'lib_builds', 'lib_refs', 'hex_files', 'bin_files']:
r = getattr(resources, r_type) r = getattr(resources, r_type)
if r: if r:
self.toolchain.copy_files(r, trg_path, resources=resources) self.toolchain.copy_files(r, trg_path, resources=resources)
@ -149,16 +149,21 @@ class Exporter(object):
# Copy only the file for the required target and toolchain # Copy only the file for the required target and toolchain
lib_builds = [] lib_builds = []
# Create the configuration object # Create the configuration object
if isinstance(prj_paths, basestring):
prj_paths = [prj_paths]
config = Config(self.target, prj_paths) config = Config(self.target, prj_paths)
for src in ['lib', 'src']: for src in ['lib', 'src']:
resources = reduce(add, [self.__scan_and_copy(join(path, src), trg_path) for path in prj_paths]) resources = self.__scan_and_copy(join(prj_paths[0], src), trg_path)
for path in prj_paths[1:]:
resources.add(self.__scan_and_copy(join(path, src), trg_path))
lib_builds.extend(resources.lib_builds) lib_builds.extend(resources.lib_builds)
# The repository files # The repository files
for repo_dir in resources.repo_dirs: #for repo_dir in resources.repo_dirs:
repo_files = self.__scan_all(repo_dir) # repo_files = self.__scan_all(repo_dir)
for path in proj_paths : # for path in prj_paths:
self.toolchain.copy_files(repo_files, trg_path, rel_path=join(path, src)) # self.toolchain.copy_files(repo_files, trg_path, rel_path=join(path, src))
# The libraries builds # The libraries builds
for bld in lib_builds: for bld in lib_builds:
@ -186,19 +191,14 @@ class Exporter(object):
# Loads the resources into the config system which might expand/modify resources based on config data # Loads the resources into the config system which might expand/modify resources based on config data
self.resources = config.load_resources(resources) self.resources = config.load_resources(resources)
if hasattr(self, "MBED_CONFIG_HEADER_SUPPORTED") and self.MBED_CONFIG_HEADER_SUPPORTED : if hasattr(self, "MBED_CONFIG_HEADER_SUPPORTED") and self.MBED_CONFIG_HEADER_SUPPORTED :
# Add the configuration file to the target directory # Add the configuration file to the target directory
self.config_header = self.toolchain.MBED_CONFIG_FILE_NAME self.config_header = self.toolchain.MBED_CONFIG_FILE_NAME
config.get_config_data_header(join(trg_path, self.config_header)) config.get_config_data_header(join(trg_path, self.config_header))
self.config_macros = [] self.config_macros = []
else : else:
# And add the configuration macros to the toolchain # And add the configuration macros to the toolchain
self.config_macros = config.get_config_data_macros() self.config_macros = config.get_config_data_macros()
# Check the existence of a binary build of the mbed library for the desired target
# This prevents exporting the mbed libraries from source
# if not self.toolchain.mbed_libs:
# raise OldLibrariesException()
def gen_file(self, template_file, data, target_file): def gen_file(self, template_file, data, target_file):
template_path = join(Exporter.TEMPLATE_DIR, template_file) template_path = join(Exporter.TEMPLATE_DIR, template_file)

View File

@ -19,7 +19,6 @@ from project_generator_definitions.definitions import ProGenDef
from tools.export.exporters import Exporter, ExporterTargetsProperty from tools.export.exporters import Exporter, ExporterTargetsProperty
from tools.targets import TARGET_MAP, TARGET_NAMES from tools.targets import TARGET_MAP, TARGET_NAMES
from tools.settings import ARM_INC
# If you wish to add a new target, add it to project_generator_definitions, and then # If you wish to add a new target, add it to project_generator_definitions, and then
# define progen_target name in the target class (`` self.progen_target = 'my_target_name' ``) # define progen_target name in the target class (`` self.progen_target = 'my_target_name' ``)
@ -79,8 +78,6 @@ class Uvision4(Exporter):
project_data['tool_specific']['uvision']['misc']['c_flags'] = list(set(self.progen_flags['common_flags'] + self.progen_flags['c_flags'] + self.progen_flags['cxx_flags'])) project_data['tool_specific']['uvision']['misc']['c_flags'] = list(set(self.progen_flags['common_flags'] + self.progen_flags['c_flags'] + self.progen_flags['cxx_flags']))
# not compatible with c99 flag set in the template # not compatible with c99 flag set in the template
project_data['tool_specific']['uvision']['misc']['c_flags'].remove("--c99") project_data['tool_specific']['uvision']['misc']['c_flags'].remove("--c99")
# ARM_INC is by default as system inclusion, not required for exported project
project_data['tool_specific']['uvision']['misc']['c_flags'].remove("-I \""+ARM_INC+"\"")
# cpp is not required as it's implicit for cpp files # cpp is not required as it's implicit for cpp files
project_data['tool_specific']['uvision']['misc']['c_flags'].remove("--cpp") project_data['tool_specific']['uvision']['misc']['c_flags'].remove("--cpp")
# we want no-vla for only cxx, but it's also applied for C in IDE, thus we remove it # we want no-vla for only cxx, but it's also applied for C in IDE, thus we remove it

View File

@ -19,7 +19,6 @@ from project_generator_definitions.definitions import ProGenDef
from tools.export.exporters import Exporter, ExporterTargetsProperty from tools.export.exporters import Exporter, ExporterTargetsProperty
from tools.targets import TARGET_MAP, TARGET_NAMES from tools.targets import TARGET_MAP, TARGET_NAMES
from tools.settings import ARM_INC
# If you wish to add a new target, add it to project_generator_definitions, and then # If you wish to add a new target, add it to project_generator_definitions, and then
# define progen_target name in the target class (`` self.progen_target = 'my_target_name' ``) # define progen_target name in the target class (`` self.progen_target = 'my_target_name' ``)
@ -77,8 +76,6 @@ class Uvision5(Exporter):
project_data['tool_specific']['uvision5']['misc']['asm_flags'] = list(set(self.progen_flags['asm_flags'])) project_data['tool_specific']['uvision5']['misc']['asm_flags'] = list(set(self.progen_flags['asm_flags']))
# cxx flags included, as uvision have them all in one tab # cxx flags included, as uvision have them all in one tab
project_data['tool_specific']['uvision5']['misc']['c_flags'] = list(set(self.progen_flags['common_flags'] + self.progen_flags['c_flags'] + self.progen_flags['cxx_flags'])) project_data['tool_specific']['uvision5']['misc']['c_flags'] = list(set(self.progen_flags['common_flags'] + self.progen_flags['c_flags'] + self.progen_flags['cxx_flags']))
# ARM_INC is by default as system inclusion, not required for exported project
project_data['tool_specific']['uvision5']['misc']['c_flags'].remove("-I \""+ARM_INC+"\"")
# not compatible with c99 flag set in the template # not compatible with c99 flag set in the template
project_data['tool_specific']['uvision5']['misc']['c_flags'].remove("--c99") project_data['tool_specific']['uvision5']['misc']['c_flags'].remove("--c99")
# cpp is not required as it's implicit for cpp files # cpp is not required as it's implicit for cpp files

View File

@ -80,17 +80,6 @@ for _n in _ENV_PATHS:
print "WARNING: MBED_%s set as environment variable but doesn't exist" % _n print "WARNING: MBED_%s set as environment variable but doesn't exist" % _n
##############################################################################
# ARM Compiler Paths
##############################################################################
ARM_BIN = join(ARM_PATH, "bin")
ARM_INC = join(ARM_PATH, "include")
ARM_LIB = join(ARM_PATH, "lib")
ARM_CPPLIB = join(ARM_LIB, "cpplib")
MY_ARM_CLIB = join(ARM_LIB, "lib", "microlib")
############################################################################## ##############################################################################
# Test System Settings # Test System Settings
############################################################################## ##############################################################################

View File

@ -122,7 +122,7 @@ def ignore_path(name, reg_exps):
class MbedRepository: class MbedRepository:
@staticmethod @staticmethod
def run_and_print(command, cwd): def run_and_print(command, cwd):
stdout, _, _ = run_cmd(command, wd=cwd, redirect=True) stdout, _, _ = run_cmd(command, work_dir=cwd, redirect=True)
print(stdout) print(stdout)
def __init__(self, name, team = None): def __init__(self, name, team = None):
@ -147,7 +147,7 @@ class MbedRepository:
def publish(self): def publish(self):
# The maintainer has to evaluate the changes first and explicitly accept them # The maintainer has to evaluate the changes first and explicitly accept them
self.run_and_print(['hg', 'addremove'], cwd=self.path) self.run_and_print(['hg', 'addremove'], cwd=self.path)
stdout, _, _ = run_cmd(['hg', 'status'], wd=self.path) stdout, _, _ = run_cmd(['hg', 'status'], work_dir=self.path)
if stdout == '': if stdout == '':
print "No changes" print "No changes"
return False return False

View File

@ -61,6 +61,9 @@ class Target:
# need to be computed differently than regular attributes # need to be computed differently than regular attributes
__cumulative_attributes = ['extra_labels', 'macros', 'device_has', 'features'] __cumulative_attributes = ['extra_labels', 'macros', 'device_has', 'features']
# List of targets that were added dynamically using "add_py_targets" (see below)
__py_targets = set()
# Location of the 'targets.json' file # Location of the 'targets.json' file
__targets_json_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'hal', 'targets.json') __targets_json_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'hal', 'targets.json')

View File

@ -58,7 +58,7 @@ from tools.build_api import create_result
from tools.build_api import add_result_to_report from tools.build_api import add_result_to_report
from tools.build_api import scan_for_source_paths from tools.build_api import scan_for_source_paths
from tools.libraries import LIBRARIES, LIBRARY_MAP from tools.libraries import LIBRARIES, LIBRARY_MAP
from tools.toolchains import TOOLCHAIN_BIN_PATH from tools.toolchains import TOOLCHAIN_PATHS
from tools.toolchains import TOOLCHAINS from tools.toolchains import TOOLCHAINS
from tools.test_exporters import ReportExporter, ResultExporterType from tools.test_exporters import ReportExporter, ResultExporterType
from tools.utils import argparse_filestring_type from tools.utils import argparse_filestring_type
@ -1343,8 +1343,8 @@ def print_test_configuration_from_json(json_data, join_delim=", "):
if conflict: if conflict:
cell_val += '*' cell_val += '*'
# Check for conflicts: toolchain vs toolchain path # Check for conflicts: toolchain vs toolchain path
if toolchain in TOOLCHAIN_BIN_PATH: if toolchain in TOOLCHAIN_PATHS:
toolchain_path = TOOLCHAIN_BIN_PATH[toolchain] toolchain_path = TOOLCHAIN_PATHS[toolchain]
if not os.path.isdir(toolchain_path): if not os.path.isdir(toolchain_path):
conflict_path = True conflict_path = True
if toolchain not in toolchain_path_conflicts: if toolchain not in toolchain_path_conflicts:
@ -1368,8 +1368,8 @@ def print_test_configuration_from_json(json_data, join_delim=", "):
for toolchain in toolchain_path_conflicts: for toolchain in toolchain_path_conflicts:
# Let's check toolchain configuration # Let's check toolchain configuration
if toolchain in TOOLCHAIN_BIN_PATH: if toolchain in TOOLCHAIN_PATHS:
toolchain_path = TOOLCHAIN_BIN_PATH[toolchain] toolchain_path = TOOLCHAIN_PATHS[toolchain]
if not os.path.isdir(toolchain_path): if not os.path.isdir(toolchain_path):
result += "\t# Toolchain %s path not found: %s\n"% (toolchain, toolchain_path) result += "\t# Toolchain %s path not found: %s\n"% (toolchain, toolchain_path)
return result return result

View File

@ -28,7 +28,7 @@ from copy import deepcopy
from tools.config import Config from tools.config import Config
from multiprocessing import Pool, cpu_count from multiprocessing import Pool, cpu_count
from tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path from tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path, compile_worker
from tools.settings import BUILD_OPTIONS, MBED_ORG_USER from tools.settings import BUILD_OPTIONS, MBED_ORG_USER
import tools.hooks as hooks import tools.hooks as hooks
from tools.memap import MemapParser from tools.memap import MemapParser
@ -38,28 +38,7 @@ import fnmatch
#Disables multiprocessing if set to higher number than the host machine CPUs #Disables multiprocessing if set to higher number than the host machine CPUs
CPU_COUNT_MIN = 1 CPU_COUNT_MIN = 1
CPU_COEF = 1
def compile_worker(job):
results = []
for command in job['commands']:
try:
_, _stderr, _rc = run_cmd(command, job['work_dir'])
except KeyboardInterrupt as e:
raise ToolException
results.append({
'code': _rc,
'output': _stderr,
'command': command
})
return {
'source': job['source'],
'object': job['object'],
'commands': job['commands'],
'results': results
}
class Resources: class Resources:
def __init__(self, base_path=None): def __init__(self, base_path=None):
@ -198,7 +177,8 @@ class Resources:
# had the knowledge of a list of these directories to be ignored. # had the knowledge of a list of these directories to be ignored.
LEGACY_IGNORE_DIRS = set([ LEGACY_IGNORE_DIRS = set([
'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z', 'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z',
'ARM', 'GCC_ARM', 'GCC_CR', 'IAR', 'uARM' 'ARM', 'uARM', 'IAR',
'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB',
]) ])
LEGACY_TOOLCHAIN_NAMES = { LEGACY_TOOLCHAIN_NAMES = {
'ARM_STD':'ARM', 'ARM_MICRO': 'uARM', 'ARM_STD':'ARM', 'ARM_MICRO': 'uARM',
@ -209,23 +189,21 @@ LEGACY_TOOLCHAIN_NAMES = {
class mbedToolchain: class mbedToolchain:
VERBOSE = True VERBOSE = True
COMPILE_C_AS_CPP = False
RESPONSE_FILES = True
CORTEX_SYMBOLS = { CORTEX_SYMBOLS = {
"Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
"Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
"Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1"], "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
"Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
"Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
"Cortex-M4F": ["__CORTEX_M4", "__FPU_PRESENT=1", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
"Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
"Cortex-M7F" : ["__CORTEX_M7", "__FPU_PRESENT=1", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
"Cortex-M7FD" : ["__CORTEX_M7", "__FPU_PRESENT=1", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], "Cortex-M7FD" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
"Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"], "Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"],
} }
GOANNA_FORMAT = "[Goanna] warning [%FILENAME%:%LINENO%] - [%CHECKNAME%(%SEVERITY%)] %MESSAGE%"
GOANNA_DIAGNOSTIC_PATTERN = re.compile(r'"\[Goanna\] (?P<severity>warning) \[(?P<file>[^:]+):(?P<line>\d+)\] \- (?P<message>.*)"')
MBED_CONFIG_FILE_NAME="mbed_config.h" MBED_CONFIG_FILE_NAME="mbed_config.h"
@ -270,7 +248,7 @@ class mbedToolchain:
self.ignore_patterns = [] self.ignore_patterns = []
# Pre-mbed 2.0 ignore dirs # Pre-mbed 2.0 ignore dirs
self.legacy_ignore_dirs = LEGACY_IGNORE_DIRS - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]]) self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]])
# Output notify function # Output notify function
# This function is passed all events, and expected to handle notification of the # This function is passed all events, and expected to handle notification of the
@ -307,6 +285,14 @@ class mbedToolchain:
# uVisor spepcific rules # uVisor spepcific rules
if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels: if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels:
self.target.core = re.sub(r"F$", '', self.target.core) self.target.core = re.sub(r"F$", '', self.target.core)
self.stat_cache = {}
self.init()
# This allows post __init__() hooks. Do not use
def init(self):
return True
def get_output(self): def get_output(self):
return self.output return self.output
@ -325,7 +311,7 @@ class mbedToolchain:
elif event['type'] == 'cc': elif event['type'] == 'cc':
event['severity'] = event['severity'].title() event['severity'] = event['severity'].title()
event['file'] = basename(event['file']) event['file'] = basename(event['file'])
msg = '[%(severity)s] %(file)s@%(line)s: %(message)s' % event msg = '[%(severity)s] %(file)s@%(line)s,%(col)s: %(message)s' % event
elif event['type'] == 'progress': elif event['type'] == 'progress':
if not silent: if not silent:
@ -360,12 +346,6 @@ class mbedToolchain:
event['toolchain'] = self event['toolchain'] = self
return self.notify_fun(event, self.silent) return self.notify_fun(event, self.silent)
def goanna_parse_line(self, line):
if "analyze" in self.options:
return self.GOANNA_DIAGNOSTIC_PATTERN.match(line)
else:
return None
def get_symbols(self): def get_symbols(self):
if self.symbols is None: if self.symbols is None:
# Target and Toolchain symbols # Target and Toolchain symbols
@ -422,15 +402,17 @@ class mbedToolchain:
target_mod_time = stat(target).st_mtime target_mod_time = stat(target).st_mtime
for d in dependencies: for d in dependencies:
# Some objects are not provided with full path and here we do not have # Some objects are not provided with full path and here we do not have
# information about the library paths. Safe option: assume an update # information about the library paths. Safe option: assume an update
if not d or not exists(d): if not d or not exists(d):
return True return True
if not self.stat_cache.has_key(d):
self.stat_cache[d] = stat(d).st_mtime
if stat(d).st_mtime >= target_mod_time: if self.stat_cache[d] >= target_mod_time:
return True return True
return False return False
def is_ignored(self, file_path): def is_ignored(self, file_path):
@ -446,6 +428,8 @@ class mbedToolchain:
# object and the parameter *exclude_paths* is used by the directory traversal to # object and the parameter *exclude_paths* is used by the directory traversal to
# exclude certain paths from the traversal. # exclude certain paths from the traversal.
def scan_resources(self, path, exclude_paths=None, base_path=None): def scan_resources(self, path, exclude_paths=None, base_path=None):
self.progress("scan", path)
resources = Resources(path) resources = Resources(path)
if not base_path: if not base_path:
if isfile(path): if isfile(path):
@ -493,9 +477,8 @@ class mbedToolchain:
for d in copy(dirs): for d in copy(dirs):
dir_path = join(root, d) dir_path = join(root, d)
# Add internal repo folders/files. This is needed for exporters # Add internal repo folders/files. This is needed for exporters
if d == '.hg': if d == '.hg' or d == '.git':
resources.repo_dirs.append(dir_path) resources.repo_dirs.append(dir_path)
resources.repo_files.extend(self.scan_repository(dir_path))
if ((d.startswith('.') or d in self.legacy_ignore_dirs) or if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
# Ignore targets that do not match the TARGET in extra_labels list # Ignore targets that do not match the TARGET in extra_labels list
@ -596,7 +579,6 @@ class mbedToolchain:
return resources return resources
def copy_files(self, files_paths, trg_path, resources=None, rel_path=None): def copy_files(self, files_paths, trg_path, resources=None, rel_path=None):
# Handle a single file # Handle a single file
if type(files_paths) != ListType: files_paths = [files_paths] if type(files_paths) != ListType: files_paths = [files_paths]
@ -605,7 +587,7 @@ class mbedToolchain:
files_paths.remove(source) files_paths.remove(source)
for source in files_paths: for source in files_paths:
if resources is not None: if resources is not None and resources.file_basepath.has_key(source):
relative_path = relpath(source, resources.file_basepath[source]) relative_path = relpath(source, resources.file_basepath[source])
elif rel_path is not None: elif rel_path is not None:
relative_path = relpath(source, rel_path) relative_path = relpath(source, rel_path)
@ -623,7 +605,9 @@ class mbedToolchain:
source_dir, name, _ = split_path(source) source_dir, name, _ = split_path(source)
obj_dir = join(build_path, relpath(source_dir, base_dir)) obj_dir = join(build_path, relpath(source_dir, base_dir))
mkdir(obj_dir) if obj_dir is not self.prev_dir:
self.prev_dir = obj_dir
mkdir(obj_dir)
return join(obj_dir, name + '.o') return join(obj_dir, name + '.o')
def get_inc_file(self, includes): def get_inc_file(self, includes):
@ -633,17 +617,46 @@ class mbedToolchain:
cmd_list = [] cmd_list = []
for c in includes: for c in includes:
if c: if c:
cmd_list.append(('-I%s' % c).replace("\\", "/")) c = c.replace("\\", "/")
if self.CHROOT:
c = c.replace(self.CHROOT, '')
cmd_list.append('-I%s' % c)
string = " ".join(cmd_list) string = " ".join(cmd_list)
f.write(string) f.write(string)
return include_file return include_file
def get_link_file(self, cmd):
link_file = join(self.build_dir, ".link_files.txt")
with open(link_file, "wb") as f:
cmd_list = []
for c in cmd:
if c:
c = c.replace("\\", "/")
if self.CHROOT:
c = c.replace(self.CHROOT, '')
cmd_list.append(('"%s"' % c) if not c.startswith('-') else c)
string = " ".join(cmd_list)
f.write(string)
return link_file
def get_arch_file(self, objects):
archive_file = join(self.build_dir, ".archive_files.txt")
with open(archive_file, "wb") as f:
o_list = []
for o in objects:
o_list.append('"%s"' % o)
string = " ".join(o_list).replace("\\", "/")
f.write(string)
return archive_file
def compile_sources(self, resources, build_path, inc_dirs=None): def compile_sources(self, resources, build_path, inc_dirs=None):
# Web IDE progress bar for project build # Web IDE progress bar for project build
files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
self.to_be_compiled = len(files_to_compile) self.to_be_compiled = len(files_to_compile)
self.compiled = 0 self.compiled = 0
self.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()]))
inc_paths = resources.inc_dirs inc_paths = resources.inc_dirs
if inc_dirs is not None: if inc_dirs is not None:
inc_paths.extend(inc_dirs) inc_paths.extend(inc_dirs)
@ -658,14 +671,12 @@ class mbedToolchain:
objects = [] objects = []
queue = [] queue = []
prev_dir = None work_dir = getcwd()
self.prev_dir = None
# Sort compile queue for consistency # Sort compile queue for consistency
files_to_compile.sort() files_to_compile.sort()
work_dir = getcwd()
for source in files_to_compile: for source in files_to_compile:
_, name, _ = split_path(source)
object = self.relative_object_path(build_path, resources.file_basepath[source], source) object = self.relative_object_path(build_path, resources.file_basepath[source], source)
# Queue mode (multiprocessing) # Queue mode (multiprocessing)
@ -695,7 +706,7 @@ class mbedToolchain:
self.compiled += 1 self.compiled += 1
self.progress("compile", item['source'], build_update=True) self.progress("compile", item['source'], build_update=True)
for res in result['results']: for res in result['results']:
self.debug("Command: %s" % ' '.join(res['command'])) self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
self.compile_output([ self.compile_output([
res['code'], res['code'],
res['output'], res['output'],
@ -705,21 +716,23 @@ class mbedToolchain:
return objects return objects
def compile_queue(self, queue, objects): def compile_queue(self, queue, objects):
jobs_count = int(self.jobs if self.jobs else cpu_count()) jobs_count = int(self.jobs if self.jobs else cpu_count() * CPU_COEF)
p = Pool(processes=jobs_count) p = Pool(processes=jobs_count)
results = [] results = []
for i in range(len(queue)): for i in range(len(queue)):
results.append(p.apply_async(compile_worker, [queue[i]])) results.append(p.apply_async(compile_worker, [queue[i]]))
p.close()
itr = 0 itr = 0
while True: while len(results):
itr += 1 itr += 1
if itr > 180000: if itr > 180000:
p.terminate() p.terminate()
p.join() p.join()
raise ToolException("Compile did not finish in 5 minutes") raise ToolException("Compile did not finish in 5 minutes")
sleep(0.01)
pending = 0 pending = 0
for r in results: for r in results:
if r._ready is True: if r._ready is True:
@ -730,7 +743,7 @@ class mbedToolchain:
self.compiled += 1 self.compiled += 1
self.progress("compile", result['source'], build_update=True) self.progress("compile", result['source'], build_update=True)
for res in result['results']: for res in result['results']:
self.debug("Command: %s" % ' '.join(res['command'])) self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
self.compile_output([ self.compile_output([
res['code'], res['code'],
res['output'], res['output'],
@ -743,17 +756,10 @@ class mbedToolchain:
raise ToolException(err) raise ToolException(err)
else: else:
pending += 1 pending += 1
if pending > jobs_count: if pending >= jobs_count:
break break
if len(results) == 0:
break
sleep(0.01)
results = None results = None
p.terminate()
p.join() p.join()
return objects return objects
@ -768,10 +774,10 @@ class mbedToolchain:
dep_path = base + '.d' dep_path = base + '.d'
deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else [] deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else []
if len(deps) == 0 or self.need_update(object, deps): if len(deps) == 0 or self.need_update(object, deps):
if ext == '.c': if ext == '.cpp' or self.COMPILE_C_AS_CPP:
return self.compile_c(source, object, includes)
else:
return self.compile_cpp(source, object, includes) return self.compile_cpp(source, object, includes)
else:
return self.compile_c(source, object, includes)
elif ext == '.s': elif ext == '.s':
deps = [source] deps = [source]
if self.need_update(object, deps): if self.need_update(object, deps):
@ -795,12 +801,8 @@ class mbedToolchain:
for error_line in _stderr.splitlines(): for error_line in _stderr.splitlines():
self.debug("Output: %s"% error_line) self.debug("Output: %s"% error_line)
# Check return code # Check return code
if _rc != 0: if _rc != 0:
for line in _stderr.splitlines():
self.tool_error(line)
if self.is_not_supported_error(_stderr): if self.is_not_supported_error(_stderr):
raise NotSupportedException(_stderr) raise NotSupportedException(_stderr)
else: else:
@ -847,7 +849,6 @@ class mbedToolchain:
if self.need_update(bin, [elf]): if self.need_update(bin, [elf]):
needed_update = True needed_update = True
self.progress("elf2bin", name) self.progress("elf2bin", name)
self.binary(r, elf, bin) self.binary(r, elf, bin)
self.map_outputs = self.mem_stats(map) self.map_outputs = self.mem_stats(map)
@ -858,9 +859,7 @@ class mbedToolchain:
return bin, needed_update return bin, needed_update
def default_cmd(self, command): def default_cmd(self, command):
self.debug("Command: %s"% ' '.join(command)) _stdout, _stderr, _rc = run_cmd(command, work_dir=getcwd(), chroot=self.CHROOT)
_stdout, _stderr, _rc = run_cmd(command)
self.debug("Return: %s"% _rc) self.debug("Return: %s"% _rc)
for output_line in _stdout.splitlines(): for output_line in _stdout.splitlines():
@ -884,14 +883,13 @@ class mbedToolchain:
message = "[DEBUG] " + message message = "[DEBUG] " + message
self.notify({'type': 'debug', 'message': message}) self.notify({'type': 'debug', 'message': message})
def cc_info(self, severity, file, line, message, target_name=None, toolchain_name=None): def cc_info(self, info=None):
self.notify({'type': 'cc', if info is not None:
'severity': severity, info['type'] = 'cc'
'file': file, self.notify(info)
'line': line,
'message': message, def cc_verbose(self, message, file=""):
'target_name': target_name, self.debug(message)
'toolchain_name': toolchain_name})
def progress(self, action, file, build_update=False): def progress(self, action, file, build_update=False):
msg = {'type': 'progress', 'action': action, 'file': file} msg = {'type': 'progress', 'action': action, 'file': file}
@ -959,13 +957,14 @@ class mbedToolchain:
def get_config_macros(self): def get_config_macros(self):
return Config.config_to_macros(self.config_data) if self.config_data else [] return Config.config_to_macros(self.config_data) if self.config_data else []
from tools.settings import ARM_BIN
from tools.settings import ARM_PATH
from tools.settings import GCC_ARM_PATH, GCC_CR_PATH from tools.settings import GCC_ARM_PATH, GCC_CR_PATH
from tools.settings import IAR_PATH from tools.settings import IAR_PATH
TOOLCHAIN_BIN_PATH = { TOOLCHAIN_PATHS = {
'ARM': ARM_BIN, 'ARM': ARM_PATH,
'uARM': ARM_BIN, 'uARM': ARM_PATH,
'GCC_ARM': GCC_ARM_PATH, 'GCC_ARM': GCC_ARM_PATH,
'GCC_CR': GCC_CR_PATH, 'GCC_CR': GCC_CR_PATH,
'IAR': IAR_PATH 'IAR': IAR_PATH

View File

@ -17,8 +17,7 @@ limitations under the License.
import re import re
from os.path import join, dirname, splitext, basename, exists from os.path import join, dirname, splitext, basename, exists
from tools.toolchains import mbedToolchain from tools.toolchains import mbedToolchain, TOOLCHAIN_PATHS
from tools.settings import ARM_BIN, ARM_INC, ARM_LIB, MY_ARM_CLIB, ARM_CPPLIB, GOANNA_PATH
from tools.hooks import hook_tool from tools.hooks import hook_tool
from tools.utils import mkdir from tools.utils import mkdir
import copy import copy
@ -29,13 +28,14 @@ class ARM(mbedToolchain):
STD_LIB_NAME = "%s.ar" STD_LIB_NAME = "%s.ar"
DIAGNOSTIC_PATTERN = re.compile('"(?P<file>[^"]+)", line (?P<line>\d+)( \(column (?P<column>\d+)\)|): (?P<severity>Warning|Error): (?P<message>.+)') DIAGNOSTIC_PATTERN = re.compile('"(?P<file>[^"]+)", line (?P<line>\d+)( \(column (?P<column>\d+)\)|): (?P<severity>Warning|Error): (?P<message>.+)')
INDEX_PATTERN = re.compile('(?P<col>\s*)\^')
DEP_PATTERN = re.compile('\S+:\s(?P<file>.+)\n') DEP_PATTERN = re.compile('\S+:\s(?P<file>.+)\n')
DEFAULT_FLAGS = { DEFAULT_FLAGS = {
'common': ["-c", "--gnu", 'common': ["-c", "--gnu",
"-Otime", "--split_sections", "--apcs=interwork", "-Otime", "--split_sections", "--apcs=interwork",
"--brief_diagnostics", "--restrict", "--multibyte_chars", "-I \""+ARM_INC+"\""], "--brief_diagnostics", "--restrict", "--multibyte_chars"],
'asm': [], 'asm': [],
'c': ["--md", "--no_depend_system_headers", "--c99", "-D__ASSERT_MSG"], 'c': ["--md", "--no_depend_system_headers", "--c99", "-D__ASSERT_MSG"],
'cxx': ["--cpp", "--no_rtti", "--no_vla"], 'cxx': ["--cpp", "--no_rtti", "--no_vla"],
@ -56,6 +56,9 @@ class ARM(mbedToolchain):
else: else:
cpu = target.core cpu = target.core
ARM_BIN = join(TOOLCHAIN_PATHS['ARM'], "bin")
ARM_INC = join(TOOLCHAIN_PATHS['ARM'], "include")
main_cc = join(ARM_BIN, "armcc") main_cc = join(ARM_BIN, "armcc")
self.flags['common'] += ["--cpu=%s" % cpu] self.flags['common'] += ["--cpu=%s" % cpu]
@ -68,13 +71,9 @@ class ARM(mbedToolchain):
else: else:
self.flags['c'].append("-O3") self.flags['c'].append("-O3")
self.asm = [main_cc] + self.flags['common'] + self.flags['asm'] self.asm = [main_cc] + self.flags['common'] + self.flags['asm'] + ["-I \""+ARM_INC+"\""]
if not "analyze" in self.options: self.cc = [main_cc] + self.flags['common'] + self.flags['c'] + ["-I \""+ARM_INC+"\""]
self.cc = [main_cc] + self.flags['common'] + self.flags['c'] self.cppc = [main_cc] + self.flags['common'] + self.flags['c'] + self.flags['cxx'] + ["-I \""+ARM_INC+"\""]
self.cppc = [main_cc] + self.flags['common'] + self.flags['c'] + self.flags['cxx']
else:
self.cc = [join(GOANNA_PATH, "goannacc"), "--with-cc=" + main_cc.replace('\\', '/'), "--dialect=armcc", '--output-format="%s"' % self.GOANNA_FORMAT] + self.flags['common'] + self.flags['c']
self.cppc= [join(GOANNA_PATH, "goannac++"), "--with-cxx=" + main_cc.replace('\\', '/'), "--dialect=armcc", '--output-format="%s"' % self.GOANNA_FORMAT] + self.flags['common'] + self.flags['c'] + self.flags['cxx']
self.ld = [join(ARM_BIN, "armlink")] self.ld = [join(ARM_BIN, "armlink")]
self.sys_libs = [] self.sys_libs = []
@ -87,40 +86,54 @@ class ARM(mbedToolchain):
for line in open(dep_path).readlines(): for line in open(dep_path).readlines():
match = ARM.DEP_PATTERN.match(line) match = ARM.DEP_PATTERN.match(line)
if match is not None: if match is not None:
dependencies.append(match.group('file')) #we need to append chroot, because when the .d files are generated the compiler is chrooted
dependencies.append((self.CHROOT if self.CHROOT else '') + match.group('file'))
return dependencies return dependencies
def parse_output(self, output): def parse_output(self, output):
msg = None
for line in output.splitlines(): for line in output.splitlines():
match = ARM.DIAGNOSTIC_PATTERN.match(line) match = ARM.DIAGNOSTIC_PATTERN.match(line)
if match is not None: if match is not None:
self.cc_info( if msg is not None:
match.group('severity').lower(), self.cc_info(msg)
match.group('file'), msg = {
match.group('line'), 'severity': match.group('severity').lower(),
match.group('message'), 'file': match.group('file'),
target_name=self.target.name, 'line': match.group('line'),
toolchain_name=self.name 'col': match.group('column') if match.group('column') else 0,
) 'message': match.group('message'),
match = self.goanna_parse_line(line) 'text': '',
if match is not None: 'target_name': self.target.name,
self.cc_info( 'toolchain_name': self.name
match.group('severity').lower(), }
match.group('file'), elif msg is not None:
match.group('line'), match = ARM.INDEX_PATTERN.match(line)
match.group('message') if match is not None:
) msg['col'] = len(match.group('col'))
self.cc_info(msg)
msg = None
else:
msg['text'] += line+"\n"
if msg is not None:
self.cc_info(msg)
def get_dep_option(self, object): def get_dep_option(self, object):
base, _ = splitext(object) base, _ = splitext(object)
dep_path = base + '.d' dep_path = base + '.d'
return ["--depend", dep_path] return ["--depend", dep_path]
def get_config_option(self, config_header) : def get_config_option(self, config_header):
return ['--preinclude=' + config_header] return ['--preinclude=' + config_header]
def get_compile_options(self, defines, includes): def get_compile_options(self, defines, includes):
opts = ['-D%s' % d for d in defines] + ['--via', self.get_inc_file(includes)] opts = ['-D%s' % d for d in defines]
if self.RESPONSE_FILES:
opts += ['--via', self.get_inc_file(includes)]
else:
opts += ["-I%s" % i for i in includes]
config_header = self.get_config_header() config_header = self.get_config_header()
if config_header is not None: if config_header is not None:
opts = opts + self.get_config_option(config_header) opts = opts + self.get_config_option(config_header)
@ -183,32 +196,25 @@ class ARM(mbedToolchain):
# Call cmdline hook # Call cmdline hook
cmd = self.hook.get_cmdline_linker(cmd) cmd = self.hook.get_cmdline_linker(cmd)
# Split link command to linker executable + response file if self.RESPONSE_FILES:
link_files = join(dirname(output), ".link_files.txt") # Split link command to linker executable + response file
with open(link_files, "wb") as f:
cmd_linker = cmd[0] cmd_linker = cmd[0]
cmd_list = [] link_files = self.get_link_file(cmd[1:])
for c in cmd[1:]: cmd = [cmd_linker, '--via', link_files]
if c:
cmd_list.append(('"%s"' % c) if not c.startswith('-') else c)
string = " ".join(cmd_list).replace("\\", "/")
f.write(string)
# Exec command # Exec command
self.default_cmd([cmd_linker, '--via', link_files]) self.cc_verbose("Link: %s" % ' '.join(cmd))
self.default_cmd(cmd)
@hook_tool @hook_tool
def archive(self, objects, lib_path): def archive(self, objects, lib_path):
archive_files = join(dirname(lib_path), ".archive_files.txt") if self.RESPONSE_FILES:
with open(archive_files, "wb") as f: param = ['--via', self.get_arch_file(objects)]
o_list = [] else:
for o in objects: param = objects
o_list.append('"%s"' % o)
string = " ".join(o_list).replace("\\", "/")
f.write(string)
# Exec command # Exec command
self.default_cmd([self.ar, '-r', lib_path, '--via', archive_files]) self.default_cmd([self.ar, '-r', lib_path] + param)
@hook_tool @hook_tool
def binary(self, resources, elf, bin): def binary(self, resources, elf, bin):
@ -219,6 +225,7 @@ class ARM(mbedToolchain):
cmd = self.hook.get_cmdline_binary(cmd) cmd = self.hook.get_cmdline_binary(cmd)
# Exec command # Exec command
self.cc_verbose("FromELF: %s" % ' '.join(cmd))
self.default_cmd(cmd) self.default_cmd(cmd)
@ -227,7 +234,7 @@ class ARM_STD(ARM):
ARM.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose) ARM.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose)
# Run-time values # Run-time values
self.ld.extend(["--libpath \"%s\"" % ARM_LIB]) self.ld.extend(["--libpath", join(TOOLCHAIN_PATHS['ARM'], "lib")])
class ARM_MICRO(ARM): class ARM_MICRO(ARM):
@ -260,13 +267,13 @@ class ARM_MICRO(ARM):
self.ld += ["--noscanlib"] self.ld += ["--noscanlib"]
# System Libraries # System Libraries
self.sys_libs.extend([join(MY_ARM_CLIB, lib+".l") for lib in ["mc_p", "mf_p", "m_ps"]]) self.sys_libs.extend([join(TOOLCHAIN_PATHS['ARM'], "lib", "microlib", lib+".l") for lib in ["mc_p", "mf_p", "m_ps"]])
if target.core == "Cortex-M3": if target.core == "Cortex-M3":
self.sys_libs.extend([join(ARM_CPPLIB, lib+".l") for lib in ["cpp_ws", "cpprt_w"]]) self.sys_libs.extend([join(TOOLCHAIN_PATHS['ARM'], "lib", "cpplib", lib+".l") for lib in ["cpp_ws", "cpprt_w"]])
elif target.core in ["Cortex-M0", "Cortex-M0+"]: elif target.core in ["Cortex-M0", "Cortex-M0+"]:
self.sys_libs.extend([join(ARM_CPPLIB, lib+".l") for lib in ["cpp_ps", "cpprt_p"]]) self.sys_libs.extend([join(TOOLCHAIN_PATHS['ARM'], "lib", "cpplib", lib+".l") for lib in ["cpp_ps", "cpprt_p"]])
else: else:
# Run-time values # Run-time values
self.ld.extend(["--libpath \"%s\"" % ARM_LIB]) self.ld.extend(["--libpath", join(TOOLCHAIN_PATHS['ARM'], "lib")])

View File

@ -17,9 +17,7 @@ limitations under the License.
import re import re
from os.path import join, basename, splitext, dirname, exists from os.path import join, basename, splitext, dirname, exists
from tools.toolchains import mbedToolchain from tools.toolchains import mbedToolchain, TOOLCHAIN_PATHS
from tools.settings import GCC_ARM_PATH, GCC_CR_PATH
from tools.settings import GOANNA_PATH
from tools.hooks import hook_tool from tools.hooks import hook_tool
class GCC(mbedToolchain): class GCC(mbedToolchain):
@ -28,6 +26,7 @@ class GCC(mbedToolchain):
STD_LIB_NAME = "lib%s.a" STD_LIB_NAME = "lib%s.a"
DIAGNOSTIC_PATTERN = re.compile('((?P<file>[^:]+):(?P<line>\d+):)(\d+:)? (?P<severity>warning|error): (?P<message>.+)') DIAGNOSTIC_PATTERN = re.compile('((?P<file>[^:]+):(?P<line>\d+):)(\d+:)? (?P<severity>warning|error): (?P<message>.+)')
INDEX_PATTERN = re.compile('(?P<col>\s*)\^')
DEFAULT_FLAGS = { DEFAULT_FLAGS = {
'common': ["-c", "-Wall", "-Wextra", 'common': ["-c", "-Wall", "-Wextra",
@ -99,12 +98,8 @@ class GCC(mbedToolchain):
main_cc = join(tool_path, "arm-none-eabi-gcc") main_cc = join(tool_path, "arm-none-eabi-gcc")
main_cppc = join(tool_path, "arm-none-eabi-g++") main_cppc = join(tool_path, "arm-none-eabi-g++")
self.asm = [main_cc] + self.flags['asm'] + self.flags["common"] self.asm = [main_cc] + self.flags['asm'] + self.flags["common"]
if not "analyze" in self.options: self.cc = [main_cc]
self.cc = [main_cc] self.cppc =[main_cppc]
self.cppc =[main_cppc]
else:
self.cc = [join(GOANNA_PATH, "goannacc"), "--with-cc=" + main_cc.replace('\\', '/'), "--dialect=gnu", '--output-format="%s"' % self.GOANNA_FORMAT]
self.cppc= [join(GOANNA_PATH, "goannac++"), "--with-cxx=" + main_cppc.replace('\\', '/'), "--dialect=gnu", '--output-format="%s"' % self.GOANNA_FORMAT]
self.cc += self.flags['c'] + self.flags['common'] self.cc += self.flags['c'] + self.flags['common']
self.cppc += self.flags['cxx'] + self.flags['common'] self.cppc += self.flags['cxx'] + self.flags['common']
@ -131,9 +126,9 @@ class GCC(mbedToolchain):
# back later to a space char) # back later to a space char)
file = file.replace('\\ ', '\a') file = file.replace('\\ ', '\a')
if file.find(" ") == -1: if file.find(" ") == -1:
dependencies.append(file.replace('\a', ' ')) dependencies.append((self.CHROOT if self.CHROOT else '') + file.replace('\a', ' '))
else: else:
dependencies = dependencies + [f.replace('\a', ' ') for f in file.split(" ")] dependencies = dependencies + [(self.CHROOT if self.CHROOT else '') + f.replace('\a', ' ') for f in file.split(" ")]
return dependencies return dependencies
def is_not_supported_error(self, output): def is_not_supported_error(self, output):
@ -141,32 +136,30 @@ class GCC(mbedToolchain):
def parse_output(self, output): def parse_output(self, output):
# The warning/error notification is multiline # The warning/error notification is multiline
WHERE, WHAT = 0, 1 msg = None
state, file, message = WHERE, None, None
for line in output.splitlines(): for line in output.splitlines():
match = self.goanna_parse_line(line)
if match is not None:
self.cc_info(
match.group('severity').lower(),
match.group('file'),
match.group('line'),
match.group('message'),
target_name=self.target.name,
toolchain_name=self.name
)
continue
match = GCC.DIAGNOSTIC_PATTERN.match(line) match = GCC.DIAGNOSTIC_PATTERN.match(line)
if match is not None: if match is not None:
self.cc_info( if msg is not None:
match.group('severity').lower(), self.cc_info(msg)
match.group('file'), msg = {
match.group('line'), 'severity': match.group('severity').lower(),
match.group('message'), 'file': match.group('file'),
target_name=self.target.name, 'line': match.group('line'),
toolchain_name=self.name 'col': 0,
) 'message': match.group('message'),
'text': '',
'target_name': self.target.name,
'toolchain_name': self.name
}
elif msg is not None:
match = GCC.INDEX_PATTERN.match(line)
if match is not None:
msg['col'] = len(match.group('col'))
self.cc_info(msg)
msg = None
else:
msg['text'] += line+"\n"
def get_dep_option(self, object): def get_dep_option(self, object):
base, _ = splitext(object) base, _ = splitext(object)
@ -177,7 +170,12 @@ class GCC(mbedToolchain):
return ['-include', config_header] return ['-include', config_header]
def get_compile_options(self, defines, includes): def get_compile_options(self, defines, includes):
opts = ['-D%s' % d for d in defines] + ['@%s' % self.get_inc_file(includes)] opts = ['-D%s' % d for d in defines]
if self.RESPONSE_FILES:
opts += ['@%s' % self.get_inc_file(includes)]
else:
opts += ["-I%s" % i for i in includes]
config_header = self.get_config_header() config_header = self.get_config_header()
if config_header is not None: if config_header is not None:
opts = opts + self.get_config_option(config_header) opts = opts + self.get_config_option(config_header)
@ -235,32 +233,25 @@ class GCC(mbedToolchain):
# Call cmdline hook # Call cmdline hook
cmd = self.hook.get_cmdline_linker(cmd) cmd = self.hook.get_cmdline_linker(cmd)
# Split link command to linker executable + response file if self.RESPONSE_FILES:
link_files = join(dirname(output), ".link_files.txt") # Split link command to linker executable + response file
with open(link_files, "wb") as f:
cmd_linker = cmd[0] cmd_linker = cmd[0]
cmd_list = [] link_files = self.get_link_file(cmd[1:])
for c in cmd[1:]: cmd = [cmd_linker, "@%s" % link_files]
if c:
cmd_list.append(('"%s"' % c) if not c.startswith('-') else c)
string = " ".join(cmd_list).replace("\\", "/")
f.write(string)
# Exec command # Exec command
self.default_cmd([cmd_linker, "@%s" % link_files]) self.cc_verbose("Link: %s" % ' '.join(cmd))
self.default_cmd(cmd)
@hook_tool @hook_tool
def archive(self, objects, lib_path): def archive(self, objects, lib_path):
archive_files = join(dirname(lib_path), ".archive_files.txt") if self.RESPONSE_FILES:
with open(archive_files, "wb") as f: param = ["@%s" % self.get_arch_file(objects)]
o_list = [] else:
for o in objects: param = objects
o_list.append('"%s"' % o)
string = " ".join(o_list).replace("\\", "/")
f.write(string)
# Exec command # Exec command
self.default_cmd([self.ar, 'rcs', lib_path, "@%s" % archive_files]) self.default_cmd([self.ar, 'rcs', lib_path] + param)
@hook_tool @hook_tool
def binary(self, resources, elf, bin): def binary(self, resources, elf, bin):
@ -271,12 +262,13 @@ class GCC(mbedToolchain):
cmd = self.hook.get_cmdline_binary(cmd) cmd = self.hook.get_cmdline_binary(cmd)
# Exec command # Exec command
self.cc_verbose("FromELF: %s" % ' '.join(cmd))
self.default_cmd(cmd) self.default_cmd(cmd)
class GCC_ARM(GCC): class GCC_ARM(GCC):
def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False): def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
GCC.__init__(self, target, options, notify, macros, silent, GCC_ARM_PATH, extra_verbose=extra_verbose) GCC.__init__(self, target, options, notify, macros, silent, TOOLCHAIN_PATHS['GCC_ARM'], extra_verbose=extra_verbose)
# Use latest gcc nanolib # Use latest gcc nanolib
if "big-build" in self.options: if "big-build" in self.options:
@ -309,7 +301,7 @@ class GCC_ARM(GCC):
class GCC_CR(GCC): class GCC_CR(GCC):
def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False): def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
GCC.__init__(self, target, options, notify, macros, silent, GCC_CR_PATH, extra_verbose=extra_verbose) GCC.__init__(self, target, options, notify, macros, silent, TOOLCHAIN_PATHS['GCC_CR'], extra_verbose=extra_verbose)
additional_compiler_flags = [ additional_compiler_flags = [
"-D__NEWLIB__", "-D__CODE_RED", "-D__USE_CMSIS", "-DCPP_USE_HEAP", "-D__NEWLIB__", "-D__CODE_RED", "-D__USE_CMSIS", "-DCPP_USE_HEAP",

View File

@ -18,9 +18,7 @@ import re
from os import remove from os import remove
from os.path import join, exists, dirname, splitext, exists from os.path import join, exists, dirname, splitext, exists
from tools.toolchains import mbedToolchain from tools.toolchains import mbedToolchain, TOOLCHAIN_PATHS
from tools.settings import IAR_PATH
from tools.settings import GOANNA_PATH
from tools.hooks import hook_tool from tools.hooks import hook_tool
class IAR(mbedToolchain): class IAR(mbedToolchain):
@ -29,6 +27,7 @@ class IAR(mbedToolchain):
STD_LIB_NAME = "%s.a" STD_LIB_NAME = "%s.a"
DIAGNOSTIC_PATTERN = re.compile('"(?P<file>[^"]+)",(?P<line>[\d]+)\s+(?P<severity>Warning|Error)(?P<message>.+)') DIAGNOSTIC_PATTERN = re.compile('"(?P<file>[^"]+)",(?P<line>[\d]+)\s+(?P<severity>Warning|Error)(?P<message>.+)')
INDEX_PATTERN = re.compile('(?P<col>\s*)\^')
DEFAULT_FLAGS = { DEFAULT_FLAGS = {
'common': [ 'common': [
@ -66,12 +65,12 @@ class IAR(mbedToolchain):
if target.core == "Cortex-M4F": if target.core == "Cortex-M4F":
c_flags_cmd = [ c_flags_cmd = [
"--cpu", "Cortex-M4F", "--cpu", "Cortex-M4F",
"--thumb", "--dlib_config", join(IAR_PATH, "inc", "c", "DLib_Config_Full.h") "--thumb", "--dlib_config", join(TOOLCHAIN_PATHS['IAR'], "inc", "c", "DLib_Config_Full.h")
] ]
else: else:
c_flags_cmd = [ c_flags_cmd = [
"--cpu", cpuchoice, "--cpu", cpuchoice,
"--thumb", "--dlib_config", join(IAR_PATH, "inc", "c", "DLib_Config_Full.h") "--thumb", "--dlib_config", join(TOOLCHAIN_PATHS['IAR'], "inc", "c", "DLib_Config_Full.h")
] ]
# custom c++ cmd flags # custom c++ cmd flags
cxx_flags_cmd = [ cxx_flags_cmd = [
@ -90,16 +89,12 @@ class IAR(mbedToolchain):
else: else:
c_flags_cmd.append("-Oh") c_flags_cmd.append("-Oh")
IAR_BIN = join(IAR_PATH, "bin") IAR_BIN = join(TOOLCHAIN_PATHS['IAR'], "bin")
main_cc = join(IAR_BIN, "iccarm") main_cc = join(IAR_BIN, "iccarm")
self.asm = [join(IAR_BIN, "iasmarm")] + asm_flags_cmd + self.flags["asm"] self.asm = [join(IAR_BIN, "iasmarm")] + asm_flags_cmd + self.flags["asm"]
if not "analyze" in self.options: self.cc = [main_cc]
self.cc = [main_cc] self.cppc = [main_cc]
self.cppc = [main_cc]
else:
self.cc = [join(GOANNA_PATH, "goannacc"), '--with-cc="%s"' % main_cc.replace('\\', '/'), "--dialect=iar-arm", '--output-format="%s"' % self.GOANNA_FORMAT]
self.cppc = [join(GOANNA_PATH, "goannac++"), '--with-cxx="%s"' % main_cc.replace('\\', '/'), "--dialect=iar-arm", '--output-format="%s"' % self.GOANNA_FORMAT]
self.cc += self.flags["common"] + c_flags_cmd + self.flags["c"] self.cc += self.flags["common"] + c_flags_cmd + self.flags["c"]
self.cppc += self.flags["common"] + c_flags_cmd + cxx_flags_cmd + self.flags["cxx"] self.cppc += self.flags["common"] + c_flags_cmd + cxx_flags_cmd + self.flags["cxx"]
self.ld = join(IAR_BIN, "ilinkarm") self.ld = join(IAR_BIN, "ilinkarm")
@ -107,29 +102,34 @@ class IAR(mbedToolchain):
self.elf2bin = join(IAR_BIN, "ielftool") self.elf2bin = join(IAR_BIN, "ielftool")
def parse_dependencies(self, dep_path): def parse_dependencies(self, dep_path):
return [path.strip() for path in open(dep_path).readlines() return [(self.CHROOT if self.CHROOT else '')+path.strip() for path in open(dep_path).readlines()
if (path and not path.isspace())] if (path and not path.isspace())]
def parse_output(self, output): def parse_output(self, output):
msg = None
for line in output.splitlines(): for line in output.splitlines():
match = IAR.DIAGNOSTIC_PATTERN.match(line) match = IAR.DIAGNOSTIC_PATTERN.match(line)
if match is not None: if match is not None:
self.cc_info( if msg is not None:
match.group('severity').lower(), self.cc_info(msg)
match.group('file'), msg = {
match.group('line'), 'severity': match.group('severity').lower(),
match.group('message'), 'file': match.group('file'),
target_name=self.target.name, 'line': match.group('line'),
toolchain_name=self.name 'col': 0,
) 'message': match.group('message'),
match = self.goanna_parse_line(line) 'text': '',
if match is not None: 'target_name': self.target.name,
self.cc_info( 'toolchain_name': self.name
match.group('severity').lower(), }
match.group('file'), elif msg is not None:
match.group('line'), match = IAR.INDEX_PATTERN.match(line)
match.group('message') if match is not None:
) msg['col'] = len(match.group('col'))
self.cc_info(msg)
msg = None
else:
msg['text'] += line+"\n"
def get_dep_option(self, object): def get_dep_option(self, object):
base, _ = splitext(object) base, _ = splitext(object)
@ -144,7 +144,12 @@ class IAR(mbedToolchain):
return ['--preinclude=' + config_header] return ['--preinclude=' + config_header]
def get_compile_options(self, defines, includes, for_asm=False): def get_compile_options(self, defines, includes, for_asm=False):
opts = ['-D%s' % d for d in defines] + ['-f', self.get_inc_file(includes)] opts = ['-D%s' % d for d in defines]
if self.RESPONSE_FILES:
opts += ['-f', self.get_inc_file(includes)]
else:
opts += ["-I%s" % i for i in includes]
config_header = self.get_config_header() config_header = self.get_config_header()
if for_asm: if for_asm:
# The assembler doesn't support '--preinclude', so we need to add # The assembler doesn't support '--preinclude', so we need to add
@ -201,34 +206,27 @@ class IAR(mbedToolchain):
# Call cmdline hook # Call cmdline hook
cmd = self.hook.get_cmdline_linker(cmd) cmd = self.hook.get_cmdline_linker(cmd)
# Split link command to linker executable + response file if self.RESPONSE_FILES:
link_files = join(dirname(output), ".link_files.txt") # Split link command to linker executable + response file
with open(link_files, "wb") as f:
cmd_linker = cmd[0] cmd_linker = cmd[0]
cmd_list = [] link_files = self.get_link_file(cmd[1:])
for c in cmd[1:]: cmd = [cmd_linker, '-f', link_files]
if c:
cmd_list.append(('"%s"' % c) if not c.startswith('-') else c)
string = " ".join(cmd_list).replace("\\", "/")
f.write(string)
# Exec command # Exec command
self.default_cmd([cmd_linker, '-f', link_files]) self.cc_verbose("Link: %s" % ' '.join(cmd))
self.default_cmd(cmd)
@hook_tool @hook_tool
def archive(self, objects, lib_path): def archive(self, objects, lib_path):
archive_files = join(dirname(lib_path), ".archive_files.txt") if self.RESPONSE_FILES:
with open(archive_files, "wb") as f: param = ['-f', self.get_arch_file(objects)]
o_list = [] else:
for o in objects: param = objects
o_list.append('"%s"' % o)
string = " ".join(o_list).replace("\\", "/")
f.write(string)
if exists(lib_path): if exists(lib_path):
remove(lib_path) remove(lib_path)
self.default_cmd([self.ar, lib_path, '-f', archive_files]) self.default_cmd([self.ar, lib_path] + param)
@hook_tool @hook_tool
def binary(self, resources, elf, bin): def binary(self, resources, elf, bin):
@ -239,4 +237,5 @@ class IAR(mbedToolchain):
cmd = self.hook.get_cmdline_binary(cmd) cmd = self.hook.get_cmdline_binary(cmd)
# Exec command # Exec command
self.cc_verbose("FromELF: %s" % ' '.join(cmd))
self.default_cmd(cmd) self.default_cmd(cmd)

View File

@ -25,6 +25,28 @@ from os.path import isdir, join, exists, split, relpath, splitext
from subprocess import Popen, PIPE, STDOUT, call from subprocess import Popen, PIPE, STDOUT, call
import json import json
from collections import OrderedDict from collections import OrderedDict
import logging
def compile_worker(job):
results = []
for command in job['commands']:
try:
_, _stderr, _rc = run_cmd(command, work_dir=job['work_dir'], chroot=job['chroot'])
except KeyboardInterrupt as e:
raise ToolException
results.append({
'code': _rc,
'output': _stderr,
'command': command
})
return {
'source': job['source'],
'object': job['object'],
'commands': job['commands'],
'results': results
}
def cmd(l, check=True, verbose=False, shell=False, cwd=None): def cmd(l, check=True, verbose=False, shell=False, cwd=None):
text = l if shell else ' '.join(l) text = l if shell else ' '.join(l)
@ -35,10 +57,21 @@ def cmd(l, check=True, verbose=False, shell=False, cwd=None):
raise Exception('ERROR %d: "%s"' % (rc, text)) raise Exception('ERROR %d: "%s"' % (rc, text))
def run_cmd(command, wd=None, redirect=False): def run_cmd(command, work_dir=None, chroot=None, redirect=False):
assert is_cmd_valid(command[0]) if chroot:
# Conventions managed by the web team for the mbed.org build system
chroot_cmd = [
'/usr/sbin/chroot', '--userspec=33:33', chroot
]
for c in command:
chroot_cmd += [c.replace(chroot, '')]
logging.debug("Running command %s"%' '.join(chroot_cmd))
command = chroot_cmd
work_dir = None
try: try:
p = Popen(command, stdout=PIPE, stderr=STDOUT if redirect else PIPE, cwd=wd) p = Popen(command, stdout=PIPE, stderr=STDOUT if redirect else PIPE, cwd=work_dir)
_stdout, _stderr = p.communicate() _stdout, _stderr = p.communicate()
except OSError as e: except OSError as e:
print "[OS ERROR] Command: "+(' '.join(command)) print "[OS ERROR] Command: "+(' '.join(command))