2013-08-06 13:38:00 +00:00
|
|
|
"""
|
|
|
|
mbed SDK
|
|
|
|
Copyright (c) 2011-2013 ARM Limited
|
|
|
|
|
|
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
you may not use this file except in compliance with the License.
|
|
|
|
You may obtain a copy of the License at
|
|
|
|
|
|
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
See the License for the specific language governing permissions and
|
|
|
|
limitations under the License.
|
|
|
|
"""
|
2014-09-19 10:30:35 +00:00
|
|
|
|
|
|
|
import re
|
2014-12-03 09:24:19 +00:00
|
|
|
import sys
|
2016-06-09 22:50:03 +00:00
|
|
|
from os import stat, walk, getcwd, sep
|
2013-06-24 13:32:08 +00:00
|
|
|
from copy import copy
|
2014-09-19 10:30:35 +00:00
|
|
|
from time import time, sleep
|
2013-06-24 13:32:08 +00:00
|
|
|
from types import ListType
|
2014-09-19 10:30:35 +00:00
|
|
|
from shutil import copyfile
|
2016-06-09 22:50:03 +00:00
|
|
|
from os.path import join, splitext, exists, relpath, dirname, basename, split, abspath
|
2013-06-24 13:32:08 +00:00
|
|
|
from inspect import getmro
|
2016-06-07 19:28:12 +00:00
|
|
|
from copy import deepcopy
|
2013-06-24 13:32:08 +00:00
|
|
|
|
2014-09-19 10:30:35 +00:00
|
|
|
from multiprocessing import Pool, cpu_count
|
2016-06-09 20:34:53 +00:00
|
|
|
from tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path
|
|
|
|
from tools.settings import BUILD_OPTIONS, MBED_ORG_USER
|
|
|
|
import tools.hooks as hooks
|
2016-06-10 14:19:02 +00:00
|
|
|
from tools.memap import MemapParser
|
2016-06-09 22:50:03 +00:00
|
|
|
from hashlib import md5
|
|
|
|
import fnmatch
|
2014-09-19 10:30:35 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
|
2014-07-09 12:30:41 +00:00
|
|
|
#Disables multiprocessing if set to higher number than the host machine CPUs
|
2014-08-20 09:59:28 +00:00
|
|
|
CPU_COUNT_MIN = 1
|
2014-07-09 10:28:02 +00:00
|
|
|
|
2014-07-10 12:33:04 +00:00
|
|
|
def compile_worker(job):
|
2014-08-05 15:22:50 +00:00
|
|
|
results = []
|
|
|
|
for command in job['commands']:
|
2014-09-19 10:30:35 +00:00
|
|
|
_, _stderr, _rc = run_cmd(command, job['work_dir'])
|
2014-08-05 15:22:50 +00:00
|
|
|
results.append({
|
2014-09-19 10:30:35 +00:00
|
|
|
'code': _rc,
|
|
|
|
'output': _stderr,
|
2014-08-05 15:22:50 +00:00
|
|
|
'command': command
|
|
|
|
})
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-07-10 12:33:04 +00:00
|
|
|
return {
|
|
|
|
'source': job['source'],
|
2014-08-05 15:22:50 +00:00
|
|
|
'object': job['object'],
|
|
|
|
'commands': job['commands'],
|
|
|
|
'results': results
|
2014-07-10 12:33:04 +00:00
|
|
|
}
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
class Resources:
|
|
|
|
def __init__(self, base_path=None):
|
|
|
|
self.base_path = base_path
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.inc_dirs = []
|
|
|
|
self.headers = []
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.s_sources = []
|
|
|
|
self.c_sources = []
|
|
|
|
self.cpp_sources = []
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.lib_dirs = set([])
|
|
|
|
self.objects = []
|
|
|
|
self.libraries = []
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
# mbed special files
|
|
|
|
self.lib_builds = []
|
2013-11-14 16:45:14 +00:00
|
|
|
self.lib_refs = []
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-11-14 16:45:14 +00:00
|
|
|
self.repo_dirs = []
|
|
|
|
self.repo_files = []
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.linker_script = None
|
2014-01-14 17:15:31 +00:00
|
|
|
|
|
|
|
# Other files
|
|
|
|
self.hex_files = []
|
2015-02-04 10:29:31 +00:00
|
|
|
self.bin_files = []
|
2016-06-09 22:50:03 +00:00
|
|
|
self.json_files = []
|
|
|
|
|
|
|
|
def __add__(self, resources):
|
|
|
|
if resources is None:
|
|
|
|
return self
|
|
|
|
else:
|
|
|
|
return self.add(resources)
|
|
|
|
|
|
|
|
def __radd__(self, resources):
|
|
|
|
if resources is None:
|
|
|
|
return self
|
|
|
|
else:
|
|
|
|
return self.add(resources)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def add(self, resources):
|
|
|
|
self.inc_dirs += resources.inc_dirs
|
|
|
|
self.headers += resources.headers
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.s_sources += resources.s_sources
|
|
|
|
self.c_sources += resources.c_sources
|
|
|
|
self.cpp_sources += resources.cpp_sources
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.lib_dirs |= resources.lib_dirs
|
|
|
|
self.objects += resources.objects
|
|
|
|
self.libraries += resources.libraries
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.lib_builds += resources.lib_builds
|
2013-11-14 16:45:14 +00:00
|
|
|
self.lib_refs += resources.lib_refs
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-11-14 16:45:14 +00:00
|
|
|
self.repo_dirs += resources.repo_dirs
|
|
|
|
self.repo_files += resources.repo_files
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
if resources.linker_script is not None:
|
|
|
|
self.linker_script = resources.linker_script
|
2014-01-14 17:15:31 +00:00
|
|
|
|
|
|
|
self.hex_files += resources.hex_files
|
2015-02-04 10:29:31 +00:00
|
|
|
self.bin_files += resources.bin_files
|
2016-06-09 22:50:03 +00:00
|
|
|
self.json_files += resources.json_files
|
|
|
|
|
|
|
|
return self
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def relative_to(self, base, dot=False):
|
|
|
|
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
|
2013-11-14 16:45:14 +00:00
|
|
|
'cpp_sources', 'lib_dirs', 'objects', 'libraries',
|
2016-06-09 22:50:03 +00:00
|
|
|
'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
|
|
|
|
'hex_files', 'bin_files', 'json_files']:
|
2013-06-24 13:32:08 +00:00
|
|
|
v = [rel_path(f, base, dot) for f in getattr(self, field)]
|
|
|
|
setattr(self, field, v)
|
|
|
|
if self.linker_script is not None:
|
|
|
|
self.linker_script = rel_path(self.linker_script, base, dot)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def win_to_unix(self):
|
|
|
|
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
|
2013-11-14 16:45:14 +00:00
|
|
|
'cpp_sources', 'lib_dirs', 'objects', 'libraries',
|
2016-06-09 22:50:03 +00:00
|
|
|
'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
|
|
|
|
'hex_files', 'bin_files', 'json_files']:
|
2013-06-24 13:32:08 +00:00
|
|
|
v = [f.replace('\\', '/') for f in getattr(self, field)]
|
|
|
|
setattr(self, field, v)
|
|
|
|
if self.linker_script is not None:
|
|
|
|
self.linker_script = self.linker_script.replace('\\', '/')
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def __str__(self):
|
2014-08-14 16:40:58 +00:00
|
|
|
s = []
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
for (label, resources) in (
|
|
|
|
('Include Directories', self.inc_dirs),
|
|
|
|
('Headers', self.headers),
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
('Assembly sources', self.s_sources),
|
|
|
|
('C sources', self.c_sources),
|
|
|
|
('C++ sources', self.cpp_sources),
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
('Library directories', self.lib_dirs),
|
|
|
|
('Objects', self.objects),
|
2014-01-14 17:15:31 +00:00
|
|
|
('Libraries', self.libraries),
|
|
|
|
|
|
|
|
('Hex files', self.hex_files),
|
2015-02-04 12:58:40 +00:00
|
|
|
('Bin files', self.bin_files),
|
2013-06-24 13:32:08 +00:00
|
|
|
):
|
|
|
|
if resources:
|
|
|
|
s.append('%s:\n ' % label + '\n '.join(resources))
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
if self.linker_script:
|
|
|
|
s.append('Linker Script: ' + self.linker_script)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
return '\n'.join(s)
|
|
|
|
|
2013-07-23 16:22:57 +00:00
|
|
|
# Support legacy build conventions: the original mbed build system did not have
|
|
|
|
# standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
|
|
|
|
# had the knowledge of a list of these directories to be ignored.
|
|
|
|
LEGACY_IGNORE_DIRS = set([
|
|
|
|
'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z',
|
2016-02-11 05:27:33 +00:00
|
|
|
'ARM', 'GCC_ARM', 'GCC_CR', 'IAR', 'uARM'
|
2013-07-23 16:22:57 +00:00
|
|
|
])
|
|
|
|
LEGACY_TOOLCHAIN_NAMES = {
|
|
|
|
'ARM_STD':'ARM', 'ARM_MICRO': 'uARM',
|
2016-02-11 05:27:33 +00:00
|
|
|
'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR',
|
2013-07-23 16:22:57 +00:00
|
|
|
'IAR': 'IAR',
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
class mbedToolchain:
|
|
|
|
VERBOSE = True
|
2016-06-09 22:50:03 +00:00
|
|
|
ignorepatterns = []
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
CORTEX_SYMBOLS = {
|
2016-06-09 22:50:03 +00:00
|
|
|
"Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
|
|
|
"Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
2016-05-06 16:50:21 +00:00
|
|
|
"Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1"],
|
2016-06-09 22:50:03 +00:00
|
|
|
"Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
|
|
|
"Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
|
|
|
"Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
|
|
|
"Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
|
|
|
"Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
2014-10-24 04:02:26 +00:00
|
|
|
"Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"],
|
2013-06-24 13:32:08 +00:00
|
|
|
}
|
2013-10-14 14:32:41 +00:00
|
|
|
|
|
|
|
GOANNA_FORMAT = "[Goanna] warning [%FILENAME%:%LINENO%] - [%CHECKNAME%(%SEVERITY%)] %MESSAGE%"
|
|
|
|
GOANNA_DIAGNOSTIC_PATTERN = re.compile(r'"\[Goanna\] (?P<severity>warning) \[(?P<file>[^:]+):(?P<line>\d+)\] \- (?P<message>.*)"')
|
|
|
|
|
2015-11-12 18:16:10 +00:00
|
|
|
def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
|
2013-06-24 13:32:08 +00:00
|
|
|
self.target = target
|
2013-07-23 16:22:57 +00:00
|
|
|
self.name = self.__class__.__name__
|
2013-08-30 09:19:08 +00:00
|
|
|
self.hook = hooks.Hook(target, self)
|
2014-12-03 09:24:19 +00:00
|
|
|
self.silent = silent
|
2015-11-05 23:21:21 +00:00
|
|
|
self.output = ""
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-07-23 16:22:57 +00:00
|
|
|
self.legacy_ignore_dirs = LEGACY_IGNORE_DIRS - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]])
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2015-11-12 18:16:10 +00:00
|
|
|
if notify:
|
|
|
|
self.notify_fun = notify
|
|
|
|
elif extra_verbose:
|
|
|
|
self.notify_fun = self.print_notify_verbose
|
|
|
|
else:
|
|
|
|
self.notify_fun = self.print_notify
|
|
|
|
|
2014-12-03 09:24:19 +00:00
|
|
|
self.options = options if options is not None else []
|
2014-07-09 10:28:02 +00:00
|
|
|
|
2013-10-21 08:11:06 +00:00
|
|
|
self.macros = macros or []
|
2013-07-02 15:43:29 +00:00
|
|
|
self.options.extend(BUILD_OPTIONS)
|
|
|
|
if self.options:
|
|
|
|
self.info("Build Options: %s" % (', '.join(self.options)))
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-07-25 16:49:55 +00:00
|
|
|
self.obj_path = join("TARGET_"+target.name, "TOOLCHAIN_"+self.name)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.symbols = None
|
|
|
|
self.labels = None
|
2013-11-07 11:48:34 +00:00
|
|
|
self.has_config = False
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.build_all = False
|
2016-06-09 22:50:03 +00:00
|
|
|
self.build_dir = None
|
2013-11-27 14:24:42 +00:00
|
|
|
self.timestamp = time()
|
2014-07-09 17:00:21 +00:00
|
|
|
self.jobs = 1
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-07-09 10:28:02 +00:00
|
|
|
self.CHROOT = None
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-07-10 12:33:04 +00:00
|
|
|
self.mp_pool = None
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2016-06-14 00:57:01 +00:00
|
|
|
if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels:
|
2016-06-09 22:50:03 +00:00
|
|
|
self.target.core = re.sub(r"F$", '', self.target.core)
|
2016-06-14 00:57:01 +00:00
|
|
|
|
2016-06-07 19:28:12 +00:00
|
|
|
self.flags = deepcopy(self.DEFAULT_FLAGS)
|
2016-06-09 22:50:03 +00:00
|
|
|
|
2015-11-05 23:21:21 +00:00
|
|
|
def get_output(self):
|
|
|
|
return self.output
|
|
|
|
|
|
|
|
def print_notify(self, event, silent=False):
|
|
|
|
""" Default command line notification
|
|
|
|
"""
|
|
|
|
msg = None
|
|
|
|
|
2016-06-14 00:57:01 +00:00
|
|
|
if not self.VERBOSE and event['type'] == 'tool_error':
|
2015-11-05 23:21:21 +00:00
|
|
|
msg = event['message']
|
2016-06-14 00:57:01 +00:00
|
|
|
|
|
|
|
elif event['type'] in ['info', 'debug']:
|
|
|
|
msg = event['message']
|
|
|
|
|
2015-11-05 23:21:21 +00:00
|
|
|
elif event['type'] == 'cc':
|
|
|
|
event['severity'] = event['severity'].title()
|
|
|
|
event['file'] = basename(event['file'])
|
|
|
|
msg = '[%(severity)s] %(file)s@%(line)s: %(message)s' % event
|
|
|
|
|
|
|
|
elif event['type'] == 'progress':
|
|
|
|
if not silent:
|
|
|
|
msg = '%s: %s' % (event['action'].title(), basename(event['file']))
|
|
|
|
|
|
|
|
if msg:
|
|
|
|
print msg
|
|
|
|
self.output += msg + "\n"
|
|
|
|
|
|
|
|
def print_notify_verbose(self, event, silent=False):
|
|
|
|
""" Default command line notification with more verbose mode
|
|
|
|
"""
|
|
|
|
if event['type'] in ['info', 'debug']:
|
|
|
|
self.print_notify(event) # standard handle
|
|
|
|
|
|
|
|
elif event['type'] == 'cc':
|
|
|
|
event['severity'] = event['severity'].title()
|
|
|
|
event['file'] = basename(event['file'])
|
|
|
|
event['mcu_name'] = "None"
|
|
|
|
event['toolchain'] = "None"
|
|
|
|
event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown"
|
|
|
|
event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown"
|
|
|
|
msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event
|
|
|
|
print msg
|
|
|
|
self.output += msg + "\n"
|
|
|
|
|
|
|
|
elif event['type'] == 'progress':
|
|
|
|
self.print_notify(event) # standard handle
|
|
|
|
|
2014-12-03 09:24:19 +00:00
|
|
|
def notify(self, event):
|
|
|
|
""" Little closure for notify functions
|
|
|
|
"""
|
|
|
|
return self.notify_fun(event, self.silent)
|
|
|
|
|
2014-08-14 16:40:58 +00:00
|
|
|
def __exit__(self):
|
2014-07-10 12:33:04 +00:00
|
|
|
if self.mp_pool is not None:
|
|
|
|
self.mp_pool.terminate()
|
2013-10-14 14:32:41 +00:00
|
|
|
|
|
|
|
def goanna_parse_line(self, line):
|
|
|
|
if "analyze" in self.options:
|
|
|
|
return self.GOANNA_DIAGNOSTIC_PATTERN.match(line)
|
|
|
|
else:
|
|
|
|
return None
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def get_symbols(self):
|
|
|
|
if self.symbols is None:
|
|
|
|
# Target and Toolchain symbols
|
|
|
|
labels = self.get_labels()
|
|
|
|
self.symbols = ["TARGET_%s" % t for t in labels['TARGET']]
|
|
|
|
self.symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']])
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2014-04-04 11:24:21 +00:00
|
|
|
# Config support
|
2013-11-07 11:48:34 +00:00
|
|
|
if self.has_config:
|
|
|
|
self.symbols.append('HAVE_MBED_CONFIG_H')
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
# Cortex CPU symbols
|
|
|
|
if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
|
|
|
|
self.symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
|
2013-11-27 14:24:42 +00:00
|
|
|
|
|
|
|
# Symbols defined by the on-line build.system
|
2016-02-05 07:19:48 +00:00
|
|
|
self.symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1'])
|
2013-11-27 14:24:42 +00:00
|
|
|
if MBED_ORG_USER:
|
|
|
|
self.symbols.append('MBED_USERNAME=' + MBED_ORG_USER)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2014-04-04 11:24:21 +00:00
|
|
|
# Add target's symbols
|
2014-08-20 09:59:28 +00:00
|
|
|
self.symbols += self.target.macros
|
2016-06-09 22:50:03 +00:00
|
|
|
# Add target's hardware
|
2016-06-08 15:28:05 +00:00
|
|
|
self.symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has]
|
2016-06-09 22:50:03 +00:00
|
|
|
# Add target's features
|
|
|
|
self.symbols += ["FEATURE_" + data + "=1" for data in self.target.features]
|
2014-08-20 09:59:28 +00:00
|
|
|
# Add extra symbols passed via 'macros' parameter
|
|
|
|
self.symbols += self.macros
|
Target K64F addition.
Squashed commit of the following:
commit db3c9f7682083abeb291e01df31e67e4c50845b3
Author: 0xc0170 <c0170@rocketmail.com>
Date: Wed Apr 2 09:52:00 2014 +0200
K64F - KSDK - Warnings fixes
commit a639a5cdff889c13509c954b0a34ebac861c1361
Merge: 67a2c2a f3de345
Author: 0xc0170 <c0170@rocketmail.com>
Date: Tue Apr 1 12:48:35 2014 +0200
Merge branch latest 'master' into dev_target_k64f
Conflicts:
libraries/rtos/rtx/RTX_Conf_CM.c
workspace_tools/build_api.py
commit 67a2c2aeb976f264db52ea10d18fea9de0d7685f
Author: 0xc0170 <c0170@rocketmail.com>
Date: Sun Mar 30 13:19:51 2014 +0200
K64F - PinName for buttons (SW2, SW3)
commit 957573e2cd42d5c73ed99477abb98c8b883695b2
Author: 0xc0170 <c0170@rocketmail.com>
Date: Tue Mar 25 11:46:57 2014 +0100
K64F - pins addition to mbed HAL, uart - 4 instances, fix i2c instance which was not stored
commit 2347a6d03984e297190910a250f2771032ae6327
Author: sg- <sam.w.grove@gmail.com>
Date: Mon Mar 24 15:20:51 2014 -0500
Added wait to i2c stop
commit b7b4a9c72e8724087a44078c41a2cb33e4c8d5e3
Author: 0xc0170 <c0170@rocketmail.com>
Date: Mon Mar 24 19:28:16 2014 +0100
K64F - I2c - ack flag retreive correction (logic inverted in ksdk hal)
commit 46c875251263029e32463c3b48473b10496088d9
Author: sg- <sam.w.grove@gmail.com>
Date: Mon Mar 24 13:16:18 2014 -0500
Added I2C Pinnames
commit b71c7a0dfba7025662f9a9d176494ce4dc86273e
Author: 0xc0170 <c0170@rocketmail.com>
Date: Tue Mar 18 17:02:34 2014 +0100
K64F Arduino pinNames update
commit d41b0ee493263d1d80fcb72b3f0d4d788359c7c9
Merge: 9c0a982 e2574eb
Author: 0xc0170 <c0170@rocketmail.com>
Date: Tue Mar 18 14:57:57 2014 +0100
Merge remote-tracking branch 'upstream/master' into dev_target_k64f.
K64F gpio changed according to the latest mbed master.
Conflicts:
libraries/rtos/rtx/RTX_CM_lib.h
workspace_tools/export/uvision4.py
commit 9c0a9822ab14263fff5e3b6459b7c2b4a77ce30c
Author: 0xc0170 <c0170@rocketmail.com>
Date: Mon Mar 17 21:08:17 2014 +0100
K64F - sleep support
commit 5edcf3603d2e7b00eedbb377203a054b7a01e51d
Author: 0xc0170 <c0170@rocketmail.com>
Date: Sun Mar 16 18:19:55 2014 +0100
K64F - pullup/down corrections, LED1 - R, LED2 - G, LED3 - B
commit a2b3b53a1474d32779654765cd1ce2ba2c6b2186
Author: 0xc0170 <c0170@rocketmail.com>
Date: Thu Mar 13 20:55:39 2014 +0100
K64F - SPI - delays are set, pin definition for tests
commit 1f3b3abe16f4afaaf1b75cb4bf3e3a9d5b6e50a7
Author: 0xc0170 <c0170@rocketmail.com>
Date: Tue Mar 11 21:26:00 2014 +0100
K64F - DAC update - tested with test a8
- internal reference set to VDDA
- PinName DAC0_OUT
commit 26d8cf47f8c0786b290ae659beb022901029b313
Author: 0xc0170 <c0170@rocketmail.com>
Date: Tue Mar 11 08:31:44 2014 +0100
KSDK - drivers layer removal, mbed HAL using only KSDK HAL
- ADC corrections with channels, and clock configuration
commit 67ebe14f5c88725033ea0fb135d94b6bf9a00fdb
Author: 0xc0170 <c0170@rocketmail.com>
Date: Mon Mar 10 12:46:08 2014 +0100
K20 copy files removed, targets.py - vertical alignment for K64F definition
commit be5c562d513c808e5bd425195a4fb1c71f47a57e
Merge: 696a713 fe0aca9
Author: Emilio Monti <emilmont@gmail.com>
Date: Mon Mar 10 11:14:55 2014 +0000
Merge branch 'rtos_support' into dev_target_k64f
commit 696a713751194b4762f1cdf6c17c0786decd7808
Author: 0xc0170 <c0170@rocketmail.com>
Date: Mon Mar 10 12:05:30 2014 +0100
[FIX] K64F - adc, sgtl driver updates
commit fe0aca9940bbdd5ee70a1a7341a0a2ad2abf912b
Author: Emilio Monti <emilmont@gmail.com>
Date: Mon Mar 10 11:04:16 2014 +0000
Add RTOS support for K64F
commit 5c3edcbca6dbcce628c7cde51ac94a6fc6278ba5
Author: 0xc0170 <c0170@rocketmail.com>
Date: Sun Mar 9 20:43:38 2014 +0100
K64F - uvision templates update
- uvision 5.10 supports K64F
commit 33f18d11d0eadb9243f1be0ae96c5f82e2913d48
Merge: 26f7587 74e9b2e
Author: 0xc0170 <c0170@rocketmail.com>
Date: Sat Mar 8 10:34:25 2014 +0100
Update branch from mbed master
- merge branch 'master' into dev_target_k64f
Conflicts:
libraries/USBDevice/USBDevice/USBEndpoints.h
libraries/USBDevice/USBDevice/USBHAL_KL25Z.cpp
workspace_tools/export/uvision4.py
workspace_tools/targets.py
commit 26f75872b19a1db2a3abb34c6e773bac56acb32f
Author: 0xc0170 <c0170@rocketmail.com>
Date: Thu Mar 6 22:15:53 2014 +0100
K64F - USBDevice - MPU disable in init
commit e54d6bbaa68827bd63058fbf2428e289d15ac1f7
Author: 0xc0170 <c0170@rocketmail.com>
Date: Wed Feb 26 21:06:58 2014 +0100
K64F - clock setup 4 (usb clock enable)
commit c4165732b9520cb31ae3d649d50c353d09dc9932
Author: 0xc0170 <c0170@rocketmail.com>
Date: Wed Feb 26 20:01:47 2014 +0100
K64F - USBDevice addition
commit 9fcdbb8562b1415561b04e902fcdbb4724add5af
Author: 0xc0170 <c0170@rocketmail.com>
Date: Mon Feb 24 19:11:48 2014 +0100
K64F SPI HAL - initial version
commit 8093df58fa7d17fcb5ad04872c958d5254ee1d8a
Author: 0xc0170 <c0170@rocketmail.com>
Date: Sat Feb 22 13:14:44 2014 +0100
K64F - RTC and I2C implementation (using KPSDK HAL)
- instance correction in objects and peripherals names headers
commit 7ef3fbda605c2bd53a86f37d0676c0393b2e2949
Author: 0xc0170 <c0170@rocketmail.com>
Date: Fri Feb 14 09:45:27 2014 +0100
mbed RTC HAL for K64F
commit e40332fd2db8bf36b3e6cabac5729e013da40c28
Merge: e059f65 6bfcd87
Author: 0xc0170 <c0170@rocketmail.com>
Date: Thu Feb 13 14:20:20 2014 +0100
Merge branch 'master' into dev_target_k64f
Conflicts:
workspace_tools/build_api.py
workspace_tools/export/uvision4.py
workspace_tools/targets.py
workspace_tools/toolchains/__init__.py
commit e059f65fd09694418f9fa4f38da90954ab9decfe
Author: 0xc0170 <c0170@rocketmail.com>
Date: Wed Feb 5 21:35:49 2014 +0100
pwm mbed HAL
- using jusst ftm hal from KPSDK, not yet functional
commit b784278872b1d66ce2940f4988e0479971de8bc0
Author: 0xc0170 <c0170@rocketmail.com>
Date: Mon Feb 3 18:28:24 2014 +0100
uvision exporters for K64F, uticker - lptmr
- lptmr - no hal neiter driver, quick implementation using registers and internal clock
- exporters for K64F - using K60 1MB target, because K64F is not available in 4.7 yet
commit 7a030693e025c2bd456563f3e6f4456033d3f644
Author: Bogdan Marinescu <bogdan.marinescu@arm.com>
Date: Tue Jan 28 16:29:54 2014 +0200
Attempt to keep target's include directory structure
commit a2445b383224125abf4ee23bd17f1e685010a4a5
Author: 0xc0170 <c0170@rocketmail.com>
Date: Mon Jan 27 07:25:16 2014 +0100
Original KPSDK include directory structure for device
commit 9c07c58bb9cf5e9d8be4c3bec117ee87a5ea81c0
Author: 0xc0170 <c0170@rocketmail.com>
Date: Fri Jan 24 16:51:17 2014 +0000
K64F ADC - initial commit
- ADC using KPSDK driver
commit 88e03ef8c5855a57887bb36cddfa5ab1491d400c
Author: 0xc0170 <c0170@rocketmail.com>
Date: Fri Jan 24 12:18:14 2014 +0000
GPI IRQ
- nvic vectors number correction
- gpio irq HAL implementation
commit e83f1108ae9f779ce240d6cdfe23532bfa00a55e
Author: 0xc0170 <c0170@rocketmail.com>
Date: Fri Jan 24 10:06:44 2014 +0000
PORT HAL implementation
- using gpio hal port
commit 75c21943200c8240d1edaf0a67f84b9f3b43db7f
Author: 0xc0170 <c0170@rocketmail.com>
Date: Thu Jan 23 16:02:36 2014 +0000
Serial (only 8bit at the moment), using KPSDK HAL
commit 296e79def617f005918cd8e2aa574f2908a362ca
Author: 0xc0170 <c0170@rocketmail.com>
Date: Thu Jan 23 08:35:50 2014 +0000
Folder structure correction for K64F - KPSDK
- scripts reverted, only new macro is available
- K64F specific headers are in HAL in device folder
commit f236b1ffcb9c8b443ad8483bca8b0e564a63f004
Author: 0xc0170 <c0170@rocketmail.com>
Date: Wed Jan 22 16:07:30 2014 +0100
update KPSDK to RC1
- the update causes few more dependences which were reported. Will be removed later (usb, boards)
- pit timer - hal use , pit driver uses us period
commit f02c5353d4920e885f803ad235e5e82001e97b94
Author: 0xc0170 <c0170@rocketmail.com>
Date: Tue Jan 21 09:45:55 2014 +0100
KPSDK In/out declaration removal
commit 8c88e098b4dc4901753309f1e6db4adb8aca4384
Author: 0xc0170 <c0170@rocketmail.com>
Date: Tue Jan 21 09:12:41 2014 +0100
gpio_t struct only needs pinName
- gpio_init creates init objects on stack
commit 6b96d4ea2c5a6a2cb13571d740ffb679a62f8f3d
Author: 0xc0170 <c0170@rocketmail.com>
Date: Mon Jan 20 19:59:03 2014 +0100
us ticker - pit implementation (not functional yet)
- pit driver in KPSDK - added sdk prefix to needed functions
commit 098e60a3846abcd4c9c00bd199b01d4b1899807f
Author: 0xc0170 <c0170@rocketmail.com>
Date: Mon Jan 20 13:01:58 2014 +0100
GPIO HAL - gpio_set implementation
commit 2bfebbfc75dcd08c20297ba42dc0cc82e5381a40
Author: 0xc0170 <c0170@rocketmail.com>
Date: Sun Jan 19 20:46:55 2014 +0100
GPIO KPSDK changes
- gpio driver - sdk prefix, no lookuptable, input/output declaration, refactoring, set MUX to GPIO
- gpio api in mbed implementation, tested on blue led
commit d083733c485fbdd79ed9ce87100df9fee82294a7
Author: 0xc0170 <c0170@rocketmail.com>
Date: Sat Jan 18 17:14:09 2014 +0100
Update folder structure for KPSDK
- drivers addition
- usb (needed by drivers - needs to be fixed)
- utilities
- hal folder
- drivers/flash removed (needs to be fixed)
- usb host removed (needs to be fixed)
commit 9abcf3d94a2cc849cd6e586c1bad650b6a340a0c
Author: 0xc0170 <c0170@rocketmail.com>
Date: Thu Jan 16 11:06:16 2014 +0100
Initial commit for K64F
- KPSDK addition
- CMSIS + HAL for K64F
- HAL is not yet implemented
- scripts - target contain macros, ignore folders, cmsis copy folders
2014-04-02 12:39:01 +00:00
|
|
|
|
2014-04-04 11:24:21 +00:00
|
|
|
# Form factor variables
|
|
|
|
if hasattr(self.target, 'supported_form_factors'):
|
|
|
|
self.symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors])
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2015-08-05 10:52:23 +00:00
|
|
|
return list(set(self.symbols)) # Return only unique symbols
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
# Extend the internal list of macros
|
|
|
|
def add_macros(self, new_macros):
|
|
|
|
self.macros.extend(new_macros)
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def get_labels(self):
|
|
|
|
if self.labels is None:
|
|
|
|
toolchain_labels = [c.__name__ for c in getmro(self.__class__)]
|
|
|
|
toolchain_labels.remove('mbedToolchain')
|
|
|
|
self.labels = {
|
2016-06-09 22:50:03 +00:00
|
|
|
'TARGET': self.target.get_labels() + ["DEBUG" if "debug-info" in self.options else "RELEASE"],
|
2016-06-08 12:25:06 +00:00
|
|
|
'FEATURE': self.target.features,
|
2013-06-24 13:32:08 +00:00
|
|
|
'TOOLCHAIN': toolchain_labels
|
|
|
|
}
|
|
|
|
return self.labels
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def need_update(self, target, dependencies):
|
|
|
|
if self.build_all:
|
|
|
|
return True
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
if not exists(target):
|
|
|
|
return True
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
target_mod_time = stat(target).st_mtime
|
2014-07-09 11:38:18 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
for d in dependencies:
|
2014-07-09 11:38:18 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
# Some objects are not provided with full path and here we do not have
|
|
|
|
# information about the library paths. Safe option: assume an update
|
2014-07-09 11:38:18 +00:00
|
|
|
if not d or not exists(d):
|
2013-06-24 13:32:08 +00:00
|
|
|
return True
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2014-07-09 11:38:18 +00:00
|
|
|
if stat(d).st_mtime >= target_mod_time:
|
|
|
|
return True
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2014-07-09 11:38:18 +00:00
|
|
|
return False
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
def is_ignored(self, file_path):
|
|
|
|
for pattern in self.ignorepatterns:
|
|
|
|
if fnmatch.fnmatch(file_path, pattern):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def scan_resources(self, path, exclude_paths=None):
|
2013-06-24 13:32:08 +00:00
|
|
|
labels = self.get_labels()
|
|
|
|
resources = Resources(path)
|
2013-11-07 11:48:34 +00:00
|
|
|
self.has_config = False
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
""" os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
|
|
|
|
When topdown is True, the caller can modify the dirnames list in-place
|
|
|
|
(perhaps using del or slice assignment), and walk() will only recurse into
|
|
|
|
the subdirectories whose names remain in dirnames; this can be used to prune
|
|
|
|
the search, impose a specific order of visiting, or even to inform walk()
|
|
|
|
about directories the caller creates or renames before it resumes walk()
|
|
|
|
again. Modifying dirnames when topdown is False is ineffective, because in
|
|
|
|
bottom-up mode the directories in dirnames are generated before dirpath
|
|
|
|
itself is generated.
|
|
|
|
"""
|
2016-06-09 22:50:03 +00:00
|
|
|
for root, dirs, files in walk(path, followlinks=True):
|
2013-06-24 13:32:08 +00:00
|
|
|
# Remove ignored directories
|
2016-06-09 22:50:03 +00:00
|
|
|
# Check if folder contains .mbedignore
|
|
|
|
if ".mbedignore" in files :
|
|
|
|
with open (join(root,".mbedignore"), "r") as f:
|
|
|
|
lines=f.readlines()
|
|
|
|
lines = [l.strip() for l in lines] # Strip whitespaces
|
|
|
|
lines = [l for l in lines if l != ""] # Strip empty lines
|
|
|
|
lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
|
|
|
|
# Append root path to glob patterns
|
|
|
|
# and append patterns to ignorepatterns
|
|
|
|
self.ignorepatterns.extend([join(root,line.strip()) for line in lines])
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
for d in copy(dirs):
|
2016-06-09 22:50:03 +00:00
|
|
|
dir_path = join(root, d)
|
2013-11-14 16:45:14 +00:00
|
|
|
if d == '.hg':
|
|
|
|
resources.repo_dirs.append(dir_path)
|
|
|
|
resources.repo_files.extend(self.scan_repository(dir_path))
|
|
|
|
|
2013-07-23 16:22:57 +00:00
|
|
|
if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
|
2013-06-24 13:32:08 +00:00
|
|
|
(d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
|
2016-06-08 12:25:06 +00:00
|
|
|
(d.startswith('FEATURE_') and d[8:] not in labels['FEATURE']) or
|
2016-06-09 22:50:03 +00:00
|
|
|
(d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or
|
|
|
|
(d == 'TESTS')):
|
|
|
|
dirs.remove(d)
|
|
|
|
|
|
|
|
|
|
|
|
# Remove dirs that already match the ignorepatterns
|
|
|
|
# to avoid travelling into them and to prevent them
|
|
|
|
# on appearing in include path.
|
|
|
|
if self.is_ignored(join(dir_path,"")):
|
2013-06-24 13:32:08 +00:00
|
|
|
dirs.remove(d)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
if exclude_paths:
|
|
|
|
for exclude_path in exclude_paths:
|
|
|
|
rel_path = relpath(dir_path, exclude_path)
|
|
|
|
if not (rel_path.startswith('..')):
|
|
|
|
dirs.remove(d)
|
|
|
|
break
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
# Add root to include paths
|
|
|
|
resources.inc_dirs.append(root)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
for file in files:
|
|
|
|
file_path = join(root, file)
|
2016-06-09 22:50:03 +00:00
|
|
|
|
|
|
|
if self.is_ignored(file_path):
|
|
|
|
continue
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
_, ext = splitext(file)
|
|
|
|
ext = ext.lower()
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
if ext == '.s':
|
|
|
|
resources.s_sources.append(file_path)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
elif ext == '.c':
|
|
|
|
resources.c_sources.append(file_path)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
elif ext == '.cpp':
|
|
|
|
resources.cpp_sources.append(file_path)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2014-07-31 12:36:14 +00:00
|
|
|
elif ext == '.h' or ext == '.hpp':
|
2013-11-07 11:48:34 +00:00
|
|
|
if basename(file_path) == "mbed_config.h":
|
|
|
|
self.has_config = True
|
2013-06-24 13:32:08 +00:00
|
|
|
resources.headers.append(file_path)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
elif ext == '.o':
|
|
|
|
resources.objects.append(file_path)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2014-08-14 16:40:58 +00:00
|
|
|
elif ext == self.LIBRARY_EXT:
|
2013-06-24 13:32:08 +00:00
|
|
|
resources.libraries.append(file_path)
|
|
|
|
resources.lib_dirs.add(root)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
elif ext == self.LINKER_EXT:
|
2014-10-23 16:21:11 +00:00
|
|
|
if resources.linker_script is not None:
|
|
|
|
self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path))
|
2013-06-24 13:32:08 +00:00
|
|
|
resources.linker_script = file_path
|
2014-01-14 17:15:31 +00:00
|
|
|
|
2013-11-14 16:45:14 +00:00
|
|
|
elif ext == '.lib':
|
|
|
|
resources.lib_refs.append(file_path)
|
2014-01-14 17:15:31 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
elif ext == '.bld':
|
|
|
|
resources.lib_builds.append(file_path)
|
2014-01-14 17:15:31 +00:00
|
|
|
|
2013-11-14 16:45:14 +00:00
|
|
|
elif file == '.hgignore':
|
|
|
|
resources.repo_files.append(file_path)
|
2014-01-14 17:15:31 +00:00
|
|
|
|
|
|
|
elif ext == '.hex':
|
|
|
|
resources.hex_files.append(file_path)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2015-02-04 10:29:31 +00:00
|
|
|
elif ext == '.bin':
|
|
|
|
resources.bin_files.append(file_path)
|
2014-01-14 17:15:31 +00:00
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
elif ext == '.json':
|
|
|
|
resources.json_files.append(file_path)
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
return resources
|
2013-11-14 16:45:14 +00:00
|
|
|
|
|
|
|
def scan_repository(self, path):
|
|
|
|
resources = []
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-11-14 16:45:14 +00:00
|
|
|
for root, dirs, files in walk(path):
|
|
|
|
# Remove ignored directories
|
|
|
|
for d in copy(dirs):
|
|
|
|
if d == '.' or d == '..':
|
|
|
|
dirs.remove(d)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-11-14 16:45:14 +00:00
|
|
|
for file in files:
|
|
|
|
file_path = join(root, file)
|
|
|
|
resources.append(file_path)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-11-14 16:45:14 +00:00
|
|
|
return resources
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def copy_files(self, files_paths, trg_path, rel_path=None):
|
2015-11-05 22:53:23 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
# Handle a single file
|
|
|
|
if type(files_paths) != ListType: files_paths = [files_paths]
|
2013-12-02 15:34:19 +00:00
|
|
|
|
|
|
|
for source in files_paths:
|
|
|
|
if source is None:
|
|
|
|
files_paths.remove(source)
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
for source in files_paths:
|
|
|
|
if rel_path is not None:
|
|
|
|
relative_path = relpath(source, rel_path)
|
|
|
|
else:
|
|
|
|
_, relative_path = split(source)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
target = join(trg_path, relative_path)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
if (target != source) and (self.need_update(target, [source])):
|
2015-11-05 23:21:21 +00:00
|
|
|
self.progress("copy", relative_path)
|
2013-06-24 13:32:08 +00:00
|
|
|
mkdir(dirname(target))
|
|
|
|
copyfile(source, target)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def relative_object_path(self, build_path, base_dir, source):
|
|
|
|
source_dir, name, _ = split_path(source)
|
2016-06-09 22:50:03 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
obj_dir = join(build_path, relpath(source_dir, base_dir))
|
|
|
|
mkdir(obj_dir)
|
|
|
|
return join(obj_dir, name + '.o')
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
def get_inc_file(self, includes):
|
|
|
|
include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5)
|
|
|
|
if not exists(include_file):
|
|
|
|
with open(include_file, "wb") as f:
|
|
|
|
cmd_list = []
|
|
|
|
for c in includes:
|
|
|
|
if c:
|
|
|
|
cmd_list.append(('-I%s' % c).replace("\\", "/"))
|
|
|
|
string = " ".join(cmd_list)
|
|
|
|
f.write(string)
|
|
|
|
return include_file
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def compile_sources(self, resources, build_path, inc_dirs=None):
|
|
|
|
# Web IDE progress bar for project build
|
2014-07-09 10:28:02 +00:00
|
|
|
files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
|
|
|
|
self.to_be_compiled = len(files_to_compile)
|
2013-06-24 13:32:08 +00:00
|
|
|
self.compiled = 0
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
inc_paths = resources.inc_dirs
|
|
|
|
if inc_dirs is not None:
|
|
|
|
inc_paths.extend(inc_dirs)
|
2016-06-09 22:50:03 +00:00
|
|
|
# De-duplicate include paths
|
|
|
|
inc_paths = set(inc_paths)
|
|
|
|
# Sort include paths for consistency
|
|
|
|
inc_paths = sorted(set(inc_paths))
|
|
|
|
# Unique id of all include paths
|
|
|
|
self.inc_md5 = md5(' '.join(inc_paths)).hexdigest()
|
|
|
|
# Where to store response files
|
|
|
|
self.build_dir = build_path
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-08-14 16:40:58 +00:00
|
|
|
objects = []
|
|
|
|
queue = []
|
|
|
|
prev_dir = None
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-07-09 10:28:02 +00:00
|
|
|
# The dependency checking for C/C++ is delegated to the compiler
|
2013-06-24 13:32:08 +00:00
|
|
|
base_path = resources.base_path
|
2016-06-09 22:50:03 +00:00
|
|
|
# Sort compile queue for consistency
|
2014-07-09 10:28:02 +00:00
|
|
|
files_to_compile.sort()
|
2016-06-09 22:50:03 +00:00
|
|
|
work_dir = getcwd()
|
|
|
|
|
2014-07-09 10:28:02 +00:00
|
|
|
for source in files_to_compile:
|
|
|
|
_, name, _ = split_path(source)
|
2013-06-24 13:32:08 +00:00
|
|
|
object = self.relative_object_path(build_path, base_path, source)
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-07-09 11:38:18 +00:00
|
|
|
# Queue mode (multiprocessing)
|
2014-08-07 16:01:39 +00:00
|
|
|
commands = self.compile_command(source, object, inc_paths)
|
2014-08-05 15:22:50 +00:00
|
|
|
if commands is not None:
|
2014-07-09 11:38:18 +00:00
|
|
|
queue.append({
|
|
|
|
'source': source,
|
|
|
|
'object': object,
|
2014-08-05 15:22:50 +00:00
|
|
|
'commands': commands,
|
2014-07-09 11:38:18 +00:00
|
|
|
'work_dir': work_dir,
|
|
|
|
'chroot': self.CHROOT
|
|
|
|
})
|
2014-07-09 10:28:02 +00:00
|
|
|
else:
|
|
|
|
objects.append(object)
|
2014-07-09 11:38:18 +00:00
|
|
|
|
|
|
|
# Use queues/multiprocessing if cpu count is higher than setting
|
2014-07-09 17:00:21 +00:00
|
|
|
jobs = self.jobs if self.jobs else cpu_count()
|
|
|
|
if jobs > CPU_COUNT_MIN and len(queue) > jobs:
|
2014-07-09 10:28:02 +00:00
|
|
|
return self.compile_queue(queue, objects)
|
|
|
|
else:
|
2014-08-05 15:22:50 +00:00
|
|
|
return self.compile_seq(queue, objects)
|
|
|
|
|
|
|
|
def compile_seq(self, queue, objects):
|
|
|
|
for item in queue:
|
|
|
|
result = compile_worker(item)
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-08-05 15:22:50 +00:00
|
|
|
self.compiled += 1
|
2015-11-05 23:21:21 +00:00
|
|
|
self.progress("compile", item['source'], build_update=True)
|
2014-08-05 15:22:50 +00:00
|
|
|
for res in result['results']:
|
2015-11-05 23:21:21 +00:00
|
|
|
self.debug("Command: %s" % ' '.join(res['command']))
|
|
|
|
self.compile_output([
|
2014-08-05 15:22:50 +00:00
|
|
|
res['code'],
|
|
|
|
res['output'],
|
|
|
|
res['command']
|
|
|
|
])
|
|
|
|
objects.append(result['object'])
|
2015-11-05 23:21:21 +00:00
|
|
|
return objects
|
2014-07-09 10:28:02 +00:00
|
|
|
|
|
|
|
def compile_queue(self, queue, objects):
|
2014-07-10 12:33:04 +00:00
|
|
|
jobs_count = int(self.jobs if self.jobs else cpu_count())
|
2014-07-10 19:30:51 +00:00
|
|
|
p = Pool(processes=jobs_count)
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-07-10 12:33:04 +00:00
|
|
|
results = []
|
2014-07-09 10:28:02 +00:00
|
|
|
for i in range(len(queue)):
|
2014-07-10 19:30:51 +00:00
|
|
|
results.append(p.apply_async(compile_worker, [queue[i]]))
|
2014-07-09 10:28:02 +00:00
|
|
|
|
2014-07-10 12:33:04 +00:00
|
|
|
itr = 0
|
2014-07-09 10:28:02 +00:00
|
|
|
while True:
|
2014-07-10 12:33:04 +00:00
|
|
|
itr += 1
|
2016-06-09 22:50:03 +00:00
|
|
|
if itr > 180000:
|
2014-07-10 19:30:51 +00:00
|
|
|
p.terminate()
|
|
|
|
p.join()
|
2014-07-10 12:33:04 +00:00
|
|
|
raise ToolException("Compile did not finish in 5 minutes")
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-07-10 12:33:04 +00:00
|
|
|
pending = 0
|
|
|
|
for r in results:
|
|
|
|
if r._ready is True:
|
2014-07-09 10:28:02 +00:00
|
|
|
try:
|
2014-07-10 17:10:21 +00:00
|
|
|
result = r.get()
|
|
|
|
results.remove(r)
|
2014-08-05 15:22:50 +00:00
|
|
|
|
2014-07-10 17:10:21 +00:00
|
|
|
self.compiled += 1
|
2015-11-05 23:21:21 +00:00
|
|
|
self.progress("compile", result['source'], build_update=True)
|
2014-08-05 15:22:50 +00:00
|
|
|
for res in result['results']:
|
2015-11-05 23:21:21 +00:00
|
|
|
self.debug("Command: %s" % ' '.join(res['command']))
|
|
|
|
self.compile_output([
|
2014-08-05 15:22:50 +00:00
|
|
|
res['code'],
|
|
|
|
res['output'],
|
|
|
|
res['command']
|
|
|
|
])
|
2014-07-10 17:10:21 +00:00
|
|
|
objects.append(result['object'])
|
2014-07-09 10:28:02 +00:00
|
|
|
except ToolException, err:
|
2014-07-10 19:30:51 +00:00
|
|
|
p.terminate()
|
|
|
|
p.join()
|
2014-07-09 10:28:02 +00:00
|
|
|
raise ToolException(err)
|
2014-07-10 12:33:04 +00:00
|
|
|
else:
|
|
|
|
pending += 1
|
|
|
|
if pending > jobs_count:
|
|
|
|
break
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-07-10 12:33:04 +00:00
|
|
|
|
|
|
|
if len(results) == 0:
|
2014-07-09 10:28:02 +00:00
|
|
|
break
|
|
|
|
|
2014-07-10 12:33:04 +00:00
|
|
|
sleep(0.01)
|
2014-07-09 10:28:02 +00:00
|
|
|
|
2014-07-10 12:33:04 +00:00
|
|
|
results = None
|
2014-07-10 19:30:51 +00:00
|
|
|
p.terminate()
|
2014-08-20 09:59:28 +00:00
|
|
|
p.join()
|
|
|
|
|
2015-11-05 23:21:21 +00:00
|
|
|
return objects
|
2014-08-05 15:22:50 +00:00
|
|
|
|
2014-08-07 16:01:39 +00:00
|
|
|
def compile_command(self, source, object, includes):
|
2014-07-09 10:28:02 +00:00
|
|
|
# Check dependencies
|
|
|
|
_, ext = splitext(source)
|
|
|
|
ext = ext.lower()
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-08-05 15:22:50 +00:00
|
|
|
if ext == '.c' or ext == '.cpp':
|
|
|
|
base, _ = splitext(object)
|
|
|
|
dep_path = base + '.d'
|
|
|
|
deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else []
|
|
|
|
if len(deps) == 0 or self.need_update(object, deps):
|
2014-08-07 16:01:39 +00:00
|
|
|
if ext == '.c':
|
|
|
|
return self.compile_c(source, object, includes)
|
|
|
|
else:
|
|
|
|
return self.compile_cpp(source, object, includes)
|
2014-07-09 10:28:02 +00:00
|
|
|
elif ext == '.s':
|
2014-08-05 15:22:50 +00:00
|
|
|
deps = [source]
|
|
|
|
if self.need_update(object, deps):
|
2014-08-07 16:01:39 +00:00
|
|
|
return self.assemble(source, object, includes)
|
2014-07-09 10:28:02 +00:00
|
|
|
else:
|
|
|
|
return False
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-07-09 10:28:02 +00:00
|
|
|
return None
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2016-02-25 22:29:26 +00:00
|
|
|
def is_not_supported_error(self, output):
|
|
|
|
return "#error directive: [NOT_SUPPORTED]" in output
|
|
|
|
|
2014-08-07 16:01:39 +00:00
|
|
|
def compile_output(self, output=[]):
|
2014-09-19 10:30:35 +00:00
|
|
|
_rc = output[0]
|
|
|
|
_stderr = output[1]
|
2014-07-09 10:28:02 +00:00
|
|
|
command = output[2]
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-07-09 10:28:02 +00:00
|
|
|
# Parse output for Warnings and Errors
|
2015-11-05 23:21:21 +00:00
|
|
|
self.parse_output(_stderr)
|
|
|
|
self.debug("Return: %s"% _rc)
|
2014-09-19 10:30:35 +00:00
|
|
|
for error_line in _stderr.splitlines():
|
2015-11-05 23:21:21 +00:00
|
|
|
self.debug("Output: %s"% error_line)
|
2015-11-05 22:53:23 +00:00
|
|
|
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-07-09 10:28:02 +00:00
|
|
|
# Check return code
|
2014-09-19 10:30:35 +00:00
|
|
|
if _rc != 0:
|
|
|
|
for line in _stderr.splitlines():
|
2015-11-05 23:21:21 +00:00
|
|
|
self.tool_error(line)
|
2015-11-05 22:53:23 +00:00
|
|
|
|
2016-02-25 22:29:26 +00:00
|
|
|
if self.is_not_supported_error(_stderr):
|
|
|
|
raise NotSupportedException(_stderr)
|
|
|
|
else:
|
|
|
|
raise ToolException(_stderr)
|
2014-07-09 11:38:18 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def build_library(self, objects, dir, name):
|
2015-11-05 20:42:45 +00:00
|
|
|
needed_update = False
|
2013-06-24 13:32:08 +00:00
|
|
|
lib = self.STD_LIB_NAME % name
|
|
|
|
fout = join(dir, lib)
|
|
|
|
if self.need_update(fout, objects):
|
2015-11-05 23:21:21 +00:00
|
|
|
self.info("Library: %s" % lib)
|
2013-06-24 13:32:08 +00:00
|
|
|
self.archive(objects, fout)
|
2015-11-05 20:42:45 +00:00
|
|
|
needed_update = True
|
|
|
|
|
2015-11-05 23:21:21 +00:00
|
|
|
return needed_update
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-07-24 16:29:11 +00:00
|
|
|
def link_program(self, r, tmp_path, name):
|
2015-11-05 20:42:45 +00:00
|
|
|
needed_update = False
|
2014-02-07 17:57:35 +00:00
|
|
|
ext = 'bin'
|
2015-02-19 16:08:02 +00:00
|
|
|
if hasattr(self.target, 'OUTPUT_EXT'):
|
|
|
|
ext = self.target.OUTPUT_EXT
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2014-02-10 15:21:57 +00:00
|
|
|
if hasattr(self.target, 'OUTPUT_NAMING'):
|
|
|
|
self.var("binary_naming", self.target.OUTPUT_NAMING)
|
|
|
|
if self.target.OUTPUT_NAMING == "8.3":
|
2014-02-06 16:02:37 +00:00
|
|
|
name = name[0:8]
|
|
|
|
ext = ext[0:3]
|
2016-06-09 22:50:03 +00:00
|
|
|
|
|
|
|
# Create destination directory
|
|
|
|
head, tail = split(name)
|
|
|
|
new_path = join(tmp_path, head)
|
|
|
|
mkdir(new_path)
|
|
|
|
|
2014-02-06 16:02:37 +00:00
|
|
|
filename = name+'.'+ext
|
2013-06-24 13:32:08 +00:00
|
|
|
elf = join(tmp_path, name + '.elf')
|
2014-02-06 16:02:37 +00:00
|
|
|
bin = join(tmp_path, filename)
|
2016-06-09 22:50:03 +00:00
|
|
|
map = join(tmp_path, name + '.map')
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-07-24 16:29:11 +00:00
|
|
|
if self.need_update(elf, r.objects + r.libraries + [r.linker_script]):
|
2015-11-05 20:42:45 +00:00
|
|
|
needed_update = True
|
2015-11-05 23:21:21 +00:00
|
|
|
self.progress("link", name)
|
2013-07-24 16:29:11 +00:00
|
|
|
self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
if self.need_update(bin, [elf]):
|
2015-11-05 20:42:45 +00:00
|
|
|
needed_update = True
|
2015-11-05 23:21:21 +00:00
|
|
|
self.progress("elf2bin", name)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2014-01-14 17:15:31 +00:00
|
|
|
self.binary(r, elf, bin)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
self.mem_stats(map)
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.var("compile_succeded", True)
|
2014-02-06 16:02:37 +00:00
|
|
|
self.var("binary", filename)
|
2014-02-10 15:21:57 +00:00
|
|
|
|
2015-11-05 23:21:21 +00:00
|
|
|
return bin, needed_update
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def default_cmd(self, command):
|
2016-06-09 22:50:03 +00:00
|
|
|
self.debug("Command: %s"% ' '.join(command))
|
2014-09-19 10:30:35 +00:00
|
|
|
_stdout, _stderr, _rc = run_cmd(command)
|
2014-09-19 12:27:40 +00:00
|
|
|
|
2014-09-19 10:30:35 +00:00
|
|
|
self.debug("Return: %s"% _rc)
|
|
|
|
|
|
|
|
for output_line in _stdout.splitlines():
|
|
|
|
self.debug("Output: %s"% output_line)
|
|
|
|
for error_line in _stderr.splitlines():
|
|
|
|
self.debug("Errors: %s"% error_line)
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-09-19 10:30:35 +00:00
|
|
|
if _rc != 0:
|
|
|
|
for line in _stderr.splitlines():
|
2013-06-24 13:32:08 +00:00
|
|
|
self.tool_error(line)
|
2014-09-19 10:30:35 +00:00
|
|
|
raise ToolException(_stderr)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
### NOTIFICATIONS ###
|
|
|
|
def info(self, message):
|
2015-11-06 20:44:44 +00:00
|
|
|
self.notify({'type': 'info', 'message': message})
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def debug(self, message):
|
|
|
|
if self.VERBOSE:
|
|
|
|
if type(message) is ListType:
|
|
|
|
message = ' '.join(message)
|
2014-07-11 08:13:22 +00:00
|
|
|
message = "[DEBUG] " + message
|
2015-11-05 23:21:21 +00:00
|
|
|
self.notify({'type': 'debug', 'message': message})
|
2014-06-02 14:44:45 +00:00
|
|
|
|
|
|
|
def cc_info(self, severity, file, line, message, target_name=None, toolchain_name=None):
|
2015-11-06 20:44:44 +00:00
|
|
|
self.notify({'type': 'cc',
|
2014-06-02 14:44:45 +00:00
|
|
|
'severity': severity,
|
|
|
|
'file': file,
|
|
|
|
'line': line,
|
|
|
|
'message': message,
|
|
|
|
'target_name': target_name,
|
|
|
|
'toolchain_name': toolchain_name})
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def progress(self, action, file, build_update=False):
|
|
|
|
msg = {'type': 'progress', 'action': action, 'file': file}
|
|
|
|
if build_update:
|
|
|
|
msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled)
|
2015-11-06 20:44:44 +00:00
|
|
|
self.notify(msg)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def tool_error(self, message):
|
2015-11-06 20:44:44 +00:00
|
|
|
self.notify({'type': 'tool_error', 'message': message})
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def var(self, key, value):
|
2015-11-06 20:44:44 +00:00
|
|
|
self.notify({'type': 'var', 'key': key, 'val': value})
|
2013-06-24 13:32:08 +00:00
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
def mem_stats(self, map):
|
|
|
|
# Creates parser object
|
|
|
|
toolchain = self.__class__.__name__
|
2016-06-10 14:19:02 +00:00
|
|
|
|
|
|
|
# Create memap object
|
|
|
|
memap = MemapParser()
|
|
|
|
|
|
|
|
# Parse and decode a map file
|
|
|
|
if memap.parse(abspath(map), toolchain) is False:
|
|
|
|
self.info("Unknown toolchain for memory statistics %s" % toolchain)
|
2016-06-09 22:50:03 +00:00
|
|
|
return
|
2016-06-10 14:19:02 +00:00
|
|
|
|
|
|
|
# Write output to stdout in text (pretty table) format
|
|
|
|
memap.generate_output('table')
|
|
|
|
|
|
|
|
# Write output to file in JSON format
|
|
|
|
map_out = splitext(map)[0] + "_map.json"
|
|
|
|
memap.generate_output('json', map_out)
|
|
|
|
|
|
|
|
# Write output to file in CSV format for the CI
|
|
|
|
map_csv = splitext(map)[0] + "_map.csv"
|
|
|
|
memap.generate_output('csv-ci', map_csv)
|
|
|
|
|
|
|
|
|
2016-06-09 20:34:53 +00:00
|
|
|
from tools.settings import ARM_BIN
|
|
|
|
from tools.settings import GCC_ARM_PATH, GCC_CR_PATH
|
|
|
|
from tools.settings import IAR_PATH
|
2014-09-05 12:26:51 +00:00
|
|
|
|
|
|
|
TOOLCHAIN_BIN_PATH = {
|
|
|
|
'ARM': ARM_BIN,
|
|
|
|
'uARM': ARM_BIN,
|
|
|
|
'GCC_ARM': GCC_ARM_PATH,
|
|
|
|
'GCC_CR': GCC_CR_PATH,
|
|
|
|
'IAR': IAR_PATH
|
|
|
|
}
|
|
|
|
|
2016-06-09 20:34:53 +00:00
|
|
|
from tools.toolchains.arm import ARM_STD, ARM_MICRO
|
|
|
|
from tools.toolchains.gcc import GCC_ARM, GCC_CR
|
|
|
|
from tools.toolchains.iar import IAR
|
2013-06-24 13:32:08 +00:00
|
|
|
|
|
|
|
TOOLCHAIN_CLASSES = {
|
2014-09-05 12:26:51 +00:00
|
|
|
'ARM': ARM_STD,
|
|
|
|
'uARM': ARM_MICRO,
|
|
|
|
'GCC_ARM': GCC_ARM,
|
|
|
|
'GCC_CR': GCC_CR,
|
2013-06-24 13:32:08 +00:00
|
|
|
'IAR': IAR
|
|
|
|
}
|
|
|
|
|
|
|
|
TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())
|