mbed-os/tools/toolchains/__init__.py

948 lines
33 KiB
Python
Raw Normal View History

2013-08-06 13:38:00 +00:00
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import re
import sys
from os import stat, walk, getcwd, sep
from copy import copy
from time import time, sleep
from types import ListType
from shutil import copyfile
from os.path import join, splitext, exists, relpath, dirname, basename, split, abspath
from inspect import getmro
from copy import deepcopy
from tools.config import Config
from multiprocessing import Pool, cpu_count
from tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path
from tools.settings import BUILD_OPTIONS, MBED_ORG_USER
import tools.hooks as hooks
2016-06-10 14:19:02 +00:00
from tools.memap import MemapParser
from hashlib import md5
import fnmatch
2014-07-09 12:30:41 +00:00
#Disables multiprocessing if set to higher number than the host machine CPUs
2014-08-20 09:59:28 +00:00
CPU_COUNT_MIN = 1
def compile_worker(job):
results = []
for command in job['commands']:
try:
_, _stderr, _rc = run_cmd(command, job['work_dir'])
except KeyboardInterrupt as e:
raise ToolException
results.append({
'code': _rc,
'output': _stderr,
'command': command
})
2014-08-20 09:59:28 +00:00
return {
'source': job['source'],
'object': job['object'],
'commands': job['commands'],
'results': results
}
class Resources:
def __init__(self, base_path=None):
self.base_path = base_path
self.file_basepath = {}
self.inc_dirs = []
self.headers = []
self.s_sources = []
self.c_sources = []
self.cpp_sources = []
self.lib_dirs = set([])
self.objects = []
self.libraries = []
# mbed special files
self.lib_builds = []
self.lib_refs = []
self.repo_dirs = []
self.repo_files = []
self.linker_script = None
# Other files
self.hex_files = []
self.bin_files = []
self.json_files = []
# Features
self.features = {}
def __add__(self, resources):
if resources is None:
return self
else:
return self.add(resources)
def __radd__(self, resources):
if resources is None:
return self
else:
return self.add(resources)
def add(self, resources):
for f,p in resources.file_basepath.items():
self.file_basepath[f] = p
self.inc_dirs += resources.inc_dirs
self.headers += resources.headers
self.s_sources += resources.s_sources
self.c_sources += resources.c_sources
self.cpp_sources += resources.cpp_sources
self.lib_dirs |= resources.lib_dirs
self.objects += resources.objects
self.libraries += resources.libraries
self.lib_builds += resources.lib_builds
self.lib_refs += resources.lib_refs
self.repo_dirs += resources.repo_dirs
self.repo_files += resources.repo_files
if resources.linker_script is not None:
self.linker_script = resources.linker_script
self.hex_files += resources.hex_files
self.bin_files += resources.bin_files
self.json_files += resources.json_files
self.features.update(resources.features)
return self
def relative_to(self, base, dot=False):
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
'cpp_sources', 'lib_dirs', 'objects', 'libraries',
'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
'hex_files', 'bin_files', 'json_files']:
v = [rel_path(f, base, dot) for f in getattr(self, field)]
setattr(self, field, v)
for f in self.features:
self.features[f] = rel_path(self.features[f], base, dot)
if self.linker_script is not None:
self.linker_script = rel_path(self.linker_script, base, dot)
def win_to_unix(self):
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
'cpp_sources', 'lib_dirs', 'objects', 'libraries',
'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
'hex_files', 'bin_files', 'json_files']:
v = [f.replace('\\', '/') for f in getattr(self, field)]
setattr(self, field, v)
for f in self.features:
self.features[f] = self.features[f].replace('\\', '/')
if self.linker_script is not None:
self.linker_script = self.linker_script.replace('\\', '/')
def __str__(self):
s = []
for (label, resources) in (
('Include Directories', self.inc_dirs),
('Headers', self.headers),
('Assembly sources', self.s_sources),
('C sources', self.c_sources),
('C++ sources', self.cpp_sources),
('Library directories', self.lib_dirs),
('Objects', self.objects),
('Libraries', self.libraries),
('Hex files', self.hex_files),
2015-02-04 12:58:40 +00:00
('Bin files', self.bin_files),
('Features', self.features),
):
if resources:
s.append('%s:\n ' % label + '\n '.join(resources))
if self.linker_script:
s.append('Linker Script: ' + self.linker_script)
return '\n'.join(s)
# Support legacy build conventions: the original mbed build system did not have
# standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
# had the knowledge of a list of these directories to be ignored.
LEGACY_IGNORE_DIRS = set([
'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z',
'ARM', 'GCC_ARM', 'GCC_CR', 'IAR', 'uARM'
])
LEGACY_TOOLCHAIN_NAMES = {
'ARM_STD':'ARM', 'ARM_MICRO': 'uARM',
'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR',
'IAR': 'IAR',
}
class mbedToolchain:
VERBOSE = True
CORTEX_SYMBOLS = {
"Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
"Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
"Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1"],
"Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
"Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
"Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
"Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
"Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
2014-10-24 04:02:26 +00:00
"Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"],
}
GOANNA_FORMAT = "[Goanna] warning [%FILENAME%:%LINENO%] - [%CHECKNAME%(%SEVERITY%)] %MESSAGE%"
GOANNA_DIAGNOSTIC_PATTERN = re.compile(r'"\[Goanna\] (?P<severity>warning) \[(?P<file>[^:]+):(?P<line>\d+)\] \- (?P<message>.*)"')
MBED_CONFIG_FILE_NAME="mbed_config.h"
2015-11-12 18:16:10 +00:00
def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
self.target = target
self.name = self.__class__.__name__
# compile/assemble/link/binary hooks
self.hook = hooks.Hook(target, self)
2015-11-12 18:16:10 +00:00
# Toolchain flags
self.flags = deepcopy(self.DEFAULT_FLAGS)
# User-defined macros
self.macros = macros or []
# Macros generated from toolchain and target rules/features
self.symbols = None
# Labels generated from toolchain and target rules/features (used for selective build)
self.labels = None
# This will hold the configuration data (as returned by Config.get_config_data())
self.config_data = None
# Non-incremental compile
self.build_all = False
# Build output dir
self.build_dir = None
self.timestamp = time()
2014-08-20 09:59:28 +00:00
# Output build naming based on target+toolchain combo (mbed 2.0 builds)
self.obj_path = join("TARGET_"+target.name, "TOOLCHAIN_"+self.name)
# Number of concurrent build jobs. 0 means auto (based on host system cores)
self.jobs = 0
2014-08-20 09:59:28 +00:00
self.CHROOT = None
# Ignore patterns from .mbedignore files
self.ignore_patterns = []
# Pre-mbed 2.0 ignore dirs
self.legacy_ignore_dirs = LEGACY_IGNORE_DIRS - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]])
2014-08-20 09:59:28 +00:00
# Output notify function
if notify:
self.notify_fun = notify
elif extra_verbose:
self.notify_fun = self.print_notify_verbose
else:
self.notify_fun = self.print_notify
# Silent builds (no output)
self.silent = silent
# Print output buffer
self.output = ""
# Build options passed by -o flag
self.options = options if options is not None else []
# Build options passed by settings.py or mbed_settings.py
self.options.extend(BUILD_OPTIONS)
if self.options:
self.info("Build Options: %s" % (', '.join(self.options)))
# uVisor spepcific rules
if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels:
self.target.core = re.sub(r"F$", '', self.target.core)
2015-11-05 23:21:21 +00:00
def get_output(self):
return self.output
def print_notify(self, event, silent=False):
""" Default command line notification
"""
msg = None
if not self.VERBOSE and event['type'] == 'tool_error':
2015-11-05 23:21:21 +00:00
msg = event['message']
elif event['type'] in ['info', 'debug']:
msg = event['message']
2015-11-05 23:21:21 +00:00
elif event['type'] == 'cc':
event['severity'] = event['severity'].title()
event['file'] = basename(event['file'])
msg = '[%(severity)s] %(file)s@%(line)s: %(message)s' % event
elif event['type'] == 'progress':
if not silent:
msg = '%s: %s' % (event['action'].title(), basename(event['file']))
if msg:
print msg
self.output += msg + "\n"
def print_notify_verbose(self, event, silent=False):
""" Default command line notification with more verbose mode
"""
if event['type'] in ['info', 'debug']:
self.print_notify(event) # standard handle
elif event['type'] == 'cc':
event['severity'] = event['severity'].title()
event['file'] = basename(event['file'])
event['mcu_name'] = "None"
event['toolchain'] = "None"
event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown"
event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown"
msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event
print msg
self.output += msg + "\n"
elif event['type'] == 'progress':
self.print_notify(event) # standard handle
def notify(self, event):
""" Little closure for notify functions
"""
return self.notify_fun(event, self.silent)
def goanna_parse_line(self, line):
if "analyze" in self.options:
return self.GOANNA_DIAGNOSTIC_PATTERN.match(line)
else:
return None
def get_symbols(self):
if self.symbols is None:
# Target and Toolchain symbols
labels = self.get_labels()
self.symbols = ["TARGET_%s" % t for t in labels['TARGET']]
self.symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']])
# Cortex CPU symbols
if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
self.symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
# Symbols defined by the on-line build.system
self.symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1'])
if MBED_ORG_USER:
self.symbols.append('MBED_USERNAME=' + MBED_ORG_USER)
# Add target's symbols
2014-08-20 09:59:28 +00:00
self.symbols += self.target.macros
# Add target's hardware
self.symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has]
# Add target's features
self.symbols += ["FEATURE_" + data + "=1" for data in self.target.features]
2014-08-20 09:59:28 +00:00
# Add extra symbols passed via 'macros' parameter
self.symbols += self.macros
Target K64F addition. Squashed commit of the following: commit db3c9f7682083abeb291e01df31e67e4c50845b3 Author: 0xc0170 <c0170@rocketmail.com> Date: Wed Apr 2 09:52:00 2014 +0200 K64F - KSDK - Warnings fixes commit a639a5cdff889c13509c954b0a34ebac861c1361 Merge: 67a2c2a f3de345 Author: 0xc0170 <c0170@rocketmail.com> Date: Tue Apr 1 12:48:35 2014 +0200 Merge branch latest 'master' into dev_target_k64f Conflicts: libraries/rtos/rtx/RTX_Conf_CM.c workspace_tools/build_api.py commit 67a2c2aeb976f264db52ea10d18fea9de0d7685f Author: 0xc0170 <c0170@rocketmail.com> Date: Sun Mar 30 13:19:51 2014 +0200 K64F - PinName for buttons (SW2, SW3) commit 957573e2cd42d5c73ed99477abb98c8b883695b2 Author: 0xc0170 <c0170@rocketmail.com> Date: Tue Mar 25 11:46:57 2014 +0100 K64F - pins addition to mbed HAL, uart - 4 instances, fix i2c instance which was not stored commit 2347a6d03984e297190910a250f2771032ae6327 Author: sg- <sam.w.grove@gmail.com> Date: Mon Mar 24 15:20:51 2014 -0500 Added wait to i2c stop commit b7b4a9c72e8724087a44078c41a2cb33e4c8d5e3 Author: 0xc0170 <c0170@rocketmail.com> Date: Mon Mar 24 19:28:16 2014 +0100 K64F - I2c - ack flag retreive correction (logic inverted in ksdk hal) commit 46c875251263029e32463c3b48473b10496088d9 Author: sg- <sam.w.grove@gmail.com> Date: Mon Mar 24 13:16:18 2014 -0500 Added I2C Pinnames commit b71c7a0dfba7025662f9a9d176494ce4dc86273e Author: 0xc0170 <c0170@rocketmail.com> Date: Tue Mar 18 17:02:34 2014 +0100 K64F Arduino pinNames update commit d41b0ee493263d1d80fcb72b3f0d4d788359c7c9 Merge: 9c0a982 e2574eb Author: 0xc0170 <c0170@rocketmail.com> Date: Tue Mar 18 14:57:57 2014 +0100 Merge remote-tracking branch 'upstream/master' into dev_target_k64f. K64F gpio changed according to the latest mbed master. Conflicts: libraries/rtos/rtx/RTX_CM_lib.h workspace_tools/export/uvision4.py commit 9c0a9822ab14263fff5e3b6459b7c2b4a77ce30c Author: 0xc0170 <c0170@rocketmail.com> Date: Mon Mar 17 21:08:17 2014 +0100 K64F - sleep support commit 5edcf3603d2e7b00eedbb377203a054b7a01e51d Author: 0xc0170 <c0170@rocketmail.com> Date: Sun Mar 16 18:19:55 2014 +0100 K64F - pullup/down corrections, LED1 - R, LED2 - G, LED3 - B commit a2b3b53a1474d32779654765cd1ce2ba2c6b2186 Author: 0xc0170 <c0170@rocketmail.com> Date: Thu Mar 13 20:55:39 2014 +0100 K64F - SPI - delays are set, pin definition for tests commit 1f3b3abe16f4afaaf1b75cb4bf3e3a9d5b6e50a7 Author: 0xc0170 <c0170@rocketmail.com> Date: Tue Mar 11 21:26:00 2014 +0100 K64F - DAC update - tested with test a8 - internal reference set to VDDA - PinName DAC0_OUT commit 26d8cf47f8c0786b290ae659beb022901029b313 Author: 0xc0170 <c0170@rocketmail.com> Date: Tue Mar 11 08:31:44 2014 +0100 KSDK - drivers layer removal, mbed HAL using only KSDK HAL - ADC corrections with channels, and clock configuration commit 67ebe14f5c88725033ea0fb135d94b6bf9a00fdb Author: 0xc0170 <c0170@rocketmail.com> Date: Mon Mar 10 12:46:08 2014 +0100 K20 copy files removed, targets.py - vertical alignment for K64F definition commit be5c562d513c808e5bd425195a4fb1c71f47a57e Merge: 696a713 fe0aca9 Author: Emilio Monti <emilmont@gmail.com> Date: Mon Mar 10 11:14:55 2014 +0000 Merge branch 'rtos_support' into dev_target_k64f commit 696a713751194b4762f1cdf6c17c0786decd7808 Author: 0xc0170 <c0170@rocketmail.com> Date: Mon Mar 10 12:05:30 2014 +0100 [FIX] K64F - adc, sgtl driver updates commit fe0aca9940bbdd5ee70a1a7341a0a2ad2abf912b Author: Emilio Monti <emilmont@gmail.com> Date: Mon Mar 10 11:04:16 2014 +0000 Add RTOS support for K64F commit 5c3edcbca6dbcce628c7cde51ac94a6fc6278ba5 Author: 0xc0170 <c0170@rocketmail.com> Date: Sun Mar 9 20:43:38 2014 +0100 K64F - uvision templates update - uvision 5.10 supports K64F commit 33f18d11d0eadb9243f1be0ae96c5f82e2913d48 Merge: 26f7587 74e9b2e Author: 0xc0170 <c0170@rocketmail.com> Date: Sat Mar 8 10:34:25 2014 +0100 Update branch from mbed master - merge branch 'master' into dev_target_k64f Conflicts: libraries/USBDevice/USBDevice/USBEndpoints.h libraries/USBDevice/USBDevice/USBHAL_KL25Z.cpp workspace_tools/export/uvision4.py workspace_tools/targets.py commit 26f75872b19a1db2a3abb34c6e773bac56acb32f Author: 0xc0170 <c0170@rocketmail.com> Date: Thu Mar 6 22:15:53 2014 +0100 K64F - USBDevice - MPU disable in init commit e54d6bbaa68827bd63058fbf2428e289d15ac1f7 Author: 0xc0170 <c0170@rocketmail.com> Date: Wed Feb 26 21:06:58 2014 +0100 K64F - clock setup 4 (usb clock enable) commit c4165732b9520cb31ae3d649d50c353d09dc9932 Author: 0xc0170 <c0170@rocketmail.com> Date: Wed Feb 26 20:01:47 2014 +0100 K64F - USBDevice addition commit 9fcdbb8562b1415561b04e902fcdbb4724add5af Author: 0xc0170 <c0170@rocketmail.com> Date: Mon Feb 24 19:11:48 2014 +0100 K64F SPI HAL - initial version commit 8093df58fa7d17fcb5ad04872c958d5254ee1d8a Author: 0xc0170 <c0170@rocketmail.com> Date: Sat Feb 22 13:14:44 2014 +0100 K64F - RTC and I2C implementation (using KPSDK HAL) - instance correction in objects and peripherals names headers commit 7ef3fbda605c2bd53a86f37d0676c0393b2e2949 Author: 0xc0170 <c0170@rocketmail.com> Date: Fri Feb 14 09:45:27 2014 +0100 mbed RTC HAL for K64F commit e40332fd2db8bf36b3e6cabac5729e013da40c28 Merge: e059f65 6bfcd87 Author: 0xc0170 <c0170@rocketmail.com> Date: Thu Feb 13 14:20:20 2014 +0100 Merge branch 'master' into dev_target_k64f Conflicts: workspace_tools/build_api.py workspace_tools/export/uvision4.py workspace_tools/targets.py workspace_tools/toolchains/__init__.py commit e059f65fd09694418f9fa4f38da90954ab9decfe Author: 0xc0170 <c0170@rocketmail.com> Date: Wed Feb 5 21:35:49 2014 +0100 pwm mbed HAL - using jusst ftm hal from KPSDK, not yet functional commit b784278872b1d66ce2940f4988e0479971de8bc0 Author: 0xc0170 <c0170@rocketmail.com> Date: Mon Feb 3 18:28:24 2014 +0100 uvision exporters for K64F, uticker - lptmr - lptmr - no hal neiter driver, quick implementation using registers and internal clock - exporters for K64F - using K60 1MB target, because K64F is not available in 4.7 yet commit 7a030693e025c2bd456563f3e6f4456033d3f644 Author: Bogdan Marinescu <bogdan.marinescu@arm.com> Date: Tue Jan 28 16:29:54 2014 +0200 Attempt to keep target's include directory structure commit a2445b383224125abf4ee23bd17f1e685010a4a5 Author: 0xc0170 <c0170@rocketmail.com> Date: Mon Jan 27 07:25:16 2014 +0100 Original KPSDK include directory structure for device commit 9c07c58bb9cf5e9d8be4c3bec117ee87a5ea81c0 Author: 0xc0170 <c0170@rocketmail.com> Date: Fri Jan 24 16:51:17 2014 +0000 K64F ADC - initial commit - ADC using KPSDK driver commit 88e03ef8c5855a57887bb36cddfa5ab1491d400c Author: 0xc0170 <c0170@rocketmail.com> Date: Fri Jan 24 12:18:14 2014 +0000 GPI IRQ - nvic vectors number correction - gpio irq HAL implementation commit e83f1108ae9f779ce240d6cdfe23532bfa00a55e Author: 0xc0170 <c0170@rocketmail.com> Date: Fri Jan 24 10:06:44 2014 +0000 PORT HAL implementation - using gpio hal port commit 75c21943200c8240d1edaf0a67f84b9f3b43db7f Author: 0xc0170 <c0170@rocketmail.com> Date: Thu Jan 23 16:02:36 2014 +0000 Serial (only 8bit at the moment), using KPSDK HAL commit 296e79def617f005918cd8e2aa574f2908a362ca Author: 0xc0170 <c0170@rocketmail.com> Date: Thu Jan 23 08:35:50 2014 +0000 Folder structure correction for K64F - KPSDK - scripts reverted, only new macro is available - K64F specific headers are in HAL in device folder commit f236b1ffcb9c8b443ad8483bca8b0e564a63f004 Author: 0xc0170 <c0170@rocketmail.com> Date: Wed Jan 22 16:07:30 2014 +0100 update KPSDK to RC1 - the update causes few more dependences which were reported. Will be removed later (usb, boards) - pit timer - hal use , pit driver uses us period commit f02c5353d4920e885f803ad235e5e82001e97b94 Author: 0xc0170 <c0170@rocketmail.com> Date: Tue Jan 21 09:45:55 2014 +0100 KPSDK In/out declaration removal commit 8c88e098b4dc4901753309f1e6db4adb8aca4384 Author: 0xc0170 <c0170@rocketmail.com> Date: Tue Jan 21 09:12:41 2014 +0100 gpio_t struct only needs pinName - gpio_init creates init objects on stack commit 6b96d4ea2c5a6a2cb13571d740ffb679a62f8f3d Author: 0xc0170 <c0170@rocketmail.com> Date: Mon Jan 20 19:59:03 2014 +0100 us ticker - pit implementation (not functional yet) - pit driver in KPSDK - added sdk prefix to needed functions commit 098e60a3846abcd4c9c00bd199b01d4b1899807f Author: 0xc0170 <c0170@rocketmail.com> Date: Mon Jan 20 13:01:58 2014 +0100 GPIO HAL - gpio_set implementation commit 2bfebbfc75dcd08c20297ba42dc0cc82e5381a40 Author: 0xc0170 <c0170@rocketmail.com> Date: Sun Jan 19 20:46:55 2014 +0100 GPIO KPSDK changes - gpio driver - sdk prefix, no lookuptable, input/output declaration, refactoring, set MUX to GPIO - gpio api in mbed implementation, tested on blue led commit d083733c485fbdd79ed9ce87100df9fee82294a7 Author: 0xc0170 <c0170@rocketmail.com> Date: Sat Jan 18 17:14:09 2014 +0100 Update folder structure for KPSDK - drivers addition - usb (needed by drivers - needs to be fixed) - utilities - hal folder - drivers/flash removed (needs to be fixed) - usb host removed (needs to be fixed) commit 9abcf3d94a2cc849cd6e586c1bad650b6a340a0c Author: 0xc0170 <c0170@rocketmail.com> Date: Thu Jan 16 11:06:16 2014 +0100 Initial commit for K64F - KPSDK addition - CMSIS + HAL for K64F - HAL is not yet implemented - scripts - target contain macros, ignore folders, cmsis copy folders
2014-04-02 12:39:01 +00:00
# Form factor variables
if hasattr(self.target, 'supported_form_factors'):
self.symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors])
return list(set(self.symbols)) # Return only unique symbols
# Extend the internal list of macros
def add_macros(self, new_macros):
self.macros.extend(new_macros)
def get_labels(self):
if self.labels is None:
toolchain_labels = [c.__name__ for c in getmro(self.__class__)]
toolchain_labels.remove('mbedToolchain')
self.labels = {
'TARGET': self.target.get_labels() + ["DEBUG" if "debug-info" in self.options else "RELEASE"],
'FEATURE': self.target.features,
'TOOLCHAIN': toolchain_labels
}
return self.labels
def need_update(self, target, dependencies):
if self.build_all:
return True
if not exists(target):
return True
target_mod_time = stat(target).st_mtime
for d in dependencies:
# Some objects are not provided with full path and here we do not have
# information about the library paths. Safe option: assume an update
if not d or not exists(d):
return True
if stat(d).st_mtime >= target_mod_time:
return True
return False
2014-08-20 09:59:28 +00:00
def is_ignored(self, file_path):
for pattern in self.ignore_patterns:
if fnmatch.fnmatch(file_path, pattern):
return True
return False
def scan_resources(self, path, exclude_paths=None, base_path=None):
labels = self.get_labels()
resources = Resources(path)
if not base_path:
base_path = path
resources.base_path = base_path
""" os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
When topdown is True, the caller can modify the dirnames list in-place
(perhaps using del or slice assignment), and walk() will only recurse into
the subdirectories whose names remain in dirnames; this can be used to prune
the search, impose a specific order of visiting, or even to inform walk()
about directories the caller creates or renames before it resumes walk()
again. Modifying dirnames when topdown is False is ineffective, because in
bottom-up mode the directories in dirnames are generated before dirpath
itself is generated.
"""
for root, dirs, files in walk(path, followlinks=True):
# Check if folder contains .mbedignore
if ".mbedignore" in files:
with open (join(root,".mbedignore"), "r") as f:
lines=f.readlines()
lines = [l.strip() for l in lines] # Strip whitespaces
lines = [l for l in lines if l != ""] # Strip empty lines
lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
# Append root path to glob patterns and append patterns to ignore_patterns
self.ignore_patterns.extend([join(root,line.strip()) for line in lines])
# Skip the whole folder if ignored, e.g. .mbedignore containing '*'
if self.is_ignored(join(root,"")):
continue
for d in copy(dirs):
dir_path = join(root, d)
# Add internal repo folders/files. This is needed for exporters
if d == '.hg':
resources.repo_dirs.append(dir_path)
resources.repo_files.extend(self.scan_repository(dir_path))
if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
# Ignore targets that do not match the TARGET in extra_labels list
(d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
# Ignore toolchain that do not match the current TOOLCHAIN
(d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or
# Ignore .mbedignore files
self.is_ignored(join(dir_path,"")) or
# Ignore TESTS dir
(d == 'TESTS')):
dirs.remove(d)
elif d.startswith('FEATURE_'):
# Recursively scan features but ignore them in the current scan.
# These are dynamically added by the config system if the conditions are matched
resources.features[d[8:]] = self.scan_resources(dir_path, base_path=base_path)
dirs.remove(d)
elif exclude_paths:
for exclude_path in exclude_paths:
rel_path = relpath(dir_path, exclude_path)
if not (rel_path.startswith('..')):
dirs.remove(d)
break
# Add root to include paths
resources.inc_dirs.append(root)
for file in files:
file_path = join(root, file)
resources.file_basepath[file_path] = base_path
if self.is_ignored(file_path):
continue
_, ext = splitext(file)
ext = ext.lower()
if ext == '.s':
resources.s_sources.append(file_path)
elif ext == '.c':
resources.c_sources.append(file_path)
elif ext == '.cpp':
resources.cpp_sources.append(file_path)
elif ext == '.h' or ext == '.hpp':
resources.headers.append(file_path)
elif ext == '.o':
resources.objects.append(file_path)
elif ext == self.LIBRARY_EXT:
resources.libraries.append(file_path)
resources.lib_dirs.add(root)
elif ext == self.LINKER_EXT:
if resources.linker_script is not None:
self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path))
resources.linker_script = file_path
elif ext == '.lib':
resources.lib_refs.append(file_path)
elif ext == '.bld':
resources.lib_builds.append(file_path)
elif file == '.hgignore':
resources.repo_files.append(file_path)
elif ext == '.hex':
resources.hex_files.append(file_path)
elif ext == '.bin':
resources.bin_files.append(file_path)
elif ext == '.json':
resources.json_files.append(file_path)
return resources
def scan_repository(self, path):
resources = []
for root, dirs, files in walk(path):
# Remove ignored directories
for d in copy(dirs):
if d == '.' or d == '..':
dirs.remove(d)
for file in files:
file_path = join(root, file)
resources.append(file_path)
return resources
def copy_files(self, files_paths, trg_path, resources=None, rel_path=None):
2015-11-05 22:53:23 +00:00
# Handle a single file
if type(files_paths) != ListType: files_paths = [files_paths]
for source in files_paths:
if source is None:
files_paths.remove(source)
for source in files_paths:
if resources is not None:
relative_path = relpath(source, resources.file_basepath[source])
elif rel_path is not None:
relative_path = relpath(source, rel_path)
else:
_, relative_path = split(source)
target = join(trg_path, relative_path)
if (target != source) and (self.need_update(target, [source])):
2015-11-05 23:21:21 +00:00
self.progress("copy", relative_path)
mkdir(dirname(target))
copyfile(source, target)
def relative_object_path(self, build_path, base_dir, source):
source_dir, name, _ = split_path(source)
obj_dir = join(build_path, relpath(source_dir, base_dir))
mkdir(obj_dir)
return join(obj_dir, name + '.o')
2014-08-20 09:59:28 +00:00
def get_inc_file(self, includes):
include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5)
if not exists(include_file):
with open(include_file, "wb") as f:
cmd_list = []
for c in includes:
if c:
cmd_list.append(('-I%s' % c).replace("\\", "/"))
string = " ".join(cmd_list)
f.write(string)
return include_file
def compile_sources(self, resources, build_path, inc_dirs=None):
# Web IDE progress bar for project build
files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
self.to_be_compiled = len(files_to_compile)
self.compiled = 0
2014-08-20 09:59:28 +00:00
inc_paths = resources.inc_dirs
if inc_dirs is not None:
inc_paths.extend(inc_dirs)
# De-duplicate include paths
inc_paths = set(inc_paths)
# Sort include paths for consistency
inc_paths = sorted(set(inc_paths))
# Unique id of all include paths
self.inc_md5 = md5(' '.join(inc_paths)).hexdigest()
# Where to store response files
self.build_dir = build_path
2014-08-20 09:59:28 +00:00
objects = []
queue = []
prev_dir = None
2014-08-20 09:59:28 +00:00
# Sort compile queue for consistency
files_to_compile.sort()
work_dir = getcwd()
for source in files_to_compile:
_, name, _ = split_path(source)
object = self.relative_object_path(build_path, resources.file_basepath[source], source)
2014-08-20 09:59:28 +00:00
# Queue mode (multiprocessing)
commands = self.compile_command(source, object, inc_paths)
if commands is not None:
queue.append({
'source': source,
'object': object,
'commands': commands,
'work_dir': work_dir,
'chroot': self.CHROOT
})
else:
objects.append(object)
# Use queues/multiprocessing if cpu count is higher than setting
jobs = self.jobs if self.jobs else cpu_count()
if jobs > CPU_COUNT_MIN and len(queue) > jobs:
return self.compile_queue(queue, objects)
else:
return self.compile_seq(queue, objects)
def compile_seq(self, queue, objects):
for item in queue:
result = compile_worker(item)
2014-08-20 09:59:28 +00:00
self.compiled += 1
2015-11-05 23:21:21 +00:00
self.progress("compile", item['source'], build_update=True)
for res in result['results']:
2015-11-05 23:21:21 +00:00
self.debug("Command: %s" % ' '.join(res['command']))
self.compile_output([
res['code'],
res['output'],
res['command']
])
objects.append(result['object'])
2015-11-05 23:21:21 +00:00
return objects
def compile_queue(self, queue, objects):
jobs_count = int(self.jobs if self.jobs else cpu_count())
p = Pool(processes=jobs_count)
2014-08-20 09:59:28 +00:00
results = []
for i in range(len(queue)):
results.append(p.apply_async(compile_worker, [queue[i]]))
itr = 0
while True:
itr += 1
if itr > 180000:
p.terminate()
p.join()
raise ToolException("Compile did not finish in 5 minutes")
2014-08-20 09:59:28 +00:00
pending = 0
for r in results:
if r._ready is True:
try:
2014-07-10 17:10:21 +00:00
result = r.get()
results.remove(r)
2014-07-10 17:10:21 +00:00
self.compiled += 1
2015-11-05 23:21:21 +00:00
self.progress("compile", result['source'], build_update=True)
for res in result['results']:
2015-11-05 23:21:21 +00:00
self.debug("Command: %s" % ' '.join(res['command']))
self.compile_output([
res['code'],
res['output'],
res['command']
])
2014-07-10 17:10:21 +00:00
objects.append(result['object'])
except ToolException, err:
p.terminate()
p.join()
raise ToolException(err)
else:
pending += 1
if pending > jobs_count:
break
2014-08-20 09:59:28 +00:00
if len(results) == 0:
break
sleep(0.01)
results = None
p.terminate()
2014-08-20 09:59:28 +00:00
p.join()
2015-11-05 23:21:21 +00:00
return objects
def compile_command(self, source, object, includes):
# Check dependencies
_, ext = splitext(source)
ext = ext.lower()
2014-08-20 09:59:28 +00:00
if ext == '.c' or ext == '.cpp':
base, _ = splitext(object)
dep_path = base + '.d'
deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else []
if len(deps) == 0 or self.need_update(object, deps):
if ext == '.c':
return self.compile_c(source, object, includes)
else:
return self.compile_cpp(source, object, includes)
elif ext == '.s':
deps = [source]
if self.need_update(object, deps):
return self.assemble(source, object, includes)
else:
return False
2014-08-20 09:59:28 +00:00
return None
2014-08-20 09:59:28 +00:00
def is_not_supported_error(self, output):
return "#error directive: [NOT_SUPPORTED]" in output
def compile_output(self, output=[]):
_rc = output[0]
_stderr = output[1]
command = output[2]
2014-08-20 09:59:28 +00:00
# Parse output for Warnings and Errors
2015-11-05 23:21:21 +00:00
self.parse_output(_stderr)
self.debug("Return: %s"% _rc)
for error_line in _stderr.splitlines():
2015-11-05 23:21:21 +00:00
self.debug("Output: %s"% error_line)
2015-11-05 22:53:23 +00:00
2014-08-20 09:59:28 +00:00
# Check return code
if _rc != 0:
for line in _stderr.splitlines():
2015-11-05 23:21:21 +00:00
self.tool_error(line)
2015-11-05 22:53:23 +00:00
if self.is_not_supported_error(_stderr):
raise NotSupportedException(_stderr)
else:
raise ToolException(_stderr)
def build_library(self, objects, dir, name):
needed_update = False
lib = self.STD_LIB_NAME % name
fout = join(dir, lib)
if self.need_update(fout, objects):
2015-11-05 23:21:21 +00:00
self.info("Library: %s" % lib)
self.archive(objects, fout)
needed_update = True
2015-11-05 23:21:21 +00:00
return needed_update
def link_program(self, r, tmp_path, name):
needed_update = False
ext = 'bin'
if hasattr(self.target, 'OUTPUT_EXT'):
ext = self.target.OUTPUT_EXT
if hasattr(self.target, 'OUTPUT_NAMING'):
self.var("binary_naming", self.target.OUTPUT_NAMING)
if self.target.OUTPUT_NAMING == "8.3":
name = name[0:8]
ext = ext[0:3]
# Create destination directory
head, tail = split(name)
new_path = join(tmp_path, head)
mkdir(new_path)
filename = name+'.'+ext
elf = join(tmp_path, name + '.elf')
bin = join(tmp_path, filename)
map = join(tmp_path, name + '.map')
if self.need_update(elf, r.objects + r.libraries + [r.linker_script]):
needed_update = True
2015-11-05 23:21:21 +00:00
self.progress("link", name)
self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
if self.need_update(bin, [elf]):
needed_update = True
2015-11-05 23:21:21 +00:00
self.progress("elf2bin", name)
self.binary(r, elf, bin)
self.mem_stats(map)
self.var("compile_succeded", True)
self.var("binary", filename)
2015-11-05 23:21:21 +00:00
return bin, needed_update
def default_cmd(self, command):
self.debug("Command: %s"% ' '.join(command))
_stdout, _stderr, _rc = run_cmd(command)
self.debug("Return: %s"% _rc)
for output_line in _stdout.splitlines():
self.debug("Output: %s"% output_line)
for error_line in _stderr.splitlines():
self.debug("Errors: %s"% error_line)
2014-08-20 09:59:28 +00:00
if _rc != 0:
for line in _stderr.splitlines():
self.tool_error(line)
raise ToolException(_stderr)
### NOTIFICATIONS ###
def info(self, message):
2015-11-06 20:44:44 +00:00
self.notify({'type': 'info', 'message': message})
def debug(self, message):
if self.VERBOSE:
if type(message) is ListType:
message = ' '.join(message)
message = "[DEBUG] " + message
2015-11-05 23:21:21 +00:00
self.notify({'type': 'debug', 'message': message})
def cc_info(self, severity, file, line, message, target_name=None, toolchain_name=None):
2015-11-06 20:44:44 +00:00
self.notify({'type': 'cc',
'severity': severity,
'file': file,
'line': line,
'message': message,
'target_name': target_name,
'toolchain_name': toolchain_name})
def progress(self, action, file, build_update=False):
msg = {'type': 'progress', 'action': action, 'file': file}
if build_update:
msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled)
2015-11-06 20:44:44 +00:00
self.notify(msg)
def tool_error(self, message):
2015-11-06 20:44:44 +00:00
self.notify({'type': 'tool_error', 'message': message})
def var(self, key, value):
2015-11-06 20:44:44 +00:00
self.notify({'type': 'var', 'key': key, 'val': value})
def mem_stats(self, map):
# Creates parser object
toolchain = self.__class__.__name__
2016-06-10 14:19:02 +00:00
# Create memap object
memap = MemapParser()
# Parse and decode a map file
if memap.parse(abspath(map), toolchain) is False:
self.info("Unknown toolchain for memory statistics %s" % toolchain)
return
2016-06-10 14:19:02 +00:00
# Write output to stdout in text (pretty table) format
memap.generate_output('table')
# Write output to file in JSON format
map_out = splitext(map)[0] + "_map.json"
memap.generate_output('json', map_out)
# Write output to file in CSV format for the CI
map_csv = splitext(map)[0] + "_map.csv"
memap.generate_output('csv-ci', map_csv)
# Set the configuration data
def set_config_data(self, config_data):
self.config_data = config_data
# Return the location of the config header. This function will create the config
# header first if needed. The header will be written in a file called "mbed_conf.h"
# located in the project's build directory.
# If config headers are not used (self.config_header_content is None), the function
# returns None
def get_config_header(self):
if self.config_data is None:
return None
config_file = join(self.build_dir, self.MBED_CONFIG_FILE_NAME)
if not exists(config_file):
with open(config_file, "wt") as f:
f.write(Config.config_to_header(self.config_data))
return config_file
# Return the list of macros geenrated by the build system
def get_config_macros(self):
return Config.config_to_macros(self.config_data) if self.config_data else []
2016-06-10 14:19:02 +00:00
from tools.settings import ARM_BIN
from tools.settings import GCC_ARM_PATH, GCC_CR_PATH
from tools.settings import IAR_PATH
TOOLCHAIN_BIN_PATH = {
'ARM': ARM_BIN,
'uARM': ARM_BIN,
'GCC_ARM': GCC_ARM_PATH,
'GCC_CR': GCC_CR_PATH,
'IAR': IAR_PATH
}
from tools.toolchains.arm import ARM_STD, ARM_MICRO
from tools.toolchains.gcc import GCC_ARM, GCC_CR
from tools.toolchains.iar import IAR
TOOLCHAIN_CLASSES = {
'ARM': ARM_STD,
'uARM': ARM_MICRO,
'GCC_ARM': GCC_ARM,
'GCC_CR': GCC_CR,
'IAR': IAR
}
TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())