2013-08-06 13:38:00 +00:00
|
|
|
"""
|
|
|
|
mbed SDK
|
|
|
|
Copyright (c) 2011-2013 ARM Limited
|
|
|
|
|
|
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
you may not use this file except in compliance with the License.
|
|
|
|
You may obtain a copy of the License at
|
|
|
|
|
|
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
See the License for the specific language governing permissions and
|
|
|
|
limitations under the License.
|
|
|
|
"""
|
2014-09-19 10:30:35 +00:00
|
|
|
|
|
|
|
import re
|
2014-12-03 09:24:19 +00:00
|
|
|
import sys
|
2016-07-15 13:37:46 +00:00
|
|
|
from os import stat, walk, getcwd, sep, remove
|
2013-06-24 13:32:08 +00:00
|
|
|
from copy import copy
|
2014-09-19 10:30:35 +00:00
|
|
|
from time import time, sleep
|
2013-06-24 13:32:08 +00:00
|
|
|
from types import ListType
|
2014-09-19 10:30:35 +00:00
|
|
|
from shutil import copyfile
|
2016-06-28 19:55:23 +00:00
|
|
|
from os.path import join, splitext, exists, relpath, dirname, basename, split, abspath, isfile, isdir
|
2013-06-24 13:32:08 +00:00
|
|
|
from inspect import getmro
|
2016-06-07 19:28:12 +00:00
|
|
|
from copy import deepcopy
|
2016-06-27 12:17:03 +00:00
|
|
|
from tools.config import Config
|
2016-07-05 22:58:43 +00:00
|
|
|
from abc import ABCMeta, abstractmethod
|
2016-09-13 18:38:58 +00:00
|
|
|
from distutils.spawn import find_executable
|
2013-06-24 13:32:08 +00:00
|
|
|
|
2014-09-19 10:30:35 +00:00
|
|
|
from multiprocessing import Pool, cpu_count
|
2016-07-19 10:14:42 +00:00
|
|
|
from tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path, compile_worker
|
2016-09-27 18:15:22 +00:00
|
|
|
from tools.settings import MBED_ORG_USER
|
2016-06-09 20:34:53 +00:00
|
|
|
import tools.hooks as hooks
|
2016-06-10 14:19:02 +00:00
|
|
|
from tools.memap import MemapParser
|
2016-06-09 22:50:03 +00:00
|
|
|
from hashlib import md5
|
|
|
|
import fnmatch
|
2014-09-19 10:30:35 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
|
2014-07-09 12:30:41 +00:00
|
|
|
#Disables multiprocessing if set to higher number than the host machine CPUs
|
2014-08-20 09:59:28 +00:00
|
|
|
CPU_COUNT_MIN = 1
|
2016-07-19 10:14:42 +00:00
|
|
|
CPU_COEF = 1
|
2016-06-09 22:50:03 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
class Resources:
|
|
|
|
def __init__(self, base_path=None):
|
|
|
|
self.base_path = base_path
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-06-15 21:07:49 +00:00
|
|
|
self.file_basepath = {}
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.inc_dirs = []
|
|
|
|
self.headers = []
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.s_sources = []
|
|
|
|
self.c_sources = []
|
|
|
|
self.cpp_sources = []
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.lib_dirs = set([])
|
|
|
|
self.objects = []
|
|
|
|
self.libraries = []
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
# mbed special files
|
|
|
|
self.lib_builds = []
|
2013-11-14 16:45:14 +00:00
|
|
|
self.lib_refs = []
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-11-14 16:45:14 +00:00
|
|
|
self.repo_dirs = []
|
|
|
|
self.repo_files = []
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.linker_script = None
|
2014-01-14 17:15:31 +00:00
|
|
|
|
|
|
|
# Other files
|
|
|
|
self.hex_files = []
|
2015-02-04 10:29:31 +00:00
|
|
|
self.bin_files = []
|
2016-06-09 22:50:03 +00:00
|
|
|
self.json_files = []
|
|
|
|
|
2016-06-14 17:36:41 +00:00
|
|
|
# Features
|
|
|
|
self.features = {}
|
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
def __add__(self, resources):
|
|
|
|
if resources is None:
|
|
|
|
return self
|
|
|
|
else:
|
|
|
|
return self.add(resources)
|
|
|
|
|
|
|
|
def __radd__(self, resources):
|
|
|
|
if resources is None:
|
|
|
|
return self
|
|
|
|
else:
|
|
|
|
return self.add(resources)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def add(self, resources):
|
2016-06-15 21:07:49 +00:00
|
|
|
for f,p in resources.file_basepath.items():
|
|
|
|
self.file_basepath[f] = p
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.inc_dirs += resources.inc_dirs
|
|
|
|
self.headers += resources.headers
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.s_sources += resources.s_sources
|
|
|
|
self.c_sources += resources.c_sources
|
|
|
|
self.cpp_sources += resources.cpp_sources
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.lib_dirs |= resources.lib_dirs
|
|
|
|
self.objects += resources.objects
|
|
|
|
self.libraries += resources.libraries
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.lib_builds += resources.lib_builds
|
2013-11-14 16:45:14 +00:00
|
|
|
self.lib_refs += resources.lib_refs
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-11-14 16:45:14 +00:00
|
|
|
self.repo_dirs += resources.repo_dirs
|
|
|
|
self.repo_files += resources.repo_files
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
if resources.linker_script is not None:
|
|
|
|
self.linker_script = resources.linker_script
|
2014-01-14 17:15:31 +00:00
|
|
|
|
|
|
|
self.hex_files += resources.hex_files
|
2015-02-04 10:29:31 +00:00
|
|
|
self.bin_files += resources.bin_files
|
2016-06-09 22:50:03 +00:00
|
|
|
self.json_files += resources.json_files
|
|
|
|
|
2016-06-14 17:36:41 +00:00
|
|
|
self.features.update(resources.features)
|
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
return self
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-09-28 18:38:25 +00:00
|
|
|
def _collect_duplicates(self, dupe_dict, dupe_headers):
|
2016-09-28 18:16:22 +00:00
|
|
|
for filename in self.s_sources + self.c_sources + self.cpp_sources:
|
|
|
|
objname, _ = splitext(basename(filename))
|
2016-09-28 18:38:25 +00:00
|
|
|
dupe_dict.setdefault(objname, set())
|
|
|
|
dupe_dict[objname] |= set([filename])
|
|
|
|
for filename in self.headers:
|
|
|
|
headername = basename(filename)
|
|
|
|
dupe_headers.setdefault(headername, set())
|
|
|
|
dupe_headers[headername] |= set([headername])
|
|
|
|
for res in self.features.values():
|
|
|
|
res._collect_duplicates(dupe_dict, dupe_headers)
|
|
|
|
return dupe_dict, dupe_headers
|
|
|
|
|
2016-09-30 19:41:11 +00:00
|
|
|
def detect_duplicates(self, toolchain):
|
2016-09-30 19:57:08 +00:00
|
|
|
"""Detect all potential ambiguities in filenames and report them with
|
|
|
|
a toolchain notification
|
|
|
|
|
|
|
|
Positional Arguments:
|
|
|
|
toolchain - used for notifications
|
|
|
|
"""
|
2016-09-28 18:38:25 +00:00
|
|
|
count = 0
|
|
|
|
dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict())
|
2016-09-28 18:16:22 +00:00
|
|
|
for objname, filenames in dupe_dict.iteritems():
|
|
|
|
if len(filenames) > 1:
|
2016-09-28 18:38:25 +00:00
|
|
|
count+=1
|
2016-09-30 19:41:11 +00:00
|
|
|
toolchain.tool_error(
|
|
|
|
"Object file %s.o is not unique! It could be made from: %s"\
|
|
|
|
% (objname, " ".join(filenames)))
|
2016-09-28 18:16:22 +00:00
|
|
|
for headername, locations in dupe_headers.iteritems():
|
2016-09-28 18:38:25 +00:00
|
|
|
if len(locations) > 1:
|
|
|
|
count+=1
|
2016-09-30 19:41:11 +00:00
|
|
|
toolchain.tool_error(
|
|
|
|
"Header file %s is not unique! It could be: %s" %\
|
|
|
|
(headername, " ".join(locations)))
|
2016-09-28 18:38:25 +00:00
|
|
|
return count
|
2016-09-28 18:16:22 +00:00
|
|
|
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def relative_to(self, base, dot=False):
|
|
|
|
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
|
2013-11-14 16:45:14 +00:00
|
|
|
'cpp_sources', 'lib_dirs', 'objects', 'libraries',
|
2016-06-09 22:50:03 +00:00
|
|
|
'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
|
|
|
|
'hex_files', 'bin_files', 'json_files']:
|
2013-06-24 13:32:08 +00:00
|
|
|
v = [rel_path(f, base, dot) for f in getattr(self, field)]
|
|
|
|
setattr(self, field, v)
|
2016-06-14 17:36:41 +00:00
|
|
|
|
2016-07-05 15:16:50 +00:00
|
|
|
self.features = {k: f.relative_to(base, dot) for k, f in self.features.iteritems() if f}
|
2016-06-14 17:36:41 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
if self.linker_script is not None:
|
|
|
|
self.linker_script = rel_path(self.linker_script, base, dot)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def win_to_unix(self):
|
|
|
|
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
|
2013-11-14 16:45:14 +00:00
|
|
|
'cpp_sources', 'lib_dirs', 'objects', 'libraries',
|
2016-06-09 22:50:03 +00:00
|
|
|
'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
|
|
|
|
'hex_files', 'bin_files', 'json_files']:
|
2013-06-24 13:32:08 +00:00
|
|
|
v = [f.replace('\\', '/') for f in getattr(self, field)]
|
|
|
|
setattr(self, field, v)
|
2016-06-14 17:36:41 +00:00
|
|
|
|
2016-07-05 15:16:50 +00:00
|
|
|
self.features = {k: f.win_to_unix() for k, f in self.features.iteritems() if f}
|
2016-06-14 17:36:41 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
if self.linker_script is not None:
|
|
|
|
self.linker_script = self.linker_script.replace('\\', '/')
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def __str__(self):
|
2014-08-14 16:40:58 +00:00
|
|
|
s = []
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
for (label, resources) in (
|
|
|
|
('Include Directories', self.inc_dirs),
|
|
|
|
('Headers', self.headers),
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
('Assembly sources', self.s_sources),
|
|
|
|
('C sources', self.c_sources),
|
|
|
|
('C++ sources', self.cpp_sources),
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
('Library directories', self.lib_dirs),
|
|
|
|
('Objects', self.objects),
|
2014-01-14 17:15:31 +00:00
|
|
|
('Libraries', self.libraries),
|
|
|
|
|
|
|
|
('Hex files', self.hex_files),
|
2015-02-04 12:58:40 +00:00
|
|
|
('Bin files', self.bin_files),
|
2016-06-14 17:36:41 +00:00
|
|
|
|
|
|
|
('Features', self.features),
|
2013-06-24 13:32:08 +00:00
|
|
|
):
|
|
|
|
if resources:
|
|
|
|
s.append('%s:\n ' % label + '\n '.join(resources))
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
if self.linker_script:
|
|
|
|
s.append('Linker Script: ' + self.linker_script)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
return '\n'.join(s)
|
|
|
|
|
2013-07-23 16:22:57 +00:00
|
|
|
# Support legacy build conventions: the original mbed build system did not have
|
|
|
|
# standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
|
|
|
|
# had the knowledge of a list of these directories to be ignored.
|
|
|
|
LEGACY_IGNORE_DIRS = set([
|
2016-10-28 13:48:49 +00:00
|
|
|
'LPC11U24', 'LPC1768', 'LPC4088', 'LPC812', 'KL25Z',
|
2016-07-19 10:14:42 +00:00
|
|
|
'ARM', 'uARM', 'IAR',
|
|
|
|
'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB',
|
2013-07-23 16:22:57 +00:00
|
|
|
])
|
|
|
|
LEGACY_TOOLCHAIN_NAMES = {
|
|
|
|
'ARM_STD':'ARM', 'ARM_MICRO': 'uARM',
|
2016-02-11 05:27:33 +00:00
|
|
|
'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR',
|
2013-07-23 16:22:57 +00:00
|
|
|
'IAR': 'IAR',
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
class mbedToolchain:
|
2016-07-19 10:16:05 +00:00
|
|
|
# Verbose logging
|
2013-06-24 13:32:08 +00:00
|
|
|
VERBOSE = True
|
2016-07-19 10:16:05 +00:00
|
|
|
|
|
|
|
# Compile C files as CPP
|
2016-07-19 10:14:42 +00:00
|
|
|
COMPILE_C_AS_CPP = False
|
2016-07-19 10:16:05 +00:00
|
|
|
|
|
|
|
# Response files for compiling, includes, linking and archiving.
|
|
|
|
# Not needed on posix systems where the typical arg limit is 2 megabytes
|
2016-07-19 10:14:42 +00:00
|
|
|
RESPONSE_FILES = True
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
CORTEX_SYMBOLS = {
|
2016-06-09 22:50:03 +00:00
|
|
|
"Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
|
|
|
"Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
2016-07-19 10:14:42 +00:00
|
|
|
"Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
2016-06-09 22:50:03 +00:00
|
|
|
"Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
|
|
|
"Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
2016-07-19 10:14:42 +00:00
|
|
|
"Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
2016-06-09 22:50:03 +00:00
|
|
|
"Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
2016-07-19 10:14:42 +00:00
|
|
|
"Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
|
|
|
"Cortex-M7FD" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
2014-10-24 04:02:26 +00:00
|
|
|
"Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"],
|
2013-06-24 13:32:08 +00:00
|
|
|
}
|
2013-10-14 14:32:41 +00:00
|
|
|
|
2016-06-23 17:40:50 +00:00
|
|
|
MBED_CONFIG_FILE_NAME="mbed_config.h"
|
|
|
|
|
2017-05-16 20:31:10 +00:00
|
|
|
PROFILE_FILE_NAME = ".profile"
|
|
|
|
|
2016-07-05 22:58:43 +00:00
|
|
|
__metaclass__ = ABCMeta
|
|
|
|
|
2016-09-27 22:40:18 +00:00
|
|
|
profile_template = {'common':[], 'c':[], 'cxx':[], 'asm':[], 'ld':[]}
|
|
|
|
|
2017-02-28 20:04:54 +00:00
|
|
|
def __init__(self, target, notify=None, macros=None, silent=False,
|
|
|
|
extra_verbose=False, build_profile=None, build_dir=None):
|
2013-06-24 13:32:08 +00:00
|
|
|
self.target = target
|
2013-07-23 16:22:57 +00:00
|
|
|
self.name = self.__class__.__name__
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2016-06-17 22:27:50 +00:00
|
|
|
# compile/assemble/link/binary hooks
|
2013-08-30 09:19:08 +00:00
|
|
|
self.hook = hooks.Hook(target, self)
|
2015-11-12 18:16:10 +00:00
|
|
|
|
2016-06-17 22:27:50 +00:00
|
|
|
# Toolchain flags
|
2016-09-27 22:40:18 +00:00
|
|
|
self.flags = deepcopy(build_profile or self.profile_template)
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2017-01-17 17:51:11 +00:00
|
|
|
# System libraries provided by the toolchain
|
|
|
|
self.sys_libs = []
|
|
|
|
|
2016-06-17 22:27:50 +00:00
|
|
|
# User-defined macros
|
|
|
|
self.macros = macros or []
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2016-06-17 22:27:50 +00:00
|
|
|
# Macros generated from toolchain and target rules/features
|
2016-08-04 17:19:08 +00:00
|
|
|
self.asm_symbols = None
|
|
|
|
self.cxx_symbols = None
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2016-06-17 22:27:50 +00:00
|
|
|
# Labels generated from toolchain and target rules/features (used for selective build)
|
2013-06-24 13:32:08 +00:00
|
|
|
self.labels = None
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2016-07-18 18:57:59 +00:00
|
|
|
# This will hold the initialized config object
|
|
|
|
self.config = None
|
|
|
|
|
2016-06-27 12:17:03 +00:00
|
|
|
# This will hold the configuration data (as returned by Config.get_config_data())
|
|
|
|
self.config_data = None
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-07-15 13:37:46 +00:00
|
|
|
# This will hold the location of the configuration file or None if there's no configuration available
|
|
|
|
self.config_file = None
|
|
|
|
|
|
|
|
# Call guard for "get_config_data" (see the comments of get_config_data for details)
|
|
|
|
self.config_processed = False
|
|
|
|
|
2016-06-17 22:27:50 +00:00
|
|
|
# Non-incremental compile
|
2013-06-24 13:32:08 +00:00
|
|
|
self.build_all = False
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2016-06-17 22:27:50 +00:00
|
|
|
# Build output dir
|
2017-02-28 20:04:54 +00:00
|
|
|
self.build_dir = build_dir
|
2013-11-27 14:24:42 +00:00
|
|
|
self.timestamp = time()
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2016-06-17 22:27:50 +00:00
|
|
|
# Number of concurrent build jobs. 0 means auto (based on host system cores)
|
|
|
|
self.jobs = 0
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2016-06-17 22:27:50 +00:00
|
|
|
# Ignore patterns from .mbedignore files
|
|
|
|
self.ignore_patterns = []
|
|
|
|
|
|
|
|
# Pre-mbed 2.0 ignore dirs
|
2016-07-19 10:14:42 +00:00
|
|
|
self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]])
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2016-06-17 22:27:50 +00:00
|
|
|
# Output notify function
|
2016-07-07 20:19:36 +00:00
|
|
|
# This function is passed all events, and expected to handle notification of the
|
|
|
|
# user, emit the events to a log, etc.
|
|
|
|
# The API for all notify methods passed into the notify parameter is as follows:
|
|
|
|
# def notify(Event, Silent)
|
|
|
|
# Where *Event* is a dict representing the toolchain event that was generated
|
|
|
|
# e.g.: a compile succeeded, or a warning was emitted by the compiler
|
|
|
|
# or an application was linked
|
|
|
|
# *Silent* is a boolean
|
2016-06-17 22:27:50 +00:00
|
|
|
if notify:
|
|
|
|
self.notify_fun = notify
|
|
|
|
elif extra_verbose:
|
|
|
|
self.notify_fun = self.print_notify_verbose
|
|
|
|
else:
|
|
|
|
self.notify_fun = self.print_notify
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2016-06-17 22:27:50 +00:00
|
|
|
# Silent builds (no output)
|
|
|
|
self.silent = silent
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2016-06-17 22:27:50 +00:00
|
|
|
# Print output buffer
|
2016-06-28 15:34:28 +00:00
|
|
|
self.output = str()
|
|
|
|
self.map_outputs = list() # Place to store memmap scan results in JSON like data structures
|
2016-06-22 14:23:33 +00:00
|
|
|
|
2016-06-17 22:27:50 +00:00
|
|
|
# uVisor spepcific rules
|
2016-06-14 00:57:01 +00:00
|
|
|
if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels:
|
2016-06-09 22:50:03 +00:00
|
|
|
self.target.core = re.sub(r"F$", '', self.target.core)
|
2016-07-19 10:16:05 +00:00
|
|
|
|
|
|
|
# Stats cache is used to reduce the amount of IO requests to stat
|
|
|
|
# header files during dependency change. See need_update()
|
2016-07-19 10:14:42 +00:00
|
|
|
self.stat_cache = {}
|
|
|
|
|
2016-07-19 10:16:05 +00:00
|
|
|
# Used by the mbed Online Build System to build in chrooted environment
|
|
|
|
self.CHROOT = None
|
|
|
|
|
|
|
|
# Call post __init__() hooks before the ARM/GCC_ARM/IAR toolchain __init__() takes over
|
2016-07-19 10:14:42 +00:00
|
|
|
self.init()
|
|
|
|
|
2016-07-19 10:16:05 +00:00
|
|
|
# Used for post __init__() hooks
|
|
|
|
# THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
|
|
|
|
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
|
2016-07-19 10:14:42 +00:00
|
|
|
def init(self):
|
|
|
|
return True
|
2016-06-16 13:13:50 +00:00
|
|
|
|
2015-11-05 23:21:21 +00:00
|
|
|
def get_output(self):
|
|
|
|
return self.output
|
|
|
|
|
|
|
|
def print_notify(self, event, silent=False):
|
|
|
|
""" Default command line notification
|
|
|
|
"""
|
|
|
|
msg = None
|
|
|
|
|
2016-06-14 00:57:01 +00:00
|
|
|
if not self.VERBOSE and event['type'] == 'tool_error':
|
2015-11-05 23:21:21 +00:00
|
|
|
msg = event['message']
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2016-06-14 00:57:01 +00:00
|
|
|
elif event['type'] in ['info', 'debug']:
|
|
|
|
msg = event['message']
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2015-11-05 23:21:21 +00:00
|
|
|
elif event['type'] == 'cc':
|
|
|
|
event['severity'] = event['severity'].title()
|
|
|
|
event['file'] = basename(event['file'])
|
2016-07-19 10:14:42 +00:00
|
|
|
msg = '[%(severity)s] %(file)s@%(line)s,%(col)s: %(message)s' % event
|
2015-11-05 23:21:21 +00:00
|
|
|
|
|
|
|
elif event['type'] == 'progress':
|
2016-10-11 19:31:02 +00:00
|
|
|
if 'percent' in event:
|
|
|
|
msg = '{} [{:>5.1f}%]: {}'.format(event['action'].title(),
|
|
|
|
event['percent'],
|
|
|
|
basename(event['file']))
|
|
|
|
else:
|
|
|
|
msg = '{}: {}'.format(event['action'].title(),
|
|
|
|
basename(event['file']))
|
2015-11-05 23:21:21 +00:00
|
|
|
|
|
|
|
if msg:
|
2016-10-11 19:31:02 +00:00
|
|
|
if not silent:
|
|
|
|
print msg
|
2015-11-05 23:21:21 +00:00
|
|
|
self.output += msg + "\n"
|
|
|
|
|
|
|
|
def print_notify_verbose(self, event, silent=False):
|
|
|
|
""" Default command line notification with more verbose mode
|
|
|
|
"""
|
|
|
|
if event['type'] in ['info', 'debug']:
|
2016-10-11 19:31:02 +00:00
|
|
|
self.print_notify(event, silent=silent) # standard handle
|
2015-11-05 23:21:21 +00:00
|
|
|
|
|
|
|
elif event['type'] == 'cc':
|
|
|
|
event['severity'] = event['severity'].title()
|
|
|
|
event['file'] = basename(event['file'])
|
|
|
|
event['mcu_name'] = "None"
|
|
|
|
event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown"
|
|
|
|
event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown"
|
|
|
|
msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event
|
2016-10-11 19:31:02 +00:00
|
|
|
if not silent:
|
|
|
|
print msg
|
2015-11-05 23:21:21 +00:00
|
|
|
self.output += msg + "\n"
|
|
|
|
|
|
|
|
elif event['type'] == 'progress':
|
|
|
|
self.print_notify(event) # standard handle
|
|
|
|
|
2016-07-19 10:16:05 +00:00
|
|
|
# THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
|
|
|
|
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
|
2014-12-03 09:24:19 +00:00
|
|
|
def notify(self, event):
|
|
|
|
""" Little closure for notify functions
|
|
|
|
"""
|
2016-07-07 19:57:48 +00:00
|
|
|
event['toolchain'] = self
|
2014-12-03 09:24:19 +00:00
|
|
|
return self.notify_fun(event, self.silent)
|
|
|
|
|
2016-08-04 17:19:08 +00:00
|
|
|
def get_symbols(self, for_asm=False):
|
|
|
|
if for_asm:
|
|
|
|
if self.asm_symbols is None:
|
|
|
|
self.asm_symbols = []
|
|
|
|
|
|
|
|
# Cortex CPU symbols
|
|
|
|
if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
|
|
|
|
self.asm_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
|
|
|
|
|
|
|
|
# Add target's symbols
|
|
|
|
self.asm_symbols += self.target.macros
|
|
|
|
# Add extra symbols passed via 'macros' parameter
|
|
|
|
self.asm_symbols += self.macros
|
|
|
|
return list(set(self.asm_symbols)) # Return only unique symbols
|
|
|
|
else:
|
|
|
|
if self.cxx_symbols is None:
|
|
|
|
# Target and Toolchain symbols
|
|
|
|
labels = self.get_labels()
|
|
|
|
self.cxx_symbols = ["TARGET_%s" % t for t in labels['TARGET']]
|
|
|
|
self.cxx_symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']])
|
|
|
|
|
|
|
|
# Cortex CPU symbols
|
|
|
|
if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
|
|
|
|
self.cxx_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
|
|
|
|
|
|
|
|
# Symbols defined by the on-line build.system
|
|
|
|
self.cxx_symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1'])
|
|
|
|
if MBED_ORG_USER:
|
|
|
|
self.cxx_symbols.append('MBED_USERNAME=' + MBED_ORG_USER)
|
|
|
|
|
|
|
|
# Add target's symbols
|
|
|
|
self.cxx_symbols += self.target.macros
|
|
|
|
# Add target's hardware
|
|
|
|
self.cxx_symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has]
|
|
|
|
# Add target's features
|
|
|
|
self.cxx_symbols += ["FEATURE_" + data + "=1" for data in self.target.features]
|
|
|
|
# Add extra symbols passed via 'macros' parameter
|
|
|
|
self.cxx_symbols += self.macros
|
|
|
|
|
|
|
|
# Form factor variables
|
|
|
|
if hasattr(self.target, 'supported_form_factors'):
|
|
|
|
self.cxx_symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors])
|
|
|
|
|
|
|
|
return list(set(self.cxx_symbols)) # Return only unique symbols
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
# Extend the internal list of macros
|
|
|
|
def add_macros(self, new_macros):
|
|
|
|
self.macros.extend(new_macros)
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def get_labels(self):
|
|
|
|
if self.labels is None:
|
|
|
|
toolchain_labels = [c.__name__ for c in getmro(self.__class__)]
|
|
|
|
toolchain_labels.remove('mbedToolchain')
|
|
|
|
self.labels = {
|
2016-09-27 18:15:22 +00:00
|
|
|
'TARGET': self.target.labels,
|
2016-06-08 12:25:06 +00:00
|
|
|
'FEATURE': self.target.features,
|
2013-06-24 13:32:08 +00:00
|
|
|
'TOOLCHAIN': toolchain_labels
|
|
|
|
}
|
2016-09-29 19:52:40 +00:00
|
|
|
|
|
|
|
# This is a policy decision and it should /really/ be in the config system
|
|
|
|
# ATM it's here for backward compatibility
|
2016-12-06 11:28:17 +00:00
|
|
|
if ((("-g" in self.flags['common'] or "-g3" in self.flags['common']) and
|
2017-04-03 20:43:22 +00:00
|
|
|
"-O0" in self.flags['common']) or
|
2016-09-29 19:52:40 +00:00
|
|
|
("-r" in self.flags['common'] and
|
|
|
|
"-On" in self.flags['common'])):
|
|
|
|
self.labels['TARGET'].append("DEBUG")
|
|
|
|
else:
|
|
|
|
self.labels['TARGET'].append("RELEASE")
|
2013-06-24 13:32:08 +00:00
|
|
|
return self.labels
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-07-19 10:16:05 +00:00
|
|
|
|
|
|
|
# Determine whether a source file needs updating/compiling
|
2013-06-24 13:32:08 +00:00
|
|
|
def need_update(self, target, dependencies):
|
|
|
|
if self.build_all:
|
|
|
|
return True
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
if not exists(target):
|
|
|
|
return True
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
target_mod_time = stat(target).st_mtime
|
2014-07-09 11:38:18 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
for d in dependencies:
|
|
|
|
# Some objects are not provided with full path and here we do not have
|
|
|
|
# information about the library paths. Safe option: assume an update
|
2014-07-09 11:38:18 +00:00
|
|
|
if not d or not exists(d):
|
2013-06-24 13:32:08 +00:00
|
|
|
return True
|
2016-07-19 10:14:42 +00:00
|
|
|
|
|
|
|
if not self.stat_cache.has_key(d):
|
|
|
|
self.stat_cache[d] = stat(d).st_mtime
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-07-19 10:14:42 +00:00
|
|
|
if self.stat_cache[d] >= target_mod_time:
|
2014-07-09 11:38:18 +00:00
|
|
|
return True
|
2016-07-19 10:14:42 +00:00
|
|
|
|
2014-07-09 11:38:18 +00:00
|
|
|
return False
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
def is_ignored(self, file_path):
|
2017-01-10 20:36:15 +00:00
|
|
|
"""Check if file path is ignored by any .mbedignore thus far"""
|
2016-06-17 22:27:50 +00:00
|
|
|
for pattern in self.ignore_patterns:
|
2016-06-09 22:50:03 +00:00
|
|
|
if fnmatch.fnmatch(file_path, pattern):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2017-01-04 21:51:53 +00:00
|
|
|
def add_ignore_patterns(self, root, base_path, patterns):
|
2017-01-10 20:36:15 +00:00
|
|
|
"""Add a series of patterns to the ignored paths
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
root - the directory containing the ignore file
|
|
|
|
base_path - the location that the scan started from
|
|
|
|
patterns - the list of patterns we will ignore in the future
|
|
|
|
"""
|
2017-01-04 21:51:53 +00:00
|
|
|
real_base = relpath(root, base_path)
|
|
|
|
if real_base == ".":
|
|
|
|
self.ignore_patterns.extend(patterns)
|
|
|
|
else:
|
|
|
|
self.ignore_patterns.extend(join(real_base, pat) for pat in patterns)
|
|
|
|
|
2016-06-29 16:20:34 +00:00
|
|
|
# Create a Resources object from the path pointed to by *path* by either traversing a
|
|
|
|
# a directory structure, when *path* is a directory, or adding *path* to the resources,
|
|
|
|
# when *path* is a file.
|
|
|
|
# The parameter *base_path* is used to set the base_path attribute of the Resources
|
|
|
|
# object and the parameter *exclude_paths* is used by the directory traversal to
|
|
|
|
# exclude certain paths from the traversal.
|
2016-06-15 21:07:49 +00:00
|
|
|
def scan_resources(self, path, exclude_paths=None, base_path=None):
|
2016-07-19 10:14:42 +00:00
|
|
|
self.progress("scan", path)
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
resources = Resources(path)
|
2016-06-15 21:07:49 +00:00
|
|
|
if not base_path:
|
2016-06-28 19:55:23 +00:00
|
|
|
if isfile(path):
|
|
|
|
base_path = dirname(path)
|
|
|
|
else:
|
|
|
|
base_path = path
|
2016-06-15 21:07:49 +00:00
|
|
|
resources.base_path = base_path
|
|
|
|
|
2016-06-28 19:55:23 +00:00
|
|
|
if isfile(path):
|
2016-06-29 16:11:54 +00:00
|
|
|
self._add_file(path, resources, base_path, exclude_paths=exclude_paths)
|
2016-06-28 19:55:23 +00:00
|
|
|
else:
|
2016-06-29 16:11:54 +00:00
|
|
|
self._add_dir(path, resources, base_path, exclude_paths=exclude_paths)
|
2016-06-28 19:55:23 +00:00
|
|
|
return resources
|
|
|
|
|
2016-06-29 16:20:34 +00:00
|
|
|
# A helper function for scan_resources. _add_dir traverses *path* (assumed to be a
|
|
|
|
# directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file
|
|
|
|
# on every file it considers adding to the resources object.
|
2016-06-29 16:11:54 +00:00
|
|
|
def _add_dir(self, path, resources, base_path, exclude_paths=None):
|
2013-06-24 13:32:08 +00:00
|
|
|
""" os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
|
|
|
|
When topdown is True, the caller can modify the dirnames list in-place
|
|
|
|
(perhaps using del or slice assignment), and walk() will only recurse into
|
|
|
|
the subdirectories whose names remain in dirnames; this can be used to prune
|
|
|
|
the search, impose a specific order of visiting, or even to inform walk()
|
|
|
|
about directories the caller creates or renames before it resumes walk()
|
|
|
|
again. Modifying dirnames when topdown is False is ineffective, because in
|
|
|
|
bottom-up mode the directories in dirnames are generated before dirpath
|
|
|
|
itself is generated.
|
|
|
|
"""
|
2016-06-28 19:55:23 +00:00
|
|
|
labels = self.get_labels()
|
2016-06-09 22:50:03 +00:00
|
|
|
for root, dirs, files in walk(path, followlinks=True):
|
|
|
|
# Check if folder contains .mbedignore
|
2016-06-24 00:20:54 +00:00
|
|
|
if ".mbedignore" in files:
|
2016-06-09 22:50:03 +00:00
|
|
|
with open (join(root,".mbedignore"), "r") as f:
|
|
|
|
lines=f.readlines()
|
|
|
|
lines = [l.strip() for l in lines] # Strip whitespaces
|
|
|
|
lines = [l for l in lines if l != ""] # Strip empty lines
|
|
|
|
lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
|
2016-06-17 22:27:50 +00:00
|
|
|
# Append root path to glob patterns and append patterns to ignore_patterns
|
2017-01-04 21:51:53 +00:00
|
|
|
self.add_ignore_patterns(root, base_path, lines)
|
2016-06-09 22:50:03 +00:00
|
|
|
|
2016-06-24 00:20:54 +00:00
|
|
|
# Skip the whole folder if ignored, e.g. .mbedignore containing '*'
|
2017-02-28 20:04:54 +00:00
|
|
|
if (self.is_ignored(join(relpath(root, base_path),"")) or
|
|
|
|
self.build_dir == join(relpath(root, base_path))):
|
2017-01-04 21:51:53 +00:00
|
|
|
dirs[:] = []
|
2016-06-24 00:20:54 +00:00
|
|
|
continue
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
for d in copy(dirs):
|
2016-06-09 22:50:03 +00:00
|
|
|
dir_path = join(root, d)
|
2016-06-24 00:20:54 +00:00
|
|
|
# Add internal repo folders/files. This is needed for exporters
|
2016-07-19 10:14:42 +00:00
|
|
|
if d == '.hg' or d == '.git':
|
2013-11-14 16:45:14 +00:00
|
|
|
resources.repo_dirs.append(dir_path)
|
2016-06-28 19:55:23 +00:00
|
|
|
|
2013-07-23 16:22:57 +00:00
|
|
|
if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
|
2016-06-16 23:45:47 +00:00
|
|
|
# Ignore targets that do not match the TARGET in extra_labels list
|
2013-06-24 13:32:08 +00:00
|
|
|
(d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
|
2016-06-16 23:45:47 +00:00
|
|
|
# Ignore toolchain that do not match the current TOOLCHAIN
|
2016-06-09 22:50:03 +00:00
|
|
|
(d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or
|
2016-06-16 23:45:47 +00:00
|
|
|
# Ignore .mbedignore files
|
2017-01-04 21:51:53 +00:00
|
|
|
self.is_ignored(join(relpath(root, base_path), d,"")) or
|
2016-06-16 23:45:47 +00:00
|
|
|
# Ignore TESTS dir
|
2016-06-09 22:50:03 +00:00
|
|
|
(d == 'TESTS')):
|
2016-06-24 00:20:54 +00:00
|
|
|
dirs.remove(d)
|
2016-06-16 23:45:47 +00:00
|
|
|
elif d.startswith('FEATURE_'):
|
2016-06-24 00:20:54 +00:00
|
|
|
# Recursively scan features but ignore them in the current scan.
|
|
|
|
# These are dynamically added by the config system if the conditions are matched
|
2016-06-15 21:07:49 +00:00
|
|
|
resources.features[d[8:]] = self.scan_resources(dir_path, base_path=base_path)
|
2016-06-14 17:36:41 +00:00
|
|
|
dirs.remove(d)
|
2016-06-16 23:45:47 +00:00
|
|
|
elif exclude_paths:
|
2016-06-09 22:50:03 +00:00
|
|
|
for exclude_path in exclude_paths:
|
|
|
|
rel_path = relpath(dir_path, exclude_path)
|
|
|
|
if not (rel_path.startswith('..')):
|
|
|
|
dirs.remove(d)
|
|
|
|
break
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
# Add root to include paths
|
2017-04-11 19:26:14 +00:00
|
|
|
root = root.rstrip("/")
|
2013-06-24 13:32:08 +00:00
|
|
|
resources.inc_dirs.append(root)
|
2016-07-20 19:43:09 +00:00
|
|
|
resources.file_basepath[root] = base_path
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
for file in files:
|
|
|
|
file_path = join(root, file)
|
2016-06-29 16:11:54 +00:00
|
|
|
self._add_file(file_path, resources, base_path)
|
2016-06-09 22:50:03 +00:00
|
|
|
|
2016-06-29 16:20:34 +00:00
|
|
|
# A helper function for both scan_resources and _add_dir. _add_file adds one file
|
|
|
|
# (*file_path*) to the resources object based on the file type.
|
2016-06-29 16:11:54 +00:00
|
|
|
def _add_file(self, file_path, resources, base_path, exclude_paths=None):
|
2016-06-28 19:55:23 +00:00
|
|
|
resources.file_basepath[file_path] = base_path
|
2016-06-15 21:07:49 +00:00
|
|
|
|
2017-01-04 21:51:53 +00:00
|
|
|
if self.is_ignored(relpath(file_path, base_path)):
|
2016-06-28 19:55:23 +00:00
|
|
|
return
|
2016-06-09 22:50:03 +00:00
|
|
|
|
2016-06-28 19:55:23 +00:00
|
|
|
_, ext = splitext(file_path)
|
|
|
|
ext = ext.lower()
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-06-28 19:55:23 +00:00
|
|
|
if ext == '.s':
|
|
|
|
resources.s_sources.append(file_path)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-06-28 19:55:23 +00:00
|
|
|
elif ext == '.c':
|
|
|
|
resources.c_sources.append(file_path)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-06-28 19:55:23 +00:00
|
|
|
elif ext == '.cpp':
|
|
|
|
resources.cpp_sources.append(file_path)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-06-28 19:55:23 +00:00
|
|
|
elif ext == '.h' or ext == '.hpp':
|
|
|
|
resources.headers.append(file_path)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-06-28 19:55:23 +00:00
|
|
|
elif ext == '.o':
|
|
|
|
resources.objects.append(file_path)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-06-28 19:55:23 +00:00
|
|
|
elif ext == self.LIBRARY_EXT:
|
|
|
|
resources.libraries.append(file_path)
|
|
|
|
resources.lib_dirs.add(dirname(file_path))
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-06-28 19:55:23 +00:00
|
|
|
elif ext == self.LINKER_EXT:
|
|
|
|
if resources.linker_script is not None:
|
|
|
|
self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path))
|
|
|
|
resources.linker_script = file_path
|
2014-01-14 17:15:31 +00:00
|
|
|
|
2016-06-28 19:55:23 +00:00
|
|
|
elif ext == '.lib':
|
|
|
|
resources.lib_refs.append(file_path)
|
2014-01-14 17:15:31 +00:00
|
|
|
|
2016-06-28 19:55:23 +00:00
|
|
|
elif ext == '.bld':
|
|
|
|
resources.lib_builds.append(file_path)
|
2014-01-14 17:15:31 +00:00
|
|
|
|
2016-09-12 21:19:55 +00:00
|
|
|
elif basename(file_path) == '.hgignore':
|
|
|
|
resources.repo_files.append(file_path)
|
|
|
|
|
|
|
|
elif basename(file_path) == '.gitignore':
|
2016-06-28 19:55:23 +00:00
|
|
|
resources.repo_files.append(file_path)
|
2014-01-14 17:15:31 +00:00
|
|
|
|
2016-06-28 19:55:23 +00:00
|
|
|
elif ext == '.hex':
|
|
|
|
resources.hex_files.append(file_path)
|
2015-11-05 20:42:45 +00:00
|
|
|
|
2016-06-28 19:55:23 +00:00
|
|
|
elif ext == '.bin':
|
|
|
|
resources.bin_files.append(file_path)
|
2014-01-14 17:15:31 +00:00
|
|
|
|
2016-06-28 19:55:23 +00:00
|
|
|
elif ext == '.json':
|
|
|
|
resources.json_files.append(file_path)
|
2016-06-09 22:50:03 +00:00
|
|
|
|
2013-11-14 16:45:14 +00:00
|
|
|
|
|
|
|
def scan_repository(self, path):
|
|
|
|
resources = []
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-11-14 16:45:14 +00:00
|
|
|
for root, dirs, files in walk(path):
|
|
|
|
# Remove ignored directories
|
|
|
|
for d in copy(dirs):
|
|
|
|
if d == '.' or d == '..':
|
|
|
|
dirs.remove(d)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-11-14 16:45:14 +00:00
|
|
|
for file in files:
|
|
|
|
file_path = join(root, file)
|
|
|
|
resources.append(file_path)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-11-14 16:45:14 +00:00
|
|
|
return resources
|
|
|
|
|
2016-06-15 23:31:25 +00:00
|
|
|
def copy_files(self, files_paths, trg_path, resources=None, rel_path=None):
|
2013-06-24 13:32:08 +00:00
|
|
|
# Handle a single file
|
|
|
|
if type(files_paths) != ListType: files_paths = [files_paths]
|
2013-12-02 15:34:19 +00:00
|
|
|
|
|
|
|
for source in files_paths:
|
|
|
|
if source is None:
|
|
|
|
files_paths.remove(source)
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
for source in files_paths:
|
2016-07-19 10:14:42 +00:00
|
|
|
if resources is not None and resources.file_basepath.has_key(source):
|
2016-06-15 23:31:25 +00:00
|
|
|
relative_path = relpath(source, resources.file_basepath[source])
|
|
|
|
elif rel_path is not None:
|
2013-06-24 13:32:08 +00:00
|
|
|
relative_path = relpath(source, rel_path)
|
|
|
|
else:
|
|
|
|
_, relative_path = split(source)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
target = join(trg_path, relative_path)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
if (target != source) and (self.need_update(target, [source])):
|
2015-11-05 23:21:21 +00:00
|
|
|
self.progress("copy", relative_path)
|
2013-06-24 13:32:08 +00:00
|
|
|
mkdir(dirname(target))
|
|
|
|
copyfile(source, target)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-07-19 10:16:05 +00:00
|
|
|
# THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
|
|
|
|
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
|
2013-06-24 13:32:08 +00:00
|
|
|
def relative_object_path(self, build_path, base_dir, source):
|
|
|
|
source_dir, name, _ = split_path(source)
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
obj_dir = join(build_path, relpath(source_dir, base_dir))
|
2016-07-19 10:14:42 +00:00
|
|
|
if obj_dir is not self.prev_dir:
|
|
|
|
self.prev_dir = obj_dir
|
|
|
|
mkdir(obj_dir)
|
2013-06-24 13:32:08 +00:00
|
|
|
return join(obj_dir, name + '.o')
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2016-07-19 10:16:05 +00:00
|
|
|
# Generate response file for all includes.
|
|
|
|
# ARM, GCC, IAR cross compatible
|
2016-06-09 22:50:03 +00:00
|
|
|
def get_inc_file(self, includes):
|
|
|
|
include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5)
|
|
|
|
if not exists(include_file):
|
|
|
|
with open(include_file, "wb") as f:
|
|
|
|
cmd_list = []
|
|
|
|
for c in includes:
|
|
|
|
if c:
|
2016-07-19 10:14:42 +00:00
|
|
|
c = c.replace("\\", "/")
|
|
|
|
if self.CHROOT:
|
|
|
|
c = c.replace(self.CHROOT, '')
|
|
|
|
cmd_list.append('-I%s' % c)
|
2016-06-09 22:50:03 +00:00
|
|
|
string = " ".join(cmd_list)
|
|
|
|
f.write(string)
|
|
|
|
return include_file
|
|
|
|
|
2016-07-19 10:16:05 +00:00
|
|
|
# Generate response file for all objects when linking.
|
|
|
|
# ARM, GCC, IAR cross compatible
|
2016-07-19 10:14:42 +00:00
|
|
|
def get_link_file(self, cmd):
|
|
|
|
link_file = join(self.build_dir, ".link_files.txt")
|
|
|
|
with open(link_file, "wb") as f:
|
|
|
|
cmd_list = []
|
|
|
|
for c in cmd:
|
|
|
|
if c:
|
|
|
|
c = c.replace("\\", "/")
|
|
|
|
if self.CHROOT:
|
|
|
|
c = c.replace(self.CHROOT, '')
|
|
|
|
cmd_list.append(('"%s"' % c) if not c.startswith('-') else c)
|
|
|
|
string = " ".join(cmd_list)
|
|
|
|
f.write(string)
|
|
|
|
return link_file
|
|
|
|
|
2016-07-19 10:16:05 +00:00
|
|
|
# Generate response file for all objects when archiving.
|
|
|
|
# ARM, GCC, IAR cross compatible
|
2016-07-19 10:14:42 +00:00
|
|
|
def get_arch_file(self, objects):
|
|
|
|
archive_file = join(self.build_dir, ".archive_files.txt")
|
|
|
|
with open(archive_file, "wb") as f:
|
|
|
|
o_list = []
|
|
|
|
for o in objects:
|
|
|
|
o_list.append('"%s"' % o)
|
|
|
|
string = " ".join(o_list).replace("\\", "/")
|
|
|
|
f.write(string)
|
|
|
|
return archive_file
|
|
|
|
|
2016-07-19 10:16:05 +00:00
|
|
|
# THIS METHOD IS BEING CALLED BY THE MBED ONLINE BUILD SYSTEM
|
|
|
|
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
|
2017-02-28 20:04:54 +00:00
|
|
|
def compile_sources(self, resources, inc_dirs=None):
|
2013-06-24 13:32:08 +00:00
|
|
|
# Web IDE progress bar for project build
|
2014-07-09 10:28:02 +00:00
|
|
|
files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
|
|
|
|
self.to_be_compiled = len(files_to_compile)
|
2013-06-24 13:32:08 +00:00
|
|
|
self.compiled = 0
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2016-07-19 10:14:42 +00:00
|
|
|
self.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()]))
|
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
inc_paths = resources.inc_dirs
|
|
|
|
if inc_dirs is not None:
|
|
|
|
inc_paths.extend(inc_dirs)
|
2016-06-09 22:50:03 +00:00
|
|
|
# De-duplicate include paths
|
|
|
|
inc_paths = set(inc_paths)
|
|
|
|
# Sort include paths for consistency
|
|
|
|
inc_paths = sorted(set(inc_paths))
|
|
|
|
# Unique id of all include paths
|
|
|
|
self.inc_md5 = md5(' '.join(inc_paths)).hexdigest()
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-08-14 16:40:58 +00:00
|
|
|
objects = []
|
|
|
|
queue = []
|
2016-07-19 10:14:42 +00:00
|
|
|
work_dir = getcwd()
|
|
|
|
self.prev_dir = None
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2016-07-15 13:37:46 +00:00
|
|
|
# Generate configuration header (this will update self.build_all if needed)
|
|
|
|
self.get_config_header()
|
2017-05-16 20:31:10 +00:00
|
|
|
self.dump_build_profile()
|
2016-07-15 13:37:46 +00:00
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
# Sort compile queue for consistency
|
2014-07-09 10:28:02 +00:00
|
|
|
files_to_compile.sort()
|
|
|
|
for source in files_to_compile:
|
2017-02-28 20:04:54 +00:00
|
|
|
object = self.relative_object_path(
|
|
|
|
self.build_dir, resources.file_basepath[source], source)
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-07-09 11:38:18 +00:00
|
|
|
# Queue mode (multiprocessing)
|
2014-08-07 16:01:39 +00:00
|
|
|
commands = self.compile_command(source, object, inc_paths)
|
2014-08-05 15:22:50 +00:00
|
|
|
if commands is not None:
|
2014-07-09 11:38:18 +00:00
|
|
|
queue.append({
|
|
|
|
'source': source,
|
|
|
|
'object': object,
|
2014-08-05 15:22:50 +00:00
|
|
|
'commands': commands,
|
2014-07-09 11:38:18 +00:00
|
|
|
'work_dir': work_dir,
|
|
|
|
'chroot': self.CHROOT
|
|
|
|
})
|
2014-07-09 10:28:02 +00:00
|
|
|
else:
|
2016-09-28 21:15:03 +00:00
|
|
|
self.compiled += 1
|
2014-07-09 10:28:02 +00:00
|
|
|
objects.append(object)
|
2014-07-09 11:38:18 +00:00
|
|
|
|
|
|
|
# Use queues/multiprocessing if cpu count is higher than setting
|
2014-07-09 17:00:21 +00:00
|
|
|
jobs = self.jobs if self.jobs else cpu_count()
|
|
|
|
if jobs > CPU_COUNT_MIN and len(queue) > jobs:
|
2014-07-09 10:28:02 +00:00
|
|
|
return self.compile_queue(queue, objects)
|
|
|
|
else:
|
2014-08-05 15:22:50 +00:00
|
|
|
return self.compile_seq(queue, objects)
|
|
|
|
|
2016-07-19 10:16:05 +00:00
|
|
|
# Compile source files queue in sequential order
|
2014-08-05 15:22:50 +00:00
|
|
|
def compile_seq(self, queue, objects):
|
|
|
|
for item in queue:
|
|
|
|
result = compile_worker(item)
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-08-05 15:22:50 +00:00
|
|
|
self.compiled += 1
|
2015-11-05 23:21:21 +00:00
|
|
|
self.progress("compile", item['source'], build_update=True)
|
2014-08-05 15:22:50 +00:00
|
|
|
for res in result['results']:
|
2016-07-19 10:14:42 +00:00
|
|
|
self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
|
2015-11-05 23:21:21 +00:00
|
|
|
self.compile_output([
|
2014-08-05 15:22:50 +00:00
|
|
|
res['code'],
|
|
|
|
res['output'],
|
|
|
|
res['command']
|
|
|
|
])
|
|
|
|
objects.append(result['object'])
|
2015-11-05 23:21:21 +00:00
|
|
|
return objects
|
2014-07-09 10:28:02 +00:00
|
|
|
|
2016-07-19 10:16:05 +00:00
|
|
|
# Compile source files queue in parallel by creating pool of worker threads
|
2014-07-09 10:28:02 +00:00
|
|
|
def compile_queue(self, queue, objects):
|
2016-07-19 10:14:42 +00:00
|
|
|
jobs_count = int(self.jobs if self.jobs else cpu_count() * CPU_COEF)
|
2014-07-10 19:30:51 +00:00
|
|
|
p = Pool(processes=jobs_count)
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-07-10 12:33:04 +00:00
|
|
|
results = []
|
2014-07-09 10:28:02 +00:00
|
|
|
for i in range(len(queue)):
|
2014-07-10 19:30:51 +00:00
|
|
|
results.append(p.apply_async(compile_worker, [queue[i]]))
|
2016-07-19 10:14:42 +00:00
|
|
|
p.close()
|
2014-07-09 10:28:02 +00:00
|
|
|
|
2014-07-10 12:33:04 +00:00
|
|
|
itr = 0
|
2016-07-19 10:14:42 +00:00
|
|
|
while len(results):
|
2014-07-10 12:33:04 +00:00
|
|
|
itr += 1
|
2016-06-09 22:50:03 +00:00
|
|
|
if itr > 180000:
|
2014-07-10 19:30:51 +00:00
|
|
|
p.terminate()
|
|
|
|
p.join()
|
2014-07-10 12:33:04 +00:00
|
|
|
raise ToolException("Compile did not finish in 5 minutes")
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2016-07-19 10:14:42 +00:00
|
|
|
sleep(0.01)
|
2014-07-10 12:33:04 +00:00
|
|
|
pending = 0
|
|
|
|
for r in results:
|
|
|
|
if r._ready is True:
|
2014-07-09 10:28:02 +00:00
|
|
|
try:
|
2014-07-10 17:10:21 +00:00
|
|
|
result = r.get()
|
|
|
|
results.remove(r)
|
2014-08-05 15:22:50 +00:00
|
|
|
|
2014-07-10 17:10:21 +00:00
|
|
|
self.compiled += 1
|
2015-11-05 23:21:21 +00:00
|
|
|
self.progress("compile", result['source'], build_update=True)
|
2014-08-05 15:22:50 +00:00
|
|
|
for res in result['results']:
|
2016-07-19 10:14:42 +00:00
|
|
|
self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
|
2015-11-05 23:21:21 +00:00
|
|
|
self.compile_output([
|
2014-08-05 15:22:50 +00:00
|
|
|
res['code'],
|
|
|
|
res['output'],
|
|
|
|
res['command']
|
|
|
|
])
|
2014-07-10 17:10:21 +00:00
|
|
|
objects.append(result['object'])
|
2014-07-09 10:28:02 +00:00
|
|
|
except ToolException, err:
|
2016-08-02 10:44:13 +00:00
|
|
|
if p._taskqueue.queue:
|
|
|
|
p._taskqueue.queue.clear()
|
|
|
|
sleep(0.5)
|
2014-07-10 19:30:51 +00:00
|
|
|
p.terminate()
|
|
|
|
p.join()
|
2014-07-09 10:28:02 +00:00
|
|
|
raise ToolException(err)
|
2014-07-10 12:33:04 +00:00
|
|
|
else:
|
|
|
|
pending += 1
|
2016-07-19 10:14:42 +00:00
|
|
|
if pending >= jobs_count:
|
2014-07-10 12:33:04 +00:00
|
|
|
break
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-07-10 12:33:04 +00:00
|
|
|
results = None
|
2014-08-20 09:59:28 +00:00
|
|
|
p.join()
|
|
|
|
|
2015-11-05 23:21:21 +00:00
|
|
|
return objects
|
2014-08-05 15:22:50 +00:00
|
|
|
|
2016-07-19 10:16:05 +00:00
|
|
|
# Determine the compile command based on type of source file
|
2014-08-07 16:01:39 +00:00
|
|
|
def compile_command(self, source, object, includes):
|
2014-07-09 10:28:02 +00:00
|
|
|
# Check dependencies
|
|
|
|
_, ext = splitext(source)
|
|
|
|
ext = ext.lower()
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-08-05 15:22:50 +00:00
|
|
|
if ext == '.c' or ext == '.cpp':
|
|
|
|
base, _ = splitext(object)
|
|
|
|
dep_path = base + '.d'
|
2016-11-08 16:59:33 +00:00
|
|
|
try:
|
|
|
|
deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else []
|
|
|
|
except IOError, IndexError:
|
|
|
|
deps = []
|
2017-05-11 19:20:09 +00:00
|
|
|
config_file = ([self.config.app_config_location]
|
|
|
|
if self.config.app_config_location else [])
|
2017-05-31 14:54:09 +00:00
|
|
|
deps.extend(config_file)
|
2017-05-16 20:48:45 +00:00
|
|
|
if ext == '.cpp' or self.COMPILE_C_AS_CPP:
|
|
|
|
deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-cxx"))
|
|
|
|
else:
|
|
|
|
deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-c"))
|
2017-05-16 20:31:10 +00:00
|
|
|
if len(deps) == 0 or self.need_update(object, deps):
|
2016-07-19 10:14:42 +00:00
|
|
|
if ext == '.cpp' or self.COMPILE_C_AS_CPP:
|
2014-08-07 16:01:39 +00:00
|
|
|
return self.compile_cpp(source, object, includes)
|
2016-07-19 10:14:42 +00:00
|
|
|
else:
|
|
|
|
return self.compile_c(source, object, includes)
|
2014-07-09 10:28:02 +00:00
|
|
|
elif ext == '.s':
|
2014-08-05 15:22:50 +00:00
|
|
|
deps = [source]
|
2017-05-16 20:48:45 +00:00
|
|
|
deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-asm"))
|
2014-08-05 15:22:50 +00:00
|
|
|
if self.need_update(object, deps):
|
2014-08-07 16:01:39 +00:00
|
|
|
return self.assemble(source, object, includes)
|
2014-07-09 10:28:02 +00:00
|
|
|
else:
|
|
|
|
return False
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-07-09 10:28:02 +00:00
|
|
|
return None
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2016-07-05 22:58:43 +00:00
|
|
|
@abstractmethod
|
|
|
|
def parse_dependencies(self, dep_path):
|
2016-07-15 15:51:47 +00:00
|
|
|
"""Parse the dependency information generated by the compiler.
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
dep_path -- the path to a file generated by a previous run of the compiler
|
|
|
|
|
|
|
|
Return value:
|
|
|
|
A list of all source files that the dependency file indicated were dependencies
|
|
|
|
|
|
|
|
Side effects:
|
|
|
|
None
|
2016-07-05 22:58:43 +00:00
|
|
|
"""
|
2016-07-15 15:17:35 +00:00
|
|
|
raise NotImplemented
|
2016-07-05 22:58:43 +00:00
|
|
|
|
2016-02-25 22:29:26 +00:00
|
|
|
def is_not_supported_error(self, output):
|
|
|
|
return "#error directive: [NOT_SUPPORTED]" in output
|
|
|
|
|
2016-07-05 22:58:43 +00:00
|
|
|
@abstractmethod
|
|
|
|
def parse_output(self, output):
|
2016-07-15 15:51:47 +00:00
|
|
|
"""Take in compiler output and extract sinlge line warnings and errors from it.
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
output -- a string of all the messages emitted by a run of the compiler
|
|
|
|
|
|
|
|
Return value:
|
|
|
|
None
|
|
|
|
|
|
|
|
Side effects:
|
|
|
|
call self.cc_info or self.notify with a description of the event generated by the compiler
|
2016-07-05 22:58:43 +00:00
|
|
|
"""
|
2016-07-15 15:17:35 +00:00
|
|
|
raise NotImplemented
|
2016-07-05 22:58:43 +00:00
|
|
|
|
2014-08-07 16:01:39 +00:00
|
|
|
def compile_output(self, output=[]):
|
2014-09-19 10:30:35 +00:00
|
|
|
_rc = output[0]
|
2017-05-15 19:10:06 +00:00
|
|
|
_stderr = output[1].decode("utf-8")
|
2014-07-09 10:28:02 +00:00
|
|
|
command = output[2]
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-07-09 10:28:02 +00:00
|
|
|
# Parse output for Warnings and Errors
|
2015-11-05 23:21:21 +00:00
|
|
|
self.parse_output(_stderr)
|
|
|
|
self.debug("Return: %s"% _rc)
|
2014-09-19 10:30:35 +00:00
|
|
|
for error_line in _stderr.splitlines():
|
2015-11-05 23:21:21 +00:00
|
|
|
self.debug("Output: %s"% error_line)
|
2015-11-05 22:53:23 +00:00
|
|
|
|
2014-07-09 10:28:02 +00:00
|
|
|
# Check return code
|
2014-09-19 10:30:35 +00:00
|
|
|
if _rc != 0:
|
2016-02-25 22:29:26 +00:00
|
|
|
if self.is_not_supported_error(_stderr):
|
|
|
|
raise NotSupportedException(_stderr)
|
|
|
|
else:
|
|
|
|
raise ToolException(_stderr)
|
2014-07-09 11:38:18 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def build_library(self, objects, dir, name):
|
2015-11-05 20:42:45 +00:00
|
|
|
needed_update = False
|
2013-06-24 13:32:08 +00:00
|
|
|
lib = self.STD_LIB_NAME % name
|
|
|
|
fout = join(dir, lib)
|
|
|
|
if self.need_update(fout, objects):
|
2015-11-05 23:21:21 +00:00
|
|
|
self.info("Library: %s" % lib)
|
2013-06-24 13:32:08 +00:00
|
|
|
self.archive(objects, fout)
|
2015-11-05 20:42:45 +00:00
|
|
|
needed_update = True
|
|
|
|
|
2015-11-05 23:21:21 +00:00
|
|
|
return needed_update
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-07-24 16:29:11 +00:00
|
|
|
def link_program(self, r, tmp_path, name):
|
2015-11-05 20:42:45 +00:00
|
|
|
needed_update = False
|
2014-02-07 17:57:35 +00:00
|
|
|
ext = 'bin'
|
2015-02-19 16:08:02 +00:00
|
|
|
if hasattr(self.target, 'OUTPUT_EXT'):
|
|
|
|
ext = self.target.OUTPUT_EXT
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2014-02-10 15:21:57 +00:00
|
|
|
if hasattr(self.target, 'OUTPUT_NAMING'):
|
|
|
|
self.var("binary_naming", self.target.OUTPUT_NAMING)
|
|
|
|
if self.target.OUTPUT_NAMING == "8.3":
|
2014-02-06 16:02:37 +00:00
|
|
|
name = name[0:8]
|
|
|
|
ext = ext[0:3]
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2016-06-09 22:50:03 +00:00
|
|
|
# Create destination directory
|
|
|
|
head, tail = split(name)
|
|
|
|
new_path = join(tmp_path, head)
|
|
|
|
mkdir(new_path)
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2014-02-06 16:02:37 +00:00
|
|
|
filename = name+'.'+ext
|
2013-06-24 13:32:08 +00:00
|
|
|
elf = join(tmp_path, name + '.elf')
|
2017-03-22 19:47:46 +00:00
|
|
|
bin = None if ext is 'elf' else join(tmp_path, filename)
|
2016-06-09 22:50:03 +00:00
|
|
|
map = join(tmp_path, name + '.map')
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-09-02 03:13:31 +00:00
|
|
|
r.objects = sorted(set(r.objects))
|
2017-05-08 18:30:51 +00:00
|
|
|
config_file = ([self.config.app_config_location]
|
|
|
|
if self.config.app_config_location else [])
|
2017-05-31 14:54:09 +00:00
|
|
|
dependencies = r.objects + r.libraries + [r.linker_script] + config_file
|
2017-05-16 20:48:45 +00:00
|
|
|
dependencies.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-ld"))
|
|
|
|
if self.need_update(elf, dependencies):
|
2015-11-05 20:42:45 +00:00
|
|
|
needed_update = True
|
2015-11-05 23:21:21 +00:00
|
|
|
self.progress("link", name)
|
2013-07-24 16:29:11 +00:00
|
|
|
self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2017-03-22 19:47:46 +00:00
|
|
|
if bin and self.need_update(bin, [elf]):
|
2015-11-05 20:42:45 +00:00
|
|
|
needed_update = True
|
2015-11-05 23:21:21 +00:00
|
|
|
self.progress("elf2bin", name)
|
2014-01-14 17:15:31 +00:00
|
|
|
self.binary(r, elf, bin)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-06-28 15:34:28 +00:00
|
|
|
self.map_outputs = self.mem_stats(map)
|
2016-06-09 22:50:03 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
self.var("compile_succeded", True)
|
2014-02-06 16:02:37 +00:00
|
|
|
self.var("binary", filename)
|
2014-02-10 15:21:57 +00:00
|
|
|
|
2015-11-05 23:21:21 +00:00
|
|
|
return bin, needed_update
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-07-19 10:16:05 +00:00
|
|
|
# THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
|
|
|
|
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
|
2013-06-24 13:32:08 +00:00
|
|
|
def default_cmd(self, command):
|
2016-07-19 10:14:42 +00:00
|
|
|
_stdout, _stderr, _rc = run_cmd(command, work_dir=getcwd(), chroot=self.CHROOT)
|
2014-09-19 10:30:35 +00:00
|
|
|
self.debug("Return: %s"% _rc)
|
|
|
|
|
|
|
|
for output_line in _stdout.splitlines():
|
|
|
|
self.debug("Output: %s"% output_line)
|
|
|
|
for error_line in _stderr.splitlines():
|
|
|
|
self.debug("Errors: %s"% error_line)
|
2014-08-20 09:59:28 +00:00
|
|
|
|
2014-09-19 10:30:35 +00:00
|
|
|
if _rc != 0:
|
|
|
|
for line in _stderr.splitlines():
|
2013-06-24 13:32:08 +00:00
|
|
|
self.tool_error(line)
|
2014-09-19 10:30:35 +00:00
|
|
|
raise ToolException(_stderr)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
### NOTIFICATIONS ###
|
|
|
|
def info(self, message):
|
2015-11-06 20:44:44 +00:00
|
|
|
self.notify({'type': 'info', 'message': message})
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-07-19 10:16:05 +00:00
|
|
|
# THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
|
|
|
|
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
|
2013-06-24 13:32:08 +00:00
|
|
|
def debug(self, message):
|
|
|
|
if self.VERBOSE:
|
|
|
|
if type(message) is ListType:
|
|
|
|
message = ' '.join(message)
|
2014-07-11 08:13:22 +00:00
|
|
|
message = "[DEBUG] " + message
|
2015-11-05 23:21:21 +00:00
|
|
|
self.notify({'type': 'debug', 'message': message})
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2016-07-19 10:16:05 +00:00
|
|
|
# THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
|
|
|
|
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
|
2016-07-19 10:14:42 +00:00
|
|
|
def cc_info(self, info=None):
|
|
|
|
if info is not None:
|
|
|
|
info['type'] = 'cc'
|
|
|
|
self.notify(info)
|
|
|
|
|
2016-07-19 10:16:05 +00:00
|
|
|
# THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
|
|
|
|
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
|
2016-07-19 10:14:42 +00:00
|
|
|
def cc_verbose(self, message, file=""):
|
|
|
|
self.debug(message)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def progress(self, action, file, build_update=False):
|
|
|
|
msg = {'type': 'progress', 'action': action, 'file': file}
|
|
|
|
if build_update:
|
|
|
|
msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled)
|
2015-11-06 20:44:44 +00:00
|
|
|
self.notify(msg)
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def tool_error(self, message):
|
2015-11-06 20:44:44 +00:00
|
|
|
self.notify({'type': 'tool_error', 'message': message})
|
2014-06-02 14:44:45 +00:00
|
|
|
|
2013-06-24 13:32:08 +00:00
|
|
|
def var(self, key, value):
|
2015-11-06 20:44:44 +00:00
|
|
|
self.notify({'type': 'var', 'key': key, 'val': value})
|
2013-06-24 13:32:08 +00:00
|
|
|
|
2016-07-19 10:16:05 +00:00
|
|
|
# THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
|
|
|
|
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
|
2016-06-09 22:50:03 +00:00
|
|
|
def mem_stats(self, map):
|
2016-06-28 15:34:28 +00:00
|
|
|
"""! Creates parser object
|
|
|
|
@param map Path to linker map file to parse and decode
|
|
|
|
@return Memory summary structure with memory usage statistics
|
|
|
|
None if map file can't be opened and processed
|
|
|
|
"""
|
2016-06-09 22:50:03 +00:00
|
|
|
toolchain = self.__class__.__name__
|
2016-06-10 14:19:02 +00:00
|
|
|
|
|
|
|
# Create memap object
|
|
|
|
memap = MemapParser()
|
|
|
|
|
|
|
|
# Parse and decode a map file
|
|
|
|
if memap.parse(abspath(map), toolchain) is False:
|
|
|
|
self.info("Unknown toolchain for memory statistics %s" % toolchain)
|
2016-06-28 15:34:28 +00:00
|
|
|
return None
|
2016-06-10 14:19:02 +00:00
|
|
|
|
2016-10-11 23:24:01 +00:00
|
|
|
# Store the memap instance for later use
|
|
|
|
self.memap_instance = memap
|
2016-06-10 14:19:02 +00:00
|
|
|
|
2016-06-28 15:34:28 +00:00
|
|
|
# Here we return memory statistics structure (constructed after
|
|
|
|
# call to generate_output) which contains raw data in bytes
|
|
|
|
# about sections + summary
|
2016-09-27 21:51:16 +00:00
|
|
|
return memap.mem_report
|
2016-06-28 15:34:28 +00:00
|
|
|
|
2016-06-27 12:17:03 +00:00
|
|
|
# Set the configuration data
|
|
|
|
def set_config_data(self, config_data):
|
|
|
|
self.config_data = config_data
|
2016-06-16 13:13:50 +00:00
|
|
|
|
2016-07-15 13:37:46 +00:00
|
|
|
# Creates the configuration header if needed:
|
|
|
|
# - if there is no configuration data, "mbed_config.h" is not create (or deleted if it exists).
|
|
|
|
# - if there is configuration data and "mbed_config.h" does not exist, it is created.
|
|
|
|
# - if there is configuration data similar to the previous configuration data,
|
|
|
|
# "mbed_config.h" is left untouched.
|
|
|
|
# - if there is new configuration data, "mbed_config.h" is overriden.
|
|
|
|
# The function needs to be called exactly once for the lifetime of this toolchain instance.
|
|
|
|
# The "config_processed" variable (below) ensures this behaviour.
|
|
|
|
# The function returns the location of the configuration file, or None if there is no
|
|
|
|
# configuration data available (and thus no configuration file)
|
2016-06-16 13:57:33 +00:00
|
|
|
def get_config_header(self):
|
2016-07-15 13:37:46 +00:00
|
|
|
if self.config_processed: # this function was already called, return its result
|
|
|
|
return self.config_file
|
|
|
|
# The config file is located in the build directory
|
|
|
|
self.config_file = join(self.build_dir, self.MBED_CONFIG_FILE_NAME)
|
|
|
|
# If the file exists, read its current content in prev_data
|
|
|
|
if exists(self.config_file):
|
|
|
|
with open(self.config_file, "rt") as f:
|
|
|
|
prev_data = f.read()
|
|
|
|
else:
|
|
|
|
prev_data = None
|
|
|
|
# Get the current configuration data
|
|
|
|
crt_data = Config.config_to_header(self.config_data) if self.config_data else None
|
|
|
|
# "changed" indicates if a configuration change was detected
|
|
|
|
changed = False
|
|
|
|
if prev_data is not None: # a previous mbed_config.h exists
|
|
|
|
if crt_data is None: # no configuration data, so "mbed_config.h" needs to be removed
|
|
|
|
remove(self.config_file)
|
|
|
|
self.config_file = None # this means "config file not present"
|
|
|
|
changed = True
|
|
|
|
elif crt_data != prev_data: # different content of config file
|
|
|
|
with open(self.config_file, "wt") as f:
|
|
|
|
f.write(crt_data)
|
|
|
|
changed = True
|
|
|
|
else: # a previous mbed_config.h does not exist
|
|
|
|
if crt_data is not None: # there's configuration data available
|
|
|
|
with open(self.config_file, "wt") as f:
|
|
|
|
f.write(crt_data)
|
|
|
|
changed = True
|
|
|
|
else:
|
|
|
|
self.config_file = None # this means "config file not present"
|
|
|
|
# If there was a change in configuration, rebuild everything
|
|
|
|
self.build_all = changed
|
|
|
|
# Make sure that this function will only return the location of the configuration
|
|
|
|
# file for subsequent calls, without trying to manipulate its content in any way.
|
|
|
|
self.config_processed = True
|
|
|
|
return self.config_file
|
2016-06-16 13:13:50 +00:00
|
|
|
|
2017-05-16 20:31:10 +00:00
|
|
|
def dump_build_profile(self):
|
|
|
|
"""Dump the current build profile and macros into the `.profile` file
|
|
|
|
in the build directory"""
|
2017-05-16 20:48:45 +00:00
|
|
|
for key in ["cxx", "c", "asm", "ld"]:
|
|
|
|
to_dump = (str(self.flags[key]) + str(sorted(self.macros)))
|
|
|
|
if key in ["cxx", "c"]:
|
|
|
|
to_dump += str(self.flags['common'])
|
|
|
|
where = join(self.build_dir, self.PROFILE_FILE_NAME + "-" + key)
|
|
|
|
self._overwrite_when_not_equal(where, to_dump)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _overwrite_when_not_equal(filename, content):
|
|
|
|
if not exists(filename) or content != open(filename).read():
|
|
|
|
with open(filename, "wb") as out:
|
|
|
|
out.write(content)
|
2017-05-16 20:31:10 +00:00
|
|
|
|
2016-09-13 18:38:58 +00:00
|
|
|
@staticmethod
|
|
|
|
def generic_check_executable(tool_key, executable_name, levels_up,
|
|
|
|
nested_dir=None):
|
|
|
|
"""
|
|
|
|
Positional args:
|
|
|
|
tool_key: the key to index TOOLCHAIN_PATHS
|
|
|
|
executable_name: the toolchain's named executable (ex. armcc)
|
|
|
|
levels_up: each toolchain joins the toolchain_path, some
|
|
|
|
variable directories (bin, include), and the executable name,
|
|
|
|
so the TOOLCHAIN_PATH value must be appropriately distanced
|
|
|
|
|
|
|
|
Keyword args:
|
|
|
|
nested_dir: the directory within TOOLCHAIN_PATHS where the executable
|
|
|
|
is found (ex: 'bin' for ARM\bin\armcc (necessary to check for path
|
|
|
|
that will be used by toolchain's compile)
|
|
|
|
|
|
|
|
Returns True if the executable location specified by the user
|
|
|
|
exists and is valid OR the executable can be found on the PATH.
|
|
|
|
Returns False otherwise.
|
|
|
|
"""
|
|
|
|
# Search PATH if user did not specify a path or specified path doesn't
|
|
|
|
# exist.
|
|
|
|
if not TOOLCHAIN_PATHS[tool_key] or not exists(TOOLCHAIN_PATHS[tool_key]):
|
|
|
|
exe = find_executable(executable_name)
|
|
|
|
if not exe:
|
|
|
|
return False
|
|
|
|
for level in range(levels_up):
|
|
|
|
# move up the specified number of directories
|
|
|
|
exe = dirname(exe)
|
|
|
|
TOOLCHAIN_PATHS[tool_key] = exe
|
|
|
|
if nested_dir:
|
|
|
|
subdir = join(TOOLCHAIN_PATHS[tool_key], nested_dir,
|
|
|
|
executable_name)
|
|
|
|
else:
|
|
|
|
subdir = join(TOOLCHAIN_PATHS[tool_key],executable_name)
|
|
|
|
# User could have specified a path that exists but does not contain exe
|
|
|
|
return exists(subdir) or exists(subdir +'.exe')
|
|
|
|
|
2016-09-13 21:26:58 +00:00
|
|
|
@abstractmethod
|
|
|
|
def check_executable(self):
|
|
|
|
"""Returns True if the executable (armcc) location specified by the
|
|
|
|
user exists OR the executable can be found on the PATH.
|
|
|
|
Returns False otherwise."""
|
|
|
|
raise NotImplemented
|
|
|
|
|
2016-07-05 22:58:43 +00:00
|
|
|
@abstractmethod
|
|
|
|
def get_config_option(self, config_header):
|
|
|
|
"""Generate the compiler option that forces the inclusion of the configuration
|
|
|
|
header file.
|
2016-07-15 15:51:47 +00:00
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
config_header -- The configuration header that will be included within all source files
|
|
|
|
|
|
|
|
Return value:
|
|
|
|
A list of the command line arguments that will force the inclusion the specified header
|
|
|
|
|
|
|
|
Side effects:
|
|
|
|
None
|
2016-07-05 22:58:43 +00:00
|
|
|
"""
|
2016-07-15 15:17:35 +00:00
|
|
|
raise NotImplemented
|
2016-07-05 22:58:43 +00:00
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def assemble(self, source, object, includes):
|
2016-07-15 15:51:47 +00:00
|
|
|
"""Generate the command line that assembles.
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
source -- a file path that is the file to assemble
|
|
|
|
object -- a file path that is the destination object
|
|
|
|
includes -- a list of all directories where header files may be found
|
|
|
|
|
|
|
|
Return value:
|
|
|
|
The complete command line, as a list, that would invoke the assembler
|
|
|
|
on the source file, include all the include paths, and generate
|
|
|
|
the specified object file.
|
|
|
|
|
|
|
|
Side effects:
|
|
|
|
None
|
|
|
|
|
|
|
|
Note:
|
|
|
|
This method should be decorated with @hook_tool.
|
2016-07-05 22:58:43 +00:00
|
|
|
"""
|
2016-07-15 15:17:35 +00:00
|
|
|
raise NotImplemented
|
2016-07-05 22:58:43 +00:00
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def compile_c(self, source, object, includes):
|
2016-07-15 15:51:47 +00:00
|
|
|
"""Generate the command line that compiles a C source file.
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
source -- the C source file to compile
|
|
|
|
object -- the destination object file
|
|
|
|
includes -- a list of all the directories where header files may be found
|
|
|
|
|
|
|
|
Return value:
|
|
|
|
The complete command line, as a list, that would invoke the C compiler
|
|
|
|
on the source file, include all the include paths, and generate the
|
|
|
|
specified object file.
|
|
|
|
|
|
|
|
Side effects:
|
|
|
|
None
|
|
|
|
|
|
|
|
Note:
|
|
|
|
This method should be decorated with @hook_tool.
|
2016-07-05 22:58:43 +00:00
|
|
|
"""
|
2016-07-15 15:17:35 +00:00
|
|
|
raise NotImplemented
|
2016-07-05 22:58:43 +00:00
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def compile_cpp(self, source, object, includes):
|
2016-07-15 15:51:47 +00:00
|
|
|
"""Generate the command line that compiles a C++ source file.
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
source -- the C++ source file to compile
|
|
|
|
object -- the destination object file
|
|
|
|
includes -- a list of all the directories where header files may be found
|
|
|
|
|
|
|
|
Return value:
|
|
|
|
The complete command line, as a list, that would invoke the C++ compiler
|
|
|
|
on the source file, include all the include paths, and generate the
|
|
|
|
specified object file.
|
|
|
|
|
|
|
|
Side effects:
|
|
|
|
None
|
|
|
|
|
|
|
|
Note:
|
|
|
|
This method should be decorated with @hook_tool.
|
2016-07-05 22:58:43 +00:00
|
|
|
"""
|
2016-07-15 15:17:35 +00:00
|
|
|
raise NotImplemented
|
2016-07-05 22:58:43 +00:00
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def link(self, output, objects, libraries, lib_dirs, mem_map):
|
2016-07-15 15:51:47 +00:00
|
|
|
"""Run the linker to create an executable and memory map.
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
output -- the file name to place the executable in
|
|
|
|
objects -- all of the object files to link
|
|
|
|
libraries -- all of the required libraries
|
|
|
|
lib_dirs -- where the required libraries are located
|
|
|
|
mem_map -- the location where the memory map file should be stored
|
|
|
|
|
|
|
|
Return value:
|
|
|
|
None
|
|
|
|
|
|
|
|
Side effect:
|
|
|
|
Runs the linker to produce the executable.
|
|
|
|
|
|
|
|
Note:
|
|
|
|
This method should be decorated with @hook_tool.
|
2016-07-05 22:58:43 +00:00
|
|
|
"""
|
2016-07-15 15:17:35 +00:00
|
|
|
raise NotImplemented
|
2016-07-05 22:58:43 +00:00
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def archive(self, objects, lib_path):
|
2016-07-15 15:51:47 +00:00
|
|
|
"""Run the command line that creates an archive.
|
|
|
|
|
|
|
|
Positional arguhments:
|
|
|
|
objects -- a list of all the object files that should be archived
|
|
|
|
lib_path -- the file name of the resulting library file
|
|
|
|
|
|
|
|
Return value:
|
|
|
|
None
|
|
|
|
|
|
|
|
Side effect:
|
|
|
|
Runs the archiving tool to produce the library file.
|
|
|
|
|
|
|
|
Note:
|
|
|
|
This method should be decorated with @hook_tool.
|
2016-07-05 22:58:43 +00:00
|
|
|
"""
|
2016-07-15 15:17:35 +00:00
|
|
|
raise NotImplemented
|
2016-07-05 22:58:43 +00:00
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def binary(self, resources, elf, bin):
|
2016-07-15 15:51:47 +00:00
|
|
|
"""Run the command line that will Extract a simplified binary file.
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
resources -- A resources object (Is not used in any of the toolchains)
|
|
|
|
elf -- the executable file that is to be converted
|
|
|
|
bin -- the file name of the to be created simplified binary file
|
|
|
|
|
|
|
|
Return value:
|
|
|
|
None
|
|
|
|
|
|
|
|
Side effect:
|
|
|
|
Runs the elf2bin tool to produce the simplified binary file.
|
|
|
|
|
|
|
|
Note:
|
|
|
|
This method should be decorated with @hook_tool.
|
2016-07-05 22:58:43 +00:00
|
|
|
"""
|
2016-07-15 15:17:35 +00:00
|
|
|
raise NotImplemented
|
2016-07-05 22:58:43 +00:00
|
|
|
|
2017-02-01 22:22:06 +00:00
|
|
|
@staticmethod
|
|
|
|
@abstractmethod
|
|
|
|
def name_mangle(name):
|
|
|
|
"""Mangle a name based on the conventional name mangling of this toolchain
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
name -- the name to mangle
|
|
|
|
|
|
|
|
Return:
|
|
|
|
the mangled name as a string
|
|
|
|
"""
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
@abstractmethod
|
|
|
|
def make_ld_define(name, value):
|
|
|
|
"""Create an argument to the linker that would define a symbol
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
name -- the symbol to define
|
|
|
|
value -- the value to give the symbol
|
|
|
|
|
|
|
|
Return:
|
|
|
|
The linker flag as a string
|
|
|
|
"""
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
@abstractmethod
|
|
|
|
def redirect_symbol(source, sync, build_dir):
|
|
|
|
"""Redirect a symbol at link time to point at somewhere else
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
source -- the symbol doing the pointing
|
|
|
|
sync -- the symbol being pointed to
|
|
|
|
build_dir -- the directory to put "response files" if needed by the toolchain
|
|
|
|
|
|
|
|
Side Effects:
|
|
|
|
Possibly create a file in the build directory
|
|
|
|
|
|
|
|
Return:
|
|
|
|
The linker flag to redirect the symbol, as a string
|
|
|
|
"""
|
|
|
|
raise NotImplemented
|
|
|
|
|
2016-06-27 12:17:03 +00:00
|
|
|
# Return the list of macros geenrated by the build system
|
|
|
|
def get_config_macros(self):
|
|
|
|
return Config.config_to_macros(self.config_data) if self.config_data else []
|
2016-06-10 14:19:02 +00:00
|
|
|
|
2017-04-04 16:35:00 +00:00
|
|
|
@property
|
|
|
|
def report(self):
|
|
|
|
to_ret = {}
|
|
|
|
to_ret['c_compiler'] = {'flags': copy(self.flags['c']),
|
|
|
|
'symbols': self.get_symbols()}
|
|
|
|
to_ret['cxx_compiler'] = {'flags': copy(self.flags['cxx']),
|
|
|
|
'symbols': self.get_symbols()}
|
|
|
|
to_ret['assembler'] = {'flags': copy(self.flags['asm']),
|
|
|
|
'symbols': self.get_symbols(True)}
|
|
|
|
to_ret['linker'] = {'flags': copy(self.flags['ld'])}
|
|
|
|
to_ret.update(self.config.report)
|
|
|
|
return to_ret
|
|
|
|
|
2016-07-19 10:14:42 +00:00
|
|
|
from tools.settings import ARM_PATH
|
2017-02-16 19:50:32 +00:00
|
|
|
from tools.settings import GCC_ARM_PATH
|
2016-06-09 20:34:53 +00:00
|
|
|
from tools.settings import IAR_PATH
|
2014-09-05 12:26:51 +00:00
|
|
|
|
2016-07-19 10:14:42 +00:00
|
|
|
TOOLCHAIN_PATHS = {
|
|
|
|
'ARM': ARM_PATH,
|
|
|
|
'uARM': ARM_PATH,
|
2014-09-05 12:26:51 +00:00
|
|
|
'GCC_ARM': GCC_ARM_PATH,
|
|
|
|
'IAR': IAR_PATH
|
|
|
|
}
|
|
|
|
|
2016-06-09 20:34:53 +00:00
|
|
|
from tools.toolchains.arm import ARM_STD, ARM_MICRO
|
2017-02-16 19:50:32 +00:00
|
|
|
from tools.toolchains.gcc import GCC_ARM
|
2016-06-09 20:34:53 +00:00
|
|
|
from tools.toolchains.iar import IAR
|
2013-06-24 13:32:08 +00:00
|
|
|
|
|
|
|
TOOLCHAIN_CLASSES = {
|
2014-09-05 12:26:51 +00:00
|
|
|
'ARM': ARM_STD,
|
|
|
|
'uARM': ARM_MICRO,
|
|
|
|
'GCC_ARM': GCC_ARM,
|
2013-06-24 13:32:08 +00:00
|
|
|
'IAR': IAR
|
|
|
|
}
|
|
|
|
|
|
|
|
TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())
|