mirror of https://github.com/ARMmbed/mbed-os.git
Add config system, memap, updates to build_api and toolchains
parent
773dab514e
commit
c2e3001739
|
@ -19,20 +19,22 @@ import re
|
|||
import tempfile
|
||||
import colorama
|
||||
|
||||
|
||||
from copy import copy
|
||||
from types import ListType
|
||||
from shutil import rmtree
|
||||
from os.path import join, exists, basename
|
||||
from os.path import join, exists, basename, abspath, normpath
|
||||
from os import getcwd, walk
|
||||
from time import time
|
||||
import fnmatch
|
||||
|
||||
from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException
|
||||
from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException, ToolException
|
||||
from tools.paths import MBED_TARGETS_PATH, MBED_LIBRARIES, MBED_API, MBED_HAL, MBED_COMMON
|
||||
from tools.targets import TARGET_NAMES, TARGET_MAP
|
||||
from tools.libraries import Library
|
||||
from tools.toolchains import TOOLCHAIN_CLASSES
|
||||
from jinja2 import FileSystemLoader
|
||||
from jinja2.environment import Environment
|
||||
|
||||
from tools.config import Config
|
||||
|
||||
def prep_report(report, target_name, toolchain_name, id_name):
|
||||
# Setup report keys
|
||||
|
@ -75,37 +77,90 @@ def add_result_to_report(report, result):
|
|||
result_wrap = { 0: result }
|
||||
report[target][toolchain][id_name].append(result_wrap)
|
||||
|
||||
def get_config(src_path, target, toolchain_name):
|
||||
# Convert src_path to a list if needed
|
||||
src_paths = [src_path] if type(src_path) != ListType else src_path
|
||||
# We need to remove all paths which are repeated to avoid
|
||||
# multiple compilations and linking with the same objects
|
||||
src_paths = [src_paths[0]] + list(set(src_paths[1:]))
|
||||
|
||||
# Create configuration object
|
||||
config = Config(target, src_paths)
|
||||
|
||||
# If the 'target' argument is a string, convert it to a target instance
|
||||
if isinstance(target, str):
|
||||
try:
|
||||
target = TARGET_MAP[target]
|
||||
except KeyError:
|
||||
raise KeyError("Target '%s' not found" % target)
|
||||
|
||||
# Toolchain instance
|
||||
try:
|
||||
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options=None, notify=None, macros=None, silent=True, extra_verbose=False)
|
||||
except KeyError as e:
|
||||
raise KeyError("Toolchain %s not supported" % toolchain_name)
|
||||
|
||||
# Scan src_path for config files
|
||||
resources = toolchain.scan_resources(src_paths[0])
|
||||
for path in src_paths[1:]:
|
||||
resources.add(toolchain.scan_resources(path))
|
||||
|
||||
config.add_config_files(resources.json_files)
|
||||
return config.get_config_data()
|
||||
|
||||
def build_project(src_path, build_path, target, toolchain_name,
|
||||
libraries_paths=None, options=None, linker_script=None,
|
||||
clean=False, notify=None, verbose=False, name=None, macros=None, inc_dirs=None,
|
||||
jobs=1, silent=False, report=None, properties=None, project_id=None, project_description=None, extra_verbose=False):
|
||||
jobs=1, silent=False, report=None, properties=None, project_id=None, project_description=None,
|
||||
extra_verbose=False, config=None):
|
||||
""" This function builds project. Project can be for example one test / UT
|
||||
"""
|
||||
# Toolchain instance
|
||||
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, notify, macros, silent, extra_verbose=extra_verbose)
|
||||
toolchain.VERBOSE = verbose
|
||||
toolchain.jobs = jobs
|
||||
toolchain.build_all = clean
|
||||
|
||||
# Convert src_path to a list if needed
|
||||
src_paths = [src_path] if type(src_path) != ListType else src_path
|
||||
|
||||
# We need to remove all paths which are repeated to avoid
|
||||
# multiple compilations and linking with the same objects
|
||||
src_paths = [src_paths[0]] + list(set(src_paths[1:]))
|
||||
PROJECT_BASENAME = basename(src_paths[0])
|
||||
first_src_path = src_paths[0] if src_paths[0] != "." and src_paths[0] != "./" else getcwd()
|
||||
abs_path = abspath(first_src_path)
|
||||
project_name = basename(normpath(abs_path))
|
||||
|
||||
# If the configuration object was not yet created, create it now
|
||||
config = config or Config(target, src_paths)
|
||||
|
||||
# If the 'target' argument is a string, convert it to a target instance
|
||||
if isinstance(target, str):
|
||||
try:
|
||||
target = TARGET_MAP[target]
|
||||
except KeyError:
|
||||
raise KeyError("Target '%s' not found" % target)
|
||||
|
||||
# Toolchain instance
|
||||
try:
|
||||
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, notify, macros, silent, extra_verbose=extra_verbose)
|
||||
except KeyError as e:
|
||||
raise KeyError("Toolchain %s not supported" % toolchain_name)
|
||||
|
||||
toolchain.VERBOSE = verbose
|
||||
toolchain.jobs = jobs
|
||||
toolchain.build_all = clean
|
||||
|
||||
if name is None:
|
||||
# We will use default project name based on project folder name
|
||||
name = PROJECT_BASENAME
|
||||
toolchain.info("Building project %s (%s, %s)" % (PROJECT_BASENAME.upper(), target.name, toolchain_name))
|
||||
name = project_name
|
||||
toolchain.info("Building project %s (%s, %s)" % (project_name, target.name, toolchain_name))
|
||||
else:
|
||||
# User used custom global project name to have the same name for the
|
||||
toolchain.info("Building project %s to %s (%s, %s)" % (PROJECT_BASENAME.upper(), name, target.name, toolchain_name))
|
||||
toolchain.info("Building project %s to %s (%s, %s)" % (project_name, name, target.name, toolchain_name))
|
||||
|
||||
|
||||
if report != None:
|
||||
start = time()
|
||||
id_name = project_id.upper()
|
||||
description = project_description
|
||||
|
||||
# If project_id is specified, use that over the default name
|
||||
id_name = project_id.upper() if project_id else name.upper()
|
||||
description = project_description if project_description else name
|
||||
vendor_label = target.extra_labels[0]
|
||||
cur_result = None
|
||||
prep_report(report, target.name, toolchain_name, id_name)
|
||||
|
@ -139,13 +194,18 @@ def build_project(src_path, build_path, target, toolchain_name,
|
|||
resources.inc_dirs.extend(inc_dirs)
|
||||
else:
|
||||
resources.inc_dirs.append(inc_dirs)
|
||||
|
||||
# Update the configuration with any .json files found while scanning
|
||||
config.add_config_files(resources.json_files)
|
||||
# And add the configuration macros to the toolchain
|
||||
toolchain.add_macros(config.get_config_data_macros())
|
||||
|
||||
# Compile Sources
|
||||
for path in src_paths:
|
||||
src = toolchain.scan_resources(path)
|
||||
objects = toolchain.compile_sources(src, build_path, resources.inc_dirs)
|
||||
resources.objects.extend(objects)
|
||||
|
||||
|
||||
# Link Program
|
||||
res, needed_update = toolchain.link_program(resources, build_path, name)
|
||||
|
||||
|
@ -181,11 +241,11 @@ def build_project(src_path, build_path, target, toolchain_name,
|
|||
# Let Exception propagate
|
||||
raise e
|
||||
|
||||
|
||||
def build_library(src_paths, build_path, target, toolchain_name,
|
||||
dependencies_paths=None, options=None, name=None, clean=False,
|
||||
dependencies_paths=None, options=None, name=None, clean=False, archive=True,
|
||||
notify=None, verbose=False, macros=None, inc_dirs=None, inc_dirs_ext=None,
|
||||
jobs=1, silent=False, report=None, properties=None, extra_verbose=False):
|
||||
jobs=1, silent=False, report=None, properties=None, extra_verbose=False,
|
||||
project_id=None):
|
||||
""" src_path: the path of the source directory
|
||||
build_path: the path of the build directory
|
||||
target: ['LPC1768', 'LPC11U24', 'LPC2368']
|
||||
|
@ -201,11 +261,16 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
|||
src_paths = [src_paths]
|
||||
|
||||
# The first path will give the name to the library
|
||||
name = basename(src_paths[0])
|
||||
project_name = basename(src_paths[0] if src_paths[0] != "." and src_paths[0] != "./" else getcwd())
|
||||
if name is None:
|
||||
# We will use default project name based on project folder name
|
||||
name = project_name
|
||||
|
||||
if report != None:
|
||||
start = time()
|
||||
id_name = name.upper()
|
||||
|
||||
# If project_id is specified, use that over the default name
|
||||
id_name = project_id.upper() if project_id else name.upper()
|
||||
description = name
|
||||
vendor_label = target.extra_labels[0]
|
||||
cur_result = None
|
||||
|
@ -233,47 +298,71 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
|||
toolchain.jobs = jobs
|
||||
toolchain.build_all = clean
|
||||
|
||||
toolchain.info("Building library %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
|
||||
toolchain.info("Building library %s (%s, %s)" % (name, target.name, toolchain_name))
|
||||
|
||||
# Scan Resources
|
||||
resources = []
|
||||
for src_path in src_paths:
|
||||
resources.append(toolchain.scan_resources(src_path))
|
||||
resources = None
|
||||
for path in src_paths:
|
||||
# Scan resources
|
||||
resource = toolchain.scan_resources(path)
|
||||
|
||||
# Copy headers, objects and static libraries - all files needed for static lib
|
||||
toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path)
|
||||
toolchain.copy_files(resource.objects, build_path, rel_path=resource.base_path)
|
||||
toolchain.copy_files(resource.libraries, build_path, rel_path=resource.base_path)
|
||||
if resource.linker_script:
|
||||
toolchain.copy_files(resource.linker_script, build_path, rel_path=resource.base_path)
|
||||
|
||||
# Extend resources collection
|
||||
if not resources:
|
||||
resources = resource
|
||||
else:
|
||||
resources.add(resource)
|
||||
|
||||
# We need to add if necessary additional include directories
|
||||
if inc_dirs:
|
||||
if type(inc_dirs) == ListType:
|
||||
resources.inc_dirs.extend(inc_dirs)
|
||||
else:
|
||||
resources.inc_dirs.append(inc_dirs)
|
||||
|
||||
# Add extra include directories / files which are required by library
|
||||
# This files usually are not in the same directory as source files so
|
||||
# previous scan will not include them
|
||||
if inc_dirs_ext is not None:
|
||||
for inc_ext in inc_dirs_ext:
|
||||
resources.append(toolchain.scan_resources(inc_ext))
|
||||
resources.add(toolchain.scan_resources(inc_ext))
|
||||
|
||||
# Dependencies Include Paths
|
||||
dependencies_include_dir = []
|
||||
if dependencies_paths is not None:
|
||||
for path in dependencies_paths:
|
||||
lib_resources = toolchain.scan_resources(path)
|
||||
dependencies_include_dir.extend(lib_resources.inc_dirs)
|
||||
resources.inc_dirs.extend(lib_resources.inc_dirs)
|
||||
|
||||
if inc_dirs:
|
||||
dependencies_include_dir.extend(inc_dirs)
|
||||
if archive:
|
||||
# Use temp path when building archive
|
||||
tmp_path = join(build_path, '.temp')
|
||||
mkdir(tmp_path)
|
||||
else:
|
||||
tmp_path = build_path
|
||||
|
||||
# Create the desired build directory structure
|
||||
bin_path = join(build_path, toolchain.obj_path)
|
||||
mkdir(bin_path)
|
||||
tmp_path = join(build_path, '.temp', toolchain.obj_path)
|
||||
mkdir(tmp_path)
|
||||
|
||||
# Copy Headers
|
||||
for resource in resources:
|
||||
toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path)
|
||||
dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs)
|
||||
# Handle configuration
|
||||
config = Config(target)
|
||||
# Update the configuration with any .json files found while scanning
|
||||
config.add_config_files(resources.json_files)
|
||||
# And add the configuration macros to the toolchain
|
||||
toolchain.add_macros(config.get_config_data_macros())
|
||||
|
||||
# Compile Sources
|
||||
objects = []
|
||||
for resource in resources:
|
||||
objects.extend(toolchain.compile_sources(resource, tmp_path, dependencies_include_dir))
|
||||
for path in src_paths:
|
||||
src = toolchain.scan_resources(path)
|
||||
objects = toolchain.compile_sources(src, abspath(tmp_path), resources.inc_dirs)
|
||||
resources.objects.extend(objects)
|
||||
|
||||
needed_update = toolchain.build_library(objects, bin_path, name)
|
||||
if archive:
|
||||
needed_update = toolchain.build_library(resources.objects, build_path, name)
|
||||
else:
|
||||
needed_update = True
|
||||
|
||||
if report != None and needed_update:
|
||||
end = time()
|
||||
|
@ -286,7 +375,12 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
|||
except Exception, e:
|
||||
if report != None:
|
||||
end = time()
|
||||
cur_result["result"] = "FAIL"
|
||||
|
||||
if isinstance(e, ToolException):
|
||||
cur_result["result"] = "FAIL"
|
||||
elif isinstance(e, NotSupportedException):
|
||||
cur_result["result"] = "NOT_SUPPORTED"
|
||||
|
||||
cur_result["elapsed_time"] = end - start
|
||||
|
||||
toolchain_output = toolchain.get_output()
|
||||
|
@ -734,3 +828,63 @@ def write_build_report(build_report, template_filename, filename):
|
|||
|
||||
with open(filename, 'w+') as f:
|
||||
f.write(template.render(failing_builds=build_report_failing, passing_builds=build_report_passing))
|
||||
|
||||
|
||||
def scan_for_source_paths(path, exclude_paths=None):
|
||||
ignorepatterns = []
|
||||
paths = []
|
||||
|
||||
def is_ignored(file_path):
|
||||
for pattern in ignorepatterns:
|
||||
if fnmatch.fnmatch(file_path, pattern):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
""" os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
|
||||
When topdown is True, the caller can modify the dirnames list in-place
|
||||
(perhaps using del or slice assignment), and walk() will only recurse into
|
||||
the subdirectories whose names remain in dirnames; this can be used to prune
|
||||
the search, impose a specific order of visiting, or even to inform walk()
|
||||
about directories the caller creates or renames before it resumes walk()
|
||||
again. Modifying dirnames when topdown is False is ineffective, because in
|
||||
bottom-up mode the directories in dirnames are generated before dirpath
|
||||
itself is generated.
|
||||
"""
|
||||
for root, dirs, files in walk(path, followlinks=True):
|
||||
# Remove ignored directories
|
||||
# Check if folder contains .mbedignore
|
||||
if ".mbedignore" in files :
|
||||
with open (join(root,".mbedignore"), "r") as f:
|
||||
lines=f.readlines()
|
||||
lines = [l.strip() for l in lines] # Strip whitespaces
|
||||
lines = [l for l in lines if l != ""] # Strip empty lines
|
||||
lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
|
||||
# Append root path to glob patterns
|
||||
# and append patterns to ignorepatterns
|
||||
ignorepatterns.extend([join(root,line.strip()) for line in lines])
|
||||
|
||||
for d in copy(dirs):
|
||||
dir_path = join(root, d)
|
||||
|
||||
# Always ignore hidden directories
|
||||
if d.startswith('.'):
|
||||
dirs.remove(d)
|
||||
|
||||
# Remove dirs that already match the ignorepatterns
|
||||
# to avoid travelling into them and to prevent them
|
||||
# on appearing in include path.
|
||||
if is_ignored(join(dir_path,"")):
|
||||
dirs.remove(d)
|
||||
|
||||
if exclude_paths:
|
||||
for exclude_path in exclude_paths:
|
||||
rel_path = relpath(dir_path, exclude_path)
|
||||
if not (rel_path.startswith('..')):
|
||||
dirs.remove(d)
|
||||
break
|
||||
|
||||
# Add root to include paths
|
||||
paths.append(root)
|
||||
|
||||
return paths
|
||||
|
|
|
@ -0,0 +1,325 @@
|
|||
"""
|
||||
mbed SDK
|
||||
Copyright (c) 2016 ARM Limited
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
# Implementation of mbed configuration mechanism
|
||||
from copy import deepcopy
|
||||
from collections import OrderedDict
|
||||
from tools.utils import json_file_to_dict, ToolException
|
||||
from tools.targets import Target
|
||||
import os
|
||||
|
||||
# Base class for all configuration exceptions
|
||||
class ConfigException(Exception):
|
||||
pass
|
||||
|
||||
# This class keeps information about a single configuration parameter
|
||||
class ConfigParameter:
|
||||
# name: the name of the configuration parameter
|
||||
# data: the data associated with the configuration parameter
|
||||
# unit_name: the unit (target/library/application) that defines this parameter
|
||||
# unit_ kind: the kind of the unit ("target", "library" or "application")
|
||||
def __init__(self, name, data, unit_name, unit_kind):
|
||||
self.name = self.get_full_name(name, unit_name, unit_kind, allow_prefix = False)
|
||||
self.defined_by = self.get_display_name(unit_name, unit_kind)
|
||||
self.set_by = self.defined_by
|
||||
self.help_text = data.get("help", None)
|
||||
self.value = data.get("value", None)
|
||||
self.required = data.get("required", False)
|
||||
self.macro_name = data.get("macro_name", "MBED_CONF_%s" % self.sanitize(self.name.upper()))
|
||||
|
||||
# Return the full (prefixed) name of a parameter.
|
||||
# If the parameter already has a prefix, check if it is valid
|
||||
# name: the simple (unqualified) name of the parameter
|
||||
# unit_name: the unit (target/library/application) that defines this parameter
|
||||
# unit_kind: the kind of the unit ("target", "library" or "application")
|
||||
# label: the name of the label in the 'target_config_overrides' section (optional)
|
||||
# allow_prefix: True to allo the original name to have a prefix, False otherwise
|
||||
@staticmethod
|
||||
def get_full_name(name, unit_name, unit_kind, label = None, allow_prefix = True):
|
||||
if name.find('.') == -1: # the name is not prefixed
|
||||
if unit_kind == "target":
|
||||
prefix = "target."
|
||||
elif unit_kind == "application":
|
||||
prefix = "app."
|
||||
else:
|
||||
prefix = unit_name + '.'
|
||||
return prefix + name
|
||||
# The name has a prefix, so check if it is valid
|
||||
if not allow_prefix:
|
||||
raise ConfigException("Invalid parameter name '%s' in '%s'" % (name, ConfigParameter.get_display_name(unit_name, unit_kind, label)))
|
||||
temp = name.split(".")
|
||||
# Check if the parameter syntax is correct (must be unit_name.parameter_name)
|
||||
if len(temp) != 2:
|
||||
raise ConfigException("Invalid parameter name '%s' in '%s'" % (name, ConfigParameter.get_display_name(unit_name, unit_kind, label)))
|
||||
prefix = temp[0]
|
||||
# Check if the given parameter prefix matches the expected prefix
|
||||
if (unit_kind == "library" and prefix != unit_name) or (unit_kind == "target" and prefix != "target"):
|
||||
raise ConfigException("Invalid prefix '%s' for parameter name '%s' in '%s'" % (prefix, name, ConfigParameter.get_display_name(unit_name, unit_kind, label)))
|
||||
return name
|
||||
|
||||
# Return the name displayed for a unit when interogating the origin
|
||||
# and the last set place of a parameter
|
||||
# unit_name: the unit (target/library/application) that defines this parameter
|
||||
# unit_kind: the kind of the unit ("target", "library" or "application")
|
||||
# label: the name of the label in the 'target_config_overrides' section (optional)
|
||||
@staticmethod
|
||||
def get_display_name(unit_name, unit_kind, label = None):
|
||||
if unit_kind == "target":
|
||||
return "target:" + unit_name
|
||||
elif unit_kind == "application":
|
||||
return "application%s" % ("[%s]" % label if label else "")
|
||||
else: # library
|
||||
return "library:%s%s" % (unit_name, "[%s]" % label if label else "")
|
||||
|
||||
# "Sanitize" a name so that it is a valid C macro name
|
||||
# Currently it simply replaces '.' and '-' with '_'
|
||||
# name: the un-sanitized name.
|
||||
@staticmethod
|
||||
def sanitize(name):
|
||||
return name.replace('.', '_').replace('-', '_')
|
||||
|
||||
# Sets a value for this parameter, remember the place where it was set
|
||||
# value: the value of the parameter
|
||||
# unit_name: the unit (target/library/application) that defines this parameter
|
||||
# unit_ kind: the kind of the unit ("target", "library" or "application")
|
||||
# label: the name of the label in the 'target_config_overrides' section (optional)
|
||||
def set_value(self, value, unit_name, unit_kind, label = None):
|
||||
self.value = value
|
||||
self.set_by = self.get_display_name(unit_name, unit_kind, label)
|
||||
|
||||
# Return the string representation of this configuration parameter
|
||||
def __str__(self):
|
||||
if self.value is not None:
|
||||
return '%s = %s (macro name: "%s")' % (self.name, self.value, self.macro_name)
|
||||
else:
|
||||
return '%s has no value' % self.name
|
||||
|
||||
# Return a verbose description of this configuration paramater as a string
|
||||
def get_verbose_description(self):
|
||||
desc = "Name: %s%s\n" % (self.name, " (required parameter)" if self.required else "")
|
||||
if self.help_text:
|
||||
desc += " Description: %s\n" % self.help_text
|
||||
desc += " Defined by: %s\n" % self.defined_by
|
||||
if not self.value:
|
||||
return desc + " No value set"
|
||||
desc += " Macro name: %s\n" % self.macro_name
|
||||
desc += " Value: %s (set by %s)" % (self.value, self.set_by)
|
||||
return desc
|
||||
|
||||
# A representation of a configuration macro. It handles both macros without a value (MACRO)
|
||||
# and with a value (MACRO=VALUE)
|
||||
class ConfigMacro:
|
||||
def __init__(self, name, unit_name, unit_kind):
|
||||
self.name = name
|
||||
self.defined_by = ConfigParameter.get_display_name(unit_name, unit_kind)
|
||||
if name.find("=") != -1:
|
||||
tmp = name.split("=")
|
||||
if len(tmp) != 2:
|
||||
raise ValueError("Invalid macro definition '%s' in '%s'" % (name, self.defined_by))
|
||||
self.macro_name = tmp[0]
|
||||
else:
|
||||
self.macro_name = name
|
||||
|
||||
# 'Config' implements the mbed configuration mechanism
|
||||
class Config:
|
||||
# Libraries and applications have different names for their configuration files
|
||||
__mbed_app_config_name = "mbed_app.json"
|
||||
__mbed_lib_config_name = "mbed_lib.json"
|
||||
|
||||
# Allowed keys in configuration dictionaries
|
||||
# (targets can have any kind of keys, so this validation is not applicable to them)
|
||||
__allowed_keys = {
|
||||
"library": set(["name", "config", "target_overrides", "macros", "__config_path"]),
|
||||
"application": set(["config", "custom_targets", "target_overrides", "macros", "__config_path"])
|
||||
}
|
||||
|
||||
# The initialization arguments for Config are:
|
||||
# target: the name of the mbed target used for this configuration instance
|
||||
# top_level_dirs: a list of top level source directories (where mbed_abb_config.json could be found)
|
||||
# __init__ will look for the application configuration file in top_level_dirs.
|
||||
# If found once, it'll parse it and check if it has a custom_targets function.
|
||||
# If it does, it'll update the list of targets if need.
|
||||
# If found more than once, an exception is raised
|
||||
# top_level_dirs can be None (in this case, mbed_app_config.json will not be searched)
|
||||
def __init__(self, target, top_level_dirs = []):
|
||||
app_config_location = None
|
||||
for s in (top_level_dirs or []):
|
||||
full_path = os.path.join(s, self.__mbed_app_config_name)
|
||||
if os.path.isfile(full_path):
|
||||
if app_config_location is not None:
|
||||
raise ConfigException("Duplicate '%s' file in '%s' and '%s'" % (self.__mbed_app_config_name, app_config_location, full_path))
|
||||
else:
|
||||
app_config_location = full_path
|
||||
self.app_config_data = json_file_to_dict(app_config_location) if app_config_location else {}
|
||||
# Check the keys in the application configuration data
|
||||
unknown_keys = set(self.app_config_data.keys()) - self.__allowed_keys["application"]
|
||||
if unknown_keys:
|
||||
raise ConfigException("Unknown key(s) '%s' in %s" % (",".join(unknown_keys), self.__mbed_app_config_name))
|
||||
# Update the list of targets with the ones defined in the application config, if applicable
|
||||
Target.add_py_targets(self.app_config_data.get("custom_targets", {}))
|
||||
self.lib_config_data = {}
|
||||
# Make sure that each config is processed only once
|
||||
self.processed_configs = {}
|
||||
self.target = target if isinstance(target, str) else target.name
|
||||
self.target_labels = Target.get_target(self.target).get_labels()
|
||||
|
||||
# Add one or more configuration files
|
||||
def add_config_files(self, flist):
|
||||
for f in flist:
|
||||
if not f.endswith(self.__mbed_lib_config_name):
|
||||
continue
|
||||
full_path = os.path.normpath(os.path.abspath(f))
|
||||
# Check that we didn't already process this file
|
||||
if self.processed_configs.has_key(full_path):
|
||||
continue
|
||||
self.processed_configs[full_path] = True
|
||||
# Read the library configuration and add a "__full_config_path" attribute to it
|
||||
cfg = json_file_to_dict(f)
|
||||
cfg["__config_path"] = full_path
|
||||
# If there's already a configuration for a module with the same name, exit with error
|
||||
if self.lib_config_data.has_key(cfg["name"]):
|
||||
raise ConfigException("Library name '%s' is not unique (defined in '%s' and '%s')" % (cfg["name"], full_path, self.lib_config_data[cfg["name"]]["__config_path"]))
|
||||
self.lib_config_data[cfg["name"]] = cfg
|
||||
|
||||
# Helper function: process a "config_parameters" section in either a target, a library or the application
|
||||
# data: a dictionary with the configuration parameters
|
||||
# params: storage for the discovered configuration parameters
|
||||
# unit_name: the unit (target/library/application) that defines this parameter
|
||||
# unit_kind: the kind of the unit ("target", "library" or "application")
|
||||
def _process_config_parameters(self, data, params, unit_name, unit_kind):
|
||||
for name, v in data.items():
|
||||
full_name = ConfigParameter.get_full_name(name, unit_name, unit_kind)
|
||||
# If the parameter was already defined, raise an error
|
||||
if full_name in params:
|
||||
raise ConfigException("Parameter name '%s' defined in both '%s' and '%s'" % (name, ConfigParameter.get_display_name(unit_name, unit_kind), params[full_name].defined_by))
|
||||
# Otherwise add it to the list of known parameters
|
||||
# If "v" is not a dictionary, this is a shortcut definition, otherwise it is a full definition
|
||||
params[full_name] = ConfigParameter(name, v if isinstance(v, dict) else {"value": v}, unit_name, unit_kind)
|
||||
return params
|
||||
|
||||
# Helper function: process "config_parameters" and "target_config_overrides" in a given dictionary
|
||||
# data: the configuration data of the library/appliation
|
||||
# params: storage for the discovered configuration parameters
|
||||
# unit_name: the unit (library/application) that defines this parameter
|
||||
# unit_kind: the kind of the unit ("library" or "application")
|
||||
def _process_config_and_overrides(self, data, params, unit_name, unit_kind):
|
||||
self._process_config_parameters(data.get("config", {}), params, unit_name, unit_kind)
|
||||
for label, overrides in data.get("target_overrides", {}).items():
|
||||
# If the label is defined by the target or it has the special value "*", process the overrides
|
||||
if (label == '*') or (label in self.target_labels):
|
||||
for name, v in overrides.items():
|
||||
# Get the full name of the parameter
|
||||
full_name = ConfigParameter.get_full_name(name, unit_name, unit_kind, label)
|
||||
# If an attempt is made to override a parameter that isn't defined, raise an error
|
||||
if not full_name in params:
|
||||
raise ConfigException("Attempt to override undefined parameter '%s' in '%s'" % (full_name, ConfigParameter.get_display_name(unit_name, unit_kind, label)))
|
||||
params[full_name].set_value(v, unit_name, unit_kind, label)
|
||||
return params
|
||||
|
||||
# Read and interpret configuration data defined by targets
|
||||
def get_target_config_data(self):
|
||||
# We consider the resolution order for our target and sort it by level reversed,
|
||||
# so that we first look at the top level target (the parent), then its direct children,
|
||||
# then the children's children and so on, until we reach self.target
|
||||
# TODO: this might not work so well in some multiple inheritance scenarios
|
||||
# At each step, look at two keys of the target data:
|
||||
# - config_parameters: used to define new configuration parameters
|
||||
# - config_overrides: used to override already defined configuration parameters
|
||||
params, json_data = {}, Target.get_json_target_data()
|
||||
resolution_order = [e[0] for e in sorted(Target.get_target(self.target).resolution_order, key = lambda e: e[1], reverse = True)]
|
||||
for tname in resolution_order:
|
||||
# Read the target data directly from its description
|
||||
t = json_data[tname]
|
||||
# Process definitions first
|
||||
self._process_config_parameters(t.get("config", {}), params, tname, "target")
|
||||
# Then process overrides
|
||||
for name, v in t.get("overrides", {}).items():
|
||||
full_name = ConfigParameter.get_full_name(name, tname, "target")
|
||||
# If the parameter name is not defined or if there isn't a path from this target to the target where the
|
||||
# parameter was defined in the target inheritance tree, raise an error
|
||||
# We need to use 'defined_by[7:]' to remove the "target:" prefix from defined_by
|
||||
if (not full_name in params) or (not params[full_name].defined_by[7:] in Target.get_target(tname).resolution_order_names):
|
||||
raise ConfigException("Attempt to override undefined parameter '%s' in '%s'" % (name, ConfigParameter.get_display_name(tname, "target")))
|
||||
# Otherwise update the value of the parameter
|
||||
params[full_name].set_value(v, tname, "target")
|
||||
return params
|
||||
|
||||
# Helper function: process a macro definition, checking for incompatible duplicate definitions
|
||||
# mlist: list of macro names to process
|
||||
# macros: dictionary with currently discovered macros
|
||||
# unit_name: the unit (library/application) that defines this macro
|
||||
# unit_kind: the kind of the unit ("library" or "application")
|
||||
def _process_macros(self, mlist, macros, unit_name, unit_kind):
|
||||
for mname in mlist:
|
||||
m = ConfigMacro(mname, unit_name, unit_kind)
|
||||
if (m.macro_name in macros) and (macros[m.macro_name].name != mname):
|
||||
# Found an incompatible definition of the macro in another module, so raise an error
|
||||
full_unit_name = ConfigParameter.get_display_name(unit_name, unit_kind)
|
||||
raise ConfigException("Macro '%s' defined in both '%s' and '%s' with incompatible values" % (m.macro_name, macros[m.macro_name].defined_by, full_unit_name))
|
||||
macros[m.macro_name] = m
|
||||
|
||||
# Read and interpret configuration data defined by libs
|
||||
# It is assumed that "add_config_files" above was already called and the library configuration data
|
||||
# exists in self.lib_config_data
|
||||
def get_lib_config_data(self):
|
||||
all_params, macros = {}, {}
|
||||
for lib_name, lib_data in self.lib_config_data.items():
|
||||
unknown_keys = set(lib_data.keys()) - self.__allowed_keys["library"]
|
||||
if unknown_keys:
|
||||
raise ConfigException("Unknown key(s) '%s' in %s" % (",".join(unknown_keys), lib_name))
|
||||
all_params.update(self._process_config_and_overrides(lib_data, {}, lib_name, "library"))
|
||||
self._process_macros(lib_data.get("macros", []), macros, lib_name, "library")
|
||||
return all_params, macros
|
||||
|
||||
# Read and interpret the configuration data defined by the target
|
||||
# The target can override any configuration parameter, as well as define its own configuration data
|
||||
# params: the dictionary with configuration parameters found so far (in the target and in libraries)
|
||||
# macros: the list of macros defined in the configuration
|
||||
def get_app_config_data(self, params, macros):
|
||||
app_cfg = self.app_config_data
|
||||
# The application can have a "config_parameters" and a "target_config_overrides" section just like a library
|
||||
self._process_config_and_overrides(app_cfg, params, "app", "application")
|
||||
# The application can also defined macros
|
||||
self._process_macros(app_cfg.get("macros", []), macros, "app", "application")
|
||||
|
||||
# Return the configuration data in two parts:
|
||||
# - params: a dictionary with (name, ConfigParam) entries
|
||||
# - macros: the list of macros defined with "macros" in libraries and in the application
|
||||
def get_config_data(self):
|
||||
all_params = self.get_target_config_data()
|
||||
lib_params, macros = self.get_lib_config_data()
|
||||
all_params.update(lib_params)
|
||||
self.get_app_config_data(all_params, macros)
|
||||
return all_params, [m.name for m in macros.values()]
|
||||
|
||||
# Helper: verify if there are any required parameters without a value in 'params'
|
||||
def _check_required_parameters(self, params):
|
||||
for p in params.values():
|
||||
if p.required and (p.value is None):
|
||||
raise ConfigException("Required parameter '%s' defined by '%s' doesn't have a value" % (p.name, p.defined_by))
|
||||
|
||||
# Return the macro definitions generated for a dictionary of configuration parameters
|
||||
# params: a dictionary of (name, ConfigParameters instance) mappings
|
||||
@staticmethod
|
||||
def parameters_to_macros(params):
|
||||
return ['%s=%s' % (m.macro_name, m.value) for m in params.values() if m.value is not None]
|
||||
|
||||
# Return the configuration data converted to a list of C macros
|
||||
def get_config_data_macros(self):
|
||||
params, macros = self.get_config_data()
|
||||
self._check_required_parameters(params)
|
||||
return macros + self.parameters_to_macros(params)
|
|
@ -247,9 +247,8 @@ if __name__ == '__main__':
|
|||
if options.build_dir is not None:
|
||||
build_dir = options.build_dir
|
||||
|
||||
target = TARGET_MAP[mcu]
|
||||
try:
|
||||
bin_file = build_project(test.source_dir, build_dir, target, toolchain, test.dependencies, options.options,
|
||||
bin_file = build_project(test.source_dir, build_dir, mcu, toolchain, test.dependencies, options.options,
|
||||
linker_script=options.linker_script,
|
||||
clean=options.clean,
|
||||
verbose=options.verbose,
|
||||
|
|
|
@ -0,0 +1,508 @@
|
|||
#! /usr/bin/env python
|
||||
|
||||
# Memory Map File Analyser for ARM mbed OS
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import string
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import time
|
||||
import string
|
||||
import StringIO
|
||||
from prettytable import PrettyTable
|
||||
|
||||
debug = False
|
||||
|
||||
class MemmapParser(object):
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
General initialization
|
||||
"""
|
||||
|
||||
# list of all modules and their sections
|
||||
self.modules = dict()
|
||||
|
||||
self.misc_flash_sections = ('.interrupts', '.flash_config')
|
||||
|
||||
self.other_sections = ('.interrupts_ram', '.init', '.ARM.extab', '.ARM.exidx', '.ARM.attributes', \
|
||||
'.eh_frame', '.init_array', '.fini_array', '.jcr', '.stab', '.stabstr', \
|
||||
'.ARM.exidx','.ARM' )
|
||||
|
||||
# sections to print info (generic for all toolchains)
|
||||
self.sections = ('.text', '.data', '.bss', '.heap', '.stack',)
|
||||
|
||||
# need to have sections merged in this order ()
|
||||
self.all_sections = self.sections + self.other_sections + \
|
||||
self.misc_flash_sections + ('unknown', 'OUTPUT')
|
||||
|
||||
self.print_sections = ('.text', '.data', '.bss')
|
||||
|
||||
# list of all object files and mappting to module names
|
||||
self.object_to_module = dict()
|
||||
|
||||
|
||||
def generate_output(self, file, json_mode):
|
||||
"""
|
||||
Generates summary of memory map data
|
||||
|
||||
Parameters
|
||||
file: descriptor (either stdout or file)
|
||||
json_mode: generates output in json formal (True/False)
|
||||
"""
|
||||
|
||||
buf = StringIO.StringIO()
|
||||
|
||||
# Calculate misc flash sections
|
||||
misc_flash_mem = 0
|
||||
for i in self.modules:
|
||||
for k in self.misc_flash_sections:
|
||||
if self.modules[i][k]:
|
||||
misc_flash_mem += self.modules[i][k]
|
||||
|
||||
# Create table
|
||||
colums = ['Module']
|
||||
for i in list(self.print_sections):
|
||||
colums.append(i)
|
||||
|
||||
table = PrettyTable(colums)
|
||||
table.align["Module"] = "l"
|
||||
|
||||
subtotal = dict()
|
||||
for k in self.sections:
|
||||
subtotal[k] = 0
|
||||
|
||||
json_obj = []
|
||||
for i in sorted(self.modules):
|
||||
|
||||
row = []
|
||||
row.append(i)
|
||||
|
||||
for k in self.sections:
|
||||
subtotal[k] += self.modules[i][k]
|
||||
|
||||
for k in self.print_sections:
|
||||
row.append(self.modules[i][k])
|
||||
|
||||
json_obj.append({ "module":i, "size":{k:self.modules[i][k] for k in self.print_sections}})
|
||||
table.add_row(row)
|
||||
|
||||
subtotal_row = ['Subtotals']
|
||||
for k in self.print_sections:
|
||||
subtotal_row.append(subtotal[k])
|
||||
|
||||
table.add_row(subtotal_row)
|
||||
|
||||
if json_mode:
|
||||
json_obj.append({ "summary":{'static_ram':(subtotal['.data']+subtotal['.bss']),
|
||||
'heap':(subtotal['.heap']),
|
||||
'stack':(subtotal['.stack']),
|
||||
'total_ram':(subtotal['.data']+subtotal['.bss']+subtotal['.heap']+subtotal['.stack']),
|
||||
'total_flash':(subtotal['.text']+subtotal['.data']+misc_flash_mem),}})
|
||||
|
||||
file.write(json.dumps(json_obj, indent=4))
|
||||
file.write('\n')
|
||||
else:
|
||||
file.write(table.get_string())
|
||||
file.write('\n')
|
||||
file.write("Static RAM memory (data + bss): %s\n" % (str(subtotal['.data']+subtotal['.bss'])))
|
||||
file.write("Heap: %s\n" % str(subtotal['.heap']))
|
||||
file.write("Stack: %s\n" % str(subtotal['.stack']))
|
||||
file.write("Total RAM memory (data + bss + heap + stack): %s\n" % (str(subtotal['.data']+subtotal['.bss']+subtotal['.heap']+subtotal['.stack'])))
|
||||
file.write("Total Flash memory (text + data + misc): %s\n" % (str(subtotal['.text']+subtotal['.data']+misc_flash_mem)))
|
||||
return
|
||||
|
||||
def module_add(self, module_name, size, section):
|
||||
"""
|
||||
Adds a module / section to the list
|
||||
"""
|
||||
|
||||
if module_name in self.modules:
|
||||
self.modules[module_name][section] += size
|
||||
else:
|
||||
temp_dic = dict()
|
||||
for x in self.all_sections:
|
||||
temp_dic[x] = 0
|
||||
temp_dic[section] = size
|
||||
self.modules[module_name] = temp_dic
|
||||
|
||||
def find_start_gcc(self,line):
|
||||
"""
|
||||
Checks location of gcc map file to start parsing map file
|
||||
"""
|
||||
if line.startswith('Linker script and memory map'):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def find_start_armcc(self,line):
|
||||
"""
|
||||
Checks location of armcc map file to start parsing map file
|
||||
"""
|
||||
if line.startswith(' Base Addr Size'):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def find_start_iar(self,line):
|
||||
"""
|
||||
Checks location of armcc map file to start parsing map file
|
||||
"""
|
||||
if line.startswith(' Section '):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def check_new_section_gcc(self,line):
|
||||
"""
|
||||
Check whether a new section in a map file has been detected (only applies to gcc)
|
||||
"""
|
||||
|
||||
for i in self.all_sections:
|
||||
if line.startswith(i):
|
||||
return i # should name of the section (assuming it's a known one)
|
||||
|
||||
if line.startswith('.'):
|
||||
return 'unknown' # all others are clasified are unknown
|
||||
else:
|
||||
return False # everything else, means no change in section
|
||||
|
||||
def path_object_to_module_name(self,txt):
|
||||
"""
|
||||
Parses path to object file and extracts module / object data
|
||||
"""
|
||||
|
||||
txt = txt.replace('\\','/')
|
||||
rex_mbed_os_name = r'^.+mbed-os\/(.+)\/(.+\.o)$'
|
||||
test_rex_mbed_os_name = re.match(rex_mbed_os_name,txt)
|
||||
|
||||
if test_rex_mbed_os_name:
|
||||
|
||||
object_name = test_rex_mbed_os_name.group(2)
|
||||
data = test_rex_mbed_os_name.group(1).split('/')
|
||||
ndata = len(data)
|
||||
|
||||
if ndata == 1:
|
||||
module_name = data[0]
|
||||
else:
|
||||
module_name = data[0] + '/' + data[1]
|
||||
|
||||
return [module_name, object_name]
|
||||
else:
|
||||
return ['Misc', ""]
|
||||
|
||||
|
||||
def parse_section_gcc(self,line):
|
||||
"""
|
||||
Parse data from a section of gcc map file
|
||||
"""
|
||||
# examples
|
||||
# 0x00004308 0x7c ./.build/K64F/GCC_ARM/mbed-os/hal/targets/hal/TARGET_Freescale/TARGET_KPSDK_MCUS/spi_api.o
|
||||
# .text 0x00000608 0x198 ./.build/K64F/GCC_ARM/mbed-os/core/mbed-rtos/rtx/TARGET_CORTEX_M/TARGET_RTOS_M4_M7/TOOLCHAIN_GCC/HAL_CM4.o
|
||||
rex_address_len_name = r'^\s+.*0x(\w{8,16})\s+0x(\w+)\s(.+)$'
|
||||
|
||||
test_address_len_name = re.match(rex_address_len_name,line)
|
||||
|
||||
if test_address_len_name:
|
||||
|
||||
if int(test_address_len_name.group(2),16) == 0: # size == 0
|
||||
return ["",0] # no valid entry
|
||||
else:
|
||||
m_name, m_object = self.path_object_to_module_name(test_address_len_name.group(3))
|
||||
m_size = int(test_address_len_name.group(2),16)
|
||||
return [m_name,m_size]
|
||||
|
||||
else: # special cortner case for *fill* sections
|
||||
# example
|
||||
# *fill* 0x0000abe4 0x4
|
||||
rex_address_len = r'^\s+\*fill\*\s+0x(\w{8,16})\s+0x(\w+).*$'
|
||||
test_address_len = re.match(rex_address_len,line)
|
||||
|
||||
if test_address_len:
|
||||
if int(test_address_len.group(2),16) == 0: # size == 0
|
||||
return ["",0] # no valid entry
|
||||
else:
|
||||
m_name = 'Misc'
|
||||
m_size = int(test_address_len.group(2),16)
|
||||
return [m_name,m_size]
|
||||
else:
|
||||
return ["",0] # no valid entry
|
||||
|
||||
def parse_map_file_gcc(self, file):
|
||||
"""
|
||||
Main logic to decode gcc map files
|
||||
"""
|
||||
|
||||
current_section = 'unknown'
|
||||
|
||||
with file as infile:
|
||||
|
||||
# Search area to parse
|
||||
for line in infile:
|
||||
if self.find_start_gcc(line) == True:
|
||||
current_section = "unknown"
|
||||
break
|
||||
|
||||
# Start decoding the map file
|
||||
for line in infile:
|
||||
|
||||
change_section = self.check_new_section_gcc(line)
|
||||
|
||||
if change_section == "OUTPUT": # finish parsing file: exit
|
||||
break
|
||||
elif change_section != False:
|
||||
current_section = change_section
|
||||
|
||||
[module_name, module_size] = self.parse_section_gcc(line)
|
||||
|
||||
if module_size == 0 or module_name == "":
|
||||
pass
|
||||
else:
|
||||
self.module_add(module_name, module_size, current_section)
|
||||
|
||||
if debug:
|
||||
print "Line: %s" % line,
|
||||
print "Module: %s\tSection: %s\tSize: %s" % (module_name,current_section,module_size)
|
||||
raw_input("----------")
|
||||
|
||||
def parse_section_armcc(self,line):
|
||||
"""
|
||||
Parse data from an armcc map file
|
||||
"""
|
||||
# Examples of armcc map file:
|
||||
# Base_Addr Size Type Attr Idx E Section Name Object
|
||||
# 0x00000000 0x00000400 Data RO 11222 RESET startup_MK64F12.o
|
||||
# 0x00000410 0x00000008 Code RO 49364 * !!!main c_w.l(__main.o)
|
||||
rex_armcc = r'^\s+0x(\w{8})\s+0x(\w{8})\s+(\w+)\s+(\w+)\s+(\d+)\s+[*]?.+\s+(.+)$'
|
||||
|
||||
test_rex_armcc = re.match(rex_armcc,line)
|
||||
|
||||
if test_rex_armcc:
|
||||
|
||||
size = int(test_rex_armcc.group(2),16)
|
||||
|
||||
if test_rex_armcc.group(4) == 'RO':
|
||||
section = '.text'
|
||||
else:
|
||||
if test_rex_armcc.group(3) == 'Data':
|
||||
section = '.data'
|
||||
elif test_rex_armcc.group(3) == 'Zero':
|
||||
section = '.bss'
|
||||
else:
|
||||
print "BUG armcc map parser"
|
||||
raw_input()
|
||||
|
||||
# lookup object in dictionary and return module name
|
||||
object_name = test_rex_armcc.group(6)
|
||||
if object_name in self.object_to_module:
|
||||
module_name = self.object_to_module[object_name]
|
||||
else:
|
||||
module_name = 'Misc'
|
||||
|
||||
return [module_name,size,section]
|
||||
|
||||
else:
|
||||
return ["",0,""] # no valid entry
|
||||
|
||||
def parse_section_iar(self,line):
|
||||
"""
|
||||
Parse data from an IAR map file
|
||||
"""
|
||||
# Examples of IAR map file:
|
||||
# Section Kind Address Size Object
|
||||
# .intvec ro code 0x00000000 0x198 startup_MK64F12.o [15]
|
||||
# .rodata const 0x00000198 0x0 zero_init3.o [133]
|
||||
# .iar.init_table const 0x00008384 0x2c - Linker created -
|
||||
# Initializer bytes const 0x00000198 0xb2 <for P3 s0>
|
||||
# .data inited 0x20000000 0xd4 driverAtmelRFInterface.o [70]
|
||||
# .bss zero 0x20000598 0x318 RTX_Conf_CM.o [4]
|
||||
# .iar.dynexit uninit 0x20001448 0x204 <Block tail>
|
||||
# HEAP uninit 0x20001650 0x10000 <Block tail>
|
||||
rex_iar = r'^\s+(.+)\s+(zero|const|ro code|inited|uninit)\s+0x(\w{8})\s+0x(\w+)\s+(.+)\s.+$'
|
||||
|
||||
test_rex_iar = re.match(rex_iar,line)
|
||||
|
||||
if test_rex_iar:
|
||||
|
||||
size = int(test_rex_iar.group(4),16)
|
||||
|
||||
if test_rex_iar.group(2) == 'const' or test_rex_iar.group(2) == 'ro code':
|
||||
section = '.text'
|
||||
elif test_rex_iar.group(2) == 'zero' or test_rex_iar.group(2) == 'uninit':
|
||||
|
||||
if test_rex_iar.group(1)[0:4] == 'HEAP':
|
||||
section = '.heap'
|
||||
elif test_rex_iar.group(1)[0:6] == 'CSTACK':
|
||||
section = '.stack'
|
||||
else:
|
||||
section = '.bss' # default section
|
||||
|
||||
elif test_rex_iar.group(2) == 'inited':
|
||||
section = '.data'
|
||||
else:
|
||||
print "BUG IAR map parser"
|
||||
raw_input()
|
||||
|
||||
# lookup object in dictionary and return module name
|
||||
object_name = test_rex_iar.group(5)
|
||||
if object_name in self.object_to_module:
|
||||
module_name = self.object_to_module[object_name]
|
||||
else:
|
||||
module_name = 'Misc'
|
||||
|
||||
return [module_name,size,section]
|
||||
|
||||
else:
|
||||
return ["",0,""] # no valid entry
|
||||
|
||||
def parse_map_file_armcc(self, file):
|
||||
"""
|
||||
Main logic to decode armcc map files
|
||||
"""
|
||||
|
||||
with file as infile:
|
||||
|
||||
# Search area to parse
|
||||
for line in infile:
|
||||
if self.find_start_armcc(line) == True:
|
||||
break
|
||||
|
||||
# Start decoding the map file
|
||||
for line in infile:
|
||||
|
||||
[name, size, section] = self.parse_section_armcc(line)
|
||||
|
||||
if size == 0 or name == "" or section == "":
|
||||
pass
|
||||
else:
|
||||
self.module_add(name, size, section)
|
||||
|
||||
def parse_map_file_iar(self, file):
|
||||
"""
|
||||
Main logic to decode armcc map files
|
||||
"""
|
||||
|
||||
with file as infile:
|
||||
|
||||
# Search area to parse
|
||||
for line in infile:
|
||||
if self.find_start_iar(line) == True:
|
||||
break
|
||||
|
||||
# Start decoding the map file
|
||||
for line in infile:
|
||||
|
||||
[name, size, section] = self.parse_section_iar(line)
|
||||
|
||||
if size == 0 or name == "" or section == "":
|
||||
pass
|
||||
else:
|
||||
self.module_add(name, size, section)
|
||||
|
||||
def search_objects(self,path,toolchain):
|
||||
"""
|
||||
Check whether the specified map file matches with the toolchain.
|
||||
Searches for object files and creates mapping: object --> module
|
||||
"""
|
||||
|
||||
path = path.replace('\\','/')
|
||||
|
||||
# check location of map file
|
||||
rex = r'^(.+\/)' + re.escape(toolchain) + r'\/(.+\.map)$'
|
||||
test_rex = re.match(rex,path)
|
||||
|
||||
if test_rex:
|
||||
search_path = test_rex.group(1) + toolchain + '/mbed-os/'
|
||||
else:
|
||||
# It looks this is not an mbed OS project
|
||||
# object-to-module mapping cannot be generated
|
||||
print "Warning: specified toolchain doesn't match with path to the memory map file."
|
||||
return
|
||||
|
||||
for root, dirs, files in os.walk(search_path):
|
||||
for file in files:
|
||||
if file.endswith(".o"):
|
||||
module_name, object_name = self.path_object_to_module_name(os.path.join(root, file))
|
||||
|
||||
if object_name in self.object_to_module:
|
||||
print "WARNING: multiple usages of object file: %s" % object_name
|
||||
print " Current: %s" % self.object_to_module[object_name]
|
||||
print " New: %s" % module_name
|
||||
print " "
|
||||
|
||||
else:
|
||||
self.object_to_module.update({object_name:module_name})
|
||||
|
||||
def main():
|
||||
|
||||
version = '0.3.7'
|
||||
time_start = time.clock()
|
||||
|
||||
# Parser handling
|
||||
parser = argparse.ArgumentParser(description="Memory Map File Analyser for ARM mbed OS\nversion %s" % version)
|
||||
|
||||
parser.add_argument('file', help='memory map file')
|
||||
|
||||
parser.add_argument('-t','--toolchain', dest='toolchain', help='select a toolchain that corresponds to the memory map file (ARM, GCC_ARM, IAR)',
|
||||
required=True)
|
||||
|
||||
parser.add_argument('-o','--output',help='output file name', required=False)
|
||||
|
||||
parser.add_argument('-j', '--json', dest='json', required=False, action="store_true",
|
||||
help='output in JSON formatted list')
|
||||
|
||||
parser.add_argument('-v', '--version', action='version', version=version)
|
||||
|
||||
# Parse/run command
|
||||
if len(sys.argv) <= 1:
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
args, remainder = parser.parse_known_args()
|
||||
|
||||
try:
|
||||
file_input = open(args.file,'rt')
|
||||
except IOError as e:
|
||||
print "I/O error({0}): {1}".format(e.errno, e.strerror)
|
||||
sys.exit(0)
|
||||
|
||||
# Creates parser object
|
||||
t = MemmapParser()
|
||||
|
||||
# Decode map file depending on the toolchain
|
||||
if args.toolchain == "ARM":
|
||||
t.search_objects(os.path.abspath(args.file),args.toolchain)
|
||||
t.parse_map_file_armcc(file_input)
|
||||
elif args.toolchain == "GCC_ARM":
|
||||
t.parse_map_file_gcc(file_input)
|
||||
elif args.toolchain == "IAR":
|
||||
print "WARNING: IAR Compiler not fully supported (yet)"
|
||||
print " "
|
||||
t.search_objects(os.path.abspath(args.file),args.toolchain)
|
||||
t.parse_map_file_iar(file_input)
|
||||
else:
|
||||
print "Invalid toolchain. Options are: ARM, GCC_ARM, IAR"
|
||||
sys.exit(0)
|
||||
|
||||
# Write output in file
|
||||
if args.output != None:
|
||||
try:
|
||||
file_output = open(args.output,'w')
|
||||
t.generate_output(file_output,args.json)
|
||||
file_output.close()
|
||||
except IOError as e:
|
||||
print "I/O error({0}): {1}".format(e.errno, e.strerror)
|
||||
sys.exit(0)
|
||||
else: # Write output in screen
|
||||
t.generate_output(sys.stdout,args.json)
|
||||
|
||||
file_input.close()
|
||||
|
||||
print "Elapsed time: %smS" %int(round((time.clock()-time_start)*1000))
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -17,18 +17,21 @@ limitations under the License.
|
|||
|
||||
import re
|
||||
import sys
|
||||
from os import stat, walk
|
||||
from os import stat, walk, getcwd, sep
|
||||
from copy import copy
|
||||
from time import time, sleep
|
||||
from types import ListType
|
||||
from shutil import copyfile
|
||||
from os.path import join, splitext, exists, relpath, dirname, basename, split
|
||||
from os.path import join, splitext, exists, relpath, dirname, basename, split, abspath
|
||||
from inspect import getmro
|
||||
|
||||
from multiprocessing import Pool, cpu_count
|
||||
from tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path
|
||||
from tools.settings import BUILD_OPTIONS, MBED_ORG_USER
|
||||
import tools.hooks as hooks
|
||||
from tools.memap import MemmapParser
|
||||
from hashlib import md5
|
||||
import fnmatch
|
||||
|
||||
|
||||
#Disables multiprocessing if set to higher number than the host machine CPUs
|
||||
|
@ -51,6 +54,7 @@ def compile_worker(job):
|
|||
'results': results
|
||||
}
|
||||
|
||||
|
||||
class Resources:
|
||||
def __init__(self, base_path=None):
|
||||
self.base_path = base_path
|
||||
|
@ -78,6 +82,19 @@ class Resources:
|
|||
# Other files
|
||||
self.hex_files = []
|
||||
self.bin_files = []
|
||||
self.json_files = []
|
||||
|
||||
def __add__(self, resources):
|
||||
if resources is None:
|
||||
return self
|
||||
else:
|
||||
return self.add(resources)
|
||||
|
||||
def __radd__(self, resources):
|
||||
if resources is None:
|
||||
return self
|
||||
else:
|
||||
return self.add(resources)
|
||||
|
||||
def add(self, resources):
|
||||
self.inc_dirs += resources.inc_dirs
|
||||
|
@ -102,11 +119,15 @@ class Resources:
|
|||
|
||||
self.hex_files += resources.hex_files
|
||||
self.bin_files += resources.bin_files
|
||||
self.json_files += resources.json_files
|
||||
|
||||
return self
|
||||
|
||||
def relative_to(self, base, dot=False):
|
||||
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
|
||||
'cpp_sources', 'lib_dirs', 'objects', 'libraries',
|
||||
'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 'hex_files', 'bin_files']:
|
||||
'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
|
||||
'hex_files', 'bin_files', 'json_files']:
|
||||
v = [rel_path(f, base, dot) for f in getattr(self, field)]
|
||||
setattr(self, field, v)
|
||||
if self.linker_script is not None:
|
||||
|
@ -115,7 +136,8 @@ class Resources:
|
|||
def win_to_unix(self):
|
||||
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
|
||||
'cpp_sources', 'lib_dirs', 'objects', 'libraries',
|
||||
'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 'hex_files', 'bin_files']:
|
||||
'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
|
||||
'hex_files', 'bin_files', 'json_files']:
|
||||
v = [f.replace('\\', '/') for f in getattr(self, field)]
|
||||
setattr(self, field, v)
|
||||
if self.linker_script is not None:
|
||||
|
@ -147,7 +169,6 @@ class Resources:
|
|||
|
||||
return '\n'.join(s)
|
||||
|
||||
|
||||
# Support legacy build conventions: the original mbed build system did not have
|
||||
# standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
|
||||
# had the knowledge of a list of these directories to be ignored.
|
||||
|
@ -164,16 +185,17 @@ LEGACY_TOOLCHAIN_NAMES = {
|
|||
|
||||
class mbedToolchain:
|
||||
VERBOSE = True
|
||||
ignorepatterns = []
|
||||
|
||||
CORTEX_SYMBOLS = {
|
||||
"Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0"],
|
||||
"Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS"],
|
||||
"Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
||||
"Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
||||
"Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1"],
|
||||
"Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3"],
|
||||
"Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4"],
|
||||
"Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1"],
|
||||
"Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7"],
|
||||
"Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1"],
|
||||
"Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
||||
"Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
||||
"Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
||||
"Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
||||
"Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
|
||||
"Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"],
|
||||
}
|
||||
|
||||
|
@ -210,6 +232,7 @@ class mbedToolchain:
|
|||
self.has_config = False
|
||||
|
||||
self.build_all = False
|
||||
self.build_dir = None
|
||||
self.timestamp = time()
|
||||
self.jobs = 1
|
||||
|
||||
|
@ -217,6 +240,9 @@ class mbedToolchain:
|
|||
|
||||
self.mp_pool = None
|
||||
|
||||
if 'UVISOR_PRESENT=1' in self.macros:
|
||||
self.target.core = re.sub(r"F$", '', self.target.core)
|
||||
|
||||
def get_output(self):
|
||||
return self.output
|
||||
|
||||
|
@ -299,7 +325,10 @@ class mbedToolchain:
|
|||
|
||||
# Add target's symbols
|
||||
self.symbols += self.target.macros
|
||||
# Add target's hardware
|
||||
self.symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has]
|
||||
# Add target's features
|
||||
self.symbols += ["FEATURE_" + data + "=1" for data in self.target.features]
|
||||
# Add extra symbols passed via 'macros' parameter
|
||||
self.symbols += self.macros
|
||||
|
||||
|
@ -309,12 +338,16 @@ class mbedToolchain:
|
|||
|
||||
return list(set(self.symbols)) # Return only unique symbols
|
||||
|
||||
# Extend the internal list of macros
|
||||
def add_macros(self, new_macros):
|
||||
self.macros.extend(new_macros)
|
||||
|
||||
def get_labels(self):
|
||||
if self.labels is None:
|
||||
toolchain_labels = [c.__name__ for c in getmro(self.__class__)]
|
||||
toolchain_labels.remove('mbedToolchain')
|
||||
self.labels = {
|
||||
'TARGET': self.target.get_labels(),
|
||||
'TARGET': self.target.get_labels() + ["DEBUG" if "debug-info" in self.options else "RELEASE"],
|
||||
'FEATURE': self.target.features,
|
||||
'TOOLCHAIN': toolchain_labels
|
||||
}
|
||||
|
@ -341,7 +374,13 @@ class mbedToolchain:
|
|||
|
||||
return False
|
||||
|
||||
def scan_resources(self, path):
|
||||
def is_ignored(self, file_path):
|
||||
for pattern in self.ignorepatterns:
|
||||
if fnmatch.fnmatch(file_path, pattern):
|
||||
return True
|
||||
return False
|
||||
|
||||
def scan_resources(self, path, exclude_paths=None):
|
||||
labels = self.get_labels()
|
||||
resources = Resources(path)
|
||||
self.has_config = False
|
||||
|
@ -356,25 +395,55 @@ class mbedToolchain:
|
|||
bottom-up mode the directories in dirnames are generated before dirpath
|
||||
itself is generated.
|
||||
"""
|
||||
for root, dirs, files in walk(path):
|
||||
for root, dirs, files in walk(path, followlinks=True):
|
||||
# Remove ignored directories
|
||||
# Check if folder contains .mbedignore
|
||||
if ".mbedignore" in files :
|
||||
with open (join(root,".mbedignore"), "r") as f:
|
||||
lines=f.readlines()
|
||||
lines = [l.strip() for l in lines] # Strip whitespaces
|
||||
lines = [l for l in lines if l != ""] # Strip empty lines
|
||||
lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
|
||||
# Append root path to glob patterns
|
||||
# and append patterns to ignorepatterns
|
||||
self.ignorepatterns.extend([join(root,line.strip()) for line in lines])
|
||||
|
||||
for d in copy(dirs):
|
||||
dir_path = join(root, d)
|
||||
if d == '.hg':
|
||||
dir_path = join(root, d)
|
||||
resources.repo_dirs.append(dir_path)
|
||||
resources.repo_files.extend(self.scan_repository(dir_path))
|
||||
|
||||
if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
|
||||
(d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
|
||||
(d.startswith('FEATURE_') and d[8:] not in labels['FEATURE']) or
|
||||
(d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN'])):
|
||||
(d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or
|
||||
(d == 'TESTS')):
|
||||
dirs.remove(d)
|
||||
|
||||
|
||||
# Remove dirs that already match the ignorepatterns
|
||||
# to avoid travelling into them and to prevent them
|
||||
# on appearing in include path.
|
||||
if self.is_ignored(join(dir_path,"")):
|
||||
dirs.remove(d)
|
||||
|
||||
if exclude_paths:
|
||||
for exclude_path in exclude_paths:
|
||||
rel_path = relpath(dir_path, exclude_path)
|
||||
if not (rel_path.startswith('..')):
|
||||
dirs.remove(d)
|
||||
break
|
||||
|
||||
# Add root to include paths
|
||||
resources.inc_dirs.append(root)
|
||||
|
||||
for file in files:
|
||||
file_path = join(root, file)
|
||||
|
||||
if self.is_ignored(file_path):
|
||||
continue
|
||||
|
||||
_, ext = splitext(file)
|
||||
ext = ext.lower()
|
||||
|
||||
|
@ -419,6 +488,9 @@ class mbedToolchain:
|
|||
elif ext == '.bin':
|
||||
resources.bin_files.append(file_path)
|
||||
|
||||
elif ext == '.json':
|
||||
resources.json_files.append(file_path)
|
||||
|
||||
return resources
|
||||
|
||||
def scan_repository(self, path):
|
||||
|
@ -460,23 +532,40 @@ class mbedToolchain:
|
|||
|
||||
def relative_object_path(self, build_path, base_dir, source):
|
||||
source_dir, name, _ = split_path(source)
|
||||
|
||||
obj_dir = join(build_path, relpath(source_dir, base_dir))
|
||||
mkdir(obj_dir)
|
||||
return join(obj_dir, name + '.o')
|
||||
|
||||
def get_inc_file(self, includes):
|
||||
include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5)
|
||||
if not exists(include_file):
|
||||
with open(include_file, "wb") as f:
|
||||
cmd_list = []
|
||||
for c in includes:
|
||||
if c:
|
||||
cmd_list.append(('-I%s' % c).replace("\\", "/"))
|
||||
string = " ".join(cmd_list)
|
||||
f.write(string)
|
||||
return include_file
|
||||
|
||||
def compile_sources(self, resources, build_path, inc_dirs=None):
|
||||
# Web IDE progress bar for project build
|
||||
files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
|
||||
self.to_be_compiled = len(files_to_compile)
|
||||
self.compiled = 0
|
||||
|
||||
#for i in self.build_params:
|
||||
# self.debug(i)
|
||||
# self.debug("%s" % self.build_params[i])
|
||||
|
||||
inc_paths = resources.inc_dirs
|
||||
if inc_dirs is not None:
|
||||
inc_paths.extend(inc_dirs)
|
||||
# De-duplicate include paths
|
||||
inc_paths = set(inc_paths)
|
||||
# Sort include paths for consistency
|
||||
inc_paths = sorted(set(inc_paths))
|
||||
# Unique id of all include paths
|
||||
self.inc_md5 = md5(' '.join(inc_paths)).hexdigest()
|
||||
# Where to store response files
|
||||
self.build_dir = build_path
|
||||
|
||||
objects = []
|
||||
queue = []
|
||||
|
@ -484,17 +573,14 @@ class mbedToolchain:
|
|||
|
||||
# The dependency checking for C/C++ is delegated to the compiler
|
||||
base_path = resources.base_path
|
||||
# Sort compile queue for consistency
|
||||
files_to_compile.sort()
|
||||
work_dir = getcwd()
|
||||
|
||||
for source in files_to_compile:
|
||||
_, name, _ = split_path(source)
|
||||
object = self.relative_object_path(build_path, base_path, source)
|
||||
|
||||
# Avoid multiple mkdir() calls on same work directory
|
||||
work_dir = dirname(object)
|
||||
if work_dir is not prev_dir:
|
||||
prev_dir = work_dir
|
||||
mkdir(work_dir)
|
||||
|
||||
# Queue mode (multiprocessing)
|
||||
commands = self.compile_command(source, object, inc_paths)
|
||||
if commands is not None:
|
||||
|
@ -542,7 +628,7 @@ class mbedToolchain:
|
|||
itr = 0
|
||||
while True:
|
||||
itr += 1
|
||||
if itr > 30000:
|
||||
if itr > 180000:
|
||||
p.terminate()
|
||||
p.join()
|
||||
raise ToolException("Compile did not finish in 5 minutes")
|
||||
|
@ -633,28 +719,6 @@ class mbedToolchain:
|
|||
else:
|
||||
raise ToolException(_stderr)
|
||||
|
||||
def compile(self, cc, source, object, includes):
|
||||
_, ext = splitext(source)
|
||||
ext = ext.lower()
|
||||
|
||||
command = cc + ['-D%s' % s for s in self.get_symbols()] + ["-I%s" % i for i in includes] + ["-o", object, source]
|
||||
|
||||
if hasattr(self, "get_dep_opt"):
|
||||
base, _ = splitext(object)
|
||||
dep_path = base + '.d'
|
||||
command.extend(self.get_dep_opt(dep_path))
|
||||
|
||||
if hasattr(self, "cc_extra"):
|
||||
command.extend(self.cc_extra(base))
|
||||
|
||||
return [command]
|
||||
|
||||
def compile_c(self, source, object, includes):
|
||||
return self.compile(self.cc, source, object, includes)
|
||||
|
||||
def compile_cpp(self, source, object, includes):
|
||||
return self.compile(self.cppc, source, object, includes)
|
||||
|
||||
def build_library(self, objects, dir, name):
|
||||
needed_update = False
|
||||
lib = self.STD_LIB_NAME % name
|
||||
|
@ -677,10 +741,16 @@ class mbedToolchain:
|
|||
if self.target.OUTPUT_NAMING == "8.3":
|
||||
name = name[0:8]
|
||||
ext = ext[0:3]
|
||||
|
||||
|
||||
# Create destination directory
|
||||
head, tail = split(name)
|
||||
new_path = join(tmp_path, head)
|
||||
mkdir(new_path)
|
||||
|
||||
filename = name+'.'+ext
|
||||
elf = join(tmp_path, name + '.elf')
|
||||
bin = join(tmp_path, filename)
|
||||
map = join(tmp_path, name + '.map')
|
||||
|
||||
if self.need_update(elf, r.objects + r.libraries + [r.linker_script]):
|
||||
needed_update = True
|
||||
|
@ -693,18 +763,20 @@ class mbedToolchain:
|
|||
|
||||
self.binary(r, elf, bin)
|
||||
|
||||
self.mem_stats(map)
|
||||
|
||||
self.var("compile_succeded", True)
|
||||
self.var("binary", filename)
|
||||
|
||||
return bin, needed_update
|
||||
|
||||
def default_cmd(self, command):
|
||||
self.debug("Command: %s"% ' '.join(command))
|
||||
_stdout, _stderr, _rc = run_cmd(command)
|
||||
# Print all warning / erros from stderr to console output
|
||||
for error_line in _stderr.splitlines():
|
||||
print error_line
|
||||
|
||||
self.debug("Command: %s"% ' '.join(command))
|
||||
self.debug("Return: %s"% _rc)
|
||||
|
||||
for output_line in _stdout.splitlines():
|
||||
|
@ -749,6 +821,35 @@ class mbedToolchain:
|
|||
def var(self, key, value):
|
||||
self.notify({'type': 'var', 'key': key, 'val': value})
|
||||
|
||||
def mem_stats(self, map):
|
||||
# Creates parser object
|
||||
toolchain = self.__class__.__name__
|
||||
t = MemmapParser()
|
||||
|
||||
try:
|
||||
with open(map, 'rt') as f:
|
||||
# Decode map file depending on the toolchain
|
||||
if toolchain == "ARM_STD" or toolchain == "ARM_MICRO":
|
||||
t.search_objects(abspath(map), "ARM")
|
||||
t.parse_map_file_armcc(f)
|
||||
elif toolchain == "GCC_ARM":
|
||||
t.parse_map_file_gcc(f)
|
||||
elif toolchain == "IAR":
|
||||
self.info("[WARNING] IAR Compiler not fully supported (yet)")
|
||||
t.search_objects(abspath(map), toolchain)
|
||||
t.parse_map_file_iar(f)
|
||||
else:
|
||||
self.info("Unknown toolchain for memory statistics %s" % toolchain)
|
||||
return
|
||||
|
||||
t.generate_output(sys.stdout, False)
|
||||
map_out = splitext(map)[0] + "_map.json"
|
||||
with open(map_out, 'w') as fo:
|
||||
t.generate_output(fo, True)
|
||||
except OSError:
|
||||
return
|
||||
|
||||
|
||||
from tools.settings import ARM_BIN
|
||||
from tools.settings import GCC_ARM_PATH, GCC_CR_PATH
|
||||
from tools.settings import IAR_PATH
|
||||
|
|
|
@ -9,19 +9,18 @@ You may obtain a copy of the License at
|
|||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" tools.,
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
import re
|
||||
from os.path import join
|
||||
import copy
|
||||
from os.path import join, dirname, splitext, basename, exists
|
||||
|
||||
from tools.toolchains import mbedToolchain
|
||||
from tools.settings import ARM_BIN, ARM_INC, ARM_LIB, MY_ARM_CLIB, ARM_CPPLIB
|
||||
from tools.settings import ARM_BIN, ARM_INC, ARM_LIB, MY_ARM_CLIB, ARM_CPPLIB, GOANNA_PATH
|
||||
from tools.hooks import hook_tool
|
||||
from tools.settings import GOANNA_PATH
|
||||
from tools.utils import mkdir
|
||||
|
||||
class ARM(mbedToolchain):
|
||||
LINKER_EXT = '.sct'
|
||||
|
@ -31,16 +30,6 @@ class ARM(mbedToolchain):
|
|||
DIAGNOSTIC_PATTERN = re.compile('"(?P<file>[^"]+)", line (?P<line>\d+)( \(column (?P<column>\d+)\)|): (?P<severity>Warning|Error): (?P<message>.+)')
|
||||
DEP_PATTERN = re.compile('\S+:\s(?P<file>.+)\n')
|
||||
|
||||
DEFAULT_FLAGS = {
|
||||
'common': ["--apcs=interwork",
|
||||
"--brief_diagnostics"],
|
||||
'asm': ['-I"%s"' % ARM_INC],
|
||||
'c': ["-c", "--gnu", "-Otime", "--restrict", "--multibyte_chars", "--split_sections", "--md", "--no_depend_system_headers", '-I"%s"' % ARM_INC,
|
||||
"--c99", "-D__ASSERT_MSG" ],
|
||||
'cxx': ["--cpp", "--no_rtti", "-D__ASSERT_MSG"],
|
||||
'ld': [],
|
||||
}
|
||||
|
||||
def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
|
||||
mbedToolchain.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose)
|
||||
|
||||
|
@ -54,25 +43,34 @@ class ARM(mbedToolchain):
|
|||
cpu = target.core
|
||||
|
||||
main_cc = join(ARM_BIN, "armcc")
|
||||
common = ["-c",
|
||||
"--cpu=%s" % cpu, "--gnu",
|
||||
"-Otime", "--split_sections", "--apcs=interwork",
|
||||
"--brief_diagnostics", "--restrict", "--multibyte_chars"
|
||||
]
|
||||
|
||||
self.flags = copy.deepcopy(self.DEFAULT_FLAGS)
|
||||
self.flags['common'] += ["--cpu=%s" % cpu]
|
||||
if "save-asm" in self.options:
|
||||
self.flags['common'].extend(["--asm", "--interleave"])
|
||||
common.extend(["--asm", "--interleave"])
|
||||
|
||||
if "debug-info" in self.options:
|
||||
self.flags['common'].append("-g")
|
||||
self.flags['c'].append("-O0")
|
||||
common.append("-O0")
|
||||
else:
|
||||
self.flags['c'].append("-O3")
|
||||
common.append("-O3")
|
||||
# add debug symbols for all builds
|
||||
common.append("-g")
|
||||
|
||||
self.asm = [main_cc] + self.flags['common'] + self.flags['asm'] + self.flags['c']
|
||||
common_c = [
|
||||
"--md", "--no_depend_system_headers",
|
||||
'-I%s' % ARM_INC
|
||||
]
|
||||
|
||||
self.asm = [main_cc] + common + ['-I%s' % ARM_INC]
|
||||
if not "analyze" in self.options:
|
||||
self.cc = [main_cc] + self.flags['common'] + self.flags['c']
|
||||
self.cppc = [main_cc] + self.flags['common'] + self.flags['c'] + self.flags['cxx']
|
||||
self.cc = [main_cc] + common + common_c + ["--c99"]
|
||||
self.cppc = [main_cc] + common + common_c + ["--cpp", "--no_rtti"]
|
||||
else:
|
||||
self.cc = [join(GOANNA_PATH, "goannacc"), "--with-cc=" + main_cc.replace('\\', '/'), "--dialect=armcc", '--output-format="%s"' % self.GOANNA_FORMAT] + self.flags['common'] + self.flags['c']
|
||||
self.cppc= [join(GOANNA_PATH, "goannac++"), "--with-cxx=" + main_cc.replace('\\', '/'), "--dialect=armcc", '--output-format="%s"' % self.GOANNA_FORMAT] + self.flags['common'] + self.flags['c'] + self.flags['cxx']
|
||||
self.cc = [join(GOANNA_PATH, "goannacc"), "--with-cc=" + main_cc.replace('\\', '/'), "--dialect=armcc", '--output-format="%s"' % self.GOANNA_FORMAT] + common + common_c + ["--c99"]
|
||||
self.cppc= [join(GOANNA_PATH, "goannac++"), "--with-cxx=" + main_cc.replace('\\', '/'), "--dialect=armcc", '--output-format="%s"' % self.GOANNA_FORMAT] + common + common_c + ["--cpp", "--no_rtti"]
|
||||
|
||||
self.ld = [join(ARM_BIN, "armlink")]
|
||||
self.sys_libs = []
|
||||
|
@ -80,19 +78,6 @@ class ARM(mbedToolchain):
|
|||
self.ar = join(ARM_BIN, "armar")
|
||||
self.elf2bin = join(ARM_BIN, "fromelf")
|
||||
|
||||
def remove_option(self, option):
|
||||
for tool in [self.asm, self.cc, self.cppc]:
|
||||
if option in tool:
|
||||
tool.remove(option)
|
||||
|
||||
def assemble(self, source, object, includes):
|
||||
# Preprocess first, then assemble
|
||||
tempfile = object + '.E.s'
|
||||
return [
|
||||
self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-E", "-o", tempfile, source],
|
||||
self.hook.get_cmdline_assembler(self.asm + ["-o", object, tempfile])
|
||||
]
|
||||
|
||||
def parse_dependencies(self, dep_path):
|
||||
dependencies = []
|
||||
for line in open(dep_path).readlines():
|
||||
|
@ -122,39 +107,116 @@ class ARM(mbedToolchain):
|
|||
match.group('message')
|
||||
)
|
||||
|
||||
def get_dep_opt(self, dep_path):
|
||||
def get_dep_option(self, object):
|
||||
base, _ = splitext(object)
|
||||
dep_path = base + '.d'
|
||||
return ["--depend", dep_path]
|
||||
|
||||
def archive(self, objects, lib_path):
|
||||
self.default_cmd([self.ar, '-r', lib_path] + objects)
|
||||
def get_compile_options(self, defines, includes):
|
||||
return ['-D%s' % d for d in defines] + ['--via', self.get_inc_file(includes)]
|
||||
|
||||
@hook_tool
|
||||
def assemble(self, source, object, includes):
|
||||
# Preprocess first, then assemble
|
||||
dir = join(dirname(object), '.temp')
|
||||
mkdir(dir)
|
||||
tempfile = join(dir, basename(object) + '.E.s')
|
||||
|
||||
# Build preprocess assemble command
|
||||
cmd_pre = self.asm + self.get_compile_options(self.get_symbols(), includes) + ["-E", "-o", tempfile, source]
|
||||
|
||||
# Build main assemble command
|
||||
cmd = self.asm + ["-o", object, tempfile]
|
||||
|
||||
# Call cmdline hook
|
||||
cmd_pre = self.hook.get_cmdline_assembler(cmd_pre)
|
||||
cmd = self.hook.get_cmdline_assembler(cmd)
|
||||
|
||||
# Return command array, don't execute
|
||||
return [cmd_pre, cmd]
|
||||
|
||||
@hook_tool
|
||||
def compile(self, cc, source, object, includes):
|
||||
# Build compile command
|
||||
cmd = cc + self.get_compile_options(self.get_symbols(), includes)
|
||||
|
||||
cmd.extend(self.get_dep_option(object))
|
||||
|
||||
cmd.extend(["-o", object, source])
|
||||
|
||||
# Call cmdline hook
|
||||
cmd = self.hook.get_cmdline_compiler(cmd)
|
||||
|
||||
return [cmd]
|
||||
|
||||
def compile_c(self, source, object, includes):
|
||||
return self.compile(self.cc, source, object, includes)
|
||||
|
||||
def compile_cpp(self, source, object, includes):
|
||||
return self.compile(self.cppc, source, object, includes)
|
||||
|
||||
@hook_tool
|
||||
def link(self, output, objects, libraries, lib_dirs, mem_map):
|
||||
map_file = splitext(output)[0] + ".map"
|
||||
if len(lib_dirs):
|
||||
args = ["-o", output, "--userlibpath", ",".join(lib_dirs), "--info=totals", "--list=.link_totals.txt"]
|
||||
args = ["-o", output, "--userlibpath", ",".join(lib_dirs), "--info=totals", "--map", "--list=%s" % map_file]
|
||||
else:
|
||||
args = ["-o", output, "--info=totals", "--list=.link_totals.txt"]
|
||||
args = ["-o", output, "--info=totals", "--map", "--list=%s" % map_file]
|
||||
|
||||
if mem_map:
|
||||
args.extend(["--scatter", mem_map])
|
||||
|
||||
if hasattr(self.target, "link_cmdline_hook"):
|
||||
args = self.target.link_cmdline_hook(self.__class__.__name__, args)
|
||||
# Build linker command
|
||||
cmd = self.ld + args + objects + libraries + self.sys_libs
|
||||
|
||||
self.default_cmd(self.ld + args + objects + libraries + self.sys_libs)
|
||||
# Call cmdline hook
|
||||
cmd = self.hook.get_cmdline_linker(cmd)
|
||||
|
||||
# Split link command to linker executable + response file
|
||||
link_files = join(dirname(output), ".link_files.txt")
|
||||
with open(link_files, "wb") as f:
|
||||
cmd_linker = cmd[0]
|
||||
cmd_list = []
|
||||
for c in cmd[1:]:
|
||||
if c:
|
||||
cmd_list.append(('"%s"' % c) if not c.startswith('-') else c)
|
||||
string = " ".join(cmd_list).replace("\\", "/")
|
||||
f.write(string)
|
||||
|
||||
# Exec command
|
||||
self.default_cmd([cmd_linker, '--via', link_files])
|
||||
|
||||
@hook_tool
|
||||
def archive(self, objects, lib_path):
|
||||
archive_files = join(dirname(lib_path), ".archive_files.txt")
|
||||
with open(archive_files, "wb") as f:
|
||||
o_list = []
|
||||
for o in objects:
|
||||
o_list.append('"%s"' % o)
|
||||
string = " ".join(o_list).replace("\\", "/")
|
||||
f.write(string)
|
||||
|
||||
# Exec command
|
||||
self.default_cmd([self.ar, '-r', lib_path, '--via', archive_files])
|
||||
|
||||
@hook_tool
|
||||
def binary(self, resources, elf, bin):
|
||||
args = [self.elf2bin, '--bin', '-o', bin, elf]
|
||||
# Build binary command
|
||||
cmd = [self.elf2bin, '--bin', '-o', bin, elf]
|
||||
|
||||
if hasattr(self.target, "binary_cmdline_hook"):
|
||||
args = self.target.binary_cmdline_hook(self.__class__.__name__, args)
|
||||
# Call cmdline hook
|
||||
cmd = self.hook.get_cmdline_binary(cmd)
|
||||
|
||||
# Exec command
|
||||
self.default_cmd(cmd)
|
||||
|
||||
self.default_cmd(args)
|
||||
|
||||
class ARM_STD(ARM):
|
||||
def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
|
||||
ARM.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose)
|
||||
self.ld.append("--libpath=%s" % ARM_LIB)
|
||||
self.cc += ["-D__ASSERT_MSG"]
|
||||
self.cppc += ["-D__ASSERT_MSG"]
|
||||
self.ld.extend(["--libpath", ARM_LIB])
|
||||
|
||||
|
||||
class ARM_MICRO(ARM):
|
||||
|
@ -163,20 +225,18 @@ class ARM_MICRO(ARM):
|
|||
def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
|
||||
ARM.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose)
|
||||
|
||||
# add microlib to the command line flags
|
||||
# Compiler
|
||||
self.asm += ["-D__MICROLIB"]
|
||||
self.cc += ["--library_type=microlib", "-D__MICROLIB"]
|
||||
self.cppc += ["--library_type=microlib", "-D__MICROLIB"]
|
||||
self.cc += ["--library_type=microlib", "-D__MICROLIB", "-D__ASSERT_MSG"]
|
||||
self.cppc += ["--library_type=microlib", "-D__MICROLIB", "-D__ASSERT_MSG"]
|
||||
|
||||
# the exporter uses --library_type flag to set microlib
|
||||
self.flags['c'] += ["--library_type=microlib"]
|
||||
self.flags['cxx'] += ["--library_type=microlib"]
|
||||
self.flags['ld'].append("--library_type=microlib")
|
||||
# Linker
|
||||
self.ld.append("--library_type=microlib")
|
||||
|
||||
# We had to patch microlib to add C++ support
|
||||
# In later releases this patch should have entered mainline
|
||||
if ARM_MICRO.PATCHED_LIBRARY:
|
||||
self.flags['ld'].append("--noscanlib")
|
||||
self.ld.append("--noscanlib")
|
||||
|
||||
# System Libraries
|
||||
self.sys_libs.extend([join(MY_ARM_CLIB, lib+".l") for lib in ["mc_p", "mf_p", "m_ps"]])
|
||||
|
@ -187,4 +247,4 @@ class ARM_MICRO(ARM):
|
|||
elif target.core in ["Cortex-M0", "Cortex-M0+"]:
|
||||
self.sys_libs.extend([join(ARM_CPPLIB, lib+".l") for lib in ["cpp_ps", "cpprt_p"]])
|
||||
else:
|
||||
self.ld.append("--libpath=%s" % ARM_LIB)
|
||||
self.ld.extend(["--libpath", ARM_LIB])
|
||||
|
|
|
@ -15,7 +15,7 @@ See the License for the specific language governing permissions and
|
|||
limitations under the License.
|
||||
"""
|
||||
import re
|
||||
from os.path import join, basename, splitext
|
||||
from os.path import join, basename, splitext, dirname, exists
|
||||
|
||||
from tools.toolchains import mbedToolchain
|
||||
from tools.settings import GCC_ARM_PATH, GCC_CR_PATH
|
||||
|
@ -67,17 +67,18 @@ class GCC(mbedToolchain):
|
|||
"-Wno-unused-parameter", "-Wno-missing-field-initializers",
|
||||
"-fmessage-length=0", "-fno-exceptions", "-fno-builtin",
|
||||
"-ffunction-sections", "-fdata-sections",
|
||||
"-MMD", "-fno-delete-null-pointer-checks", "-fomit-frame-pointer"
|
||||
"-fno-delete-null-pointer-checks", "-fomit-frame-pointer"
|
||||
] + self.cpu
|
||||
|
||||
if "save-asm" in self.options:
|
||||
common_flags.append("-save-temps")
|
||||
|
||||
if "debug-info" in self.options:
|
||||
common_flags.append("-g")
|
||||
common_flags.append("-O0")
|
||||
else:
|
||||
common_flags.append("-O2")
|
||||
# add debug symbols for all builds
|
||||
common_flags.append("-g")
|
||||
|
||||
main_cc = join(tool_path, "arm-none-eabi-gcc")
|
||||
main_cppc = join(tool_path, "arm-none-eabi-g++")
|
||||
|
@ -95,12 +96,11 @@ class GCC(mbedToolchain):
|
|||
self.ar = join(tool_path, "arm-none-eabi-ar")
|
||||
self.elf2bin = join(tool_path, "arm-none-eabi-objcopy")
|
||||
|
||||
def assemble(self, source, object, includes):
|
||||
return [self.hook.get_cmdline_assembler(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source])]
|
||||
|
||||
def parse_dependencies(self, dep_path):
|
||||
dependencies = []
|
||||
for line in open(dep_path).readlines()[1:]:
|
||||
buff = open(dep_path).readlines()
|
||||
buff[0] = re.sub('^(.*?)\: ', '', buff[0])
|
||||
for line in buff:
|
||||
file = line.replace('\\\n', '').strip()
|
||||
if file:
|
||||
# GCC might list more than one dependency on a single line, in this case
|
||||
|
@ -160,22 +160,103 @@ class GCC(mbedToolchain):
|
|||
message + match.group('message')
|
||||
)
|
||||
|
||||
def archive(self, objects, lib_path):
|
||||
self.default_cmd([self.ar, "rcs", lib_path] + objects)
|
||||
def get_dep_option(self, object):
|
||||
base, _ = splitext(object)
|
||||
dep_path = base + '.d'
|
||||
return ["-MD", "-MF", dep_path]
|
||||
|
||||
def get_compile_options(self, defines, includes):
|
||||
return ['-D%s' % d for d in defines] + ['@%s' % self.get_inc_file(includes)]
|
||||
|
||||
@hook_tool
|
||||
def assemble(self, source, object, includes):
|
||||
# Build assemble command
|
||||
cmd = self.asm + self.get_compile_options(self.get_symbols(), includes) + ["-o", object, source]
|
||||
|
||||
# Call cmdline hook
|
||||
cmd = self.hook.get_cmdline_assembler(cmd)
|
||||
|
||||
# Return command array, don't execute
|
||||
return [cmd]
|
||||
|
||||
@hook_tool
|
||||
def compile(self, cc, source, object, includes):
|
||||
# Build compile command
|
||||
cmd = cc + self.get_compile_options(self.get_symbols(), includes)
|
||||
|
||||
cmd.extend(self.get_dep_option(object))
|
||||
|
||||
cmd.extend(["-o", object, source])
|
||||
|
||||
# Call cmdline hook
|
||||
cmd = self.hook.get_cmdline_compiler(cmd)
|
||||
|
||||
return [cmd]
|
||||
|
||||
def compile_c(self, source, object, includes):
|
||||
return self.compile(self.cc, source, object, includes)
|
||||
|
||||
def compile_cpp(self, source, object, includes):
|
||||
return self.compile(self.cppc, source, object, includes)
|
||||
|
||||
@hook_tool
|
||||
def link(self, output, objects, libraries, lib_dirs, mem_map):
|
||||
libs = []
|
||||
for l in libraries:
|
||||
name, _ = splitext(basename(l))
|
||||
libs.append("-l%s" % name[3:])
|
||||
libs.extend(["-l%s" % l for l in self.sys_libs])
|
||||
|
||||
# Build linker command
|
||||
map_file = splitext(output)[0] + ".map"
|
||||
cmd = self.ld + ["-o", output, "-Wl,-Map=%s" % map_file] + objects + ["-Wl,--start-group"] + libs + ["-Wl,--end-group"]
|
||||
if mem_map:
|
||||
cmd.extend(['-T', mem_map])
|
||||
|
||||
for L in lib_dirs:
|
||||
cmd.extend(['-L', L])
|
||||
cmd.extend(libs)
|
||||
|
||||
self.default_cmd(self.hook.get_cmdline_linker(self.ld + ["-T%s" % mem_map, "-o", output] +
|
||||
objects + ["-L%s" % L for L in lib_dirs] + ["-Wl,--start-group"] + libs + ["-Wl,--end-group"]))
|
||||
# Call cmdline hook
|
||||
cmd = self.hook.get_cmdline_linker(cmd)
|
||||
|
||||
# Split link command to linker executable + response file
|
||||
link_files = join(dirname(output), ".link_files.txt")
|
||||
with open(link_files, "wb") as f:
|
||||
cmd_linker = cmd[0]
|
||||
cmd_list = []
|
||||
for c in cmd[1:]:
|
||||
if c:
|
||||
cmd_list.append(('"%s"' % c) if not c.startswith('-') else c)
|
||||
string = " ".join(cmd_list).replace("\\", "/")
|
||||
f.write(string)
|
||||
|
||||
# Exec command
|
||||
self.default_cmd([cmd_linker, "@%s" % link_files])
|
||||
|
||||
@hook_tool
|
||||
def archive(self, objects, lib_path):
|
||||
archive_files = join(dirname(lib_path), ".archive_files.txt")
|
||||
with open(archive_files, "wb") as f:
|
||||
o_list = []
|
||||
for o in objects:
|
||||
o_list.append('"%s"' % o)
|
||||
string = " ".join(o_list).replace("\\", "/")
|
||||
f.write(string)
|
||||
|
||||
# Exec command
|
||||
self.default_cmd([self.ar, 'rcs', lib_path, "@%s" % archive_files])
|
||||
|
||||
@hook_tool
|
||||
def binary(self, resources, elf, bin):
|
||||
self.default_cmd(self.hook.get_cmdline_binary([self.elf2bin, "-O", "binary", elf, bin]))
|
||||
# Build binary command
|
||||
cmd = [self.elf2bin, "-O", "binary", elf, bin]
|
||||
|
||||
# Call cmdline hook
|
||||
cmd = self.hook.get_cmdline_binary(cmd)
|
||||
|
||||
# Exec command
|
||||
self.default_cmd(cmd)
|
||||
|
||||
|
||||
class GCC_ARM(GCC):
|
||||
|
|
|
@ -16,7 +16,7 @@ limitations under the License.
|
|||
"""
|
||||
import re
|
||||
from os import remove
|
||||
from os.path import join, exists
|
||||
from os.path import join, exists, dirname, splitext, exists
|
||||
|
||||
from tools.toolchains import mbedToolchain
|
||||
from tools.settings import IAR_PATH
|
||||
|
@ -53,27 +53,32 @@ class IAR(mbedToolchain):
|
|||
|
||||
|
||||
if "debug-info" in self.options:
|
||||
c_flags.append("-r")
|
||||
c_flags.append("-On")
|
||||
else:
|
||||
c_flags.append("-Oh")
|
||||
# add debug symbols for all builds
|
||||
c_flags.append("-r")
|
||||
|
||||
IAR_BIN = join(IAR_PATH, "bin")
|
||||
main_cc = join(IAR_BIN, "iccarm")
|
||||
if target.core == "Cortex-M7F":
|
||||
if target.core == "Cortex-M7F":
|
||||
self.asm = [join(IAR_BIN, "iasmarm")] + ["--cpu", cpuchoice] + ["--fpu", "VFPv5_sp"]
|
||||
else:
|
||||
self.asm = [join(IAR_BIN, "iasmarm")] + ["--cpu", cpuchoice]
|
||||
if not "analyze" in self.options:
|
||||
self.cc = [main_cc] + c_flags
|
||||
self.cppc = [main_cc, "--c++", "--no_rtti", "--no_exceptions", "--guard_calls"] + c_flags
|
||||
self.cc = [main_cc, "--vla"] + c_flags
|
||||
self.cppc = [main_cc, "--c++", "--no_rtti", "--no_exceptions"] + c_flags
|
||||
else:
|
||||
self.cc = [join(GOANNA_PATH, "goannacc"), '--with-cc="%s"' % main_cc.replace('\\', '/'), "--dialect=iar-arm", '--output-format="%s"' % self.GOANNA_FORMAT] + c_flags
|
||||
self.cppc = [join(GOANNA_PATH, "goannac++"), '--with-cxx="%s"' % main_cc.replace('\\', '/'), "--dialect=iar-arm", '--output-format="%s"' % self.GOANNA_FORMAT] + ["--c++", "--no_rtti", "--no_exceptions", "--guard_calls"] + c_flags
|
||||
self.cc = [join(GOANNA_PATH, "goannacc"), '--with-cc="%s"' % main_cc.replace('\\', '/'), "--dialect=iar-arm", '--output-format="%s"' % self.GOANNA_FORMAT, "--vla"] + c_flags
|
||||
self.cppc = [join(GOANNA_PATH, "goannac++"), '--with-cxx="%s"' % main_cc.replace('\\', '/'), "--dialect=iar-arm", '--output-format="%s"' % self.GOANNA_FORMAT] + ["--c++", "--no_rtti", "--no_exceptions"] + c_flags
|
||||
self.ld = join(IAR_BIN, "ilinkarm")
|
||||
self.ar = join(IAR_BIN, "iarchive")
|
||||
self.elf2bin = join(IAR_BIN, "ielftool")
|
||||
|
||||
def parse_dependencies(self, dep_path):
|
||||
return [path.strip() for path in open(dep_path).readlines()
|
||||
if (path and not path.isspace())]
|
||||
|
||||
def parse_output(self, output):
|
||||
for line in output.splitlines():
|
||||
match = IAR.DIAGNOSTIC_PATTERN.match(line)
|
||||
|
@ -95,28 +100,99 @@ class IAR(mbedToolchain):
|
|||
match.group('message')
|
||||
)
|
||||
|
||||
def get_dep_opt(self, dep_path):
|
||||
def get_dep_option(self, object):
|
||||
base, _ = splitext(object)
|
||||
dep_path = base + '.d'
|
||||
return ["--dependencies", dep_path]
|
||||
|
||||
def cc_extra(self, base):
|
||||
return ["-l", base + '.s']
|
||||
def cc_extra(self, object):
|
||||
base, _ = splitext(object)
|
||||
return ["-l", base + '.s.txt']
|
||||
|
||||
def parse_dependencies(self, dep_path):
|
||||
return [path.strip() for path in open(dep_path).readlines()
|
||||
if (path and not path.isspace())]
|
||||
def get_compile_options(self, defines, includes):
|
||||
return ['-D%s' % d for d in defines] + ['-f', self.get_inc_file(includes)]
|
||||
|
||||
@hook_tool
|
||||
def assemble(self, source, object, includes):
|
||||
return [self.hook.get_cmdline_assembler(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source])]
|
||||
# Build assemble command
|
||||
cmd = self.asm + self.get_compile_options(self.get_symbols(), includes) + ["-o", object, source]
|
||||
|
||||
# Call cmdline hook
|
||||
cmd = self.hook.get_cmdline_assembler(cmd)
|
||||
|
||||
# Return command array, don't execute
|
||||
return [cmd]
|
||||
|
||||
@hook_tool
|
||||
def compile(self, cc, source, object, includes):
|
||||
# Build compile command
|
||||
cmd = cc + self.get_compile_options(self.get_symbols(), includes)
|
||||
|
||||
cmd.extend(self.get_dep_option(object))
|
||||
|
||||
cmd.extend(self.cc_extra(object))
|
||||
|
||||
cmd.extend(["-o", object, source])
|
||||
|
||||
# Call cmdline hook
|
||||
cmd = self.hook.get_cmdline_compiler(cmd)
|
||||
|
||||
return [cmd]
|
||||
|
||||
def compile_c(self, source, object, includes):
|
||||
return self.compile(self.cc, source, object, includes)
|
||||
|
||||
def compile_cpp(self, source, object, includes):
|
||||
return self.compile(self.cppc, source, object, includes)
|
||||
|
||||
@hook_tool
|
||||
def link(self, output, objects, libraries, lib_dirs, mem_map):
|
||||
# Build linker command
|
||||
map_file = splitext(output)[0] + ".map"
|
||||
cmd = [self.ld, "-o", output, "--skip_dynamic_initialization", "--map=%s" % map_file] + objects + libraries
|
||||
|
||||
if mem_map:
|
||||
cmd.extend(["--config", mem_map])
|
||||
|
||||
# Call cmdline hook
|
||||
cmd = self.hook.get_cmdline_linker(cmd)
|
||||
|
||||
# Split link command to linker executable + response file
|
||||
link_files = join(dirname(output), ".link_files.txt")
|
||||
with open(link_files, "wb") as f:
|
||||
cmd_linker = cmd[0]
|
||||
cmd_list = []
|
||||
for c in cmd[1:]:
|
||||
if c:
|
||||
cmd_list.append(('"%s"' % c) if not c.startswith('-') else c)
|
||||
string = " ".join(cmd_list).replace("\\", "/")
|
||||
f.write(string)
|
||||
|
||||
# Exec command
|
||||
self.default_cmd([cmd_linker, '-f', link_files])
|
||||
|
||||
@hook_tool
|
||||
def archive(self, objects, lib_path):
|
||||
archive_files = join(dirname(lib_path), ".archive_files.txt")
|
||||
with open(archive_files, "wb") as f:
|
||||
o_list = []
|
||||
for o in objects:
|
||||
o_list.append('"%s"' % o)
|
||||
string = " ".join(o_list).replace("\\", "/")
|
||||
f.write(string)
|
||||
|
||||
if exists(lib_path):
|
||||
remove(lib_path)
|
||||
self.default_cmd([self.ar, lib_path] + objects)
|
||||
|
||||
def link(self, output, objects, libraries, lib_dirs, mem_map):
|
||||
args = [self.ld, "-o", output, "--config", mem_map, "--skip_dynamic_initialization", "--threaded_lib"]
|
||||
self.default_cmd(self.hook.get_cmdline_linker(args + objects + libraries))
|
||||
self.default_cmd([self.ar, lib_path, '-f', archive_files])
|
||||
|
||||
@hook_tool
|
||||
def binary(self, resources, elf, bin):
|
||||
self.default_cmd(self.hook.get_cmdline_binary([self.elf2bin, '--bin', elf, bin]))
|
||||
# Build binary command
|
||||
cmd = [self.elf2bin, "--bin", elf, bin]
|
||||
|
||||
# Call cmdline hook
|
||||
cmd = self.hook.get_cmdline_binary(cmd)
|
||||
|
||||
# Exec command
|
||||
self.default_cmd(cmd)
|
||||
|
|
Loading…
Reference in New Issue