mirror of https://github.com/ARMmbed/mbed-os.git
Merge pull request #9738 from theotherjimmy/reorder-bl-hooks
Tools: Reoder post-build hooks and bl mergingpull/9956/head
commit
9d72a5cf79
|
@ -27,7 +27,6 @@ from os.path import join, exists, dirname, basename, abspath, normpath, splitext
|
||||||
from os.path import relpath
|
from os.path import relpath
|
||||||
from os import linesep, remove, makedirs
|
from os import linesep, remove, makedirs
|
||||||
from time import time
|
from time import time
|
||||||
from intelhex import IntelHex
|
|
||||||
from json import load, dump
|
from json import load, dump
|
||||||
from jinja2 import FileSystemLoader
|
from jinja2 import FileSystemLoader
|
||||||
from jinja2.environment import Environment
|
from jinja2.environment import Environment
|
||||||
|
@ -35,7 +34,7 @@ from jinja2.environment import Environment
|
||||||
from .arm_pack_manager import Cache
|
from .arm_pack_manager import Cache
|
||||||
from .utils import (mkdir, run_cmd, run_cmd_ext, NotSupportedException,
|
from .utils import (mkdir, run_cmd, run_cmd_ext, NotSupportedException,
|
||||||
ToolException, InvalidReleaseTargetException,
|
ToolException, InvalidReleaseTargetException,
|
||||||
intelhex_offset, integer, generate_update_filename, copy_when_different)
|
copy_when_different)
|
||||||
from .paths import (MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,
|
from .paths import (MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,
|
||||||
MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL,
|
MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL,
|
||||||
MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS,
|
MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS,
|
||||||
|
@ -393,124 +392,6 @@ def prepare_toolchain(src_paths, build_dir, target, toolchain_name,
|
||||||
|
|
||||||
return toolchain
|
return toolchain
|
||||||
|
|
||||||
def _printihex(ihex):
|
|
||||||
import pprint
|
|
||||||
pprint.PrettyPrinter().pprint(ihex.todict())
|
|
||||||
|
|
||||||
def _real_region_size(region):
|
|
||||||
try:
|
|
||||||
part = intelhex_offset(region.filename, offset=region.start)
|
|
||||||
return (part.maxaddr() - part.minaddr()) + 1
|
|
||||||
except AttributeError:
|
|
||||||
return region.size
|
|
||||||
|
|
||||||
|
|
||||||
def _fill_header(region_list, current_region):
|
|
||||||
"""Fill an application header region
|
|
||||||
|
|
||||||
This is done it three steps:
|
|
||||||
* Fill the whole region with zeros
|
|
||||||
* Fill const, timestamp and size entries with their data
|
|
||||||
* Fill the digests using this header as the header region
|
|
||||||
"""
|
|
||||||
region_dict = {r.name: r for r in region_list}
|
|
||||||
header = IntelHex()
|
|
||||||
header.puts(current_region.start, b'\x00' * current_region.size)
|
|
||||||
start = current_region.start
|
|
||||||
for member in current_region.filename:
|
|
||||||
_, type, subtype, data = member
|
|
||||||
member_size = Config.header_member_size(member)
|
|
||||||
if type == "const":
|
|
||||||
fmt = {
|
|
||||||
"8le": ">B", "16le": "<H", "32le": "<L", "64le": "<Q",
|
|
||||||
"8be": "<B", "16be": ">H", "32be": ">L", "64be": ">Q"
|
|
||||||
}[subtype]
|
|
||||||
header.puts(start, struct.pack(fmt, integer(data, 0)))
|
|
||||||
elif type == "timestamp":
|
|
||||||
fmt = {"32le": "<L", "64le": "<Q",
|
|
||||||
"32be": ">L", "64be": ">Q"}[subtype]
|
|
||||||
header.puts(start, struct.pack(fmt, int(time())))
|
|
||||||
elif type == "size":
|
|
||||||
fmt = {"32le": "<L", "64le": "<Q",
|
|
||||||
"32be": ">L", "64be": ">Q"}[subtype]
|
|
||||||
size = sum(_real_region_size(region_dict[r]) for r in data)
|
|
||||||
header.puts(start, struct.pack(fmt, size))
|
|
||||||
elif type == "digest":
|
|
||||||
if data == "header":
|
|
||||||
ih = header[:start]
|
|
||||||
else:
|
|
||||||
ih = intelhex_offset(region_dict[data].filename, offset=region_dict[data].start)
|
|
||||||
if subtype.startswith("CRCITT32"):
|
|
||||||
fmt = {"CRCITT32be": ">L", "CRCITT32le": "<L"}[subtype]
|
|
||||||
crc_val = zlib.crc32(ih.tobinarray()) & 0xffffffff
|
|
||||||
header.puts(start, struct.pack(fmt, crc_val))
|
|
||||||
elif subtype.startswith("SHA"):
|
|
||||||
if subtype == "SHA256":
|
|
||||||
hash = hashlib.sha256()
|
|
||||||
elif subtype == "SHA512":
|
|
||||||
hash = hashlib.sha512()
|
|
||||||
hash.update(ih.tobinarray())
|
|
||||||
header.puts(start, hash.digest())
|
|
||||||
start += Config.header_member_size(member)
|
|
||||||
return header
|
|
||||||
|
|
||||||
|
|
||||||
def merge_region_list(region_list, destination, notify, config, padding=b'\xFF'):
|
|
||||||
"""Merge the region_list into a single image
|
|
||||||
|
|
||||||
Positional Arguments:
|
|
||||||
region_list - list of regions, which should contain filenames
|
|
||||||
destination - file name to write all regions to
|
|
||||||
padding - bytes to fill gaps with
|
|
||||||
"""
|
|
||||||
merged = IntelHex()
|
|
||||||
_, format = splitext(destination)
|
|
||||||
notify.info("Merging Regions")
|
|
||||||
# Merged file list: Keep track of binary/hex files that we have already
|
|
||||||
# merged. e.g In some cases, bootloader may be split into multiple parts, but
|
|
||||||
# all internally referring to the same bootloader file.
|
|
||||||
merged_list = []
|
|
||||||
|
|
||||||
for region in region_list:
|
|
||||||
if region.active and not region.filename:
|
|
||||||
raise ToolException("Active region has no contents: No file found.")
|
|
||||||
if isinstance(region.filename, list):
|
|
||||||
header_basename, _ = splitext(destination)
|
|
||||||
header_filename = header_basename + "_header.hex"
|
|
||||||
_fill_header(region_list, region).tofile(header_filename, format='hex')
|
|
||||||
region = region._replace(filename=header_filename)
|
|
||||||
if region.filename and (region.filename not in merged_list):
|
|
||||||
notify.info(" Filling region %s with %s" % (region.name, region.filename))
|
|
||||||
part = intelhex_offset(region.filename, offset=region.start)
|
|
||||||
part.start_addr = None
|
|
||||||
# Normally, we assume that part.maxddr() can be beyond
|
|
||||||
# end of rom. However, if the size is restricted with config, do check.
|
|
||||||
if config.target.restrict_size is not None:
|
|
||||||
part_size = (part.maxaddr() - part.minaddr()) + 1
|
|
||||||
if part_size > region.size:
|
|
||||||
raise ToolException("Contents of region %s does not fit"
|
|
||||||
% region.name)
|
|
||||||
merged_list.append(region.filename)
|
|
||||||
merged.merge(part)
|
|
||||||
elif region.filename in merged_list:
|
|
||||||
notify.info(" Skipping %s as it is merged previously" % (region.name))
|
|
||||||
|
|
||||||
# Hex file can have gaps, so no padding needed. While other formats may
|
|
||||||
# need padding. Iterate through segments and pad the gaps.
|
|
||||||
if format != ".hex":
|
|
||||||
# begin patching from the end of the first segment
|
|
||||||
_, begin = merged.segments()[0]
|
|
||||||
for start, stop in merged.segments()[1:]:
|
|
||||||
pad_size = start - begin
|
|
||||||
merged.puts(begin, padding * pad_size)
|
|
||||||
begin = stop + 1
|
|
||||||
|
|
||||||
if not exists(dirname(destination)):
|
|
||||||
makedirs(dirname(destination))
|
|
||||||
notify.info("Space used after regions merged: 0x%x" %
|
|
||||||
(merged.maxaddr() - merged.minaddr() + 1))
|
|
||||||
merged.tofile(destination, format=format.strip("."))
|
|
||||||
|
|
||||||
|
|
||||||
UPDATE_WHITELIST = (
|
UPDATE_WHITELIST = (
|
||||||
"application",
|
"application",
|
||||||
|
@ -605,27 +486,7 @@ def build_project(src_paths, build_path, target, toolchain_name,
|
||||||
objects = toolchain.compile_sources(resources, sorted(resources.get_file_paths(FileType.INC_DIR)))
|
objects = toolchain.compile_sources(resources, sorted(resources.get_file_paths(FileType.INC_DIR)))
|
||||||
resources.add_files_to_type(FileType.OBJECT, objects)
|
resources.add_files_to_type(FileType.OBJECT, objects)
|
||||||
|
|
||||||
# Link Program
|
res = toolchain.link_program(resources, build_path, name)
|
||||||
if toolchain.config.has_regions:
|
|
||||||
binary, _ = toolchain.link_program(resources, build_path, name + "_application")
|
|
||||||
region_list = list(toolchain.config.regions)
|
|
||||||
region_list = [r._replace(filename=binary) if r.active else r
|
|
||||||
for r in region_list]
|
|
||||||
res = "%s.%s" % (join(build_path, name),
|
|
||||||
getattr(toolchain.target, "OUTPUT_EXT", "bin"))
|
|
||||||
merge_region_list(region_list, res, notify, toolchain.config)
|
|
||||||
update_regions = [
|
|
||||||
r for r in region_list if r.name in UPDATE_WHITELIST
|
|
||||||
]
|
|
||||||
if update_regions:
|
|
||||||
update_res = join(build_path, generate_update_filename(name, toolchain.target))
|
|
||||||
merge_region_list(update_regions, update_res, notify, toolchain.config)
|
|
||||||
res = (res, update_res)
|
|
||||||
else:
|
|
||||||
res = (res, None)
|
|
||||||
else:
|
|
||||||
res, _ = toolchain.link_program(resources, build_path, name)
|
|
||||||
res = (res, None)
|
|
||||||
|
|
||||||
into_dir, extra_artifacts = toolchain.config.deliver_into()
|
into_dir, extra_artifacts = toolchain.config.deliver_into()
|
||||||
if into_dir:
|
if into_dir:
|
||||||
|
|
214
tools/hooks.py
214
tools/hooks.py
|
@ -1,214 +0,0 @@
|
||||||
""" Configurable hooks in the build system. Can be used by various platforms
|
|
||||||
to customize the build process.
|
|
||||||
"""
|
|
||||||
|
|
||||||
################################################################################
|
|
||||||
# Hooks for the various parts of the build process
|
|
||||||
|
|
||||||
# Internal mapping of hooks per tool
|
|
||||||
_HOOKS = {}
|
|
||||||
|
|
||||||
# Internal mapping of running hooks
|
|
||||||
_RUNNING_HOOKS = {}
|
|
||||||
|
|
||||||
# Available hook types
|
|
||||||
_HOOK_TYPES = ["binary", "compile", "link", "assemble"]
|
|
||||||
|
|
||||||
# Available hook steps
|
|
||||||
_HOOK_STEPS = ["pre", "replace", "post"]
|
|
||||||
|
|
||||||
# Hook the given function. Use this function as a decorator
|
|
||||||
def hook_tool(function):
|
|
||||||
"""Decorate a function as a tool that may be hooked"""
|
|
||||||
tool = function.__name__
|
|
||||||
tool_flag = "_" + tool + "_done"
|
|
||||||
def wrapper(t_self, *args, **kwargs):
|
|
||||||
"""The hooked function itself"""
|
|
||||||
# if a hook for this tool is already running, it's most likely
|
|
||||||
# coming from a derived class, so don't hook the super class version
|
|
||||||
if _RUNNING_HOOKS.get(tool, False):
|
|
||||||
return function(t_self, *args, **kwargs)
|
|
||||||
_RUNNING_HOOKS[tool] = True
|
|
||||||
# If this tool isn't hooked, return original function
|
|
||||||
if tool not in _HOOKS:
|
|
||||||
res = function(t_self, *args, **kwargs)
|
|
||||||
_RUNNING_HOOKS[tool] = False
|
|
||||||
return res
|
|
||||||
tooldesc = _HOOKS[tool]
|
|
||||||
setattr(t_self, tool_flag, False)
|
|
||||||
# If there is a replace hook, execute the replacement instead
|
|
||||||
if "replace" in tooldesc:
|
|
||||||
res = tooldesc["replace"](t_self, *args, **kwargs)
|
|
||||||
# If the replacement has set the "done" flag, exit now
|
|
||||||
# Otherwise continue as usual
|
|
||||||
if getattr(t_self, tool_flag, False):
|
|
||||||
_RUNNING_HOOKS[tool] = False
|
|
||||||
return res
|
|
||||||
# Execute pre-function before main function if specified
|
|
||||||
if "pre" in tooldesc:
|
|
||||||
tooldesc["pre"](t_self, *args, **kwargs)
|
|
||||||
# Execute the main function now
|
|
||||||
res = function(t_self, *args, **kwargs)
|
|
||||||
# Execute post-function after main function if specified
|
|
||||||
if "post" in tooldesc:
|
|
||||||
post_res = tooldesc["post"](t_self, *args, **kwargs)
|
|
||||||
_RUNNING_HOOKS[tool] = False
|
|
||||||
return post_res or res
|
|
||||||
else:
|
|
||||||
_RUNNING_HOOKS[tool] = False
|
|
||||||
return res
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
class Hook(object):
|
|
||||||
"""A compiler class that may be hooked"""
|
|
||||||
def __init__(self, target, toolchain):
|
|
||||||
_HOOKS.clear()
|
|
||||||
self._cmdline_hooks = {}
|
|
||||||
self.toolchain = toolchain
|
|
||||||
target.init_hooks(self, toolchain)
|
|
||||||
|
|
||||||
# Hook various functions directly
|
|
||||||
@staticmethod
|
|
||||||
def _hook_add(hook_type, hook_step, function):
|
|
||||||
"""Add a hook to a compile function
|
|
||||||
|
|
||||||
Positional arguments:
|
|
||||||
hook_type - one of the _HOOK_TYPES
|
|
||||||
hook_step - one of the _HOOK_STEPS
|
|
||||||
function - the function to add to the list of hooks
|
|
||||||
"""
|
|
||||||
if hook_type not in _HOOK_TYPES or hook_step not in _HOOK_STEPS:
|
|
||||||
return False
|
|
||||||
if hook_type not in _HOOKS:
|
|
||||||
_HOOKS[hook_type] = {}
|
|
||||||
_HOOKS[hook_type][hook_step] = function
|
|
||||||
return True
|
|
||||||
|
|
||||||
def hook_add_compiler(self, hook_step, function):
|
|
||||||
"""Add a hook to the compiler
|
|
||||||
|
|
||||||
Positional Arguments:
|
|
||||||
hook_step - one of the _HOOK_STEPS
|
|
||||||
function - the function to add to the list of hooks
|
|
||||||
"""
|
|
||||||
return self._hook_add("compile", hook_step, function)
|
|
||||||
|
|
||||||
def hook_add_linker(self, hook_step, function):
|
|
||||||
"""Add a hook to the linker
|
|
||||||
|
|
||||||
Positional Arguments:
|
|
||||||
hook_step - one of the _HOOK_STEPS
|
|
||||||
function - the function to add to the list of hooks
|
|
||||||
"""
|
|
||||||
return self._hook_add("link", hook_step, function)
|
|
||||||
|
|
||||||
def hook_add_assembler(self, hook_step, function):
|
|
||||||
"""Add a hook to the assemble
|
|
||||||
|
|
||||||
Positional Arguments:
|
|
||||||
hook_step - one of the _HOOK_STEPS
|
|
||||||
function - the function to add to the list of hooks
|
|
||||||
"""
|
|
||||||
return self._hook_add("assemble", hook_step, function)
|
|
||||||
|
|
||||||
def hook_add_binary(self, hook_step, function):
|
|
||||||
"""Add a hook to the elf to binary tool
|
|
||||||
|
|
||||||
Positional Arguments:
|
|
||||||
hook_step - one of the _HOOK_STEPS
|
|
||||||
function - the function to add to the list of hooks
|
|
||||||
"""
|
|
||||||
return self._hook_add("binary", hook_step, function)
|
|
||||||
|
|
||||||
# Hook command lines
|
|
||||||
def _hook_cmdline(self, hook_type, function):
|
|
||||||
"""Add a hook to a command line function
|
|
||||||
|
|
||||||
Positional arguments:
|
|
||||||
hook_type - one of the _HOOK_TYPES
|
|
||||||
function - the function to add to the list of hooks
|
|
||||||
"""
|
|
||||||
if hook_type not in _HOOK_TYPES:
|
|
||||||
return False
|
|
||||||
self._cmdline_hooks[hook_type] = function
|
|
||||||
return True
|
|
||||||
|
|
||||||
def hook_cmdline_compiler(self, function):
|
|
||||||
"""Add a hook to the compiler command line
|
|
||||||
|
|
||||||
Positional arguments:
|
|
||||||
function - the function to call
|
|
||||||
"""
|
|
||||||
return self._hook_cmdline("compile", function)
|
|
||||||
|
|
||||||
def hook_cmdline_linker(self, function):
|
|
||||||
"""Add a hook to the linker command line
|
|
||||||
|
|
||||||
Positional arguments:
|
|
||||||
function - the function to call
|
|
||||||
"""
|
|
||||||
return self._hook_cmdline("link", function)
|
|
||||||
|
|
||||||
def hook_cmdline_assembler(self, function):
|
|
||||||
"""Add a hook to the assembler command line
|
|
||||||
|
|
||||||
Positional arguments:
|
|
||||||
function - the function to call
|
|
||||||
"""
|
|
||||||
return self._hook_cmdline("assemble", function)
|
|
||||||
|
|
||||||
def hook_cmdline_binary(self, function):
|
|
||||||
"""Add a hook to the elf to bin tool command line
|
|
||||||
|
|
||||||
Positional arguments:
|
|
||||||
function - the function to call
|
|
||||||
"""
|
|
||||||
return self._hook_cmdline("binary", function)
|
|
||||||
|
|
||||||
# Return the command line after applying the hook
|
|
||||||
def _get_cmdline(self, hook_type, cmdline):
|
|
||||||
"""Get the command line after running all hooks
|
|
||||||
|
|
||||||
Positional arguments:
|
|
||||||
hook_type - one of the _HOOK_TYPES
|
|
||||||
cmdline - the initial command line
|
|
||||||
"""
|
|
||||||
if hook_type in self._cmdline_hooks:
|
|
||||||
cmdline = self._cmdline_hooks[hook_type](
|
|
||||||
self.toolchain.__class__.__name__, cmdline)
|
|
||||||
return cmdline
|
|
||||||
|
|
||||||
def get_cmdline_compiler(self, cmdline):
|
|
||||||
"""Get the compiler command line after running all hooks
|
|
||||||
|
|
||||||
Positional arguments:
|
|
||||||
cmdline - the initial command line
|
|
||||||
"""
|
|
||||||
return self._get_cmdline("compile", cmdline)
|
|
||||||
|
|
||||||
def get_cmdline_linker(self, cmdline):
|
|
||||||
"""Get the linker command line after running all hooks
|
|
||||||
|
|
||||||
Positional arguments:
|
|
||||||
cmdline - the initial command line
|
|
||||||
"""
|
|
||||||
return self._get_cmdline("link", cmdline)
|
|
||||||
|
|
||||||
def get_cmdline_assembler(self, cmdline):
|
|
||||||
"""Get the assmebler command line after running all hooks
|
|
||||||
|
|
||||||
Positional arguments:
|
|
||||||
cmdline - the initial command line
|
|
||||||
"""
|
|
||||||
return self._get_cmdline("assemble", cmdline)
|
|
||||||
|
|
||||||
def get_cmdline_binary(self, cmdline):
|
|
||||||
"""Get the binary command line after running all hooks
|
|
||||||
|
|
||||||
Positional arguments:
|
|
||||||
cmdline - the initial command line
|
|
||||||
"""
|
|
||||||
return self._get_cmdline("binary", cmdline)
|
|
||||||
|
|
||||||
################################################################################
|
|
||||||
|
|
|
@ -0,0 +1,175 @@
|
||||||
|
# mbed SDK
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
# Copyright (c) 2011-2013 ARM Limited
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Utilities for working with region lists.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import struct
|
||||||
|
import zlib
|
||||||
|
from time import time
|
||||||
|
from os.path import splitext, exists, dirname
|
||||||
|
from os import makedirs
|
||||||
|
from .config import Config
|
||||||
|
from .utils import (
|
||||||
|
ToolException,
|
||||||
|
intelhex_offset,
|
||||||
|
integer
|
||||||
|
)
|
||||||
|
from intelhex import IntelHex
|
||||||
|
|
||||||
|
UPDATE_WHITELIST = (
|
||||||
|
"application"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _printihex(ihex):
|
||||||
|
import pprint
|
||||||
|
pprint.PrettyPrinter().pprint(ihex.todict())
|
||||||
|
|
||||||
|
|
||||||
|
def _real_region_size(region):
|
||||||
|
try:
|
||||||
|
part = intelhex_offset(region.filename, offset=region.start)
|
||||||
|
return (part.maxaddr() - part.minaddr()) + 1
|
||||||
|
except AttributeError:
|
||||||
|
return region.size
|
||||||
|
|
||||||
|
|
||||||
|
def _fill_header(region_list, current_region):
|
||||||
|
"""Fill an application header region
|
||||||
|
|
||||||
|
This is done it three steps:
|
||||||
|
* Fill the whole region with zeros
|
||||||
|
* Fill const, timestamp and size entries with their data
|
||||||
|
* Fill the digests using this header as the header region
|
||||||
|
"""
|
||||||
|
region_dict = {r.name: r for r in region_list}
|
||||||
|
header = IntelHex()
|
||||||
|
header.puts(current_region.start, b'\x00' * current_region.size)
|
||||||
|
start = current_region.start
|
||||||
|
for member in current_region.filename:
|
||||||
|
_, type, subtype, data = member
|
||||||
|
if type == "const":
|
||||||
|
fmt = {
|
||||||
|
"8le": ">B", "16le": "<H", "32le": "<L", "64le": "<Q",
|
||||||
|
"8be": "<B", "16be": ">H", "32be": ">L", "64be": ">Q"
|
||||||
|
}[subtype]
|
||||||
|
header.puts(start, struct.pack(fmt, integer(data, 0)))
|
||||||
|
elif type == "timestamp":
|
||||||
|
fmt = {"32le": "<L", "64le": "<Q",
|
||||||
|
"32be": ">L", "64be": ">Q"}[subtype]
|
||||||
|
header.puts(start, struct.pack(fmt, int(time())))
|
||||||
|
elif type == "size":
|
||||||
|
fmt = {"32le": "<L", "64le": "<Q",
|
||||||
|
"32be": ">L", "64be": ">Q"}[subtype]
|
||||||
|
size = sum(_real_region_size(region_dict[r]) for r in data)
|
||||||
|
header.puts(start, struct.pack(fmt, size))
|
||||||
|
elif type == "digest":
|
||||||
|
if data == "header":
|
||||||
|
ih = header[:start]
|
||||||
|
else:
|
||||||
|
ih = intelhex_offset(
|
||||||
|
region_dict[data].filename,
|
||||||
|
offset=region_dict[data].start
|
||||||
|
)
|
||||||
|
if subtype.startswith("CRCITT32"):
|
||||||
|
fmt = {"CRCITT32be": ">L", "CRCITT32le": "<L"}[subtype]
|
||||||
|
crc_val = zlib.crc32(ih.tobinarray()) & 0xffffffff
|
||||||
|
header.puts(start, struct.pack(fmt, crc_val))
|
||||||
|
elif subtype.startswith("SHA"):
|
||||||
|
if subtype == "SHA256":
|
||||||
|
hash = hashlib.sha256()
|
||||||
|
elif subtype == "SHA512":
|
||||||
|
hash = hashlib.sha512()
|
||||||
|
hash.update(ih.tobinarray())
|
||||||
|
header.puts(start, hash.digest())
|
||||||
|
start += Config.header_member_size(member)
|
||||||
|
return header
|
||||||
|
|
||||||
|
|
||||||
|
def merge_region_list(
|
||||||
|
region_list,
|
||||||
|
destination,
|
||||||
|
notify,
|
||||||
|
config,
|
||||||
|
padding=b'\xFF'
|
||||||
|
):
|
||||||
|
"""Merge the region_list into a single image
|
||||||
|
|
||||||
|
Positional Arguments:
|
||||||
|
region_list - list of regions, which should contain filenames
|
||||||
|
destination - file name to write all regions to
|
||||||
|
padding - bytes to fill gaps with
|
||||||
|
"""
|
||||||
|
merged = IntelHex()
|
||||||
|
_, format = splitext(destination)
|
||||||
|
notify.info("Merging Regions")
|
||||||
|
# Merged file list: Keep track of binary/hex files that we have already
|
||||||
|
# merged. e.g In some cases, bootloader may be split into multiple parts,
|
||||||
|
# but all internally referring to the same bootloader file.
|
||||||
|
merged_list = []
|
||||||
|
|
||||||
|
for region in region_list:
|
||||||
|
if region.active and not region.filename:
|
||||||
|
raise ToolException(
|
||||||
|
"Active region has no contents: No file found."
|
||||||
|
)
|
||||||
|
if isinstance(region.filename, list):
|
||||||
|
header_basename, _ = splitext(destination)
|
||||||
|
header_filename = header_basename + "_header.hex"
|
||||||
|
_fill_header(region_list, region).tofile(
|
||||||
|
header_filename, format='hex'
|
||||||
|
)
|
||||||
|
region = region._replace(filename=header_filename)
|
||||||
|
if region.filename and (region.filename not in merged_list):
|
||||||
|
notify.info(" Filling region %s with %s" % (
|
||||||
|
region.name, region.filename
|
||||||
|
))
|
||||||
|
part = intelhex_offset(region.filename, offset=region.start)
|
||||||
|
part.start_addr = None
|
||||||
|
# Normally, we assume that part.maxddr() can be beyond
|
||||||
|
# end of rom. If the size is restricted with config, don't
|
||||||
|
# allow this.
|
||||||
|
if config.target.restrict_size is not None:
|
||||||
|
part_size = (part.maxaddr() - part.minaddr()) + 1
|
||||||
|
if part_size > region.size:
|
||||||
|
raise ToolException(
|
||||||
|
"Contents of region %s does not fit" % region.name
|
||||||
|
)
|
||||||
|
merged_list.append(region.filename)
|
||||||
|
merged.merge(part)
|
||||||
|
elif region.filename in merged_list:
|
||||||
|
notify.info(
|
||||||
|
" Skipping %s as it is merged previously" % (region.name)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Hex file can have gaps, so no padding needed. While other formats may
|
||||||
|
# need padding. Iterate through segments and pad the gaps.
|
||||||
|
if format != ".hex":
|
||||||
|
# begin patching from the end of the first segment
|
||||||
|
_, begin = merged.segments()[0]
|
||||||
|
for start, stop in merged.segments()[1:]:
|
||||||
|
pad_size = start - begin
|
||||||
|
merged.puts(begin, padding * pad_size)
|
||||||
|
begin = stop + 1
|
||||||
|
|
||||||
|
if not exists(dirname(destination)):
|
||||||
|
makedirs(dirname(destination))
|
||||||
|
notify.info("Space used after regions merged: 0x%x" %
|
||||||
|
(merged.maxaddr() - merged.minaddr() + 1))
|
||||||
|
merged.tofile(destination, format=format.strip("."))
|
|
@ -355,7 +355,7 @@ class Target(namedtuple("Target", "name json_data resolution_order resolution_or
|
||||||
def is_PSA_non_secure_target(self):
|
def is_PSA_non_secure_target(self):
|
||||||
return 'NSPE_Target' in self.labels
|
return 'NSPE_Target' in self.labels
|
||||||
|
|
||||||
def init_hooks(self, hook, toolchain):
|
def get_post_build_hook(self, toolchain):
|
||||||
"""Initialize the post-build hooks for a toolchain. For now, this
|
"""Initialize the post-build hooks for a toolchain. For now, this
|
||||||
function only allows "post binary" hooks (hooks that are executed
|
function only allows "post binary" hooks (hooks that are executed
|
||||||
after the binary image is extracted from the executable file)
|
after the binary image is extracted from the executable file)
|
||||||
|
@ -404,8 +404,7 @@ class Target(namedtuple("Target", "name json_data resolution_order resolution_or
|
||||||
if toolchain_restrictions and \
|
if toolchain_restrictions and \
|
||||||
not toolchain_labels.intersection(toolchain_restrictions):
|
not toolchain_labels.intersection(toolchain_restrictions):
|
||||||
return
|
return
|
||||||
# Finally, hook the requested function
|
return getattr(cls, function_name)
|
||||||
hook.hook_add_binary("post", getattr(cls, function_name))
|
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
# Target specific code goes in this section
|
# Target specific code goes in this section
|
||||||
|
|
|
@ -18,7 +18,8 @@ limitations under the License.
|
||||||
import unittest
|
import unittest
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from mock import patch, MagicMock
|
from mock import patch, MagicMock
|
||||||
from tools.build_api import prepare_toolchain, build_project, build_library, merge_region_list
|
from tools.build_api import prepare_toolchain, build_project, build_library
|
||||||
|
from tools.regions import merge_region_list
|
||||||
from tools.resources import Resources
|
from tools.resources import Resources
|
||||||
from tools.toolchains import TOOLCHAINS
|
from tools.toolchains import TOOLCHAINS
|
||||||
from tools.notifier.mock import MockNotifier
|
from tools.notifier.mock import MockNotifier
|
||||||
|
@ -30,7 +31,7 @@ from intelhex import IntelHex
|
||||||
Tests for build_api.py
|
Tests for build_api.py
|
||||||
"""
|
"""
|
||||||
make_mock_target = namedtuple(
|
make_mock_target = namedtuple(
|
||||||
"Target", "init_hooks name features core supported_toolchains build_tools_metadata")
|
"Target", "get_post_build_hook name features core supported_toolchains build_tools_metadata")
|
||||||
#Add ARMC5 to the supported_toolchains list as ARMC5 actually refers ARM Compiler 5 and is needed by ARM/ARM_STD classes when it checks for supported toolchains
|
#Add ARMC5 to the supported_toolchains list as ARMC5 actually refers ARM Compiler 5 and is needed by ARM/ARM_STD classes when it checks for supported toolchains
|
||||||
TOOLCHAINS.add("ARMC5")
|
TOOLCHAINS.add("ARMC5")
|
||||||
#Make a mock build_tools_metadata
|
#Make a mock build_tools_metadata
|
||||||
|
@ -65,7 +66,6 @@ class BuildApiTests(unittest.TestCase):
|
||||||
@patch('tools.toolchains.mbedToolchain.need_update',
|
@patch('tools.toolchains.mbedToolchain.need_update',
|
||||||
side_effect=[i % 2 for i in range(3000)])
|
side_effect=[i % 2 for i in range(3000)])
|
||||||
@patch('os.mkdir')
|
@patch('os.mkdir')
|
||||||
@patch('tools.toolchains.exists', return_value=True)
|
|
||||||
@patch('tools.toolchains.mbedToolchain.dump_build_profile')
|
@patch('tools.toolchains.mbedToolchain.dump_build_profile')
|
||||||
@patch('tools.utils.run_cmd', return_value=(b'', b'', 0))
|
@patch('tools.utils.run_cmd', return_value=(b'', b'', 0))
|
||||||
def test_always_complete_build(self, *_):
|
def test_always_complete_build(self, *_):
|
||||||
|
@ -94,7 +94,7 @@ class BuildApiTests(unittest.TestCase):
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
app_config = "app_config"
|
app_config = "app_config"
|
||||||
mock_target = make_mock_target(lambda _, __ : None,
|
mock_target = make_mock_target(lambda _ : None,
|
||||||
"Junk", [], "Cortex-M3", TOOLCHAINS, mock_build_tools_metadata)
|
"Junk", [], "Cortex-M3", TOOLCHAINS, mock_build_tools_metadata)
|
||||||
mock_config_init.return_value = namedtuple(
|
mock_config_init.return_value = namedtuple(
|
||||||
"Config", "target has_regions name")(mock_target, False, None)
|
"Config", "target has_regions name")(mock_target, False, None)
|
||||||
|
@ -113,7 +113,7 @@ class BuildApiTests(unittest.TestCase):
|
||||||
:param mock_config_init: mock of Config __init__
|
:param mock_config_init: mock of Config __init__
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
mock_target = make_mock_target(lambda _, __ : None,
|
mock_target = make_mock_target(lambda _ : None,
|
||||||
"Junk", [], "Cortex-M3", TOOLCHAINS, mock_build_tools_metadata)
|
"Junk", [], "Cortex-M3", TOOLCHAINS, mock_build_tools_metadata)
|
||||||
mock_config_init.return_value = namedtuple(
|
mock_config_init.return_value = namedtuple(
|
||||||
"Config", "target has_regions name")(mock_target, False, None)
|
"Config", "target has_regions name")(mock_target, False, None)
|
||||||
|
@ -246,7 +246,7 @@ class BuildApiTests(unittest.TestCase):
|
||||||
self.assertEqual(args[1]['app_config'], None,
|
self.assertEqual(args[1]['app_config'], None,
|
||||||
"prepare_toolchain was called with an incorrect app_config")
|
"prepare_toolchain was called with an incorrect app_config")
|
||||||
|
|
||||||
@patch('tools.build_api.intelhex_offset')
|
@patch('tools.regions.intelhex_offset')
|
||||||
@patch('tools.config')
|
@patch('tools.config')
|
||||||
def test_merge_region_no_fit(self, mock_config, mock_intelhex_offset):
|
def test_merge_region_no_fit(self, mock_config, mock_intelhex_offset):
|
||||||
"""
|
"""
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -15,32 +15,34 @@ See the License for the specific language governing permissions and
|
||||||
limitations under the License.
|
limitations under the License.
|
||||||
"""
|
"""
|
||||||
from __future__ import print_function, absolute_import
|
from __future__ import print_function, absolute_import
|
||||||
from builtins import str
|
from builtins import str # noqa: F401
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from copy import copy
|
from copy import copy
|
||||||
from os.path import join, dirname, splitext, basename, exists, relpath, isfile
|
from os.path import join, dirname, splitext, basename, exists, isfile
|
||||||
from os import makedirs, write, curdir, remove
|
from os import makedirs, write, remove
|
||||||
from tempfile import mkstemp
|
from tempfile import mkstemp
|
||||||
from shutil import rmtree
|
from shutil import rmtree
|
||||||
from distutils.version import LooseVersion
|
from distutils.version import LooseVersion
|
||||||
|
|
||||||
from tools.targets import CORE_ARCH
|
from tools.targets import CORE_ARCH
|
||||||
from tools.toolchains import mbedToolchain, TOOLCHAIN_PATHS
|
from tools.toolchains.mbed_toolchain import mbedToolchain, TOOLCHAIN_PATHS
|
||||||
from tools.hooks import hook_tool
|
|
||||||
from tools.utils import mkdir, NotSupportedException, run_cmd
|
from tools.utils import mkdir, NotSupportedException, run_cmd
|
||||||
|
|
||||||
|
|
||||||
class ARM(mbedToolchain):
|
class ARM(mbedToolchain):
|
||||||
LINKER_EXT = '.sct'
|
LINKER_EXT = '.sct'
|
||||||
LIBRARY_EXT = '.ar'
|
LIBRARY_EXT = '.ar'
|
||||||
|
|
||||||
STD_LIB_NAME = "%s.ar"
|
STD_LIB_NAME = "%s.ar"
|
||||||
DIAGNOSTIC_PATTERN = re.compile('"(?P<file>[^"]+)", line (?P<line>\d+)( \(column (?P<column>\d+)\)|): (?P<severity>Warning|Error|Fatal error): (?P<message>.+)')
|
DIAGNOSTIC_PATTERN = re.compile('"(?P<file>[^"]+)", line (?P<line>\d+)( \(column (?P<column>\d+)\)|): (?P<severity>Warning|Error|Fatal error): (?P<message>.+)')
|
||||||
INDEX_PATTERN = re.compile('(?P<col>\s*)\^')
|
INDEX_PATTERN = re.compile('(?P<col>\s*)\^')
|
||||||
DEP_PATTERN = re.compile('\S+:\s(?P<file>.+)\n')
|
DEP_PATTERN = re.compile('\S+:\s(?P<file>.+)\n')
|
||||||
SHEBANG = "#! armcc -E"
|
SHEBANG = "#! armcc -E"
|
||||||
SUPPORTED_CORES = ["Cortex-M0", "Cortex-M0+", "Cortex-M3", "Cortex-M4",
|
SUPPORTED_CORES = [
|
||||||
"Cortex-M4F", "Cortex-M7", "Cortex-M7F", "Cortex-M7FD", "Cortex-A9"]
|
"Cortex-M0", "Cortex-M0+", "Cortex-M3", "Cortex-M4", "Cortex-M4F",
|
||||||
|
"Cortex-M7", "Cortex-M7F", "Cortex-M7FD", "Cortex-A9"
|
||||||
|
]
|
||||||
ARMCC_RANGE = (LooseVersion("5.06"), LooseVersion("5.07"))
|
ARMCC_RANGE = (LooseVersion("5.06"), LooseVersion("5.07"))
|
||||||
ARMCC_VERSION_RE = re.compile(b"Component: ARM Compiler (\d+\.\d+)")
|
ARMCC_VERSION_RE = re.compile(b"Component: ARM Compiler (\d+\.\d+)")
|
||||||
|
|
||||||
|
@ -82,7 +84,6 @@ class ARM(mbedToolchain):
|
||||||
cpu = target.core
|
cpu = target.core
|
||||||
|
|
||||||
ARM_BIN = join(TOOLCHAIN_PATHS['ARM'], "bin")
|
ARM_BIN = join(TOOLCHAIN_PATHS['ARM'], "bin")
|
||||||
ARM_INC = join(TOOLCHAIN_PATHS['ARM'], "include")
|
|
||||||
|
|
||||||
main_cc = join(ARM_BIN, "armcc")
|
main_cc = join(ARM_BIN, "armcc")
|
||||||
|
|
||||||
|
@ -90,7 +91,10 @@ class ARM(mbedToolchain):
|
||||||
|
|
||||||
self.asm = [main_cc] + self.flags['common'] + self.flags['asm']
|
self.asm = [main_cc] + self.flags['common'] + self.flags['asm']
|
||||||
self.cc = [main_cc] + self.flags['common'] + self.flags['c']
|
self.cc = [main_cc] + self.flags['common'] + self.flags['c']
|
||||||
self.cppc = [main_cc] + self.flags['common'] + self.flags['c'] + self.flags['cxx']
|
self.cppc = (
|
||||||
|
[main_cc] + self.flags['common'] +
|
||||||
|
self.flags['c'] + self.flags['cxx']
|
||||||
|
)
|
||||||
|
|
||||||
self.ld = [join(ARM_BIN, "armlink")] + self.flags['ld']
|
self.ld = [join(ARM_BIN, "armlink")] + self.flags['ld']
|
||||||
|
|
||||||
|
@ -104,9 +108,13 @@ class ARM(mbedToolchain):
|
||||||
msg = None
|
msg = None
|
||||||
min_ver, max_ver = self.ARMCC_RANGE
|
min_ver, max_ver = self.ARMCC_RANGE
|
||||||
match = self.ARMCC_VERSION_RE.search(stdout.encode("utf-8"))
|
match = self.ARMCC_VERSION_RE.search(stdout.encode("utf-8"))
|
||||||
found_version = LooseVersion(match.group(1).decode("utf-8")) if match else None
|
if match:
|
||||||
|
found_version = LooseVersion(match.group(1).decode("utf-8"))
|
||||||
|
else:
|
||||||
|
found_version = None
|
||||||
min_ver, max_ver = self.ARMCC_RANGE
|
min_ver, max_ver = self.ARMCC_RANGE
|
||||||
if found_version and (found_version < min_ver or found_version >= max_ver):
|
if found_version and (found_version < min_ver
|
||||||
|
or found_version >= max_ver):
|
||||||
msg = ("Compiler version mismatch: Have {}; "
|
msg = ("Compiler version mismatch: Have {}; "
|
||||||
"expected version >= {} and < {}"
|
"expected version >= {} and < {}"
|
||||||
.format(found_version, min_ver, max_ver))
|
.format(found_version, min_ver, max_ver))
|
||||||
|
@ -135,8 +143,11 @@ class ARM(mbedToolchain):
|
||||||
for line in open(dep_path).readlines():
|
for line in open(dep_path).readlines():
|
||||||
match = ARM.DEP_PATTERN.match(line)
|
match = ARM.DEP_PATTERN.match(line)
|
||||||
if match is not None:
|
if match is not None:
|
||||||
#we need to append chroot, because when the .d files are generated the compiler is chrooted
|
# we need to append chroot, because when the .d files are
|
||||||
dependencies.append((self.CHROOT if self.CHROOT else '') + match.group('file'))
|
# generated the compiler is chrooted
|
||||||
|
dependencies.append(
|
||||||
|
(self.CHROOT if self.CHROOT else '') + match.group('file')
|
||||||
|
)
|
||||||
return dependencies
|
return dependencies
|
||||||
|
|
||||||
def parse_output(self, output):
|
def parse_output(self, output):
|
||||||
|
@ -151,14 +162,18 @@ class ARM(mbedToolchain):
|
||||||
'severity': match.group('severity').lower(),
|
'severity': match.group('severity').lower(),
|
||||||
'file': match.group('file'),
|
'file': match.group('file'),
|
||||||
'line': match.group('line'),
|
'line': match.group('line'),
|
||||||
'col': match.group('column') if match.group('column') else 0,
|
|
||||||
'message': match.group('message'),
|
'message': match.group('message'),
|
||||||
'text': '',
|
'text': '',
|
||||||
'target_name': self.target.name,
|
'target_name': self.target.name,
|
||||||
'toolchain_name': self.name
|
'toolchain_name': self.name
|
||||||
}
|
}
|
||||||
|
if match.group('column'):
|
||||||
|
msg['col'] = match.group('column')
|
||||||
|
else:
|
||||||
|
msg['col'] = 0
|
||||||
elif msg is not None:
|
elif msg is not None:
|
||||||
# Determine the warning/error column by calculating the ^ position
|
# Determine the warning/error column by calculating the '^'
|
||||||
|
# position
|
||||||
match = ARM.INDEX_PATTERN.match(line)
|
match = ARM.INDEX_PATTERN.match(line)
|
||||||
if match is not None:
|
if match is not None:
|
||||||
msg['col'] = len(match.group('col'))
|
msg['col'] = len(match.group('col'))
|
||||||
|
@ -192,7 +207,6 @@ class ARM(mbedToolchain):
|
||||||
|
|
||||||
return opts
|
return opts
|
||||||
|
|
||||||
@hook_tool
|
|
||||||
def assemble(self, source, object, includes):
|
def assemble(self, source, object, includes):
|
||||||
# Preprocess first, then assemble
|
# Preprocess first, then assemble
|
||||||
dir = join(dirname(object), '.temp')
|
dir = join(dirname(object), '.temp')
|
||||||
|
@ -208,14 +222,9 @@ class ARM(mbedToolchain):
|
||||||
# Build main assemble command
|
# Build main assemble command
|
||||||
cmd = self.asm + ["-o", object, tempfile]
|
cmd = self.asm + ["-o", object, tempfile]
|
||||||
|
|
||||||
# Call cmdline hook
|
|
||||||
cmd_pre = self.hook.get_cmdline_assembler(cmd_pre)
|
|
||||||
cmd = self.hook.get_cmdline_assembler(cmd)
|
|
||||||
|
|
||||||
# Return command array, don't execute
|
# Return command array, don't execute
|
||||||
return [cmd_pre, cmd]
|
return [cmd_pre, cmd]
|
||||||
|
|
||||||
@hook_tool
|
|
||||||
def compile(self, cc, source, object, includes):
|
def compile(self, cc, source, object, includes):
|
||||||
# Build compile command
|
# Build compile command
|
||||||
cmd = cc + self.get_compile_options(self.get_symbols(), includes)
|
cmd = cc + self.get_compile_options(self.get_symbols(), includes)
|
||||||
|
@ -224,9 +233,6 @@ class ARM(mbedToolchain):
|
||||||
|
|
||||||
cmd.extend(["-o", object, source])
|
cmd.extend(["-o", object, source])
|
||||||
|
|
||||||
# Call cmdline hook
|
|
||||||
cmd = self.hook.get_cmdline_compiler(cmd)
|
|
||||||
|
|
||||||
return [cmd]
|
return [cmd]
|
||||||
|
|
||||||
def compile_c(self, source, object, includes):
|
def compile_c(self, source, object, includes):
|
||||||
|
@ -254,7 +260,7 @@ class ARM(mbedToolchain):
|
||||||
with open(scatter_file, "r") as input:
|
with open(scatter_file, "r") as input:
|
||||||
lines = input.readlines()
|
lines = input.readlines()
|
||||||
if (lines[0].startswith(self.SHEBANG) or
|
if (lines[0].startswith(self.SHEBANG) or
|
||||||
not lines[0].startswith("#!")):
|
not lines[0].startswith("#!")):
|
||||||
return scatter_file
|
return scatter_file
|
||||||
else:
|
else:
|
||||||
new_scatter = join(self.build_dir, ".link_script.sct")
|
new_scatter = join(self.build_dir, ".link_script.sct")
|
||||||
|
@ -269,7 +275,6 @@ class ARM(mbedToolchain):
|
||||||
|
|
||||||
return new_scatter
|
return new_scatter
|
||||||
|
|
||||||
@hook_tool
|
|
||||||
def link(self, output, objects, libraries, lib_dirs, scatter_file):
|
def link(self, output, objects, libraries, lib_dirs, scatter_file):
|
||||||
base, _ = splitext(output)
|
base, _ = splitext(output)
|
||||||
map_file = base + ".map"
|
map_file = base + ".map"
|
||||||
|
@ -282,8 +287,7 @@ class ARM(mbedToolchain):
|
||||||
new_scatter = self.correct_scatter_shebang(scatter_file)
|
new_scatter = self.correct_scatter_shebang(scatter_file)
|
||||||
args.extend(["--scatter", new_scatter])
|
args.extend(["--scatter", new_scatter])
|
||||||
|
|
||||||
cmd_pre = self.ld + args
|
cmd = self.ld + args
|
||||||
cmd = self.hook.get_cmdline_linker(cmd_pre)
|
|
||||||
|
|
||||||
if self.RESPONSE_FILES:
|
if self.RESPONSE_FILES:
|
||||||
cmd_linker = cmd[0]
|
cmd_linker = cmd[0]
|
||||||
|
@ -293,7 +297,6 @@ class ARM(mbedToolchain):
|
||||||
self.notify.cc_verbose("Link: %s" % ' '.join(cmd))
|
self.notify.cc_verbose("Link: %s" % ' '.join(cmd))
|
||||||
self.default_cmd(cmd)
|
self.default_cmd(cmd)
|
||||||
|
|
||||||
@hook_tool
|
|
||||||
def archive(self, objects, lib_path):
|
def archive(self, objects, lib_path):
|
||||||
if self.RESPONSE_FILES:
|
if self.RESPONSE_FILES:
|
||||||
param = ['--via', self.get_arch_file(objects)]
|
param = ['--via', self.get_arch_file(objects)]
|
||||||
|
@ -301,13 +304,12 @@ class ARM(mbedToolchain):
|
||||||
param = objects
|
param = objects
|
||||||
self.default_cmd([self.ar, '-r', lib_path] + param)
|
self.default_cmd([self.ar, '-r', lib_path] + param)
|
||||||
|
|
||||||
@hook_tool
|
|
||||||
def binary(self, resources, elf, bin):
|
def binary(self, resources, elf, bin):
|
||||||
_, fmt = splitext(bin)
|
_, fmt = splitext(bin)
|
||||||
# On .hex format, combine multiple .hex files (for multiple load regions) into one
|
# On .hex format, combine multiple .hex files (for multiple load
|
||||||
|
# regions) into one
|
||||||
bin_arg = {".bin": "--bin", ".hex": "--i32combined"}[fmt]
|
bin_arg = {".bin": "--bin", ".hex": "--i32combined"}[fmt]
|
||||||
cmd = [self.elf2bin, bin_arg, '-o', bin, elf]
|
cmd = [self.elf2bin, bin_arg, '-o', bin, elf]
|
||||||
cmd = self.hook.get_cmdline_binary(cmd)
|
|
||||||
|
|
||||||
# remove target binary file/path
|
# remove target binary file/path
|
||||||
if exists(bin):
|
if exists(bin):
|
||||||
|
@ -337,46 +339,95 @@ class ARM(mbedToolchain):
|
||||||
|
|
||||||
|
|
||||||
class ARM_STD(ARM):
|
class ARM_STD(ARM):
|
||||||
|
|
||||||
OFFICIALLY_SUPPORTED = True
|
OFFICIALLY_SUPPORTED = True
|
||||||
def __init__(self, target, notify=None, macros=None,
|
|
||||||
build_profile=None, build_dir=None):
|
def __init__(
|
||||||
ARM.__init__(self, target, notify, macros, build_dir=build_dir,
|
self,
|
||||||
build_profile=build_profile)
|
target,
|
||||||
|
notify=None,
|
||||||
|
macros=None,
|
||||||
|
build_profile=None,
|
||||||
|
build_dir=None
|
||||||
|
):
|
||||||
|
ARM.__init__(
|
||||||
|
self,
|
||||||
|
target,
|
||||||
|
notify,
|
||||||
|
macros,
|
||||||
|
build_dir=build_dir,
|
||||||
|
build_profile=build_profile
|
||||||
|
)
|
||||||
if int(target.build_tools_metadata["version"]) > 0:
|
if int(target.build_tools_metadata["version"]) > 0:
|
||||||
#check only for ARMC5 because ARM_STD means using ARMC5, and thus supported_toolchains must include ARMC5
|
#check only for ARMC5 because ARM_STD means using ARMC5, and thus
|
||||||
|
# supported_toolchains must include ARMC5
|
||||||
if "ARMC5" not in target.supported_toolchains:
|
if "ARMC5" not in target.supported_toolchains:
|
||||||
raise NotSupportedException("ARM compiler 5 support is required for ARM build")
|
raise NotSupportedException(
|
||||||
|
"ARM compiler 5 support is required for ARM build"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
if not set(("ARM", "uARM")).intersection(set(target.supported_toolchains)):
|
if not set(("ARM", "uARM")).intersection(set(
|
||||||
raise NotSupportedException("ARM/uARM compiler support is required for ARM build")
|
target.supported_toolchains
|
||||||
|
)):
|
||||||
|
raise NotSupportedException(
|
||||||
|
"ARM/uARM compiler support is required for ARM build"
|
||||||
|
)
|
||||||
|
|
||||||
class ARM_MICRO(ARM):
|
class ARM_MICRO(ARM):
|
||||||
PATCHED_LIBRARY = False
|
|
||||||
OFFICIALLY_SUPPORTED = True
|
|
||||||
def __init__(self, target, notify=None, macros=None,
|
|
||||||
silent=False, extra_verbose=False, build_profile=None,
|
|
||||||
build_dir=None):
|
|
||||||
target.default_toolchain = "uARM"
|
|
||||||
|
|
||||||
|
PATCHED_LIBRARY = False
|
||||||
|
|
||||||
|
OFFICIALLY_SUPPORTED = True
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
target,
|
||||||
|
notify=None,
|
||||||
|
macros=None,
|
||||||
|
silent=False,
|
||||||
|
extra_verbose=False,
|
||||||
|
build_profile=None,
|
||||||
|
build_dir=None
|
||||||
|
):
|
||||||
|
target.default_toolchain = "uARM"
|
||||||
if int(target.build_tools_metadata["version"]) > 0:
|
if int(target.build_tools_metadata["version"]) > 0:
|
||||||
#At this point we already know that we want to use ARMC5+Microlib, so check for if they are supported
|
# At this point we already know that we want to use ARMC5+Microlib
|
||||||
#For, AC6+Microlib we still use ARMC6 class
|
# so check for if they are supported For, AC6+Microlib we still
|
||||||
if not set(("ARMC5","uARM")).issubset(set(target.supported_toolchains)):
|
# use ARMC6 class
|
||||||
raise NotSupportedException("ARM/uARM compiler support is required for ARM build")
|
if not set(("ARMC5","uARM")).issubset(set(
|
||||||
|
target.supported_toolchains
|
||||||
|
)):
|
||||||
|
raise NotSupportedException(
|
||||||
|
"ARM/uARM compiler support is required for ARM build"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
if not set(("ARM", "uARM")).intersection(set(target.supported_toolchains)):
|
if not set(("ARM", "uARM")).intersection(set(
|
||||||
raise NotSupportedException("ARM/uARM compiler support is required for ARM build")
|
target.supported_toolchains
|
||||||
ARM.__init__(self, target, notify, macros, build_dir=build_dir,
|
)):
|
||||||
build_profile=build_profile)
|
raise NotSupportedException(
|
||||||
|
"ARM/uARM compiler support is required for ARM build"
|
||||||
|
)
|
||||||
|
ARM.__init__(
|
||||||
|
self,
|
||||||
|
target,
|
||||||
|
notify,
|
||||||
|
macros,
|
||||||
|
build_dir=build_dir,
|
||||||
|
build_profile=build_profile
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ARMC6(ARM_STD):
|
class ARMC6(ARM_STD):
|
||||||
OFFICIALLY_SUPPORTED = True
|
|
||||||
|
OFFICIALLY_SUPPORTED = False
|
||||||
SHEBANG = "#! armclang -E --target=arm-arm-none-eabi -x c"
|
SHEBANG = "#! armclang -E --target=arm-arm-none-eabi -x c"
|
||||||
SUPPORTED_CORES = ["Cortex-M0", "Cortex-M0+", "Cortex-M3", "Cortex-M4",
|
SUPPORTED_CORES = [
|
||||||
"Cortex-M4F", "Cortex-M7", "Cortex-M7F", "Cortex-M7FD",
|
"Cortex-M0", "Cortex-M0+", "Cortex-M3", "Cortex-M4",
|
||||||
"Cortex-M23", "Cortex-M23-NS", "Cortex-M33", "Cortex-M33F",
|
"Cortex-M4F", "Cortex-M7", "Cortex-M7F", "Cortex-M7FD",
|
||||||
"Cortex-M33-NS", "Cortex-M33F-NS", "Cortex-M33FE-NS", "Cortex-M33FE",
|
"Cortex-M23", "Cortex-M23-NS", "Cortex-M33", "Cortex-M33F",
|
||||||
"Cortex-A9"]
|
"Cortex-M33-NS", "Cortex-M33F-NS", "Cortex-M33FE-NS", "Cortex-M33FE",
|
||||||
|
"Cortex-A9"
|
||||||
|
]
|
||||||
ARMCC_RANGE = (LooseVersion("6.10"), LooseVersion("7.0"))
|
ARMCC_RANGE = (LooseVersion("6.10"), LooseVersion("7.0"))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -390,12 +441,20 @@ class ARMC6(ARM_STD):
|
||||||
"this compiler does not support the core %s" % target.core)
|
"this compiler does not support the core %s" % target.core)
|
||||||
|
|
||||||
if int(target.build_tools_metadata["version"]) > 0:
|
if int(target.build_tools_metadata["version"]) > 0:
|
||||||
if not set(("ARM", "ARMC6", "uARM")).intersection(set(target.supported_toolchains)):
|
if not set(("ARM", "ARMC6", "uARM")).intersection(set(
|
||||||
raise NotSupportedException("ARM/ARMC6 compiler support is required for ARMC6 build")
|
target.supported_toolchains
|
||||||
|
)):
|
||||||
|
raise NotSupportedException(
|
||||||
|
"ARM/ARMC6 compiler support is required for ARMC6 build"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
if not set(("ARM", "ARMC6")).intersection(set(target.supported_toolchains)):
|
if not set(("ARM", "ARMC6")).intersection(set(
|
||||||
raise NotSupportedException("ARM/ARMC6 compiler support is required for ARMC6 build")
|
target.supported_toolchains
|
||||||
|
)):
|
||||||
|
raise NotSupportedException(
|
||||||
|
"ARM/ARMC6 compiler support is required for ARMC6 build"
|
||||||
|
)
|
||||||
|
|
||||||
if getattr(target, "default_toolchain", "ARMC6") == "uARM":
|
if getattr(target, "default_toolchain", "ARMC6") == "uARM":
|
||||||
if "-DMBED_RTOS_SINGLE_THREAD" not in self.flags['common']:
|
if "-DMBED_RTOS_SINGLE_THREAD" not in self.flags['common']:
|
||||||
self.flags['common'].append("-DMBED_RTOS_SINGLE_THREAD")
|
self.flags['common'].append("-DMBED_RTOS_SINGLE_THREAD")
|
||||||
|
@ -404,15 +463,16 @@ class ARMC6(ARM_STD):
|
||||||
if "--library_type=microlib" not in self.flags['ld']:
|
if "--library_type=microlib" not in self.flags['ld']:
|
||||||
self.flags['ld'].append("--library_type=microlib")
|
self.flags['ld'].append("--library_type=microlib")
|
||||||
if "-Wl,--library_type=microlib" not in self.flags['c']:
|
if "-Wl,--library_type=microlib" not in self.flags['c']:
|
||||||
self.flags['c'].append("-Wl,--library_type=microlib")
|
self.flags['c'].append("-Wl,--library_type=microlib")
|
||||||
if "-Wl,--library_type=microlib" not in self.flags['cxx']:
|
if "-Wl,--library_type=microlib" not in self.flags['cxx']:
|
||||||
self.flags['cxx'].append("-Wl,--library_type=microlib")
|
self.flags['cxx'].append("-Wl,--library_type=microlib")
|
||||||
if "--library_type=microlib" not in self.flags['asm']:
|
if "--library_type=microlib" not in self.flags['asm']:
|
||||||
self.flags['asm'].append("--library_type=microlib")
|
self.flags['asm'].append("--library_type=microlib")
|
||||||
|
|
||||||
core = target.core
|
core = target.core
|
||||||
if CORE_ARCH[target.core] == 8:
|
if CORE_ARCH[target.core] == 8:
|
||||||
if (not target.core.endswith("-NS")) and kwargs.get('build_dir', False):
|
if ((not target.core.endswith("-NS")) and
|
||||||
|
kwargs.get('build_dir', False)):
|
||||||
# Create Secure library
|
# Create Secure library
|
||||||
build_dir = kwargs['build_dir']
|
build_dir = kwargs['build_dir']
|
||||||
secure_file = join(build_dir, "cmse_lib.o")
|
secure_file = join(build_dir, "cmse_lib.o")
|
||||||
|
@ -479,8 +539,10 @@ class ARMC6(ARM_STD):
|
||||||
self.flags['common'] + self.flags['c'])
|
self.flags['common'] + self.flags['c'])
|
||||||
self.cppc = ([join(TOOLCHAIN_PATHS["ARMC6"], "armclang")] +
|
self.cppc = ([join(TOOLCHAIN_PATHS["ARMC6"], "armclang")] +
|
||||||
self.flags['common'] + self.flags['cxx'])
|
self.flags['common'] + self.flags['cxx'])
|
||||||
self.asm = [join(TOOLCHAIN_PATHS["ARMC6"], "armasm")] + self.flags['asm']
|
self.asm = [join(TOOLCHAIN_PATHS["ARMC6"], "armasm")]
|
||||||
self.ld = [join(TOOLCHAIN_PATHS["ARMC6"], "armlink")] + self.flags['ld']
|
self.asm += self.flags['asm']
|
||||||
|
self.ld = [join(TOOLCHAIN_PATHS["ARMC6"], "armlink")]
|
||||||
|
self.ld += self.flags['ld']
|
||||||
self.ar = join(TOOLCHAIN_PATHS["ARMC6"], "armar")
|
self.ar = join(TOOLCHAIN_PATHS["ARMC6"], "armar")
|
||||||
self.elf2bin = join(TOOLCHAIN_PATHS["ARMC6"], "fromelf")
|
self.elf2bin = join(TOOLCHAIN_PATHS["ARMC6"], "fromelf")
|
||||||
|
|
||||||
|
@ -514,22 +576,21 @@ class ARMC6(ARM_STD):
|
||||||
if config_header:
|
if config_header:
|
||||||
opts.extend(self.get_config_option(config_header))
|
opts.extend(self.get_config_option(config_header))
|
||||||
if for_asm:
|
if for_asm:
|
||||||
return ["--cpreproc",
|
return [
|
||||||
"--cpreproc_opts=%s" % ",".join(self.flags['common'] + opts)]
|
"--cpreproc",
|
||||||
|
"--cpreproc_opts=%s" % ",".join(self.flags['common'] + opts)
|
||||||
|
]
|
||||||
return opts
|
return opts
|
||||||
|
|
||||||
@hook_tool
|
|
||||||
def assemble(self, source, object, includes):
|
def assemble(self, source, object, includes):
|
||||||
cmd_pre = copy(self.asm)
|
cmd_pre = copy(self.asm)
|
||||||
cmd_pre.extend(self.get_compile_options(
|
cmd_pre.extend(self.get_compile_options(
|
||||||
self.get_symbols(True), includes, for_asm=True))
|
self.get_symbols(True), includes, for_asm=True))
|
||||||
cmd_pre.extend(["-o", object, source])
|
cmd_pre.extend(["-o", object, source])
|
||||||
return [self.hook.get_cmdline_assembler(cmd_pre)]
|
return [cmd_pre]
|
||||||
|
|
||||||
@hook_tool
|
|
||||||
def compile(self, cc, source, object, includes):
|
def compile(self, cc, source, object, includes):
|
||||||
cmd = copy(cc)
|
cmd = copy(cc)
|
||||||
cmd.extend(self.get_compile_options(self.get_symbols(), includes))
|
cmd.extend(self.get_compile_options(self.get_symbols(), includes))
|
||||||
cmd.extend(["-o", object, source])
|
cmd.extend(["-o", object, source])
|
||||||
cmd = self.hook.get_cmdline_compiler(cmd)
|
|
||||||
return [cmd]
|
return [cmd]
|
||||||
|
|
|
@ -21,9 +21,9 @@ from distutils.spawn import find_executable
|
||||||
from distutils.version import LooseVersion
|
from distutils.version import LooseVersion
|
||||||
|
|
||||||
from tools.targets import CORE_ARCH
|
from tools.targets import CORE_ARCH
|
||||||
from tools.toolchains import mbedToolchain, TOOLCHAIN_PATHS
|
from tools.toolchains.mbed_toolchain import mbedToolchain, TOOLCHAIN_PATHS
|
||||||
from tools.hooks import hook_tool
|
from tools.utils import run_cmd
|
||||||
from tools.utils import run_cmd, NotSupportedException
|
|
||||||
|
|
||||||
class GCC(mbedToolchain):
|
class GCC(mbedToolchain):
|
||||||
OFFICIALLY_SUPPORTED = True
|
OFFICIALLY_SUPPORTED = True
|
||||||
|
@ -38,15 +38,21 @@ class GCC(mbedToolchain):
|
||||||
|
|
||||||
def __init__(self, target, notify=None, macros=None, build_profile=None,
|
def __init__(self, target, notify=None, macros=None, build_profile=None,
|
||||||
build_dir=None):
|
build_dir=None):
|
||||||
mbedToolchain.__init__(self, target, notify, macros,
|
mbedToolchain.__init__(
|
||||||
build_profile=build_profile, build_dir=build_dir)
|
self,
|
||||||
|
target,
|
||||||
|
notify,
|
||||||
|
macros,
|
||||||
|
build_profile=build_profile,
|
||||||
|
build_dir=build_dir
|
||||||
|
)
|
||||||
|
|
||||||
tool_path=TOOLCHAIN_PATHS['GCC_ARM']
|
tool_path = TOOLCHAIN_PATHS['GCC_ARM']
|
||||||
# Add flags for current size setting
|
# Add flags for current size setting
|
||||||
default_lib = "std"
|
default_lib = "std"
|
||||||
if hasattr(target, "default_lib"):
|
if hasattr(target, "default_lib"):
|
||||||
default_lib = target.default_lib
|
default_lib = target.default_lib
|
||||||
elif hasattr(target, "default_build"): # Legacy
|
elif hasattr(target, "default_build"):
|
||||||
default_lib = target.default_build
|
default_lib = target.default_build
|
||||||
|
|
||||||
if default_lib == "small":
|
if default_lib == "small":
|
||||||
|
@ -110,8 +116,8 @@ class GCC(mbedToolchain):
|
||||||
main_cc = join(tool_path, "arm-none-eabi-gcc")
|
main_cc = join(tool_path, "arm-none-eabi-gcc")
|
||||||
main_cppc = join(tool_path, "arm-none-eabi-g++")
|
main_cppc = join(tool_path, "arm-none-eabi-g++")
|
||||||
self.asm = [main_cc] + self.flags['asm'] + self.flags["common"]
|
self.asm = [main_cc] + self.flags['asm'] + self.flags["common"]
|
||||||
self.cc = [main_cc]
|
self.cc = [main_cc]
|
||||||
self.cppc =[main_cppc]
|
self.cppc = [main_cppc]
|
||||||
self.cc += self.flags['c'] + self.flags['common']
|
self.cc += self.flags['c'] + self.flags['common']
|
||||||
self.cppc += self.flags['cxx'] + self.flags['common']
|
self.cppc += self.flags['cxx'] + self.flags['common']
|
||||||
|
|
||||||
|
@ -130,9 +136,13 @@ class GCC(mbedToolchain):
|
||||||
stdout, _, retcode = run_cmd([self.cc[0], "--version"], redirect=True)
|
stdout, _, retcode = run_cmd([self.cc[0], "--version"], redirect=True)
|
||||||
msg = None
|
msg = None
|
||||||
match = self.GCC_VERSION_RE.search(stdout.encode("utf-8"))
|
match = self.GCC_VERSION_RE.search(stdout.encode("utf-8"))
|
||||||
found_version = LooseVersion(match.group(0).decode('utf-8')) if match else None
|
if match:
|
||||||
|
found_version = LooseVersion(match.group(0).decode('utf-8'))
|
||||||
|
else:
|
||||||
|
found_version = None
|
||||||
min_ver, max_ver = self.GCC_RANGE
|
min_ver, max_ver = self.GCC_RANGE
|
||||||
if found_version and (found_version < min_ver or found_version >= max_ver):
|
if found_version and (found_version < min_ver
|
||||||
|
or found_version >= max_ver):
|
||||||
msg = ("Compiler version mismatch: Have {}; "
|
msg = ("Compiler version mismatch: Have {}; "
|
||||||
"expected version >= {} and < {}"
|
"expected version >= {} and < {}"
|
||||||
.format(found_version, min_ver, max_ver))
|
.format(found_version, min_ver, max_ver))
|
||||||
|
@ -195,18 +205,15 @@ class GCC(mbedToolchain):
|
||||||
opts = opts + self.get_config_option(config_header)
|
opts = opts + self.get_config_option(config_header)
|
||||||
return opts
|
return opts
|
||||||
|
|
||||||
@hook_tool
|
|
||||||
def assemble(self, source, object, includes):
|
def assemble(self, source, object, includes):
|
||||||
# Build assemble command
|
# Build assemble command
|
||||||
cmd = self.asm + self.get_compile_options(self.get_symbols(True), includes) + ["-o", object, source]
|
cmd = self.asm + self.get_compile_options(
|
||||||
|
self.get_symbols(True), includes
|
||||||
# Call cmdline hook
|
) + ["-o", object, source]
|
||||||
cmd = self.hook.get_cmdline_assembler(cmd)
|
|
||||||
|
|
||||||
# Return command array, don't execute
|
# Return command array, don't execute
|
||||||
return [cmd]
|
return [cmd]
|
||||||
|
|
||||||
@hook_tool
|
|
||||||
def compile(self, cc, source, object, includes):
|
def compile(self, cc, source, object, includes):
|
||||||
# Build compile command
|
# Build compile command
|
||||||
cmd = cc + self.get_compile_options(self.get_symbols(), includes)
|
cmd = cc + self.get_compile_options(self.get_symbols(), includes)
|
||||||
|
@ -215,8 +222,6 @@ class GCC(mbedToolchain):
|
||||||
|
|
||||||
cmd.extend(["-o", object, source])
|
cmd.extend(["-o", object, source])
|
||||||
|
|
||||||
# Call cmdline hook
|
|
||||||
cmd = self.hook.get_cmdline_compiler(cmd)
|
|
||||||
if self.use_distcc:
|
if self.use_distcc:
|
||||||
cmd = ["distcc"] + cmd
|
cmd = ["distcc"] + cmd
|
||||||
|
|
||||||
|
@ -228,7 +233,6 @@ class GCC(mbedToolchain):
|
||||||
def compile_cpp(self, source, object, includes):
|
def compile_cpp(self, source, object, includes):
|
||||||
return self.compile(self.cppc, source, object, includes)
|
return self.compile(self.cppc, source, object, includes)
|
||||||
|
|
||||||
@hook_tool
|
|
||||||
def link(self, output, objects, libraries, lib_dirs, mem_map):
|
def link(self, output, objects, libraries, lib_dirs, mem_map):
|
||||||
libs = []
|
libs = []
|
||||||
for l in libraries:
|
for l in libraries:
|
||||||
|
@ -239,15 +243,23 @@ class GCC(mbedToolchain):
|
||||||
# Preprocess
|
# Preprocess
|
||||||
if mem_map:
|
if mem_map:
|
||||||
preproc_output = join(dirname(output), ".link_script.ld")
|
preproc_output = join(dirname(output), ".link_script.ld")
|
||||||
cmd = (self.preproc + [mem_map] + self.ld[1:] +
|
cmd = (
|
||||||
[ "-o", preproc_output])
|
self.preproc + [mem_map] + self.ld[1:] + ["-o", preproc_output]
|
||||||
|
)
|
||||||
self.notify.cc_verbose("Preproc: %s" % ' '.join(cmd))
|
self.notify.cc_verbose("Preproc: %s" % ' '.join(cmd))
|
||||||
self.default_cmd(cmd)
|
self.default_cmd(cmd)
|
||||||
mem_map = preproc_output
|
mem_map = preproc_output
|
||||||
|
|
||||||
# Build linker command
|
# Build linker command
|
||||||
map_file = splitext(output)[0] + ".map"
|
map_file = splitext(output)[0] + ".map"
|
||||||
cmd = self.ld + ["-o", output, "-Wl,-Map=%s" % map_file] + objects + ["-Wl,--start-group"] + libs + ["-Wl,--end-group"]
|
cmd = (
|
||||||
|
self.ld +
|
||||||
|
["-o", output, "-Wl,-Map=%s" % map_file] +
|
||||||
|
objects +
|
||||||
|
["-Wl,--start-group"] +
|
||||||
|
libs +
|
||||||
|
["-Wl,--end-group"]
|
||||||
|
)
|
||||||
|
|
||||||
if mem_map:
|
if mem_map:
|
||||||
cmd.extend(['-T', mem_map])
|
cmd.extend(['-T', mem_map])
|
||||||
|
@ -256,9 +268,6 @@ class GCC(mbedToolchain):
|
||||||
cmd.extend(['-L', L])
|
cmd.extend(['-L', L])
|
||||||
cmd.extend(libs)
|
cmd.extend(libs)
|
||||||
|
|
||||||
# Call cmdline hook
|
|
||||||
cmd = self.hook.get_cmdline_linker(cmd)
|
|
||||||
|
|
||||||
if self.RESPONSE_FILES:
|
if self.RESPONSE_FILES:
|
||||||
# Split link command to linker executable + response file
|
# Split link command to linker executable + response file
|
||||||
cmd_linker = cmd[0]
|
cmd_linker = cmd[0]
|
||||||
|
@ -269,7 +278,6 @@ class GCC(mbedToolchain):
|
||||||
self.notify.cc_verbose("Link: %s" % ' '.join(cmd))
|
self.notify.cc_verbose("Link: %s" % ' '.join(cmd))
|
||||||
self.default_cmd(cmd)
|
self.default_cmd(cmd)
|
||||||
|
|
||||||
@hook_tool
|
|
||||||
def archive(self, objects, lib_path):
|
def archive(self, objects, lib_path):
|
||||||
if self.RESPONSE_FILES:
|
if self.RESPONSE_FILES:
|
||||||
param = ["@%s" % self.get_arch_file(objects)]
|
param = ["@%s" % self.get_arch_file(objects)]
|
||||||
|
@ -279,16 +287,12 @@ class GCC(mbedToolchain):
|
||||||
# Exec command
|
# Exec command
|
||||||
self.default_cmd([self.ar, 'rcs', lib_path] + param)
|
self.default_cmd([self.ar, 'rcs', lib_path] + param)
|
||||||
|
|
||||||
@hook_tool
|
|
||||||
def binary(self, resources, elf, bin):
|
def binary(self, resources, elf, bin):
|
||||||
# Build binary command
|
# Build binary command
|
||||||
_, fmt = splitext(bin)
|
_, fmt = splitext(bin)
|
||||||
bin_arg = {'.bin': 'binary', '.hex': 'ihex'}[fmt]
|
bin_arg = {'.bin': 'binary', '.hex': 'ihex'}[fmt]
|
||||||
cmd = [self.elf2bin, "-O", bin_arg, elf, bin]
|
cmd = [self.elf2bin, "-O", bin_arg, elf, bin]
|
||||||
|
|
||||||
# Call cmdline hook
|
|
||||||
cmd = self.hook.get_cmdline_binary(cmd)
|
|
||||||
|
|
||||||
# Exec command
|
# Exec command
|
||||||
self.notify.cc_verbose("FromELF: %s" % ' '.join(cmd))
|
self.notify.cc_verbose("FromELF: %s" % ' '.join(cmd))
|
||||||
self.default_cmd(cmd)
|
self.default_cmd(cmd)
|
||||||
|
@ -308,9 +312,12 @@ class GCC(mbedToolchain):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def check_executable():
|
def check_executable():
|
||||||
"""Returns True if the executable (arm-none-eabi-gcc) location
|
"""Returns True if the executable (arm-none-eabi-gcc) location
|
||||||
specified by the user exists OR the executable can be found on the PATH.
|
specified by the user exists OR the executable can be found on the
|
||||||
Returns False otherwise."""
|
PATH. Returns False otherwise."""
|
||||||
if not TOOLCHAIN_PATHS['GCC_ARM'] or not exists(TOOLCHAIN_PATHS['GCC_ARM']):
|
if (
|
||||||
|
not TOOLCHAIN_PATHS['GCC_ARM'] or
|
||||||
|
not exists(TOOLCHAIN_PATHS['GCC_ARM'])
|
||||||
|
):
|
||||||
if find_executable('arm-none-eabi-gcc'):
|
if find_executable('arm-none-eabi-gcc'):
|
||||||
TOOLCHAIN_PATHS['GCC_ARM'] = ''
|
TOOLCHAIN_PATHS['GCC_ARM'] = ''
|
||||||
return True
|
return True
|
||||||
|
@ -320,5 +327,6 @@ class GCC(mbedToolchain):
|
||||||
exec_name = join(TOOLCHAIN_PATHS['GCC_ARM'], 'arm-none-eabi-gcc')
|
exec_name = join(TOOLCHAIN_PATHS['GCC_ARM'], 'arm-none-eabi-gcc')
|
||||||
return exists(exec_name) or exists(exec_name + '.exe')
|
return exists(exec_name) or exists(exec_name + '.exe')
|
||||||
|
|
||||||
|
|
||||||
class GCC_ARM(GCC):
|
class GCC_ARM(GCC):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -20,9 +20,9 @@ from os.path import join, splitext, exists
|
||||||
from distutils.version import LooseVersion
|
from distutils.version import LooseVersion
|
||||||
|
|
||||||
from tools.targets import CORE_ARCH
|
from tools.targets import CORE_ARCH
|
||||||
from tools.toolchains import mbedToolchain, TOOLCHAIN_PATHS
|
from tools.toolchains.mbed_toolchain import mbedToolchain, TOOLCHAIN_PATHS
|
||||||
from tools.hooks import hook_tool
|
from tools.utils import run_cmd
|
||||||
from tools.utils import run_cmd, NotSupportedException
|
|
||||||
|
|
||||||
class IAR(mbedToolchain):
|
class IAR(mbedToolchain):
|
||||||
OFFICIALLY_SUPPORTED = True
|
OFFICIALLY_SUPPORTED = True
|
||||||
|
@ -31,20 +31,29 @@ class IAR(mbedToolchain):
|
||||||
STD_LIB_NAME = "%s.a"
|
STD_LIB_NAME = "%s.a"
|
||||||
|
|
||||||
DIAGNOSTIC_PATTERN = re.compile('"(?P<file>[^"]+)",(?P<line>[\d]+)\s+(?P<severity>Warning|Error|Fatal error)(?P<message>.+)')
|
DIAGNOSTIC_PATTERN = re.compile('"(?P<file>[^"]+)",(?P<line>[\d]+)\s+(?P<severity>Warning|Error|Fatal error)(?P<message>.+)')
|
||||||
INDEX_PATTERN = re.compile('(?P<col>\s*)\^')
|
INDEX_PATTERN = re.compile('(?P<col>\s*)\^')
|
||||||
IAR_VERSION_RE = re.compile(b"IAR ANSI C/C\+\+ Compiler V(\d+\.\d+)")
|
IAR_VERSION_RE = re.compile(b"IAR ANSI C/C\+\+ Compiler V(\d+\.\d+)")
|
||||||
IAR_VERSION = LooseVersion("8.32")
|
IAR_VERSION = LooseVersion("8.32")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def check_executable():
|
def check_executable():
|
||||||
"""Returns True if the executable (arm-none-eabi-gcc) location
|
"""Returns True if the executable (arm-none-eabi-gcc) location
|
||||||
specified by the user exists OR the executable can be found on the PATH.
|
specified by the user exists OR the executable can be found on the
|
||||||
Returns False otherwise."""
|
PATH. Returns False otherwise."""
|
||||||
return mbedToolchain.generic_check_executable("IAR", 'iccarm', 2, "bin")
|
return mbedToolchain.generic_check_executable(
|
||||||
|
"IAR", 'iccarm', 2, "bin"
|
||||||
|
)
|
||||||
|
|
||||||
def __init__(self, target, notify=None, macros=None, build_profile=None,
|
def __init__(self, target, notify=None, macros=None, build_profile=None,
|
||||||
build_dir=None):
|
build_dir=None):
|
||||||
mbedToolchain.__init__(self, target, notify, macros, build_dir=build_dir, build_profile=build_profile)
|
mbedToolchain.__init__(
|
||||||
|
self,
|
||||||
|
target,
|
||||||
|
notify,
|
||||||
|
macros,
|
||||||
|
build_dir=build_dir,
|
||||||
|
build_profile=build_profile
|
||||||
|
)
|
||||||
core = target.core
|
core = target.core
|
||||||
if CORE_ARCH[target.core] == 8:
|
if CORE_ARCH[target.core] == 8:
|
||||||
# Add linking time preprocessor macro DOMAIN_NS
|
# Add linking time preprocessor macro DOMAIN_NS
|
||||||
|
@ -66,8 +75,8 @@ class IAR(mbedToolchain):
|
||||||
"Cortex-M33F": "Cortex-M33.fp.no_dsp",
|
"Cortex-M33F": "Cortex-M33.fp.no_dsp",
|
||||||
"Cortex-M33FE": "Cortex-M33.fp"}.get(core, core)
|
"Cortex-M33FE": "Cortex-M33.fp"}.get(core, core)
|
||||||
|
|
||||||
# flags_cmd are used only by our scripts, the project files have them already defined,
|
# flags_cmd are used only by our scripts, the project files have them
|
||||||
# using this flags results in the errors (duplication)
|
# already defined, using this flags results in the errors (duplication)
|
||||||
# asm accepts --cpu Core or --fpu FPU, not like c/c++ --cpu=Core
|
# asm accepts --cpu Core or --fpu FPU, not like c/c++ --cpu=Core
|
||||||
asm_flags_cmd = ["--cpu", cpu]
|
asm_flags_cmd = ["--cpu", cpu]
|
||||||
# custom c flags
|
# custom c flags
|
||||||
|
@ -84,13 +93,22 @@ class IAR(mbedToolchain):
|
||||||
IAR_BIN = join(TOOLCHAIN_PATHS['IAR'], "bin")
|
IAR_BIN = join(TOOLCHAIN_PATHS['IAR'], "bin")
|
||||||
main_cc = join(IAR_BIN, "iccarm")
|
main_cc = join(IAR_BIN, "iccarm")
|
||||||
|
|
||||||
self.asm = [join(IAR_BIN, "iasmarm")] + asm_flags_cmd + self.flags["asm"]
|
self.asm = [join(IAR_BIN, "iasmarm")]
|
||||||
self.cc = [main_cc]
|
self.asm += asm_flags_cmd
|
||||||
|
self.asm += self.flags["asm"]
|
||||||
|
|
||||||
|
self.cc = [main_cc]
|
||||||
|
self.cc += self.flags["common"]
|
||||||
|
self.cc += c_flags_cmd
|
||||||
|
self.cc += self.flags["c"]
|
||||||
|
|
||||||
self.cppc = [main_cc]
|
self.cppc = [main_cc]
|
||||||
self.cc += self.flags["common"] + c_flags_cmd + self.flags["c"]
|
self.cppc += self.flags["common"]
|
||||||
self.cppc += self.flags["common"] + c_flags_cmd + cxx_flags_cmd + self.flags["cxx"]
|
self.cppc += c_flags_cmd
|
||||||
|
self.cppc += cxx_flags_cmd
|
||||||
self.ld = [join(IAR_BIN, "ilinkarm")] + self.flags['ld']
|
self.cppc += self.flags["cxx"]
|
||||||
|
|
||||||
|
self.ld = [join(IAR_BIN, "ilinkarm")] + self.flags['ld']
|
||||||
self.ar = join(IAR_BIN, "iarchive")
|
self.ar = join(IAR_BIN, "iarchive")
|
||||||
self.elf2bin = join(IAR_BIN, "ielftool")
|
self.elf2bin = join(IAR_BIN, "ielftool")
|
||||||
|
|
||||||
|
@ -114,10 +132,16 @@ class IAR(mbedToolchain):
|
||||||
"severity": "Warning",
|
"severity": "Warning",
|
||||||
})
|
})
|
||||||
|
|
||||||
|
def _inner_parse_deps(self, dep_path):
|
||||||
|
for path in open(dep_path).readlines():
|
||||||
|
if path and not path.isspace():
|
||||||
|
if self.CHROOT:
|
||||||
|
yield self.CHROOT + path.strip()
|
||||||
|
else:
|
||||||
|
yield path.strip()
|
||||||
|
|
||||||
def parse_dependencies(self, dep_path):
|
def parse_dependencies(self, dep_path):
|
||||||
return [(self.CHROOT if self.CHROOT else '')+path.strip() for path in open(dep_path).readlines()
|
return list(self._inner_parse_deps(dep_path))
|
||||||
if (path and not path.isspace())]
|
|
||||||
|
|
||||||
def parse_output(self, output):
|
def parse_output(self, output):
|
||||||
msg = None
|
msg = None
|
||||||
|
@ -138,7 +162,8 @@ class IAR(mbedToolchain):
|
||||||
'toolchain_name': self.name
|
'toolchain_name': self.name
|
||||||
}
|
}
|
||||||
elif msg is not None:
|
elif msg is not None:
|
||||||
# Determine the warning/error column by calculating the ^ position
|
# Determine the warning/error column by calculating the '^'
|
||||||
|
# position
|
||||||
match = IAR.INDEX_PATTERN.match(line)
|
match = IAR.INDEX_PATTERN.match(line)
|
||||||
if match is not None:
|
if match is not None:
|
||||||
msg['col'] = len(match.group('col'))
|
msg['col'] = len(match.group('col'))
|
||||||
|
@ -166,7 +191,7 @@ class IAR(mbedToolchain):
|
||||||
opts = ['-D%s' % d for d in defines]
|
opts = ['-D%s' % d for d in defines]
|
||||||
if for_asm:
|
if for_asm:
|
||||||
config_macros = self.config.get_config_data_macros()
|
config_macros = self.config.get_config_data_macros()
|
||||||
macros_cmd = ['"-D%s"' % d for d in config_macros if not '"' in d]
|
macros_cmd = ['"-D%s"' % d for d in config_macros if '"' not in d]
|
||||||
if self.RESPONSE_FILES:
|
if self.RESPONSE_FILES:
|
||||||
via_file = self.make_option_file(
|
via_file = self.make_option_file(
|
||||||
macros_cmd, "asm_macros_{}.xcl")
|
macros_cmd, "asm_macros_{}.xcl")
|
||||||
|
@ -185,31 +210,21 @@ class IAR(mbedToolchain):
|
||||||
|
|
||||||
return opts
|
return opts
|
||||||
|
|
||||||
@hook_tool
|
|
||||||
def assemble(self, source, object, includes):
|
def assemble(self, source, object, includes):
|
||||||
# Build assemble command
|
# Build assemble command
|
||||||
cmd = self.asm + self.get_compile_options(self.get_symbols(True), includes, True) + ["-o", object, source]
|
cmd = self.asm + self.get_compile_options(
|
||||||
|
self.get_symbols(True), includes, True
|
||||||
# Call cmdline hook
|
) + ["-o", object, source]
|
||||||
cmd = self.hook.get_cmdline_assembler(cmd)
|
|
||||||
|
|
||||||
# Return command array, don't execute
|
# Return command array, don't execute
|
||||||
return [cmd]
|
return [cmd]
|
||||||
|
|
||||||
@hook_tool
|
|
||||||
def compile(self, cc, source, object, includes):
|
def compile(self, cc, source, object, includes):
|
||||||
# Build compile command
|
# Build compile command
|
||||||
cmd = cc + self.get_compile_options(self.get_symbols(), includes)
|
cmd = cc + self.get_compile_options(self.get_symbols(), includes)
|
||||||
|
|
||||||
cmd.extend(self.get_dep_option(object))
|
cmd.extend(self.get_dep_option(object))
|
||||||
|
|
||||||
cmd.extend(self.cc_extra(object))
|
cmd.extend(self.cc_extra(object))
|
||||||
|
|
||||||
cmd.extend(["-o", object, source])
|
cmd.extend(["-o", object, source])
|
||||||
|
|
||||||
# Call cmdline hook
|
|
||||||
cmd = self.hook.get_cmdline_compiler(cmd)
|
|
||||||
|
|
||||||
return [cmd]
|
return [cmd]
|
||||||
|
|
||||||
def compile_c(self, source, object, includes):
|
def compile_c(self, source, object, includes):
|
||||||
|
@ -218,18 +233,16 @@ class IAR(mbedToolchain):
|
||||||
def compile_cpp(self, source, object, includes):
|
def compile_cpp(self, source, object, includes):
|
||||||
return self.compile(self.cppc, source, object, includes)
|
return self.compile(self.cppc, source, object, includes)
|
||||||
|
|
||||||
@hook_tool
|
|
||||||
def link(self, output, objects, libraries, lib_dirs, mem_map):
|
def link(self, output, objects, libraries, lib_dirs, mem_map):
|
||||||
# Build linker command
|
# Build linker command
|
||||||
map_file = splitext(output)[0] + ".map"
|
map_file = splitext(output)[0] + ".map"
|
||||||
cmd = self.ld + [ "-o", output, "--map=%s" % map_file] + objects + libraries
|
cmd = self.ld + ["-o", output, "--map=%s" % map_file]
|
||||||
|
cmd += objects
|
||||||
|
cmd += libraries
|
||||||
|
|
||||||
if mem_map:
|
if mem_map:
|
||||||
cmd.extend(["--config", mem_map])
|
cmd.extend(["--config", mem_map])
|
||||||
|
|
||||||
# Call cmdline hook
|
|
||||||
cmd = self.hook.get_cmdline_linker(cmd)
|
|
||||||
|
|
||||||
if self.RESPONSE_FILES:
|
if self.RESPONSE_FILES:
|
||||||
# Split link command to linker executable + response file
|
# Split link command to linker executable + response file
|
||||||
cmd_linker = cmd[0]
|
cmd_linker = cmd[0]
|
||||||
|
@ -240,7 +253,6 @@ class IAR(mbedToolchain):
|
||||||
self.notify.cc_verbose("Link: %s" % ' '.join(cmd))
|
self.notify.cc_verbose("Link: %s" % ' '.join(cmd))
|
||||||
self.default_cmd(cmd)
|
self.default_cmd(cmd)
|
||||||
|
|
||||||
@hook_tool
|
|
||||||
def archive(self, objects, lib_path):
|
def archive(self, objects, lib_path):
|
||||||
if self.RESPONSE_FILES:
|
if self.RESPONSE_FILES:
|
||||||
param = ['-f', self.get_arch_file(objects)]
|
param = ['-f', self.get_arch_file(objects)]
|
||||||
|
@ -252,16 +264,12 @@ class IAR(mbedToolchain):
|
||||||
|
|
||||||
self.default_cmd([self.ar, lib_path] + param)
|
self.default_cmd([self.ar, lib_path] + param)
|
||||||
|
|
||||||
@hook_tool
|
|
||||||
def binary(self, resources, elf, bin):
|
def binary(self, resources, elf, bin):
|
||||||
_, fmt = splitext(bin)
|
_, fmt = splitext(bin)
|
||||||
bin_arg = {".bin": "--bin", ".hex": "--ihex"}[fmt]
|
bin_arg = {".bin": "--bin", ".hex": "--ihex"}[fmt]
|
||||||
# Build binary command
|
# Build binary command
|
||||||
cmd = [self.elf2bin, bin_arg, elf, bin]
|
cmd = [self.elf2bin, bin_arg, elf, bin]
|
||||||
|
|
||||||
# Call cmdline hook
|
|
||||||
cmd = self.hook.get_cmdline_binary(cmd)
|
|
||||||
|
|
||||||
# Exec command
|
# Exec command
|
||||||
self.notify.cc_verbose("FromELF: %s" % ' '.join(cmd))
|
self.notify.cc_verbose("FromELF: %s" % ' '.join(cmd))
|
||||||
self.default_cmd(cmd)
|
self.default_cmd(cmd)
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue