mirror of https://github.com/ARMmbed/mbed-os.git
Merge pull request #6781 from theotherjimmy/refactor-notify
tools: Refactor notification APIpull/6522/merge
commit
53d3c4344f
|
@ -39,9 +39,10 @@ from tools.build_api import build_library, build_mbed_libs, build_lib
|
|||
from tools.build_api import mcu_toolchain_matrix
|
||||
from tools.build_api import print_build_results
|
||||
from tools.settings import CPPCHECK_CMD, CPPCHECK_MSG_FORMAT
|
||||
from utils import argparse_filestring_type, args_error
|
||||
from tools.settings import CPPCHECK_CMD, CPPCHECK_MSG_FORMAT, CLI_COLOR_MAP
|
||||
from utils import argparse_filestring_type, argparse_dir_not_parent
|
||||
from tools.notifier.term import TerminalNotifier
|
||||
from tools.utils import argparse_filestring_type, args_error
|
||||
from tools.utils import argparse_filestring_type, argparse_dir_not_parent
|
||||
|
||||
if __name__ == '__main__':
|
||||
start = time()
|
||||
|
@ -145,16 +146,6 @@ if __name__ == '__main__':
|
|||
if options.source_dir and not options.build_dir:
|
||||
args_error(parser, "argument --build is required by argument --source")
|
||||
|
||||
if options.color:
|
||||
# This import happens late to prevent initializing colorization when we don't need it
|
||||
import colorize
|
||||
if options.verbose:
|
||||
notify = mbedToolchain.print_notify_verbose
|
||||
else:
|
||||
notify = mbedToolchain.print_notify
|
||||
notify = colorize.print_in_color_notifier(CLI_COLOR_MAP, notify)
|
||||
else:
|
||||
notify = None
|
||||
|
||||
# Get libraries list
|
||||
libraries = []
|
||||
|
@ -190,6 +181,7 @@ if __name__ == '__main__':
|
|||
skipped.append(tt_id)
|
||||
else:
|
||||
try:
|
||||
notify = TerminalNotifer(options.verbose, options.silent)
|
||||
mcu = TARGET_MAP[target]
|
||||
profile = extract_profile(parser, options, toolchain)
|
||||
if options.source_dir:
|
||||
|
|
|
@ -300,9 +300,8 @@ def target_supports_toolchain(target, toolchain_name):
|
|||
|
||||
def prepare_toolchain(src_paths, build_dir, target, toolchain_name,
|
||||
macros=None, clean=False, jobs=1,
|
||||
notify=None, silent=False, verbose=False,
|
||||
extra_verbose=False, config=None,
|
||||
app_config=None, build_profile=None):
|
||||
notify=None, config=None, app_config=None,
|
||||
build_profile=None):
|
||||
""" Prepares resource related objects - toolchain, target, config
|
||||
|
||||
Positional arguments:
|
||||
|
@ -315,9 +314,6 @@ def prepare_toolchain(src_paths, build_dir, target, toolchain_name,
|
|||
clean - Rebuild everything if True
|
||||
jobs - how many compilers we can run at once
|
||||
notify - Notify function for logs
|
||||
silent - suppress printing of progress indicators
|
||||
verbose - Write the actual tools command lines used if True
|
||||
extra_verbose - even more output!
|
||||
config - a Config object to use instead of creating one
|
||||
app_config - location of a chosen mbed_app.json file
|
||||
build_profile - a list of mergeable build profiles
|
||||
|
@ -345,13 +341,12 @@ def prepare_toolchain(src_paths, build_dir, target, toolchain_name,
|
|||
for key in profile:
|
||||
profile[key].extend(contents[toolchain_name].get(key, []))
|
||||
|
||||
toolchain = cur_tc(target, notify, macros, silent, build_dir=build_dir,
|
||||
extra_verbose=extra_verbose, build_profile=profile)
|
||||
toolchain = cur_tc(
|
||||
target, notify, macros, build_dir=build_dir, build_profile=profile)
|
||||
|
||||
toolchain.config = config
|
||||
toolchain.jobs = jobs
|
||||
toolchain.build_all = clean
|
||||
toolchain.VERBOSE = verbose
|
||||
|
||||
return toolchain
|
||||
|
||||
|
@ -415,7 +410,7 @@ def _fill_header(region_list, current_region):
|
|||
start += Config.header_member_size(member)
|
||||
return header
|
||||
|
||||
def merge_region_list(region_list, destination, padding=b'\xFF'):
|
||||
def merge_region_list(region_list, destination, notify, padding=b'\xFF'):
|
||||
"""Merge the region_list into a single image
|
||||
|
||||
Positional Arguments:
|
||||
|
@ -426,7 +421,7 @@ def merge_region_list(region_list, destination, padding=b'\xFF'):
|
|||
merged = IntelHex()
|
||||
_, format = splitext(destination)
|
||||
|
||||
print("Merging Regions:")
|
||||
notify.info("Merging Regions")
|
||||
|
||||
for region in region_list:
|
||||
if region.active and not region.filename:
|
||||
|
@ -437,7 +432,7 @@ def merge_region_list(region_list, destination, padding=b'\xFF'):
|
|||
_fill_header(region_list, region).tofile(header_filename, format='hex')
|
||||
region = region._replace(filename=header_filename)
|
||||
if region.filename:
|
||||
print(" Filling region %s with %s" % (region.name, region.filename))
|
||||
notify.info(" Filling region %s with %s" % (region.name, region.filename))
|
||||
part = intelhex_offset(region.filename, offset=region.start)
|
||||
part_size = (part.maxaddr() - part.minaddr()) + 1
|
||||
if part_size > region.size:
|
||||
|
@ -446,7 +441,8 @@ def merge_region_list(region_list, destination, padding=b'\xFF'):
|
|||
merged.merge(part)
|
||||
pad_size = region.size - part_size
|
||||
if pad_size > 0 and region != region_list[-1]:
|
||||
print(" Padding region %s with 0x%x bytes" % (region.name, pad_size))
|
||||
notify.info(" Padding region %s with 0x%x bytes" %
|
||||
(region.name, pad_size))
|
||||
if format is ".hex":
|
||||
"""The offset will be in the hex file generated when we're done,
|
||||
so we can skip padding here"""
|
||||
|
@ -455,8 +451,8 @@ def merge_region_list(region_list, destination, padding=b'\xFF'):
|
|||
|
||||
if not exists(dirname(destination)):
|
||||
makedirs(dirname(destination))
|
||||
print("Space used after regions merged: 0x%x" %
|
||||
(merged.maxaddr() - merged.minaddr() + 1))
|
||||
notify.info("Space used after regions merged: 0x%x" %
|
||||
(merged.maxaddr() - merged.minaddr() + 1))
|
||||
with open(destination, "wb+") as output:
|
||||
merged.tofile(output, format=format.strip("."))
|
||||
|
||||
|
@ -502,11 +498,10 @@ def scan_resources(src_paths, toolchain, dependencies_paths=None,
|
|||
return resources
|
||||
|
||||
def build_project(src_paths, build_path, target, toolchain_name,
|
||||
libraries_paths=None, linker_script=None,
|
||||
clean=False, notify=None, verbose=False, name=None,
|
||||
macros=None, inc_dirs=None, jobs=1, silent=False,
|
||||
libraries_paths=None, linker_script=None, clean=False,
|
||||
notify=None, name=None, macros=None, inc_dirs=None, jobs=1,
|
||||
report=None, properties=None, project_id=None,
|
||||
project_description=None, extra_verbose=False, config=None,
|
||||
project_description=None, config=None,
|
||||
app_config=None, build_profile=None, stats_depth=None):
|
||||
""" Build a project. A project may be a test or a user program.
|
||||
|
||||
|
@ -522,17 +517,14 @@ def build_project(src_paths, build_path, target, toolchain_name,
|
|||
linker_script - the file that drives the linker to do it's job
|
||||
clean - Rebuild everything if True
|
||||
notify - Notify function for logs
|
||||
verbose - Write the actual tools command lines used if True
|
||||
name - the name of the project
|
||||
macros - additional macros
|
||||
inc_dirs - additional directories where include files may be found
|
||||
jobs - how many compilers we can run at once
|
||||
silent - suppress printing of progress indicators
|
||||
report - a dict where a result may be appended
|
||||
properties - UUUUHHHHH beats me
|
||||
project_id - the name put in the report
|
||||
project_description - the human-readable version of what this thing does
|
||||
extra_verbose - even more output!
|
||||
config - a Config object to use instead of creating one
|
||||
app_config - location of a chosen mbed_app.json file
|
||||
build_profile - a dict of flags that will be passed to the compiler
|
||||
|
@ -553,15 +545,14 @@ def build_project(src_paths, build_path, target, toolchain_name,
|
|||
|
||||
toolchain = prepare_toolchain(
|
||||
src_paths, build_path, target, toolchain_name, macros=macros,
|
||||
clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose,
|
||||
extra_verbose=extra_verbose, config=config, app_config=app_config,
|
||||
build_profile=build_profile)
|
||||
clean=clean, jobs=jobs, notify=notify, config=config,
|
||||
app_config=app_config, build_profile=build_profile)
|
||||
|
||||
# The first path will give the name to the library
|
||||
name = (name or toolchain.config.name or
|
||||
basename(normpath(abspath(src_paths[0]))))
|
||||
toolchain.info("Building project %s (%s, %s)" %
|
||||
(name, toolchain.target.name, toolchain_name))
|
||||
notify.info("Building project %s (%s, %s)" %
|
||||
(name, toolchain.target.name, toolchain_name))
|
||||
|
||||
# Initialize reporting
|
||||
if report != None:
|
||||
|
@ -597,7 +588,7 @@ def build_project(src_paths, build_path, target, toolchain_name,
|
|||
for r in region_list]
|
||||
res = "%s.%s" % (join(build_path, name),
|
||||
getattr(toolchain.target, "OUTPUT_EXT", "bin"))
|
||||
merge_region_list(region_list, res)
|
||||
merge_region_list(region_list, res, notify)
|
||||
else:
|
||||
res, _ = toolchain.link_program(resources, build_path, name)
|
||||
|
||||
|
@ -606,9 +597,7 @@ def build_project(src_paths, build_path, target, toolchain_name,
|
|||
if memap_instance:
|
||||
# Write output to stdout in text (pretty table) format
|
||||
memap_table = memap_instance.generate_output('table', stats_depth)
|
||||
|
||||
if not silent:
|
||||
print(memap_table)
|
||||
notify.info(memap_table)
|
||||
|
||||
# Write output to file in JSON format
|
||||
map_out = join(build_path, name + "_map.json")
|
||||
|
@ -623,7 +612,6 @@ def build_project(src_paths, build_path, target, toolchain_name,
|
|||
if report != None:
|
||||
end = time()
|
||||
cur_result["elapsed_time"] = end - start
|
||||
cur_result["output"] = toolchain.get_output() + memap_table
|
||||
cur_result["result"] = "OK"
|
||||
cur_result["memory_usage"] = (memap_instance.mem_report
|
||||
if memap_instance is not None else None)
|
||||
|
@ -646,20 +634,14 @@ def build_project(src_paths, build_path, target, toolchain_name,
|
|||
|
||||
cur_result["elapsed_time"] = end - start
|
||||
|
||||
toolchain_output = toolchain.get_output()
|
||||
if toolchain_output:
|
||||
cur_result["output"] += toolchain_output
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
|
||||
# Let Exception propagate
|
||||
raise
|
||||
|
||||
def build_library(src_paths, build_path, target, toolchain_name,
|
||||
dependencies_paths=None, name=None, clean=False,
|
||||
archive=True, notify=None, verbose=False, macros=None,
|
||||
inc_dirs=None, jobs=1, silent=False, report=None,
|
||||
properties=None, extra_verbose=False, project_id=None,
|
||||
archive=True, notify=None, macros=None, inc_dirs=None, jobs=1,
|
||||
report=None, properties=None, project_id=None,
|
||||
remove_config_header_file=False, app_config=None,
|
||||
build_profile=None):
|
||||
""" Build a library
|
||||
|
@ -677,14 +659,11 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
|||
clean - Rebuild everything if True
|
||||
archive - whether the library will create an archive file
|
||||
notify - Notify function for logs
|
||||
verbose - Write the actual tools command lines used if True
|
||||
macros - additional macros
|
||||
inc_dirs - additional directories where include files may be found
|
||||
jobs - how many compilers we can run at once
|
||||
silent - suppress printing of progress indicators
|
||||
report - a dict where a result may be appended
|
||||
properties - UUUUHHHHH beats me
|
||||
extra_verbose - even more output!
|
||||
project_id - the name that goes in the report
|
||||
remove_config_header_file - delete config header file when done building
|
||||
app_config - location of a chosen mbed_app.json file
|
||||
|
@ -711,14 +690,13 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
|||
# Pass all params to the unified prepare_toolchain()
|
||||
toolchain = prepare_toolchain(
|
||||
src_paths, build_path, target, toolchain_name, macros=macros,
|
||||
clean=clean, jobs=jobs, notify=notify, silent=silent,
|
||||
verbose=verbose, extra_verbose=extra_verbose, app_config=app_config,
|
||||
clean=clean, jobs=jobs, notify=notify, app_config=app_config,
|
||||
build_profile=build_profile)
|
||||
|
||||
# The first path will give the name to the library
|
||||
if name is None:
|
||||
name = basename(normpath(abspath(src_paths[0])))
|
||||
toolchain.info("Building library %s (%s, %s)" %
|
||||
notify.info("Building library %s (%s, %s)" %
|
||||
(name, toolchain.target.name, toolchain_name))
|
||||
|
||||
# Initialize reporting
|
||||
|
@ -783,7 +761,6 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
|||
if report != None:
|
||||
end = time()
|
||||
cur_result["elapsed_time"] = end - start
|
||||
cur_result["output"] = toolchain.get_output()
|
||||
cur_result["result"] = "OK"
|
||||
|
||||
|
||||
|
@ -801,10 +778,6 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
|||
|
||||
cur_result["elapsed_time"] = end - start
|
||||
|
||||
toolchain_output = toolchain.get_output()
|
||||
if toolchain_output:
|
||||
cur_result["output"] += toolchain_output
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
|
||||
# Let Exception propagate
|
||||
|
@ -818,9 +791,8 @@ def mbed2_obj_path(target_name, toolchain_name):
|
|||
real_tc_name = TOOLCHAIN_CLASSES[toolchain_name].__name__
|
||||
return join("TARGET_" + target_name, "TOOLCHAIN_" + real_tc_name)
|
||||
|
||||
def build_lib(lib_id, target, toolchain_name, verbose=False,
|
||||
clean=False, macros=None, notify=None, jobs=1, silent=False,
|
||||
report=None, properties=None, extra_verbose=False,
|
||||
def build_lib(lib_id, target, toolchain_name, clean=False, macros=None,
|
||||
notify=None, jobs=1, report=None, properties=None,
|
||||
build_profile=None):
|
||||
""" Legacy method for building mbed libraries
|
||||
|
||||
|
@ -831,14 +803,11 @@ def build_lib(lib_id, target, toolchain_name, verbose=False,
|
|||
|
||||
Keyword arguments:
|
||||
clean - Rebuild everything if True
|
||||
verbose - Write the actual tools command lines used if True
|
||||
macros - additional macros
|
||||
notify - Notify function for logs
|
||||
jobs - how many compilers we can run at once
|
||||
silent - suppress printing of progress indicators
|
||||
report - a dict where a result may be appended
|
||||
properties - UUUUHHHHH beats me
|
||||
extra_verbose - even more output!
|
||||
build_profile - a dict of flags that will be passed to the compiler
|
||||
"""
|
||||
lib = Library(lib_id)
|
||||
|
@ -903,10 +872,9 @@ def build_lib(lib_id, target, toolchain_name, verbose=False,
|
|||
|
||||
toolchain = prepare_toolchain(
|
||||
src_paths, tmp_path, target, toolchain_name, macros=macros,
|
||||
notify=notify, silent=silent, extra_verbose=extra_verbose,
|
||||
build_profile=build_profile, jobs=jobs, clean=clean)
|
||||
notify=notify, build_profile=build_profile, jobs=jobs, clean=clean)
|
||||
|
||||
toolchain.info("Building library %s (%s, %s)" %
|
||||
notify.info("Building library %s (%s, %s)" %
|
||||
(name.upper(), target.name, toolchain_name))
|
||||
|
||||
# Take into account the library configuration (MBED_CONFIG_FILE)
|
||||
|
@ -960,7 +928,6 @@ def build_lib(lib_id, target, toolchain_name, verbose=False,
|
|||
if report != None and needed_update:
|
||||
end = time()
|
||||
cur_result["elapsed_time"] = end - start
|
||||
cur_result["output"] = toolchain.get_output()
|
||||
cur_result["result"] = "OK"
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
|
@ -972,10 +939,6 @@ def build_lib(lib_id, target, toolchain_name, verbose=False,
|
|||
cur_result["result"] = "FAIL"
|
||||
cur_result["elapsed_time"] = end - start
|
||||
|
||||
toolchain_output = toolchain.get_output()
|
||||
if toolchain_output:
|
||||
cur_result["output"] += toolchain_output
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
|
||||
# Let Exception propagate
|
||||
|
@ -983,9 +946,8 @@ def build_lib(lib_id, target, toolchain_name, verbose=False,
|
|||
|
||||
# We do have unique legacy conventions about how we build and package the mbed
|
||||
# library
|
||||
def build_mbed_libs(target, toolchain_name, verbose=False,
|
||||
clean=False, macros=None, notify=None, jobs=1, silent=False,
|
||||
report=None, properties=None, extra_verbose=False,
|
||||
def build_mbed_libs(target, toolchain_name, clean=False, macros=None,
|
||||
notify=None, jobs=1, report=None, properties=None,
|
||||
build_profile=None):
|
||||
""" Function returns True is library was built and false if building was
|
||||
skipped
|
||||
|
@ -995,15 +957,12 @@ def build_mbed_libs(target, toolchain_name, verbose=False,
|
|||
toolchain_name - the name of the build tools
|
||||
|
||||
Keyword arguments:
|
||||
verbose - Write the actual tools command lines used if True
|
||||
clean - Rebuild everything if True
|
||||
macros - additional macros
|
||||
notify - Notify function for logs
|
||||
jobs - how many compilers we can run at once
|
||||
silent - suppress printing of progress indicators
|
||||
report - a dict where a result may be appended
|
||||
properties - UUUUHHHHH beats me
|
||||
extra_verbose - even more output!
|
||||
build_profile - a dict of flags that will be passed to the compiler
|
||||
"""
|
||||
|
||||
|
@ -1047,8 +1006,7 @@ def build_mbed_libs(target, toolchain_name, verbose=False,
|
|||
mkdir(tmp_path)
|
||||
|
||||
toolchain = prepare_toolchain(
|
||||
[""], tmp_path, target, toolchain_name, macros=macros,verbose=verbose,
|
||||
notify=notify, silent=silent, extra_verbose=extra_verbose,
|
||||
[""], tmp_path, target, toolchain_name, macros=macros, notify=notify,
|
||||
build_profile=build_profile, jobs=jobs, clean=clean)
|
||||
|
||||
# Take into account the library configuration (MBED_CONFIG_FILE)
|
||||
|
@ -1057,7 +1015,7 @@ def build_mbed_libs(target, toolchain_name, verbose=False,
|
|||
toolchain.set_config_data(toolchain.config.get_config_data())
|
||||
|
||||
# mbed
|
||||
toolchain.info("Building library %s (%s, %s)" %
|
||||
notify.info("Building library %s (%s, %s)" %
|
||||
('MBED', target.name, toolchain_name))
|
||||
|
||||
# Common Headers
|
||||
|
@ -1124,7 +1082,6 @@ def build_mbed_libs(target, toolchain_name, verbose=False,
|
|||
if report != None:
|
||||
end = time()
|
||||
cur_result["elapsed_time"] = end - start
|
||||
cur_result["output"] = toolchain.get_output()
|
||||
cur_result["result"] = "OK"
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
|
@ -1137,10 +1094,6 @@ def build_mbed_libs(target, toolchain_name, verbose=False,
|
|||
cur_result["result"] = "FAIL"
|
||||
cur_result["elapsed_time"] = end - start
|
||||
|
||||
toolchain_output = toolchain.get_output()
|
||||
if toolchain_output:
|
||||
cur_result["output"] += toolchain_output
|
||||
|
||||
cur_result["output"] += str(exc)
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
|
|
|
@ -1,80 +0,0 @@
|
|||
# mbed SDK
|
||||
# Copyright (c) 2016 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
""" This python file is responsible for generating colorized notifiers.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import re
|
||||
from colorama import init, Fore, Back, Style
|
||||
init()
|
||||
|
||||
COLORS = {
|
||||
'none' : "",
|
||||
'default' : Style.RESET_ALL,
|
||||
|
||||
'black' : Fore.BLACK,
|
||||
'red' : Fore.RED,
|
||||
'green' : Fore.GREEN,
|
||||
'yellow' : Fore.YELLOW,
|
||||
'blue' : Fore.BLUE,
|
||||
'magenta' : Fore.MAGENTA,
|
||||
'cyan' : Fore.CYAN,
|
||||
'white' : Fore.WHITE,
|
||||
|
||||
'on_black' : Back.BLACK,
|
||||
'on_red' : Back.RED,
|
||||
'on_green' : Back.GREEN,
|
||||
'on_yellow' : Back.YELLOW,
|
||||
'on_blue' : Back.BLUE,
|
||||
'on_magenta' : Back.MAGENTA,
|
||||
'on_cyan' : Back.CYAN,
|
||||
'on_white' : Back.WHITE,
|
||||
}
|
||||
|
||||
COLOR_MATCHER = re.compile(r"(\w+)(\W+on\W+\w+)?")
|
||||
def colorstring_to_escapecode(color_string):
|
||||
""" Convert a color string from a string into an ascii escape code that
|
||||
will print that color on the terminal.
|
||||
|
||||
Positional arguments:
|
||||
color_string - the string to parse
|
||||
"""
|
||||
match = re.match(COLOR_MATCHER, color_string)
|
||||
if match:
|
||||
return COLORS[match.group(1)] + \
|
||||
(COLORS[match.group(2).strip().replace(" ", "_")]
|
||||
if match.group(2) else "")
|
||||
else:
|
||||
return COLORS['default']
|
||||
|
||||
|
||||
def print_in_color_notifier(color_map, print_fn):
|
||||
""" Wrap a toolchain notifier in a colorizer. This colorizer will wrap
|
||||
notifications in a color if the severity matches a color in the *color_map*.
|
||||
"""
|
||||
def wrap(event, silent=False):
|
||||
"""The notification function inself"""
|
||||
file_desc = sys.stdout
|
||||
self = event['toolchain']
|
||||
if file_desc.isatty() and 'severity' in event and \
|
||||
event['severity'] in color_map:
|
||||
file_desc.write(colorstring_to_escapecode(
|
||||
color_map[event['severity']]))
|
||||
print_fn(self, event, silent)
|
||||
file_desc.write(colorstring_to_escapecode('default'))
|
||||
else:
|
||||
print_fn(self, event, silent)
|
||||
return wrap
|
|
@ -163,7 +163,24 @@ def generate_project_files(resources, export_path, target, name, toolchain, ide,
|
|||
return files, exporter
|
||||
|
||||
|
||||
def zip_export(file_name, prefix, resources, project_files, inc_repos):
|
||||
def _inner_zip_export(resources, inc_repos):
|
||||
for loc, res in resources.items():
|
||||
to_zip = (
|
||||
res.headers + res.s_sources + res.c_sources +\
|
||||
res.cpp_sources + res.libraries + res.hex_files + \
|
||||
[res.linker_script] + res.bin_files + res.objects + \
|
||||
res.json_files + res.lib_refs + res.lib_builds)
|
||||
if inc_repos:
|
||||
for directory in res.repo_dirs:
|
||||
for root, _, files in walk(directory):
|
||||
for repo_file in files:
|
||||
source = join(root, repo_file)
|
||||
to_zip.append(source)
|
||||
res.file_basepath[source] = res.base_path
|
||||
to_zip += res.repo_files
|
||||
yield loc, to_zip
|
||||
|
||||
def zip_export(file_name, prefix, resources, project_files, inc_repos, notify):
|
||||
"""Create a zip file from an exported project.
|
||||
|
||||
Positional Parameters:
|
||||
|
@ -173,29 +190,25 @@ def zip_export(file_name, prefix, resources, project_files, inc_repos):
|
|||
project_files - a list of extra files to be added to the root of the prefix
|
||||
directory
|
||||
"""
|
||||
to_zip_list = list(_inner_zip_export(resources, inc_repos))
|
||||
total_files = sum(len(to_zip) for _, to_zip in to_zip_list)
|
||||
total_files += len(project_files)
|
||||
zipped = 0
|
||||
with zipfile.ZipFile(file_name, "w") as zip_file:
|
||||
for prj_file in project_files:
|
||||
zip_file.write(prj_file, join(prefix, basename(prj_file)))
|
||||
for loc, res in resources.items():
|
||||
to_zip = (
|
||||
res.headers + res.s_sources + res.c_sources +\
|
||||
res.cpp_sources + res.libraries + res.hex_files + \
|
||||
[res.linker_script] + res.bin_files + res.objects + \
|
||||
res.json_files + res.lib_refs + res.lib_builds)
|
||||
if inc_repos:
|
||||
for directory in res.repo_dirs:
|
||||
for root, _, files in walk(directory):
|
||||
for repo_file in files:
|
||||
source = join(root, repo_file)
|
||||
to_zip.append(source)
|
||||
res.file_basepath[source] = res.base_path
|
||||
to_zip += res.repo_files
|
||||
for loc, to_zip in to_zip_list:
|
||||
res = resources[loc]
|
||||
for source in to_zip:
|
||||
if source:
|
||||
zip_file.write(
|
||||
source,
|
||||
join(prefix, loc,
|
||||
relpath(source, res.file_basepath[source])))
|
||||
notify.progress("Zipping", source,
|
||||
100 * (zipped / total_files))
|
||||
zipped += 1
|
||||
for lib, res in resources.items():
|
||||
for source in res.lib_builds:
|
||||
target_dir, _ = splitext(source)
|
||||
dest = join(prefix, loc,
|
||||
|
@ -206,10 +219,9 @@ def zip_export(file_name, prefix, resources, project_files, inc_repos):
|
|||
|
||||
|
||||
def export_project(src_paths, export_path, target, ide, libraries_paths=None,
|
||||
linker_script=None, notify=None, verbose=False, name=None,
|
||||
inc_dirs=None, jobs=1, silent=False, extra_verbose=False,
|
||||
config=None, macros=None, zip_proj=None, inc_repos=False,
|
||||
build_profile=None, app_config=None):
|
||||
linker_script=None, notify=None, name=None, inc_dirs=None,
|
||||
jobs=1, config=None, macros=None, zip_proj=None,
|
||||
inc_repos=False, build_profile=None, app_config=None):
|
||||
"""Generates a project file and creates a zip archive if specified
|
||||
|
||||
Positional Arguments:
|
||||
|
@ -223,13 +235,9 @@ def export_project(src_paths, export_path, target, ide, libraries_paths=None,
|
|||
linker_script - path to the linker script for the specified target
|
||||
notify - function is passed all events, and expected to handle notification
|
||||
of the user, emit the events to a log, etc.
|
||||
verbose - assigns the notify function to toolchains print_notify_verbose
|
||||
name - project name
|
||||
inc_dirs - additional include directories
|
||||
jobs - number of threads
|
||||
silent - silent build - no output
|
||||
extra_verbose - assigns the notify function to toolchains
|
||||
print_notify_verbose
|
||||
config - toolchain's config object
|
||||
macros - User-defined macros
|
||||
zip_proj - string name of the zip archive you wish to creat (exclude arg
|
||||
|
@ -260,8 +268,7 @@ def export_project(src_paths, export_path, target, ide, libraries_paths=None,
|
|||
# Pass all params to the unified prepare_resources()
|
||||
toolchain = prepare_toolchain(
|
||||
paths, "", target, toolchain_name, macros=macros, jobs=jobs,
|
||||
notify=notify, silent=silent, verbose=verbose,
|
||||
extra_verbose=extra_verbose, config=config, build_profile=build_profile,
|
||||
notify=notify, config=config, build_profile=build_profile,
|
||||
app_config=app_config)
|
||||
|
||||
toolchain.RESPONSE_FILES = False
|
||||
|
@ -300,10 +307,10 @@ def export_project(src_paths, export_path, target, ide, libraries_paths=None,
|
|||
resource.add(res)
|
||||
if isinstance(zip_proj, basestring):
|
||||
zip_export(join(export_path, zip_proj), name, resource_dict,
|
||||
files + list(exporter.static_files), inc_repos)
|
||||
files + list(exporter.static_files), inc_repos, notify)
|
||||
else:
|
||||
zip_export(zip_proj, name, resource_dict,
|
||||
files + list(exporter.static_files), inc_repos)
|
||||
files + list(exporter.static_files), inc_repos, notify)
|
||||
else:
|
||||
for static_file in exporter.static_files:
|
||||
if not exists(join(export_path, basename(static_file))):
|
||||
|
|
|
@ -45,6 +45,7 @@ from tools.targets import TARGET_MAP
|
|||
from tools.options import get_default_options_parser
|
||||
from tools.options import extract_profile
|
||||
from tools.options import extract_mcus
|
||||
from tools.notifier.term import TerminalNotifier
|
||||
from tools.build_api import build_project
|
||||
from tools.build_api import mcu_toolchain_matrix
|
||||
from tools.build_api import mcu_toolchain_list
|
||||
|
@ -54,7 +55,6 @@ from utils import argparse_filestring_type
|
|||
from utils import argparse_many
|
||||
from utils import argparse_dir_not_parent
|
||||
from tools.toolchains import mbedToolchain, TOOLCHAIN_CLASSES, TOOLCHAIN_PATHS
|
||||
from tools.settings import CLI_COLOR_MAP
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Parse Options
|
||||
|
@ -232,16 +232,7 @@ if __name__ == '__main__':
|
|||
args_error(parser, "argument --build is required when argument --source is provided")
|
||||
|
||||
|
||||
if options.color:
|
||||
# This import happens late to prevent initializing colorization when we don't need it
|
||||
import colorize
|
||||
if options.verbose:
|
||||
notify = mbedToolchain.print_notify_verbose
|
||||
else:
|
||||
notify = mbedToolchain.print_notify
|
||||
notify = colorize.print_in_color_notifier(CLI_COLOR_MAP, notify)
|
||||
else:
|
||||
notify = None
|
||||
notify = TerminalNotifier(options.verbose, options.silent, options.color)
|
||||
|
||||
if not TOOLCHAIN_CLASSES[toolchain].check_executable():
|
||||
search_path = TOOLCHAIN_PATHS[toolchain] or "No path set"
|
||||
|
@ -283,10 +274,8 @@ if __name__ == '__main__':
|
|||
set(test.dependencies),
|
||||
linker_script=options.linker_script,
|
||||
clean=options.clean,
|
||||
verbose=options.verbose,
|
||||
notify=notify,
|
||||
report=build_data_blob,
|
||||
silent=options.silent,
|
||||
macros=options.macros,
|
||||
jobs=options.jobs,
|
||||
name=options.artifact_name,
|
||||
|
|
|
@ -0,0 +1,106 @@
|
|||
# mbed SDK
|
||||
# Copyright (c) 2011-2013 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function, division, absolute_import
|
||||
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
|
||||
class Notifier(object):
|
||||
"""
|
||||
Notifiers send build system events to a front end or may implement a front
|
||||
end themselves, displaying warnings and errors for a user.
|
||||
|
||||
This is different from a logger in a few ways:
|
||||
* The structure of the events are defined by this interface.
|
||||
* A "progress" level is included allowing signaling completion status to
|
||||
users.
|
||||
* It's tailored to providing events from a build system.
|
||||
|
||||
The structure of a message is a dict with a 'type' key. The type key
|
||||
determines the remaining keys as follows:
|
||||
type | description and remaining keys
|
||||
---------- | ------------------------------
|
||||
info | A simple message. The 'message' key contains the message
|
||||
debug | Another simple message; this one is less useful when compiles
|
||||
| are working. Again, the 'message' key contains the message
|
||||
progress | A progress indicator, which may include progress as a
|
||||
| percentage. The action key includes what action was taken to
|
||||
| make this progress, the file key what file was used to make
|
||||
| this progress, and the percent key, when present, indicates
|
||||
| how far along the build is.
|
||||
tool_error | When a compile fails, this contains the entire output of the
|
||||
| compiler.
|
||||
var | Provides a key, in the 'key' key, and a value, in the 'value'
|
||||
| key, for use in a UI. At the time of writing it's used to
|
||||
| communicate the binary location to the online IDE.
|
||||
"""
|
||||
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
@abstractmethod
|
||||
def notify(self, event):
|
||||
"""
|
||||
Send the user a notification specified in the event.
|
||||
"""
|
||||
raise NotImplemented
|
||||
|
||||
def info(self, message):
|
||||
"""
|
||||
Send the user a simple message.
|
||||
"""
|
||||
self.notify({'type': 'info', 'message': message})
|
||||
|
||||
def debug(self, message):
|
||||
"""
|
||||
Send a debug message to the user.
|
||||
"""
|
||||
if isinstance(message, list):
|
||||
message = ' '.join(message)
|
||||
self.notify({'type': 'debug', 'message': message})
|
||||
|
||||
def cc_info(self, info=None):
|
||||
if info is not None:
|
||||
info['type'] = 'cc'
|
||||
self.notify(info)
|
||||
|
||||
def cc_verbose(self, message, file=""):
|
||||
self.notify({
|
||||
'type': 'cc',
|
||||
'severity': 'verbose',
|
||||
'file': file,
|
||||
'message': message
|
||||
})
|
||||
|
||||
def progress(self, action, file, percent=None):
|
||||
"""
|
||||
Indicate compilation progress to a user.
|
||||
"""
|
||||
msg = {'type': 'progress', 'action': action, 'file': file}
|
||||
if percent:
|
||||
msg['percent'] = percent
|
||||
self.notify(msg)
|
||||
|
||||
def tool_error(self, message):
|
||||
"""
|
||||
Communicate a full fatal error to a user.
|
||||
"""
|
||||
self.notify({'type': 'tool_error', 'message': message})
|
||||
|
||||
def var(self, key, value):
|
||||
"""
|
||||
Update a UI with a key, value pair
|
||||
"""
|
||||
self.notify({'type': 'var', 'key': key, 'val': value})
|
|
@ -0,0 +1,27 @@
|
|||
# mbed SDK
|
||||
# Copyright (c) 2011-2013 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function, division, absolute_import
|
||||
|
||||
from . import Notifier
|
||||
|
||||
class MockNotifier(Notifier):
|
||||
"""Collect notifications
|
||||
"""
|
||||
def __init__(self):
|
||||
self.messages = []
|
||||
|
||||
def notify(self, message):
|
||||
self.messages.append(message)
|
|
@ -0,0 +1,145 @@
|
|||
# mbed SDK
|
||||
# Copyright (c) 2011-2013 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function, division, absolute_import
|
||||
|
||||
import re
|
||||
import sys
|
||||
from os import getcwd
|
||||
from os.path import basename
|
||||
|
||||
from . import Notifier
|
||||
from ..settings import (PRINT_COMPILER_OUTPUT_AS_LINK,
|
||||
CLI_COLOR_MAP, COLOR)
|
||||
|
||||
class TerminalNotifier(Notifier):
|
||||
"""
|
||||
Writes notifications to a terminal based on silent, verbose and color flags.
|
||||
"""
|
||||
|
||||
def __init__(self, verbose=False, silent=False, color=False):
|
||||
self.verbose = verbose
|
||||
self.silent = silent
|
||||
self.output = ""
|
||||
self.color = color or COLOR
|
||||
if self.color:
|
||||
from colorama import init, Fore, Back, Style
|
||||
init()
|
||||
self.COLORS = {
|
||||
'none' : "",
|
||||
'default' : Style.RESET_ALL,
|
||||
|
||||
'black' : Fore.BLACK,
|
||||
'red' : Fore.RED,
|
||||
'green' : Fore.GREEN,
|
||||
'yellow' : Fore.YELLOW,
|
||||
'blue' : Fore.BLUE,
|
||||
'magenta' : Fore.MAGENTA,
|
||||
'cyan' : Fore.CYAN,
|
||||
'white' : Fore.WHITE,
|
||||
|
||||
'on_black' : Back.BLACK,
|
||||
'on_red' : Back.RED,
|
||||
'on_green' : Back.GREEN,
|
||||
'on_yellow' : Back.YELLOW,
|
||||
'on_blue' : Back.BLUE,
|
||||
'on_magenta' : Back.MAGENTA,
|
||||
'on_cyan' : Back.CYAN,
|
||||
'on_white' : Back.WHITE,
|
||||
}
|
||||
|
||||
def get_output(self):
|
||||
return self.output
|
||||
|
||||
def notify(self, event):
|
||||
if self.verbose:
|
||||
msg = self.print_notify_verbose(event)
|
||||
else:
|
||||
msg = self.print_notify(event)
|
||||
if msg:
|
||||
if not self.silent:
|
||||
if self.color:
|
||||
self.print_in_color(event, msg)
|
||||
else:
|
||||
print(msg)
|
||||
self.output += msg + "\n"
|
||||
|
||||
def print_notify(self, event):
|
||||
""" Command line notification
|
||||
"""
|
||||
if event['type'] in ('tool_error', 'info'):
|
||||
return event['message']
|
||||
|
||||
elif event['type'] == 'cc' and event['severity'] != 'verbose':
|
||||
event['severity'] = event['severity'].title()
|
||||
|
||||
if PRINT_COMPILER_OUTPUT_AS_LINK:
|
||||
event['file'] = getcwd() + event['file'].strip('.')
|
||||
return '[{severity}] {file}:{line}:{col}: {message}'.format(
|
||||
**event)
|
||||
else:
|
||||
event['file'] = basename(event['file'])
|
||||
return '[{severity}] {file}@{line},{col}: {message}'.format(
|
||||
**event)
|
||||
|
||||
elif event['type'] == 'progress':
|
||||
event['action'] = event['action'].title()
|
||||
event['file'] = basename(event['file'])
|
||||
if 'percent' in event:
|
||||
format_string = '{action} [{percent:>5.1f}%]: {file}'
|
||||
else:
|
||||
format_string = '{action}: {file}'
|
||||
return format_string.format(**event)
|
||||
|
||||
def print_notify_verbose(self, event):
|
||||
""" Command line notification with more verbose mode
|
||||
"""
|
||||
if event['type'] == 'info' or (event['type'] == 'cc' and
|
||||
event['severity'] == 'verbose'):
|
||||
return event['message']
|
||||
elif event['type'] == 'debug':
|
||||
return "[DEBUG] {message}".format(**event)
|
||||
elif event['type'] in ('progress', 'cc'):
|
||||
return self.print_notify(event)
|
||||
|
||||
COLOR_MATCHER = re.compile(r"(\w+)(\W+on\W+\w+)?")
|
||||
def colorstring_to_escapecode(self, color_string):
|
||||
""" Convert a color string from a string into an ascii escape code that
|
||||
will print that color on the terminal.
|
||||
|
||||
Positional arguments:
|
||||
color_string - the string to parse
|
||||
"""
|
||||
match = re.match(self.COLOR_MATCHER, color_string)
|
||||
if match:
|
||||
return self.COLORS[match.group(1)] + \
|
||||
(self.COLORS[match.group(2).strip().replace(" ", "_")]
|
||||
if match.group(2) else "")
|
||||
else:
|
||||
return self.COLORS['default']
|
||||
|
||||
def print_in_color(self, event, msg):
|
||||
""" Wrap a toolchain notifier in a colorizer. This colorizer will wrap
|
||||
notifications in a color if the severity matches a color in the
|
||||
CLI_COLOR_MAP.
|
||||
"""
|
||||
"""The notification function inself"""
|
||||
if sys.stdout.isatty() and event.get('severity', None) in CLI_COLOR_MAP:
|
||||
sys.stdout.write(self.colorstring_to_escapecode(
|
||||
CLI_COLOR_MAP[event['severity']]))
|
||||
print(msg)
|
||||
sys.stdout.write(self.colorstring_to_escapecode('default'))
|
||||
else:
|
||||
print(msg)
|
|
@ -26,6 +26,7 @@ from tools.utils import argparse_force_uppercase_type
|
|||
from tools.utils import print_large_string
|
||||
from tools.utils import NotSupportedException
|
||||
from tools.options import extract_profile, list_profiles, extract_mcus
|
||||
from tools.notifier.term import TerminalNotifier
|
||||
|
||||
def setup_project(ide, target, program=None, source_dir=None, build=None, export_path=None):
|
||||
"""Generate a name, if not provided, and find dependencies
|
||||
|
@ -71,7 +72,7 @@ def setup_project(ide, target, program=None, source_dir=None, build=None, export
|
|||
|
||||
|
||||
def export(target, ide, build=None, src=None, macros=None, project_id=None,
|
||||
zip_proj=False, build_profile=None, export_path=None, silent=False,
|
||||
zip_proj=False, build_profile=None, export_path=None, notify=None,
|
||||
app_config=None):
|
||||
"""Do an export of a project.
|
||||
|
||||
|
@ -96,7 +97,7 @@ def export(target, ide, build=None, src=None, macros=None, project_id=None,
|
|||
|
||||
return export_project(src, project_dir, target, ide, name=name,
|
||||
macros=macros, libraries_paths=lib, zip_proj=zip_name,
|
||||
build_profile=build_profile, silent=silent,
|
||||
build_profile=build_profile, notify=notify,
|
||||
app_config=app_config)
|
||||
|
||||
|
||||
|
@ -247,6 +248,8 @@ def main():
|
|||
|
||||
zip_proj = not bool(options.source_dir)
|
||||
|
||||
notify = TerminalNotifier()
|
||||
|
||||
if (options.program is None) and (not options.source_dir):
|
||||
args_error(parser, "one of -p, -n, or --source is required")
|
||||
exporter, toolchain_name = get_exporter_toolchain(options.ide)
|
||||
|
@ -270,7 +273,7 @@ def main():
|
|||
src=options.source_dir, macros=options.macros,
|
||||
project_id=options.program, zip_proj=zip_proj,
|
||||
build_profile=profile, app_config=options.app_config,
|
||||
export_path=options.build_dir)
|
||||
export_path=options.build_dir, notify = notify)
|
||||
except NotSupportedException as exc:
|
||||
print("[ERROR] %s" % str(exc))
|
||||
|
||||
|
|
|
@ -45,6 +45,7 @@ IAR_PATH = ""
|
|||
# Goanna static analyser. Please overload it in mbed_settings.py
|
||||
GOANNA_PATH = ""
|
||||
|
||||
|
||||
# cppcheck path (command) and output message format
|
||||
CPPCHECK_CMD = ["cppcheck", "--enable=all"]
|
||||
CPPCHECK_MSG_FORMAT = ["--template=[{severity}] {file}@{line}: {id}:{message}"]
|
||||
|
@ -57,9 +58,12 @@ MBED_ORG_USER = ""
|
|||
# Print compiler warnings and errors as link format
|
||||
PRINT_COMPILER_OUTPUT_AS_LINK = False
|
||||
|
||||
# Print warnings/errors in color
|
||||
COLOR = False
|
||||
|
||||
CLI_COLOR_MAP = {
|
||||
"warning": "yellow",
|
||||
"error" : "red"
|
||||
"Warning": "yellow",
|
||||
"Error" : "red"
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
|
@ -77,7 +81,7 @@ except ImportError:
|
|||
# User Settings (env vars)
|
||||
##############################################################################
|
||||
_ENV_PATHS = ['ARM_PATH', 'GCC_ARM_PATH', 'GCC_CR_PATH', 'IAR_PATH',
|
||||
'ARMC6_PATH', 'PRINT_COMPILER_OUTPUT_AS_LINK']
|
||||
'ARMC6_PATH']
|
||||
|
||||
for _n in _ENV_PATHS:
|
||||
if getenv('MBED_'+_n):
|
||||
|
@ -87,6 +91,12 @@ for _n in _ENV_PATHS:
|
|||
print("WARNING: MBED_%s set as environment variable but doesn't"
|
||||
" exist" % _n)
|
||||
|
||||
_ENV_VARS = ['PRINT_COMPILER_OUTPUT_AS_LINK', 'COLOR']
|
||||
for _n in _ENV_VARS:
|
||||
value = getenv('MBED_%s' % _n)
|
||||
if value:
|
||||
globals()[_n] = value
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Test System Settings
|
||||
|
|
|
@ -370,7 +370,7 @@ class LPCTargetCode(object):
|
|||
@staticmethod
|
||||
def lpc_patch(t_self, resources, elf, binf):
|
||||
"""Patch an elf file"""
|
||||
t_self.debug("LPC Patch: %s" % os.path.split(binf)[1])
|
||||
t_self.notify.debug("LPC Patch: %s" % os.path.split(binf)[1])
|
||||
patch(binf)
|
||||
|
||||
class LPC4088Code(object):
|
||||
|
@ -404,7 +404,7 @@ class LPC4088Code(object):
|
|||
# file to 'binf'
|
||||
shutil.rmtree(binf, True)
|
||||
os.rename(binf + '.temp', binf)
|
||||
t_self.debug("Generated custom binary file (internal flash + SPIFI)")
|
||||
t_self.notify.debug("Generated custom binary file (internal flash + SPIFI)")
|
||||
LPCTargetCode.lpc_patch(t_self, resources, elf, binf)
|
||||
|
||||
class TEENSY3_1Code(object):
|
||||
|
@ -471,8 +471,8 @@ class MCU_NRF51Code(object):
|
|||
in t_self.target.EXPECTED_SOFTDEVICES_WITH_OFFSETS:
|
||||
for hexf in resources.hex_files:
|
||||
if hexf.find(softdevice_and_offset_entry['name']) != -1:
|
||||
t_self.debug("SoftDevice file found %s."
|
||||
% softdevice_and_offset_entry['name'])
|
||||
t_self.notify.debug("SoftDevice file found %s."
|
||||
% softdevice_and_offset_entry['name'])
|
||||
sdf = hexf
|
||||
|
||||
if sdf is not None:
|
||||
|
@ -481,7 +481,7 @@ class MCU_NRF51Code(object):
|
|||
break
|
||||
|
||||
if sdf is None:
|
||||
t_self.debug("Hex file not found. Aborting.")
|
||||
t_self.notify.debug("Hex file not found. Aborting.")
|
||||
return
|
||||
|
||||
# Look for bootloader file that matches this soft device or bootloader
|
||||
|
@ -490,13 +490,13 @@ class MCU_NRF51Code(object):
|
|||
if t_self.target.MERGE_BOOTLOADER is True:
|
||||
for hexf in resources.hex_files:
|
||||
if hexf.find(t_self.target.OVERRIDE_BOOTLOADER_FILENAME) != -1:
|
||||
t_self.debug("Bootloader file found %s."
|
||||
% t_self.target.OVERRIDE_BOOTLOADER_FILENAME)
|
||||
t_self.notify.debug("Bootloader file found %s."
|
||||
% t_self.target.OVERRIDE_BOOTLOADER_FILENAME)
|
||||
blf = hexf
|
||||
break
|
||||
elif hexf.find(softdevice_and_offset_entry['boot']) != -1:
|
||||
t_self.debug("Bootloader file found %s."
|
||||
% softdevice_and_offset_entry['boot'])
|
||||
t_self.notify.debug("Bootloader file found %s."
|
||||
% softdevice_and_offset_entry['boot'])
|
||||
blf = hexf
|
||||
break
|
||||
|
||||
|
@ -510,13 +510,13 @@ class MCU_NRF51Code(object):
|
|||
binh.loadbin(binf, softdevice_and_offset_entry['offset'])
|
||||
|
||||
if t_self.target.MERGE_SOFT_DEVICE is True:
|
||||
t_self.debug("Merge SoftDevice file %s"
|
||||
% softdevice_and_offset_entry['name'])
|
||||
t_self.notify.debug("Merge SoftDevice file %s"
|
||||
% softdevice_and_offset_entry['name'])
|
||||
sdh = IntelHex(sdf)
|
||||
binh.merge(sdh)
|
||||
|
||||
if t_self.target.MERGE_BOOTLOADER is True and blf is not None:
|
||||
t_self.debug("Merge BootLoader file %s" % blf)
|
||||
t_self.notify.debug("Merge BootLoader file %s" % blf)
|
||||
blh = IntelHex(blf)
|
||||
binh.merge(blh)
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@ from tools.build_api import build_project, build_library
|
|||
from tools.build_api import print_build_memory_usage
|
||||
from tools.build_api import merge_build_data
|
||||
from tools.targets import TARGET_MAP
|
||||
from tools.notifier.term import TerminalNotifier
|
||||
from tools.utils import mkdir, ToolException, NotSupportedException, args_error
|
||||
from tools.test_exporters import ReportExporter, ResultExporterType
|
||||
from tools.utils import argparse_filestring_type, argparse_lowercase_type, argparse_many
|
||||
|
@ -170,16 +171,6 @@ if __name__ == '__main__':
|
|||
else:
|
||||
tests = all_tests
|
||||
|
||||
if options.color:
|
||||
# This import happens late to prevent initializing colorization when we don't need it
|
||||
import colorize
|
||||
if options.verbose:
|
||||
notify = mbedToolchain.print_notify_verbose
|
||||
else:
|
||||
notify = mbedToolchain.print_notify
|
||||
notify = colorize.print_in_color_notifier(CLI_COLOR_MAP, notify)
|
||||
else:
|
||||
notify = None
|
||||
|
||||
if options.list:
|
||||
# Print available tests in order and exit
|
||||
|
@ -203,11 +194,12 @@ if __name__ == '__main__':
|
|||
profile = extract_profile(parser, options, toolchain)
|
||||
try:
|
||||
# Build sources
|
||||
notify = TerminalNotifier(options.verbose)
|
||||
build_library(base_source_paths, options.build_dir, mcu,
|
||||
toolchain, jobs=options.jobs,
|
||||
clean=options.clean, report=build_report,
|
||||
properties=build_properties, name="mbed-build",
|
||||
macros=options.macros, verbose=options.verbose,
|
||||
macros=options.macros,
|
||||
notify=notify, archive=False,
|
||||
app_config=config,
|
||||
build_profile=profile)
|
||||
|
@ -227,13 +219,12 @@ if __name__ == '__main__':
|
|||
print("Failed to build library")
|
||||
else:
|
||||
# Build all the tests
|
||||
|
||||
notify = TerminalNotifier(options.verbose)
|
||||
test_build_success, test_build = build_tests(tests, [options.build_dir], options.build_dir, mcu, toolchain,
|
||||
clean=options.clean,
|
||||
report=build_report,
|
||||
properties=build_properties,
|
||||
macros=options.macros,
|
||||
verbose=options.verbose,
|
||||
notify=notify,
|
||||
jobs=options.jobs,
|
||||
continue_on_build_fail=options.continue_on_build_fail,
|
||||
|
|
|
@ -21,6 +21,7 @@ from mock import patch, MagicMock
|
|||
from tools.build_api import prepare_toolchain, build_project, build_library,\
|
||||
scan_resources
|
||||
from tools.toolchains import TOOLCHAINS
|
||||
from tools.notifier.mock import MockNotifier
|
||||
|
||||
"""
|
||||
Tests for build_api.py
|
||||
|
@ -28,6 +29,7 @@ Tests for build_api.py
|
|||
make_mock_target = namedtuple(
|
||||
"Target", "init_hooks name features core supported_toolchains")
|
||||
|
||||
|
||||
class BuildApiTests(unittest.TestCase):
|
||||
"""
|
||||
Test cases for Build Api
|
||||
|
@ -61,19 +63,19 @@ class BuildApiTests(unittest.TestCase):
|
|||
@patch('tools.toolchains.mbedToolchain.dump_build_profile')
|
||||
@patch('tools.utils.run_cmd', return_value=(b'', b'', 0))
|
||||
def test_always_complete_build(self, *_):
|
||||
with MagicMock() as notify:
|
||||
toolchain = prepare_toolchain(self.src_paths, self.build_path, self.target,
|
||||
self.toolchain_name, notify=notify)
|
||||
notify = MockNotifier()
|
||||
toolchain = prepare_toolchain(self.src_paths, self.build_path, self.target,
|
||||
self.toolchain_name, notify=notify)
|
||||
|
||||
res = scan_resources(self.src_paths, toolchain)
|
||||
res = scan_resources(self.src_paths, toolchain)
|
||||
|
||||
toolchain.RESPONSE_FILES=False
|
||||
toolchain.config_processed = True
|
||||
toolchain.config_file = "junk"
|
||||
toolchain.compile_sources(res)
|
||||
toolchain.RESPONSE_FILES=False
|
||||
toolchain.config_processed = True
|
||||
toolchain.config_file = "junk"
|
||||
toolchain.compile_sources(res)
|
||||
|
||||
assert any('percent' in msg[0] and msg[0]['percent'] == 100.0
|
||||
for _, msg, _ in notify.mock_calls if msg)
|
||||
assert any('percent' in msg and msg['percent'] == 100.0
|
||||
for msg in notify.messages if msg)
|
||||
|
||||
|
||||
@patch('tools.build_api.Config')
|
||||
|
@ -128,6 +130,7 @@ class BuildApiTests(unittest.TestCase):
|
|||
:param __: mock of function scan_resources (not tested)
|
||||
:return:
|
||||
"""
|
||||
notify = MockNotifier()
|
||||
app_config = "app_config"
|
||||
mock_exists.return_value = False
|
||||
mock_prepare_toolchain().link_program.return_value = 1, 2
|
||||
|
@ -135,7 +138,7 @@ class BuildApiTests(unittest.TestCase):
|
|||
"Config", "has_regions name lib_config_data")(None, None, {})
|
||||
|
||||
build_project(self.src_paths, self.build_path, self.target,
|
||||
self.toolchain_name, app_config=app_config)
|
||||
self.toolchain_name, app_config=app_config, notify=notify)
|
||||
|
||||
args = mock_prepare_toolchain.call_args
|
||||
self.assertTrue('app_config' in args[1],
|
||||
|
@ -157,6 +160,7 @@ class BuildApiTests(unittest.TestCase):
|
|||
:param __: mock of function scan_resources (not tested)
|
||||
:return:
|
||||
"""
|
||||
notify = MockNotifier()
|
||||
mock_exists.return_value = False
|
||||
# Needed for the unpacking of the returned value
|
||||
mock_prepare_toolchain().link_program.return_value = 1, 2
|
||||
|
@ -164,7 +168,7 @@ class BuildApiTests(unittest.TestCase):
|
|||
"Config", "has_regions name lib_config_data")(None, None, {})
|
||||
|
||||
build_project(self.src_paths, self.build_path, self.target,
|
||||
self.toolchain_name)
|
||||
self.toolchain_name, notify=notify)
|
||||
|
||||
args = mock_prepare_toolchain.call_args
|
||||
self.assertTrue('app_config' in args[1],
|
||||
|
@ -186,11 +190,12 @@ class BuildApiTests(unittest.TestCase):
|
|||
:param __: mock of function scan_resources (not tested)
|
||||
:return:
|
||||
"""
|
||||
notify = MockNotifier()
|
||||
app_config = "app_config"
|
||||
mock_exists.return_value = False
|
||||
|
||||
build_library(self.src_paths, self.build_path, self.target,
|
||||
self.toolchain_name, app_config=app_config)
|
||||
self.toolchain_name, app_config=app_config, notify=notify)
|
||||
|
||||
args = mock_prepare_toolchain.call_args
|
||||
self.assertTrue('app_config' in args[1],
|
||||
|
@ -212,10 +217,11 @@ class BuildApiTests(unittest.TestCase):
|
|||
:param __: mock of function scan_resources (not tested)
|
||||
:return:
|
||||
"""
|
||||
notify = MockNotifier()
|
||||
mock_exists.return_value = False
|
||||
|
||||
build_library(self.src_paths, self.build_path, self.target,
|
||||
self.toolchain_name)
|
||||
self.toolchain_name, notify=notify)
|
||||
|
||||
args = mock_prepare_toolchain.call_args
|
||||
self.assertTrue('app_config' in args[1],
|
||||
|
|
|
@ -14,6 +14,7 @@ sys.path.insert(0, ROOT)
|
|||
from tools.toolchains import TOOLCHAIN_CLASSES, LEGACY_TOOLCHAIN_NAMES,\
|
||||
Resources, TOOLCHAIN_PATHS, mbedToolchain
|
||||
from tools.targets import TARGET_MAP
|
||||
from tools.notifier.mock import MockNotifier
|
||||
|
||||
ALPHABET = [char for char in printable if char not in [u'.', u'/']]
|
||||
|
||||
|
@ -32,7 +33,8 @@ def test_toolchain_profile_c(profile, source_file):
|
|||
to_compile = os.path.join(*filename)
|
||||
with patch('os.mkdir') as _mkdir:
|
||||
for _, tc_class in TOOLCHAIN_CLASSES.items():
|
||||
toolchain = tc_class(TARGET_MAP["K64F"], build_profile=profile)
|
||||
toolchain = tc_class(TARGET_MAP["K64F"], build_profile=profile,
|
||||
notify=MockNotifier())
|
||||
toolchain.inc_md5 = ""
|
||||
toolchain.build_dir = ""
|
||||
toolchain.config = MagicMock(app_config_location=None)
|
||||
|
@ -62,7 +64,8 @@ def test_toolchain_profile_cpp(profile, source_file):
|
|||
to_compile = os.path.join(*filename)
|
||||
with patch('os.mkdir') as _mkdir:
|
||||
for _, tc_class in TOOLCHAIN_CLASSES.items():
|
||||
toolchain = tc_class(TARGET_MAP["K64F"], build_profile=profile)
|
||||
toolchain = tc_class(TARGET_MAP["K64F"], build_profile=profile,
|
||||
notify=MockNotifier())
|
||||
toolchain.inc_md5 = ""
|
||||
toolchain.build_dir = ""
|
||||
toolchain.config = MagicMock(app_config_location=None)
|
||||
|
@ -92,7 +95,8 @@ def test_toolchain_profile_asm(profile, source_file):
|
|||
to_compile = os.path.join(*filename)
|
||||
with patch('os.mkdir') as _mkdir:
|
||||
for _, tc_class in TOOLCHAIN_CLASSES.items():
|
||||
toolchain = tc_class(TARGET_MAP["K64F"], build_profile=profile)
|
||||
toolchain = tc_class(TARGET_MAP["K64F"], build_profile=profile,
|
||||
notify=MockNotifier)
|
||||
toolchain.inc_md5 = ""
|
||||
toolchain.build_dir = ""
|
||||
for parameter in profile['asm']:
|
||||
|
@ -109,7 +113,7 @@ def test_toolchain_profile_asm(profile, source_file):
|
|||
parameter)
|
||||
|
||||
for name, Class in TOOLCHAIN_CLASSES.items():
|
||||
CLS = Class(TARGET_MAP["K64F"])
|
||||
CLS = Class(TARGET_MAP["K64F"], notify=MockNotifier())
|
||||
assert name == CLS.name or name == LEGACY_TOOLCHAIN_NAMES[CLS.name]
|
||||
|
||||
@given(fixed_dictionaries({
|
||||
|
@ -128,7 +132,8 @@ def test_toolchain_profile_ld(profile, source_file):
|
|||
with patch('os.mkdir') as _mkdir,\
|
||||
patch('tools.toolchains.mbedToolchain.default_cmd') as _dflt_cmd:
|
||||
for _, tc_class in TOOLCHAIN_CLASSES.items():
|
||||
toolchain = tc_class(TARGET_MAP["K64F"], build_profile=profile)
|
||||
toolchain = tc_class(TARGET_MAP["K64F"], build_profile=profile,
|
||||
notify=MockNotifier())
|
||||
toolchain.RESPONSE_FILES = False
|
||||
toolchain.inc_md5 = ""
|
||||
toolchain.build_dir = ""
|
||||
|
@ -146,7 +151,7 @@ def test_toolchain_profile_ld(profile, source_file):
|
|||
parameter)
|
||||
|
||||
for name, Class in TOOLCHAIN_CLASSES.items():
|
||||
CLS = Class(TARGET_MAP["K64F"])
|
||||
CLS = Class(TARGET_MAP["K64F"], notify=MockNotifier())
|
||||
assert name == CLS.name or name == LEGACY_TOOLCHAIN_NAMES[CLS.name]
|
||||
|
||||
|
||||
|
@ -155,20 +160,20 @@ def test_detect_duplicates(filenames):
|
|||
c_sources = [os.path.join(name, "dupe.c") for name in filenames]
|
||||
s_sources = [os.path.join(name, "dupe.s") for name in filenames]
|
||||
cpp_sources = [os.path.join(name, "dupe.cpp") for name in filenames]
|
||||
with MagicMock() as notify:
|
||||
toolchain = TOOLCHAIN_CLASSES["ARM"](TARGET_MAP["K64F"], notify=notify)
|
||||
res = Resources()
|
||||
res.c_sources = c_sources
|
||||
res.s_sources = s_sources
|
||||
res.cpp_sources = cpp_sources
|
||||
assert res.detect_duplicates(toolchain) == 1,\
|
||||
"Not Enough duplicates found"
|
||||
notify = MockNotifier()
|
||||
toolchain = TOOLCHAIN_CLASSES["ARM"](TARGET_MAP["K64F"], notify=notify)
|
||||
res = Resources()
|
||||
res.c_sources = c_sources
|
||||
res.s_sources = s_sources
|
||||
res.cpp_sources = cpp_sources
|
||||
assert res.detect_duplicates(toolchain) == 1,\
|
||||
"Not Enough duplicates found"
|
||||
|
||||
_, (notification, _), _ = notify.mock_calls[1]
|
||||
assert "dupe.o" in notification["message"]
|
||||
assert "dupe.s" in notification["message"]
|
||||
assert "dupe.c" in notification["message"]
|
||||
assert "dupe.cpp" in notification["message"]
|
||||
notification = notify.messages[0]
|
||||
assert "dupe.o" in notification["message"]
|
||||
assert "dupe.s" in notification["message"]
|
||||
assert "dupe.c" in notification["message"]
|
||||
assert "dupe.cpp" in notification["message"]
|
||||
|
||||
@given(text(alphabet=ALPHABET + ["/"], min_size=1))
|
||||
@given(booleans())
|
||||
|
|
|
@ -32,7 +32,7 @@ import ctypes
|
|||
import functools
|
||||
from colorama import Fore, Back, Style
|
||||
from prettytable import PrettyTable
|
||||
from copy import copy
|
||||
from copy import copy, deepcopy
|
||||
|
||||
from time import sleep, time
|
||||
try:
|
||||
|
@ -75,6 +75,7 @@ from tools.utils import argparse_filestring_type
|
|||
from tools.utils import argparse_uppercase_type
|
||||
from tools.utils import argparse_lowercase_type
|
||||
from tools.utils import argparse_many
|
||||
from tools.notifier.mock import MockNotifier
|
||||
|
||||
import tools.host_tests.host_tests_plugins as host_tests_plugins
|
||||
|
||||
|
@ -2078,7 +2079,7 @@ def find_tests(base_dir, target_name, toolchain_name, app_config=None):
|
|||
|
||||
# Prepare the toolchain
|
||||
toolchain = prepare_toolchain([base_dir], None, target_name, toolchain_name,
|
||||
silent=True, app_config=app_config)
|
||||
app_config=app_config)
|
||||
|
||||
# Scan the directory for paths to probe for 'TESTS' folders
|
||||
base_resources = scan_resources([base_dir], toolchain)
|
||||
|
@ -2206,7 +2207,7 @@ def build_test_worker(*args, **kwargs):
|
|||
|
||||
|
||||
def build_tests(tests, base_source_paths, build_path, target, toolchain_name,
|
||||
clean=False, notify=None, verbose=False, jobs=1, macros=None,
|
||||
clean=False, notify=None, jobs=1, macros=None,
|
||||
silent=False, report=None, properties=None,
|
||||
continue_on_build_fail=False, app_config=None,
|
||||
build_profile=None, stats_depth=None):
|
||||
|
@ -2258,12 +2259,11 @@ def build_tests(tests, base_source_paths, build_path, target, toolchain_name,
|
|||
'project_id': test_name,
|
||||
'report': report,
|
||||
'properties': properties,
|
||||
'verbose': verbose,
|
||||
'app_config': app_config,
|
||||
'build_profile': build_profile,
|
||||
'silent': True,
|
||||
'toolchain_paths': TOOLCHAIN_PATHS,
|
||||
'stats_depth': stats_depth
|
||||
'stats_depth': stats_depth,
|
||||
'notify': MockNotifier()
|
||||
}
|
||||
|
||||
results.append(p.apply_async(build_test_worker, args, kwargs))
|
||||
|
@ -2286,9 +2286,15 @@ def build_tests(tests, base_source_paths, build_path, target, toolchain_name,
|
|||
worker_result = r.get()
|
||||
results.remove(r)
|
||||
|
||||
# Push all deferred notifications out to the actual notifier
|
||||
new_notify = deepcopy(notify)
|
||||
for message in worker_result['kwargs']['notify'].messages:
|
||||
new_notify.notify(message)
|
||||
|
||||
# Take report from the kwargs and merge it into existing report
|
||||
if report:
|
||||
report_entry = worker_result['kwargs']['report'][target_name][toolchain_name]
|
||||
report_entry[worker_result['kwargs']['project_id'].upper()][0][0]['output'] = new_notify.get_output()
|
||||
for test_key in report_entry.keys():
|
||||
report[target_name][toolchain_name][test_key] = report_entry[test_key]
|
||||
|
||||
|
@ -2299,6 +2305,7 @@ def build_tests(tests, base_source_paths, build_path, target, toolchain_name,
|
|||
result = False
|
||||
break
|
||||
|
||||
|
||||
# Adding binary path to test build result
|
||||
if ('result' in worker_result and
|
||||
worker_result['result'] and
|
||||
|
@ -2314,8 +2321,6 @@ def build_tests(tests, base_source_paths, build_path, target, toolchain_name,
|
|||
}
|
||||
|
||||
test_key = worker_result['kwargs']['project_id'].upper()
|
||||
if report:
|
||||
print(report[target_name][toolchain_name][test_key][0][0]['output'].rstrip())
|
||||
print('Image: %s\n' % bin_file)
|
||||
|
||||
except:
|
||||
|
|
|
@ -37,6 +37,7 @@ from ..utils import (run_cmd, mkdir, rel_path, ToolException,
|
|||
NotSupportedException, split_path, compile_worker)
|
||||
from ..settings import MBED_ORG_USER, PRINT_COMPILER_OUTPUT_AS_LINK
|
||||
from .. import hooks
|
||||
from ..notifier.term import TerminalNotifier
|
||||
from ..memap import MemapParser
|
||||
|
||||
|
||||
|
@ -267,13 +268,13 @@ class Resources:
|
|||
for objname, filenames in dupe_dict.items():
|
||||
if len(filenames) > 1:
|
||||
count+=1
|
||||
toolchain.tool_error(
|
||||
toolchain.notify.tool_error(
|
||||
"Object file %s.o is not unique! It could be made from: %s"\
|
||||
% (objname, " ".join(filenames)))
|
||||
for headername, locations in dupe_headers.items():
|
||||
if len(locations) > 1:
|
||||
count+=1
|
||||
toolchain.tool_error(
|
||||
toolchain.notify.tool_error(
|
||||
"Header file %s is not unique! It could be: %s" %\
|
||||
(headername, " ".join(locations)))
|
||||
return count
|
||||
|
@ -392,8 +393,8 @@ class mbedToolchain:
|
|||
|
||||
profile_template = {'common':[], 'c':[], 'cxx':[], 'asm':[], 'ld':[]}
|
||||
|
||||
def __init__(self, target, notify=None, macros=None, silent=False,
|
||||
extra_verbose=False, build_profile=None, build_dir=None):
|
||||
def __init__(self, target, notify=None, macros=None, build_profile=None,
|
||||
build_dir=None):
|
||||
self.target = target
|
||||
self.name = self.__class__.__name__
|
||||
|
||||
|
@ -455,17 +456,9 @@ class mbedToolchain:
|
|||
# or an application was linked
|
||||
# *Silent* is a boolean
|
||||
if notify:
|
||||
self.notify_fun = notify
|
||||
elif extra_verbose:
|
||||
self.notify_fun = self.print_notify_verbose
|
||||
self.notify = notify
|
||||
else:
|
||||
self.notify_fun = self.print_notify
|
||||
|
||||
# Silent builds (no output)
|
||||
self.silent = silent
|
||||
|
||||
# Print output buffer
|
||||
self.output = str()
|
||||
self.notify = TerminalNotifier()
|
||||
|
||||
# uVisor spepcific rules
|
||||
if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels:
|
||||
|
@ -488,70 +481,7 @@ class mbedToolchain:
|
|||
return True
|
||||
|
||||
def get_output(self):
|
||||
return self.output
|
||||
|
||||
def print_notify(self, event, silent=False):
|
||||
""" Default command line notification
|
||||
"""
|
||||
msg = None
|
||||
|
||||
if not self.VERBOSE and event['type'] == 'tool_error':
|
||||
msg = event['message']
|
||||
|
||||
elif event['type'] in ['info', 'debug']:
|
||||
msg = event['message']
|
||||
|
||||
elif event['type'] == 'cc':
|
||||
event['severity'] = event['severity'].title()
|
||||
|
||||
if PRINT_COMPILER_OUTPUT_AS_LINK:
|
||||
event['file'] = getcwd() + event['file'].strip('.')
|
||||
msg = '[%(severity)s] %(file)s:%(line)s:%(col)s: %(message)s' % event
|
||||
else:
|
||||
event['file'] = basename(event['file'])
|
||||
msg = '[%(severity)s] %(file)s@%(line)s,%(col)s: %(message)s' % event
|
||||
|
||||
elif event['type'] == 'progress':
|
||||
if 'percent' in event:
|
||||
msg = '{} [{:>5.1f}%]: {}'.format(event['action'].title(),
|
||||
event['percent'],
|
||||
basename(event['file']))
|
||||
else:
|
||||
msg = '{}: {}'.format(event['action'].title(),
|
||||
basename(event['file']))
|
||||
|
||||
if msg:
|
||||
if not silent:
|
||||
print(msg)
|
||||
self.output += msg + "\n"
|
||||
|
||||
def print_notify_verbose(self, event, silent=False):
|
||||
""" Default command line notification with more verbose mode
|
||||
"""
|
||||
if event['type'] in ['info', 'debug']:
|
||||
self.print_notify(event, silent=silent) # standard handle
|
||||
|
||||
elif event['type'] == 'cc':
|
||||
event['severity'] = event['severity'].title()
|
||||
event['file'] = basename(event['file'])
|
||||
event['mcu_name'] = "None"
|
||||
event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown"
|
||||
event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown"
|
||||
msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event
|
||||
if not silent:
|
||||
print(msg)
|
||||
self.output += msg + "\n"
|
||||
|
||||
elif event['type'] == 'progress':
|
||||
self.print_notify(event) # standard handle
|
||||
|
||||
# THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
|
||||
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
|
||||
def notify(self, event):
|
||||
""" Little closure for notify functions
|
||||
"""
|
||||
event['toolchain'] = self
|
||||
return self.notify_fun(event, self.silent)
|
||||
return self.notifier.get_output()
|
||||
|
||||
def get_symbols(self, for_asm=False):
|
||||
if for_asm:
|
||||
|
@ -931,7 +861,7 @@ class mbedToolchain:
|
|||
self.to_be_compiled = len(files_to_compile)
|
||||
self.compiled = 0
|
||||
|
||||
self.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()]))
|
||||
self.notify.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()]))
|
||||
|
||||
inc_paths = resources.inc_dirs
|
||||
if inc_dirs is not None:
|
||||
|
@ -990,7 +920,7 @@ class mbedToolchain:
|
|||
self.compiled += 1
|
||||
self.progress("compile", item['source'], build_update=True)
|
||||
for res in result['results']:
|
||||
self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
|
||||
self.notify.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
|
||||
self.compile_output([
|
||||
res['code'],
|
||||
res['output'],
|
||||
|
@ -1028,7 +958,7 @@ class mbedToolchain:
|
|||
self.compiled += 1
|
||||
self.progress("compile", result['source'], build_update=True)
|
||||
for res in result['results']:
|
||||
self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
|
||||
self.notify.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
|
||||
self.compile_output([
|
||||
res['code'],
|
||||
res['output'],
|
||||
|
@ -1139,9 +1069,9 @@ class mbedToolchain:
|
|||
|
||||
# Parse output for Warnings and Errors
|
||||
self.parse_output(_stderr)
|
||||
self.debug("Return: %s"% _rc)
|
||||
self.notify.debug("Return: %s"% _rc)
|
||||
for error_line in _stderr.splitlines():
|
||||
self.debug("Output: %s"% error_line)
|
||||
self.notify.debug("Output: %s"% error_line)
|
||||
|
||||
# Check return code
|
||||
if _rc != 0:
|
||||
|
@ -1168,7 +1098,7 @@ class mbedToolchain:
|
|||
ext = self.target.OUTPUT_EXT
|
||||
|
||||
if hasattr(self.target, 'OUTPUT_NAMING'):
|
||||
self.var("binary_naming", self.target.OUTPUT_NAMING)
|
||||
self.notify.var("binary_naming", self.target.OUTPUT_NAMING)
|
||||
if self.target.OUTPUT_NAMING == "8.3":
|
||||
name = name[0:8]
|
||||
ext = ext[0:3]
|
||||
|
@ -1203,8 +1133,8 @@ class mbedToolchain:
|
|||
# Initialize memap and process map file. This doesn't generate output.
|
||||
self.mem_stats(map)
|
||||
|
||||
self.var("compile_succeded", True)
|
||||
self.var("binary", filename)
|
||||
self.notify.var("compile_succeded", True)
|
||||
self.notify.var("binary", filename)
|
||||
|
||||
return full_path, needed_update
|
||||
|
||||
|
@ -1212,54 +1142,24 @@ class mbedToolchain:
|
|||
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
|
||||
def default_cmd(self, command):
|
||||
_stdout, _stderr, _rc = run_cmd(command, work_dir=getcwd(), chroot=self.CHROOT)
|
||||
self.debug("Return: %s"% _rc)
|
||||
self.notify.debug("Return: %s"% _rc)
|
||||
|
||||
for output_line in _stdout.splitlines():
|
||||
self.debug("Output: %s"% output_line)
|
||||
self.notify.debug("Output: %s"% output_line)
|
||||
for error_line in _stderr.splitlines():
|
||||
self.debug("Errors: %s"% error_line)
|
||||
self.notify.debug("Errors: %s"% error_line)
|
||||
|
||||
if _rc != 0:
|
||||
for line in _stderr.splitlines():
|
||||
self.tool_error(line)
|
||||
self.notify.tool_error(line)
|
||||
raise ToolException(_stderr)
|
||||
|
||||
### NOTIFICATIONS ###
|
||||
def info(self, message):
|
||||
self.notify({'type': 'info', 'message': message})
|
||||
|
||||
# THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
|
||||
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
|
||||
def debug(self, message):
|
||||
if self.VERBOSE:
|
||||
if isinstance(message, list):
|
||||
message = ' '.join(message)
|
||||
message = "[DEBUG] " + message
|
||||
self.notify({'type': 'debug', 'message': message})
|
||||
|
||||
# THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
|
||||
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
|
||||
def cc_info(self, info=None):
|
||||
if info is not None:
|
||||
info['type'] = 'cc'
|
||||
self.notify(info)
|
||||
|
||||
# THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
|
||||
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
|
||||
def cc_verbose(self, message, file=""):
|
||||
self.debug(message)
|
||||
|
||||
def progress(self, action, file, build_update=False):
|
||||
msg = {'type': 'progress', 'action': action, 'file': file}
|
||||
if build_update:
|
||||
msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled)
|
||||
self.notify(msg)
|
||||
|
||||
def tool_error(self, message):
|
||||
self.notify({'type': 'tool_error', 'message': message})
|
||||
|
||||
def var(self, key, value):
|
||||
self.notify({'type': 'var', 'key': key, 'val': value})
|
||||
percent = 100. * float(self.compiled) / float(self.to_be_compiled)
|
||||
else:
|
||||
percent = None
|
||||
self.notify.progress(action, file, percent)
|
||||
|
||||
# THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
|
||||
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
|
||||
|
@ -1289,8 +1189,10 @@ class mbedToolchain:
|
|||
def add_regions(self):
|
||||
"""Add regions to the build profile, if there are any.
|
||||
"""
|
||||
print("Using regions in this build:")
|
||||
for region in self.config.regions:
|
||||
regions = list(self.config.regions)
|
||||
self.notify.info("Using regions %s in this build."
|
||||
% ", ".join(region.name for region in regions))
|
||||
for region in regions:
|
||||
for define in [(region.name.upper() + "_ADDR", region.start),
|
||||
(region.name.upper() + "_SIZE", region.size)]:
|
||||
define_string = "-D%s=0x%x" % define
|
||||
|
@ -1303,8 +1205,8 @@ class mbedToolchain:
|
|||
define_string = self.make_ld_define(*define)
|
||||
self.ld.append(define_string)
|
||||
self.flags["ld"].append(define_string)
|
||||
print(" Region %s size 0x%x, offset 0x%x"
|
||||
% (region.name, region.size, region.start))
|
||||
self.notify.info(" Region %s: size 0x%x, offset 0x%x"
|
||||
% (region.name, region.size, region.start))
|
||||
|
||||
# Set the configuration data
|
||||
def set_config_data(self, config_data):
|
||||
|
|
|
@ -48,12 +48,10 @@ class ARM(mbedToolchain):
|
|||
return mbedToolchain.generic_check_executable("ARM", 'armcc', 2, 'bin')
|
||||
|
||||
def __init__(self, target, notify=None, macros=None,
|
||||
silent=False, extra_verbose=False, build_profile=None,
|
||||
build_dir=None):
|
||||
mbedToolchain.__init__(self, target, notify, macros, silent,
|
||||
build_dir=build_dir,
|
||||
extra_verbose=extra_verbose,
|
||||
build_profile=build_profile)
|
||||
build_profile=None, build_dir=None):
|
||||
mbedToolchain.__init__(
|
||||
self, target, notify, macros, build_dir=build_dir,
|
||||
build_profile=build_profile)
|
||||
if target.core not in self.SUPPORTED_CORES:
|
||||
raise NotSupportedException(
|
||||
"this compiler does not support the core %s" % target.core)
|
||||
|
@ -102,7 +100,7 @@ class ARM(mbedToolchain):
|
|||
match = ARM.DIAGNOSTIC_PATTERN.match(line)
|
||||
if match is not None:
|
||||
if msg is not None:
|
||||
self.cc_info(msg)
|
||||
self.notify.cc_info(msg)
|
||||
msg = None
|
||||
msg = {
|
||||
'severity': match.group('severity').lower(),
|
||||
|
@ -119,13 +117,13 @@ class ARM(mbedToolchain):
|
|||
match = ARM.INDEX_PATTERN.match(line)
|
||||
if match is not None:
|
||||
msg['col'] = len(match.group('col'))
|
||||
self.cc_info(msg)
|
||||
self.notify.cc_info(msg)
|
||||
msg = None
|
||||
else:
|
||||
msg['text'] += line+"\n"
|
||||
|
||||
if msg is not None:
|
||||
self.cc_info(msg)
|
||||
self.notify.cc_info(msg)
|
||||
|
||||
def get_dep_option(self, object):
|
||||
base, _ = splitext(object)
|
||||
|
@ -242,7 +240,7 @@ class ARM(mbedToolchain):
|
|||
link_files = self.get_link_file(cmd[1:])
|
||||
cmd = [cmd_linker, '--via', link_files]
|
||||
|
||||
self.cc_verbose("Link: %s" % ' '.join(cmd))
|
||||
self.notify.cc_verbose("Link: %s" % ' '.join(cmd))
|
||||
self.default_cmd(cmd)
|
||||
|
||||
@hook_tool
|
||||
|
@ -268,7 +266,7 @@ class ARM(mbedToolchain):
|
|||
else:
|
||||
rmtree(bin)
|
||||
|
||||
self.cc_verbose("FromELF: %s" % ' '.join(cmd))
|
||||
self.notify.cc_verbose("FromELF: %s" % ' '.join(cmd))
|
||||
self.default_cmd(cmd)
|
||||
|
||||
@staticmethod
|
||||
|
@ -290,10 +288,8 @@ class ARM(mbedToolchain):
|
|||
|
||||
class ARM_STD(ARM):
|
||||
def __init__(self, target, notify=None, macros=None,
|
||||
silent=False, extra_verbose=False, build_profile=None,
|
||||
build_dir=None):
|
||||
ARM.__init__(self, target, notify, macros, silent,
|
||||
build_dir=build_dir, extra_verbose=extra_verbose,
|
||||
build_profile=None, build_dir=None):
|
||||
ARM.__init__(self, target, notify, macros, build_dir=build_dir,
|
||||
build_profile=build_profile)
|
||||
if "ARM" not in target.supported_toolchains:
|
||||
raise NotSupportedException("ARM compiler support is required for ARM build")
|
||||
|
@ -304,8 +300,7 @@ class ARM_MICRO(ARM):
|
|||
def __init__(self, target, notify=None, macros=None,
|
||||
silent=False, extra_verbose=False, build_profile=None,
|
||||
build_dir=None):
|
||||
ARM.__init__(self, target, notify, macros, silent,
|
||||
build_dir=build_dir, extra_verbose=extra_verbose,
|
||||
ARM.__init__(self, target, notify, macros, build_dir=build_dir,
|
||||
build_profile=build_profile)
|
||||
if not set(("ARM", "uARM")).intersection(set(target.supported_toolchains)):
|
||||
raise NotSupportedException("ARM/uARM compiler support is required for ARM build")
|
||||
|
|
|
@ -28,11 +28,9 @@ class GCC(mbedToolchain):
|
|||
STD_LIB_NAME = "lib%s.a"
|
||||
DIAGNOSTIC_PATTERN = re.compile('((?P<file>[^:]+):(?P<line>\d+):)(?P<col>\d+):? (?P<severity>warning|[eE]rror|fatal error): (?P<message>.+)')
|
||||
|
||||
def __init__(self, target, notify=None, macros=None,
|
||||
silent=False, extra_verbose=False, build_profile=None,
|
||||
def __init__(self, target, notify=None, macros=None, build_profile=None,
|
||||
build_dir=None):
|
||||
mbedToolchain.__init__(self, target, notify, macros, silent,
|
||||
extra_verbose=extra_verbose,
|
||||
mbedToolchain.__init__(self, target, notify, macros,
|
||||
build_profile=build_profile, build_dir=build_dir)
|
||||
|
||||
tool_path=TOOLCHAIN_PATHS['GCC_ARM']
|
||||
|
@ -119,7 +117,7 @@ class GCC(mbedToolchain):
|
|||
match = self.DIAGNOSTIC_PATTERN.search(line)
|
||||
if match is not None:
|
||||
if msg is not None:
|
||||
self.cc_info(msg)
|
||||
self.notify.cc_info(msg)
|
||||
msg = None
|
||||
msg = {
|
||||
'severity': match.group('severity').lower(),
|
||||
|
@ -133,7 +131,7 @@ class GCC(mbedToolchain):
|
|||
}
|
||||
|
||||
if msg is not None:
|
||||
self.cc_info(msg)
|
||||
self.notify.cc_info(msg)
|
||||
|
||||
def get_dep_option(self, object):
|
||||
base, _ = splitext(object)
|
||||
|
@ -200,7 +198,7 @@ class GCC(mbedToolchain):
|
|||
preproc_output = join(dirname(output), ".link_script.ld")
|
||||
cmd = (self.preproc + [mem_map] + self.ld[1:] +
|
||||
[ "-o", preproc_output])
|
||||
self.cc_verbose("Preproc: %s" % ' '.join(cmd))
|
||||
self.notify.cc_verbose("Preproc: %s" % ' '.join(cmd))
|
||||
self.default_cmd(cmd)
|
||||
mem_map = preproc_output
|
||||
|
||||
|
@ -230,10 +228,10 @@ class GCC(mbedToolchain):
|
|||
cmd = [cmd_linker, "@%s" % link_files]
|
||||
|
||||
# Exec command
|
||||
self.cc_verbose("Link: %s" % ' '.join(cmd))
|
||||
self.notify.cc_verbose("Link: %s" % ' '.join(cmd))
|
||||
self.default_cmd(cmd)
|
||||
if self.target.core == "Cortex-M23" or self.target.core == "Cortex-M33":
|
||||
self.info("Secure Library Object %s" %secure_file)
|
||||
self.notify.info("Secure Library Object %s" %secure_file)
|
||||
|
||||
@hook_tool
|
||||
def archive(self, objects, lib_path):
|
||||
|
@ -256,7 +254,7 @@ class GCC(mbedToolchain):
|
|||
cmd = self.hook.get_cmdline_binary(cmd)
|
||||
|
||||
# Exec command
|
||||
self.cc_verbose("FromELF: %s" % ' '.join(cmd))
|
||||
self.notify.cc_verbose("FromELF: %s" % ' '.join(cmd))
|
||||
self.default_cmd(cmd)
|
||||
|
||||
@staticmethod
|
||||
|
|
|
@ -36,12 +36,9 @@ class IAR(mbedToolchain):
|
|||
Returns False otherwise."""
|
||||
return mbedToolchain.generic_check_executable("IAR", 'iccarm', 2, "bin")
|
||||
|
||||
def __init__(self, target, notify=None, macros=None,
|
||||
silent=False, extra_verbose=False, build_profile=None,
|
||||
def __init__(self, target, notify=None, macros=None, build_profile=None,
|
||||
build_dir=None):
|
||||
mbedToolchain.__init__(self, target, notify, macros, silent,
|
||||
build_dir=build_dir,
|
||||
extra_verbose=extra_verbose,
|
||||
mbedToolchain.__init__(self, target, notify, macros, build_dir=build_dir,
|
||||
build_profile=build_profile)
|
||||
if target.core == "Cortex-M7F" or target.core == "Cortex-M7FD":
|
||||
cpuchoice = "Cortex-M7"
|
||||
|
@ -104,7 +101,7 @@ class IAR(mbedToolchain):
|
|||
match = IAR.DIAGNOSTIC_PATTERN.match(line)
|
||||
if match is not None:
|
||||
if msg is not None:
|
||||
self.cc_info(msg)
|
||||
self.notify.cc_info(msg)
|
||||
msg = None
|
||||
msg = {
|
||||
'severity': match.group('severity').lower(),
|
||||
|
@ -121,13 +118,13 @@ class IAR(mbedToolchain):
|
|||
match = IAR.INDEX_PATTERN.match(line)
|
||||
if match is not None:
|
||||
msg['col'] = len(match.group('col'))
|
||||
self.cc_info(msg)
|
||||
self.notify.cc_info(msg)
|
||||
msg = None
|
||||
else:
|
||||
msg['text'] += line+"\n"
|
||||
|
||||
if msg is not None:
|
||||
self.cc_info(msg)
|
||||
self.notify.cc_info(msg)
|
||||
|
||||
def get_dep_option(self, object):
|
||||
base, _ = splitext(object)
|
||||
|
@ -207,7 +204,7 @@ class IAR(mbedToolchain):
|
|||
cmd = [cmd_linker, '-f', link_files]
|
||||
|
||||
# Exec command
|
||||
self.cc_verbose("Link: %s" % ' '.join(cmd))
|
||||
self.notify.cc_verbose("Link: %s" % ' '.join(cmd))
|
||||
self.default_cmd(cmd)
|
||||
|
||||
@hook_tool
|
||||
|
@ -233,7 +230,7 @@ class IAR(mbedToolchain):
|
|||
cmd = self.hook.get_cmdline_binary(cmd)
|
||||
|
||||
# Exec command
|
||||
self.cc_verbose("FromELF: %s" % ' '.join(cmd))
|
||||
self.notify.cc_verbose("FromELF: %s" % ' '.join(cmd))
|
||||
self.default_cmd(cmd)
|
||||
|
||||
@staticmethod
|
||||
|
|
Loading…
Reference in New Issue