mirror of https://github.com/ARMmbed/mbed-os.git
adding build output to build report
parent
736cae108e
commit
20cc9c6090
|
@ -95,10 +95,10 @@ def build_project(src_path, build_path, target, toolchain_name,
|
|||
if name is None:
|
||||
# We will use default project name based on project folder name
|
||||
name = PROJECT_BASENAME
|
||||
toolchain.info("Building project %s (%s, %s)" % (PROJECT_BASENAME.upper(), target.name, toolchain_name))
|
||||
cur_result["output"] += toolchain.info("Building project %s (%s, %s)" % (PROJECT_BASENAME.upper(), target.name, toolchain_name))
|
||||
else:
|
||||
# User used custom global project name to have the same name for the
|
||||
toolchain.info("Building project %s to %s (%s, %s)" % (PROJECT_BASENAME.upper(), name, target.name, toolchain_name))
|
||||
cur_result["output"] += toolchain.info("Building project %s to %s (%s, %s)" % (PROJECT_BASENAME.upper(), name, target.name, toolchain_name))
|
||||
|
||||
start = time()
|
||||
id_name = project_id.upper()
|
||||
|
@ -141,16 +141,17 @@ def build_project(src_path, build_path, target, toolchain_name,
|
|||
# Compile Sources
|
||||
for path in src_paths:
|
||||
src = toolchain.scan_resources(path)
|
||||
objects = toolchain.compile_sources(src, build_path, resources.inc_dirs)
|
||||
objects, build_output = toolchain.compile_sources(src, build_path, resources.inc_dirs)
|
||||
resources.objects.extend(objects)
|
||||
cur_result["output"] += build_output
|
||||
|
||||
# Link Program
|
||||
res, needed_update = toolchain.link_program(resources, build_path, name)
|
||||
res, needed_update, build_output = toolchain.link_program(resources, build_path, name)
|
||||
cur_result["output"] += build_output
|
||||
|
||||
if report != None and needed_update:
|
||||
end = time()
|
||||
cur_result["elapsed_time"] = end - start
|
||||
cur_result["output"] = ""
|
||||
cur_result["result"] = "OK"
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
|
@ -220,7 +221,7 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
|||
toolchain.jobs = jobs
|
||||
toolchain.build_all = clean
|
||||
|
||||
toolchain.info("Building library %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
|
||||
cur_result["output"] += toolchain.info("Building library %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
|
||||
|
||||
# Scan Resources
|
||||
resources = []
|
||||
|
@ -252,31 +253,35 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
|||
|
||||
# Copy Headers
|
||||
for resource in resources:
|
||||
toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path)
|
||||
cur_result["output"] += toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path)
|
||||
dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs)
|
||||
|
||||
# Compile Sources
|
||||
objects = []
|
||||
for resource in resources:
|
||||
objects.extend(toolchain.compile_sources(resource, tmp_path, dependencies_include_dir))
|
||||
objects, tmp_output = toolchain.compile_sources(resource, tmp_path, dependencies_include_dir)
|
||||
objects.extend(objects)
|
||||
cur_result["output"] += tmp_output
|
||||
|
||||
needed_update = toolchain.build_library(objects, bin_path, name)
|
||||
needed_update, build_output = toolchain.build_library(objects, bin_path, name)
|
||||
|
||||
cur_result["output"] += build_output
|
||||
|
||||
if report != None and needed_update:
|
||||
end = time()
|
||||
cur_result["elapsed_time"] = end - start
|
||||
cur_result["output"] = ""
|
||||
cur_result["result"] = "OK"
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
|
||||
except Exception, e:
|
||||
end = time()
|
||||
cur_result["result"] = "FAIL"
|
||||
cur_result["output"] = str(e)
|
||||
cur_result["elapsed_time"] = end - start
|
||||
if report != None:
|
||||
end = time()
|
||||
cur_result["result"] = "FAIL"
|
||||
cur_result["output"] += str(e)
|
||||
cur_result["elapsed_time"] = end - start
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
add_result_to_report(report, cur_result)
|
||||
|
||||
# Let Exception propagate
|
||||
raise e
|
||||
|
@ -351,34 +356,37 @@ def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=F
|
|||
mkdir(TMP_PATH)
|
||||
|
||||
# CMSIS
|
||||
toolchain.info("Building library %s (%s, %s)"% ('CMSIS', target.name, toolchain_name))
|
||||
cur_result["output"] += toolchain.info("Building library %s (%s, %s)"% ('CMSIS', target.name, toolchain_name))
|
||||
cmsis_src = join(MBED_TARGETS_PATH, "cmsis")
|
||||
resources = toolchain.scan_resources(cmsis_src)
|
||||
|
||||
toolchain.copy_files(resources.headers, BUILD_TARGET)
|
||||
toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN)
|
||||
toolchain.copy_files(resources.bin_files, BUILD_TOOLCHAIN)
|
||||
cur_result["output"] += toolchain.copy_files(resources.headers, BUILD_TARGET)
|
||||
cur_result["output"] += toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN)
|
||||
cur_result["output"] += toolchain.copy_files(resources.bin_files, BUILD_TOOLCHAIN)
|
||||
|
||||
objects = toolchain.compile_sources(resources, TMP_PATH)
|
||||
toolchain.copy_files(objects, BUILD_TOOLCHAIN)
|
||||
objects, build_output = toolchain.compile_sources(resources, TMP_PATH)
|
||||
cur_result["output"] += build_output
|
||||
cur_result["output"] += toolchain.copy_files(objects, BUILD_TOOLCHAIN)
|
||||
|
||||
# mbed
|
||||
toolchain.info("Building library %s (%s, %s)" % ('MBED', target.name, toolchain_name))
|
||||
cur_result["output"] += toolchain.info("Building library %s (%s, %s)" % ('MBED', target.name, toolchain_name))
|
||||
|
||||
# Common Headers
|
||||
toolchain.copy_files(toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES)
|
||||
toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES)
|
||||
cur_result["output"] += toolchain.copy_files(toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES)
|
||||
cur_result["output"] += toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES)
|
||||
|
||||
# Target specific sources
|
||||
HAL_SRC = join(MBED_TARGETS_PATH, "hal")
|
||||
hal_implementation = toolchain.scan_resources(HAL_SRC)
|
||||
toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files + hal_implementation.libraries, BUILD_TARGET, HAL_SRC)
|
||||
cur_result["output"] += toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files + hal_implementation.libraries, BUILD_TARGET, HAL_SRC)
|
||||
incdirs = toolchain.scan_resources(BUILD_TARGET).inc_dirs
|
||||
objects = toolchain.compile_sources(hal_implementation, TMP_PATH, [MBED_LIBRARIES] + incdirs)
|
||||
objects, build_output = toolchain.compile_sources(hal_implementation, TMP_PATH, [MBED_LIBRARIES] + incdirs)
|
||||
cur_result["output"] += build_output
|
||||
|
||||
# Common Sources
|
||||
mbed_resources = toolchain.scan_resources(MBED_COMMON)
|
||||
objects += toolchain.compile_sources(mbed_resources, TMP_PATH, [MBED_LIBRARIES] + incdirs)
|
||||
objects, build_output = toolchain.compile_sources(mbed_resources, TMP_PATH, [MBED_LIBRARIES] + incdirs)
|
||||
cur_result["output"] += build_output
|
||||
|
||||
# A number of compiled files need to be copied as objects as opposed to
|
||||
# being part of the mbed library, for reasons that have to do with the way
|
||||
|
@ -396,27 +404,29 @@ def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=F
|
|||
for o in separate_objects:
|
||||
objects.remove(o)
|
||||
|
||||
needed_update = toolchain.build_library(objects, BUILD_TOOLCHAIN, "mbed")
|
||||
needed_update, build_output = toolchain.build_library(objects, BUILD_TOOLCHAIN, "mbed")
|
||||
cur_result["output"] += build_output
|
||||
|
||||
for o in separate_objects:
|
||||
toolchain.copy_files(o, BUILD_TOOLCHAIN)
|
||||
cur_result["output"] += toolchain.copy_files(o, BUILD_TOOLCHAIN)
|
||||
|
||||
if report != None and needed_update:
|
||||
end = time()
|
||||
cur_result["elapsed_time"] = end - start
|
||||
cur_result["output"] = ""
|
||||
cur_result["result"] = "OK"
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
|
||||
return True
|
||||
except Exception, e:
|
||||
end = time()
|
||||
cur_result["result"] = "FAIL"
|
||||
cur_result["output"] = str(e)
|
||||
cur_result["elapsed_time"] = end - start
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
except Exception, e:
|
||||
if report != None:
|
||||
end = time()
|
||||
cur_result["result"] = "FAIL"
|
||||
cur_result["output"] += str(e)
|
||||
cur_result["elapsed_time"] = end - start
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
|
||||
# Let Exception propagate
|
||||
raise e
|
||||
|
|
|
@ -37,23 +37,29 @@ CPU_COUNT_MIN = 1
|
|||
def print_notify(event, silent=False):
|
||||
""" Default command line notification
|
||||
"""
|
||||
msg = None
|
||||
|
||||
if event['type'] in ['info', 'debug']:
|
||||
print event['message']
|
||||
msg = event['message']
|
||||
|
||||
elif event['type'] == 'cc':
|
||||
event['severity'] = event['severity'].title()
|
||||
event['file'] = basename(event['file'])
|
||||
print '[%(severity)s] %(file)s@%(line)s: %(message)s' % event
|
||||
msg = '[%(severity)s] %(file)s@%(line)s: %(message)s' % event
|
||||
|
||||
elif event['type'] == 'progress':
|
||||
if not silent:
|
||||
print '%s: %s' % (event['action'].title(), basename(event['file']))
|
||||
msg = '%s: %s' % (event['action'].title(), basename(event['file']))
|
||||
|
||||
if msg:
|
||||
print msg
|
||||
return msg + "\n"
|
||||
|
||||
def print_notify_verbose(event, silent=False):
|
||||
""" Default command line notification with more verbose mode
|
||||
"""
|
||||
if event['type'] in ['info', 'debug']:
|
||||
print_notify(event) # standard handle
|
||||
return print_notify(event) # standard handle
|
||||
|
||||
elif event['type'] == 'cc':
|
||||
event['severity'] = event['severity'].title()
|
||||
|
@ -62,10 +68,12 @@ def print_notify_verbose(event, silent=False):
|
|||
event['toolchain'] = "None"
|
||||
event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown"
|
||||
event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown"
|
||||
print '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event
|
||||
msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event
|
||||
print msg
|
||||
return msg + "\n"
|
||||
|
||||
elif event['type'] == 'progress':
|
||||
print_notify(event) # standard handle
|
||||
return print_notify(event) # standard handle
|
||||
|
||||
def compile_worker(job):
|
||||
results = []
|
||||
|
@ -415,6 +423,8 @@ class mbedToolchain:
|
|||
return resources
|
||||
|
||||
def copy_files(self, files_paths, trg_path, rel_path=None):
|
||||
output = ""
|
||||
|
||||
# Handle a single file
|
||||
if type(files_paths) != ListType: files_paths = [files_paths]
|
||||
|
||||
|
@ -431,10 +441,12 @@ class mbedToolchain:
|
|||
target = join(trg_path, relative_path)
|
||||
|
||||
if (target != source) and (self.need_update(target, [source])):
|
||||
self.progress("copy", relative_path)
|
||||
output += self.progress("copy", relative_path)
|
||||
mkdir(dirname(target))
|
||||
copyfile(source, target)
|
||||
|
||||
return output
|
||||
|
||||
def relative_object_path(self, build_path, base_dir, source):
|
||||
source_dir, name, _ = split_path(source)
|
||||
obj_dir = join(build_path, relpath(source_dir, base_dir))
|
||||
|
@ -493,22 +505,25 @@ class mbedToolchain:
|
|||
return self.compile_seq(queue, objects)
|
||||
|
||||
def compile_seq(self, queue, objects):
|
||||
output = ""
|
||||
|
||||
for item in queue:
|
||||
result = compile_worker(item)
|
||||
|
||||
self.compiled += 1
|
||||
self.progress("compile", item['source'], build_update=True)
|
||||
output += self.progress("compile", item['source'], build_update=True)
|
||||
for res in result['results']:
|
||||
self.debug("Command: %s" % ' '.join(res['command']))
|
||||
self.compile_output([
|
||||
output += self.debug("Command: %s" % ' '.join(res['command']))
|
||||
output += self.compile_output([
|
||||
res['code'],
|
||||
res['output'],
|
||||
res['command']
|
||||
])
|
||||
objects.append(result['object'])
|
||||
return objects
|
||||
return objects, output
|
||||
|
||||
def compile_queue(self, queue, objects):
|
||||
output = ""
|
||||
jobs_count = int(self.jobs if self.jobs else cpu_count())
|
||||
p = Pool(processes=jobs_count)
|
||||
|
||||
|
@ -532,10 +547,10 @@ class mbedToolchain:
|
|||
results.remove(r)
|
||||
|
||||
self.compiled += 1
|
||||
self.progress("compile", result['source'], build_update=True)
|
||||
output += self.progress("compile", result['source'], build_update=True)
|
||||
for res in result['results']:
|
||||
self.debug("Command: %s" % ' '.join(res['command']))
|
||||
self.compile_output([
|
||||
output += self.debug("Command: %s" % ' '.join(res['command']))
|
||||
output += self.compile_output([
|
||||
res['code'],
|
||||
res['output'],
|
||||
res['command']
|
||||
|
@ -560,7 +575,7 @@ class mbedToolchain:
|
|||
p.terminate()
|
||||
p.join()
|
||||
|
||||
return objects
|
||||
return objects, output
|
||||
|
||||
def compile_command(self, source, object, includes):
|
||||
# Check dependencies
|
||||
|
@ -586,22 +601,27 @@ class mbedToolchain:
|
|||
return None
|
||||
|
||||
def compile_output(self, output=[]):
|
||||
tmp_output = ""
|
||||
_rc = output[0]
|
||||
_stderr = output[1]
|
||||
command = output[2]
|
||||
|
||||
# Parse output for Warnings and Errors
|
||||
self.parse_output(_stderr)
|
||||
self.debug("Return: %s"% _rc)
|
||||
tmp_output += self.parse_output(_stderr)
|
||||
tmp_output += self.debug("Return: %s"% _rc)
|
||||
for error_line in _stderr.splitlines():
|
||||
self.debug("Output: %s"% error_line)
|
||||
tmp_output += self.debug("Output: %s"% error_line)
|
||||
|
||||
|
||||
# Check return code
|
||||
if _rc != 0:
|
||||
for line in _stderr.splitlines():
|
||||
self.tool_error(line)
|
||||
tmp_output += self.tool_error(line)
|
||||
|
||||
raise ToolException(_stderr)
|
||||
|
||||
return tmp_output
|
||||
|
||||
def compile(self, cc, source, object, includes):
|
||||
_, ext = splitext(source)
|
||||
ext = ext.lower()
|
||||
|
@ -626,16 +646,18 @@ class mbedToolchain:
|
|||
|
||||
def build_library(self, objects, dir, name):
|
||||
needed_update = False
|
||||
output = ""
|
||||
lib = self.STD_LIB_NAME % name
|
||||
fout = join(dir, lib)
|
||||
if self.need_update(fout, objects):
|
||||
self.info("Library: %s" % lib)
|
||||
output = self.info("Library: %s" % lib)
|
||||
self.archive(objects, fout)
|
||||
needed_update = True
|
||||
|
||||
return needed_update
|
||||
return needed_update, output
|
||||
|
||||
def link_program(self, r, tmp_path, name):
|
||||
output = ""
|
||||
needed_update = False
|
||||
ext = 'bin'
|
||||
if hasattr(self.target, 'OUTPUT_EXT'):
|
||||
|
@ -653,19 +675,19 @@ class mbedToolchain:
|
|||
|
||||
if self.need_update(elf, r.objects + r.libraries + [r.linker_script]):
|
||||
needed_update = True
|
||||
self.progress("link", name)
|
||||
output += self.progress("link", name)
|
||||
self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
|
||||
|
||||
if self.need_update(bin, [elf]):
|
||||
needed_update = True
|
||||
self.progress("elf2bin", name)
|
||||
output += self.progress("elf2bin", name)
|
||||
|
||||
self.binary(r, elf, bin)
|
||||
|
||||
self.var("compile_succeded", True)
|
||||
self.var("binary", filename)
|
||||
|
||||
return bin, needed_update
|
||||
return bin, needed_update, output
|
||||
|
||||
def default_cmd(self, command):
|
||||
_stdout, _stderr, _rc = run_cmd(command)
|
||||
|
@ -688,17 +710,20 @@ class mbedToolchain:
|
|||
|
||||
### NOTIFICATIONS ###
|
||||
def info(self, message):
|
||||
self.notify({'type': 'info', 'message': message})
|
||||
return self.notify({'type': 'info', 'message': message})
|
||||
|
||||
def debug(self, message):
|
||||
output = ""
|
||||
if self.VERBOSE:
|
||||
if type(message) is ListType:
|
||||
message = ' '.join(message)
|
||||
message = "[DEBUG] " + message
|
||||
self.notify({'type': 'debug', 'message': message})
|
||||
output = self.notify({'type': 'debug', 'message': message})
|
||||
|
||||
return output
|
||||
|
||||
def cc_info(self, severity, file, line, message, target_name=None, toolchain_name=None):
|
||||
self.notify({'type': 'cc',
|
||||
return self.notify({'type': 'cc',
|
||||
'severity': severity,
|
||||
'file': file,
|
||||
'line': line,
|
||||
|
@ -710,13 +735,13 @@ class mbedToolchain:
|
|||
msg = {'type': 'progress', 'action': action, 'file': file}
|
||||
if build_update:
|
||||
msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled)
|
||||
self.notify(msg)
|
||||
return self.notify(msg)
|
||||
|
||||
def tool_error(self, message):
|
||||
self.notify({'type': 'tool_error', 'message': message})
|
||||
return self.notify({'type': 'tool_error', 'message': message})
|
||||
|
||||
def var(self, key, value):
|
||||
self.notify({'type': 'var', 'key': key, 'val': value})
|
||||
return self.notify({'type': 'var', 'key': key, 'val': value})
|
||||
|
||||
from workspace_tools.settings import ARM_BIN
|
||||
from workspace_tools.settings import GCC_ARM_PATH, GCC_CR_PATH, GCC_CS_PATH, CW_EWL_PATH, CW_GCC_PATH
|
||||
|
|
|
@ -99,10 +99,11 @@ class ARM(mbedToolchain):
|
|||
return dependencies
|
||||
|
||||
def parse_output(self, output):
|
||||
tmp_output = ""
|
||||
for line in output.splitlines():
|
||||
match = ARM.DIAGNOSTIC_PATTERN.match(line)
|
||||
if match is not None:
|
||||
self.cc_info(
|
||||
tmp_output += self.cc_info(
|
||||
match.group('severity').lower(),
|
||||
match.group('file'),
|
||||
match.group('line'),
|
||||
|
@ -112,16 +113,18 @@ class ARM(mbedToolchain):
|
|||
)
|
||||
match = self.goanna_parse_line(line)
|
||||
if match is not None:
|
||||
self.cc_info(
|
||||
tmp_output += self.cc_info(
|
||||
match.group('severity').lower(),
|
||||
match.group('file'),
|
||||
match.group('line'),
|
||||
match.group('message')
|
||||
)
|
||||
|
||||
return tmp_output
|
||||
|
||||
def get_dep_opt(self, dep_path):
|
||||
return ["--depend", dep_path]
|
||||
|
||||
|
||||
def archive(self, objects, lib_path):
|
||||
self.default_cmd([self.ar, '-r', lib_path] + objects)
|
||||
|
||||
|
|
|
@ -115,12 +115,13 @@ class GCC(mbedToolchain):
|
|||
|
||||
def parse_output(self, output):
|
||||
# The warning/error notification is multiline
|
||||
tmp_output = ""
|
||||
WHERE, WHAT = 0, 1
|
||||
state, file, message = WHERE, None, None
|
||||
for line in output.splitlines():
|
||||
match = self.goanna_parse_line(line)
|
||||
if match is not None:
|
||||
self.cc_info(
|
||||
tmp_output += self.cc_info(
|
||||
match.group('severity').lower(),
|
||||
match.group('file'),
|
||||
match.group('line'),
|
||||
|
@ -147,12 +148,14 @@ class GCC(mbedToolchain):
|
|||
state = WHERE
|
||||
continue
|
||||
|
||||
self.cc_info(
|
||||
tmp_output += self.cc_info(
|
||||
match.group('severity'),
|
||||
file, match.group('line'),
|
||||
message + match.group('message')
|
||||
)
|
||||
|
||||
return tmp_output
|
||||
|
||||
def archive(self, objects, lib_path):
|
||||
self.default_cmd([self.ar, "rcs", lib_path] + objects)
|
||||
|
||||
|
|
|
@ -65,10 +65,11 @@ class IAR(mbedToolchain):
|
|||
self.elf2bin = join(IAR_BIN, "ielftool")
|
||||
|
||||
def parse_output(self, output):
|
||||
tmp_output = ""
|
||||
for line in output.splitlines():
|
||||
match = IAR.DIAGNOSTIC_PATTERN.match(line)
|
||||
if match is not None:
|
||||
self.cc_info(
|
||||
tmp_output += self.cc_info(
|
||||
match.group('severity').lower(),
|
||||
match.group('file'),
|
||||
match.group('line'),
|
||||
|
@ -78,12 +79,14 @@ class IAR(mbedToolchain):
|
|||
)
|
||||
match = self.goanna_parse_line(line)
|
||||
if match is not None:
|
||||
self.cc_info(
|
||||
tmp_output += self.cc_info(
|
||||
match.group('severity').lower(),
|
||||
match.group('file'),
|
||||
match.group('line'),
|
||||
match.group('message')
|
||||
)
|
||||
|
||||
return tmp_output
|
||||
|
||||
def get_dep_opt(self, dep_path):
|
||||
return ["--dependencies", dep_path]
|
||||
|
@ -94,7 +97,7 @@ class IAR(mbedToolchain):
|
|||
def parse_dependencies(self, dep_path):
|
||||
return [path.strip() for path in open(dep_path).readlines()
|
||||
if (path and not path.isspace())]
|
||||
|
||||
|
||||
def assemble(self, source, object, includes):
|
||||
return [self.hook.get_cmdline_assembler(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source])]
|
||||
|
||||
|
|
Loading…
Reference in New Issue