Merge pull request #2990 from bridadan/parallel-test-build

[tools] Parallel building of tests
pull/3076/head
Sam Grove 2016-10-19 05:41:24 -05:00 committed by GitHub
commit 6bd44c59d3
4 changed files with 190 additions and 79 deletions

View File

@ -455,12 +455,29 @@ def build_project(src_paths, build_path, target, toolchain_name,
# Link Program
res, _ = toolchain.link_program(resources, build_path, name)
memap_instance = getattr(toolchain, 'memap_instance', None)
memap_table = ''
if memap_instance:
# Write output to stdout in text (pretty table) format
memap_table = memap_instance.generate_output('table')
if not silent:
print memap_table
# Write output to file in JSON format
map_out = join(build_path, name + "_map.json")
memap_instance.generate_output('json', map_out)
# Write output to file in CSV format for the CI
map_csv = join(build_path, name + "_map.csv")
memap_instance.generate_output('csv-ci', map_csv)
resources.detect_duplicates(toolchain)
if report != None:
end = time()
cur_result["elapsed_time"] = end - start
cur_result["output"] = toolchain.get_output()
cur_result["output"] = toolchain.get_output() + memap_table
cur_result["result"] = "OK"
cur_result["memory_usage"] = toolchain.map_outputs

View File

@ -404,6 +404,8 @@ class MemapParser(object):
Keyword arguments:
file_desc - descriptor (either stdout or file)
Returns: generated string for the 'table' format, otherwise None
"""
try:
@ -418,11 +420,13 @@ class MemapParser(object):
to_call = {'json': self.generate_json,
'csv-ci': self.generate_csv,
'table': self.generate_table}[export_format]
to_call(file_desc)
output = to_call(file_desc)
if file_desc is not sys.stdout:
file_desc.close()
return output
def generate_json(self, file_desc):
"""Generate a json file from a memory map
@ -432,6 +436,8 @@ class MemapParser(object):
file_desc.write(json.dumps(self.mem_report, indent=4))
file_desc.write('\n')
return None
def generate_csv(self, file_desc):
"""Generate a CSV file from a memoy map
@ -472,11 +478,15 @@ class MemapParser(object):
csv_writer.writerow(csv_module_section)
csv_writer.writerow(csv_sizes)
return None
def generate_table(self, file_desc):
"""Generate a table from a memoy map
Positional arguments:
file_desc - the file to write out the final report to
Returns: string of the generated table
"""
# Create table
columns = ['Module']
@ -504,28 +514,29 @@ class MemapParser(object):
table.add_row(subtotal_row)
file_desc.write(table.get_string())
file_desc.write('\n')
output = table.get_string()
output += '\n'
if self.mem_summary['heap'] == 0:
file_desc.write("Allocated Heap: unknown\n")
output += "Allocated Heap: unknown\n"
else:
file_desc.write("Allocated Heap: %s bytes\n" %
str(self.mem_summary['heap']))
output += "Allocated Heap: %s bytes\n" % \
str(self.mem_summary['heap'])
if self.mem_summary['stack'] == 0:
file_desc.write("Allocated Stack: unknown\n")
output += "Allocated Stack: unknown\n"
else:
file_desc.write("Allocated Stack: %s bytes\n" %
str(self.mem_summary['stack']))
output += "Allocated Stack: %s bytes\n" % \
str(self.mem_summary['stack'])
file_desc.write("Total Static RAM memory (data + bss): %s bytes\n" %
(str(self.mem_summary['static_ram'])))
file_desc.write(
"Total RAM memory (data + bss + heap + stack): %s bytes\n"
% (str(self.mem_summary['total_ram'])))
file_desc.write("Total Flash memory (text + data + misc): %s bytes\n" %
(str(self.mem_summary['total_flash'])))
output += "Total Static RAM memory (data + bss): %s bytes\n" % \
str(self.mem_summary['static_ram'])
output += "Total RAM memory (data + bss + heap + stack): %s bytes\n" % \
str(self.mem_summary['total_ram'])
output += "Total Flash memory (text + data + misc): %s bytes\n" % \
str(self.mem_summary['total_flash'])
return output
toolchains = ["ARM", "ARM_STD", "ARM_MICRO", "GCC_ARM", "IAR"]
@ -647,11 +658,15 @@ def main():
if memap.parse(args.file, args.toolchain) is False:
sys.exit(0)
returned_string = None
# Write output in file
if args.output != None:
memap.generate_output(args.export, args.output)
returned_string = memap.generate_output(args.export, args.output)
else: # Write output in screen
memap.generate_output(args.export)
returned_string = memap.generate_output(args.export)
if args.export == 'table' and returned_string:
print returned_string
sys.exit(0)

View File

@ -37,6 +37,7 @@ from time import sleep, time
from Queue import Queue, Empty
from os.path import join, exists, basename, relpath
from threading import Thread, Lock
from multiprocessing import Pool, cpu_count
from subprocess import Popen, PIPE
# Imports related to mbed build api
@ -2068,6 +2069,48 @@ def norm_relative_path(path, start):
path = path.replace("\\", "/")
return path
def build_test_worker(*args, **kwargs):
"""This is a worker function for the parallel building of tests. The `args`
and `kwargs` are passed directly to `build_project`. It returns a dictionary
with the following structure:
{
'result': `True` if no exceptions were thrown, `False` otherwise
'reason': Instance of exception that was thrown on failure
'bin_file': Path to the created binary if `build_project` was
successful. Not present otherwise
'kwargs': The keyword arguments that were passed to `build_project`.
This includes arguments that were modified (ex. report)
}
"""
bin_file = None
ret = {
'result': False,
'args': args,
'kwargs': kwargs
}
try:
bin_file = build_project(*args, **kwargs)
ret['result'] = True
ret['bin_file'] = bin_file
ret['kwargs'] = kwargs
except NotSupportedException, e:
ret['reason'] = e
except ToolException, e:
ret['reason'] = e
except KeyboardInterrupt, e:
ret['reason'] = e
except:
# Print unhandled exceptions here
import traceback
traceback.print_exc(file=sys.stdout)
return ret
def build_tests(tests, base_source_paths, build_path, target, toolchain_name,
clean=False, notify=None, verbose=False, jobs=1, macros=None,
silent=False, report=None, properties=None,
@ -2095,58 +2138,101 @@ def build_tests(tests, base_source_paths, build_path, target, toolchain_name,
result = True
map_outputs_total = list()
jobs_count = int(jobs if jobs else cpu_count())
p = Pool(processes=jobs_count)
results = []
for test_name, test_path in tests.iteritems():
test_build_path = os.path.join(build_path, test_path)
src_path = base_source_paths + [test_path]
bin_file = None
test_case_folder_name = os.path.basename(test_path)
args = (src_path, test_build_path, target, toolchain_name)
kwargs = {
'jobs': jobs,
'clean': clean,
'macros': macros,
'name': test_case_folder_name,
'project_id': test_name,
'report': report,
'properties': properties,
'verbose': verbose,
'app_config': app_config,
'build_profile': build_profile,
'silent': True
}
try:
bin_file = build_project(src_path, test_build_path, target, toolchain_name,
jobs=jobs,
clean=clean,
macros=macros,
name=test_case_folder_name,
project_id=test_name,
report=report,
properties=properties,
verbose=verbose,
app_config=app_config,
build_profile=build_profile)
results.append(p.apply_async(build_test_worker, args, kwargs))
except NotSupportedException:
pass
except ToolException:
result = False
if continue_on_build_fail:
continue
else:
p.close()
result = True
itr = 0
while len(results):
itr += 1
if itr > 360000:
p.terminate()
p.join()
raise ToolException("Compile did not finish in 10 minutes")
else:
sleep(0.01)
pending = 0
for r in results:
if r.ready() is True:
try:
worker_result = r.get()
results.remove(r)
# Take report from the kwargs and merge it into existing report
report_entry = worker_result['kwargs']['report'][target_name][toolchain_name]
for test_key in report_entry.keys():
report[target_name][toolchain_name][test_key] = report_entry[test_key]
# Set the overall result to a failure if a build failure occurred
if not worker_result['result'] and not isinstance(worker_result['reason'], NotSupportedException):
result = False
break
# Adding binary path to test build result
if worker_result['result'] and 'bin_file' in worker_result:
bin_file = norm_relative_path(worker_result['bin_file'], execution_directory)
test_build['tests'][worker_result['kwargs']['project_id']] = {
"binaries": [
{
"path": bin_file
}
]
}
test_key = worker_result['kwargs']['project_id'].upper()
print report[target_name][toolchain_name][test_key][0][0]['output'].rstrip()
print 'Image: %s\n' % bin_file
except:
if p._taskqueue.queue:
p._taskqueue.queue.clear()
sleep(0.5)
p.terminate()
p.join()
raise
else:
pending += 1
if pending >= jobs_count:
break
# Break as soon as possible if there is a failure and we are not
# continuing on build failures
if not result and not continue_on_build_fail:
if p._taskqueue.queue:
p._taskqueue.queue.clear()
sleep(0.5)
p.terminate()
break
# If a clean build was carried out last time, disable it for the next build.
# Otherwise the previously built test will be deleted.
if clean:
clean = False
# Normalize the path
if bin_file:
bin_file = norm_relative_path(bin_file, execution_directory)
test_build['tests'][test_name] = {
"binaries": [
{
"path": bin_file
}
]
}
print 'Image: %s'% bin_file
p.join()
test_builds = {}
test_builds["%s-%s" % (target_name, toolchain_name)] = test_build
return result, test_builds

View File

@ -370,24 +370,24 @@ class mbedToolchain:
msg = '[%(severity)s] %(file)s@%(line)s,%(col)s: %(message)s' % event
elif event['type'] == 'progress':
if not silent:
if 'percent' in event:
msg = '{} [{:>5.1f}%]: {}'.format(event['action'].title(),
event['percent'],
basename(event['file']))
else:
msg = '{}: {}'.format(event['action'].title(),
basename(event['file']))
if 'percent' in event:
msg = '{} [{:>5.1f}%]: {}'.format(event['action'].title(),
event['percent'],
basename(event['file']))
else:
msg = '{}: {}'.format(event['action'].title(),
basename(event['file']))
if msg:
print msg
if not silent:
print msg
self.output += msg + "\n"
def print_notify_verbose(self, event, silent=False):
""" Default command line notification with more verbose mode
"""
if event['type'] in ['info', 'debug']:
self.print_notify(event) # standard handle
self.print_notify(event, silent=silent) # standard handle
elif event['type'] == 'cc':
event['severity'] = event['severity'].title()
@ -396,7 +396,8 @@ class mbedToolchain:
event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown"
event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown"
msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event
print msg
if not silent:
print msg
self.output += msg + "\n"
elif event['type'] == 'progress':
@ -1074,16 +1075,8 @@ class mbedToolchain:
self.info("Unknown toolchain for memory statistics %s" % toolchain)
return None
# Write output to stdout in text (pretty table) format
memap.generate_output('table')
# Write output to file in JSON format
map_out = splitext(map)[0] + "_map.json"
memap.generate_output('json', map_out)
# Write output to file in CSV format for the CI
map_csv = splitext(map)[0] + "_map.csv"
memap.generate_output('csv-ci', map_csv)
# Store the memap instance for later use
self.memap_instance = memap
# Here we return memory statistics structure (constructed after
# call to generate_output) which contains raw data in bytes