mirror of https://github.com/ARMmbed/mbed-os.git
Add summary for test building
When users are building tests with `mbed compile --tests` whey will by default get memory map breakdown report. This can be suppresed in the future with command line switch. For now it is visible each time users build test cases. List is sorted by project name created with `build_project` API. Changes: * `build_project` now returns tuple (this breaks build_api.build_project API!) * Memmap script got a aggregation function to print summary from small 'JSON' partial reports. * Report is generated by `test_api.build_tests` function only! Example: ``` +----------------------------------------------------------------------+--------+-----------+-------------+------------+-------+-------+-----------+ | name | target | toolchain | total_flash | static_ram | stack | heap | total_ram | +----------------------------------------------------------------------+--------+-----------+-------------+------------+-------+-------+-----------+ | features-feature_ipv4-tests-mbedmicro-net-nist_internet_time_service | K64F | GCC_ARM | 132136 | 62288 | 32768 | 65536 | 160592 | | features-feature_ipv4-tests-mbedmicro-net-tcp_client_echo | K64F | GCC_ARM | 125613 | 62448 | 32768 | 65540 | 160756 | | features-feature_ipv4-tests-mbedmicro-net-tcp_client_hello_world | K64F | GCC_ARM | 125949 | 62448 | 32768 | 65540 | 160756 | | features-feature_ipv4-tests-mbedmicro-net-udp_echo_client | K64F | GCC_ARM | 123613 | 62276 | 32768 | 65536 | 160580 | | features-storage-tests-cfstore-add_del | K64F | GCC_ARM | 96080 | 13052 | 32768 | 65540 | 111360 | | features-storage-tests-cfstore-close | K64F | GCC_ARM | 95520 | 12004 | 32768 | 65540 | 110312 | | features-storage-tests-cfstore-create | K64F | GCC_ARM | 99144 | 13036 | 32768 | 65540 | 111344 | | features-storage-tests-cfstore-example1 | K64F | GCC_ARM | 98592 | 12368 | 32768 | 65536 | 110672 | | features-storage-tests-cfstore-example2 | K64F | GCC_ARM | 95232 | 12012 | 32768 | 65540 | 110320 | | features-storage-tests-cfstore-example3 | K64F | GCC_ARM | 95264 | 11856 | 32768 | 65536 | 110160 | | features-storage-tests-cfstore-example4 | K64F | GCC_ARM | 92632 | 12012 | 32768 | 65540 | 110320 | | features-storage-tests-cfstore-example5 | K64F | GCC_ARM | 92344 | 11856 | 32768 | 65536 | 110160 | | features-storage-tests-cfstore-find | K64F | GCC_ARM | 96344 | 13028 | 32768 | 65540 | 111336 | | features-storage-tests-cfstore-find2 | K64F | GCC_ARM | 93192 | 12004 | 32768 | 65540 | 110312 | | features-storage-tests-cfstore-flash | K64F | GCC_ARM | 97784 | 12532 | 32768 | 65540 | 110840 | | features-storage-tests-cfstore-flush | K64F | GCC_ARM | 96464 | 12012 | 32768 | 65540 | 110320 | | features-storage-tests-cfstore-flush2 | K64F | GCC_ARM | 95056 | 12004 | 32768 | 65540 | 110312 | | features-storage-tests-cfstore-init | K64F | GCC_ARM | 93120 | 12012 | 32768 | 65540 | 110320 | | features-storage-tests-cfstore-misc | K64F | GCC_ARM | 96808 | 12516 | 32768 | 65540 | 110824 | | features-storage-tests-cfstore-open | K64F | GCC_ARM | 98632 | 12540 | 32768 | 65540 | 110848 | | features-storage-tests-cfstore-read | K64F | GCC_ARM | 94112 | 12540 | 32768 | 65540 | 110848 | | features-storage-tests-cfstore-write | K64F | GCC_ARM | 94488 | 12004 | 32768 | 65540 | 110312 | | features-storage-tests-flash_journal-basicapi | K64F | GCC_ARM | 104712 | 21236 | 32768 | 65540 | 119544 | | frameworks-utest-tests-unit_tests-basic_test | K64F | GCC_ARM | 71534 | 11468 | 32768 | 65540 | 109776 | | frameworks-utest-tests-unit_tests-case_async_validate | K64F | GCC_ARM | 74598 | 11468 | 32768 | 65540 | 109776 | | frameworks-utest-tests-unit_tests-case_control_async | K64F | GCC_ARM | 74630 | 11476 | 32768 | 65540 | 109784 | | frameworks-utest-tests-unit_tests-case_control_repeat | K64F | GCC_ARM | 72790 | 11452 | 32768 | 65540 | 109760 | | frameworks-utest-tests-unit_tests-case_selection | K64F | GCC_ARM | 72302 | 11468 | 32768 | 65540 | 109776 | | frameworks-utest-tests-unit_tests-case_setup_failure | K64F | GCC_ARM | 72630 | 11468 | 32768 | 65540 | 109776 | | frameworks-utest-tests-unit_tests-case_teardown_failure | K64F | GCC_ARM | 72790 | 11468 | 32768 | 65540 | 109776 | | frameworks-utest-tests-unit_tests-control_type | K64F | GCC_ARM | 82462 | 11468 | 32768 | 65540 | 109776 | | frameworks-utest-tests-unit_tests-minimal_async_scheduler | K64F | GCC_ARM | 72182 | 11468 | 32768 | 65540 | 109776 | | frameworks-utest-tests-unit_tests-minimal_scheduler | K64F | GCC_ARM | 71998 | 11468 | 32768 | 65540 | 109776 | | frameworks-utest-tests-unit_tests-test_assertion_failure_test_setup | K64F | GCC_ARM | 71710 | 11460 | 32768 | 65540 | 109768 | | frameworks-utest-tests-unit_tests-test_setup_case_selection_failure | K64F | GCC_ARM | 71702 | 11468 | 32768 | 65540 | 109776 | | frameworks-utest-tests-unit_tests-test_setup_failure | K64F | GCC_ARM | 71710 | 11468 | 32768 | 65540 | 109776 | | tests-integration-basic | K64F | GCC_ARM | 67566 | 10780 | 32768 | 65540 | 109088 | | tests-integration-threaded_blinky | K64F | GCC_ARM | 68326 | 10780 | 32768 | 65540 | 109088 | | tests-mbed_drivers-c_strings | K64F | GCC_ARM | 74438 | 11468 | 32768 | 65540 | 109776 | | tests-mbed_drivers-callback | K64F | GCC_ARM | 88310 | 11972 | 32768 | 65540 | 110280 | | tests-mbed_drivers-dev_null | K64F | GCC_ARM | 90213 | 10784 | 32768 | 65540 | 109092 | | tests-mbed_drivers-echo | K64F | GCC_ARM | 71918 | 11468 | 32768 | 65540 | 109776 | | tests-mbed_drivers-generic_tests | K64F | GCC_ARM | 77624 | 11468 | 32768 | 65540 | 109776 | | tests-mbed_drivers-rtc | K64F | GCC_ARM | 85854 | 11308 | 32768 | 65540 | 109616 | | tests-mbed_drivers-stl_features | K64F | GCC_ARM | 80726 | 11476 | 32768 | 65540 | 109784 | | tests-mbed_drivers-ticker | K64F | GCC_ARM | 70974 | 11308 | 32768 | 65540 | 109616 | | tests-mbed_drivers-ticker_2 | K64F | GCC_ARM | 70790 | 11308 | 32768 | 65540 | 109616 | | tests-mbed_drivers-ticker_3 | K64F | GCC_ARM | 71038 | 11308 | 32768 | 65540 | 109616 | | tests-mbed_drivers-timeout | K64F | GCC_ARM | 70886 | 11308 | 32768 | 65540 | 109616 | | tests-mbed_drivers-wait_us | K64F | GCC_ARM | 70414 | 11308 | 32768 | 65540 | 109616 | | tests-mbedmicro-mbed-attributes | K64F | GCC_ARM | 71534 | 11460 | 32768 | 65540 | 109768 | | tests-mbedmicro-mbed-call_before_main | K64F | GCC_ARM | 73112 | 10780 | 32768 | 65540 | 109088 | | tests-mbedmicro-mbed-cpp | K64F | GCC_ARM | 73400 | 10780 | 32768 | 65540 | 109088 | | tests-mbedmicro-mbed-div | K64F | GCC_ARM | 73176 | 10780 | 32768 | 65540 | 109088 | | tests-mbedmicro-rtos-mbed-basic | K64F | GCC_ARM | 68390 | 10780 | 32768 | 65540 | 109088 | | tests-mbedmicro-rtos-mbed-isr | K64F | GCC_ARM | 74480 | 10780 | 32768 | 65540 | 109088 | | tests-mbedmicro-rtos-mbed-mail | K64F | GCC_ARM | 74992 | 11300 | 32768 | 65540 | 109608 | | tests-mbedmicro-rtos-mbed-mutex | K64F | GCC_ARM | 74048 | 10780 | 32768 | 65540 | 109088 | | tests-mbedmicro-rtos-mbed-queue | K64F | GCC_ARM | 74912 | 11300 | 32768 | 65540 | 109608 | | tests-mbedmicro-rtos-mbed-semaphore | K64F | GCC_ARM | 74296 | 10780 | 32768 | 65540 | 109088 | | tests-mbedmicro-rtos-mbed-signals | K64F | GCC_ARM | 74328 | 10780 | 32768 | 65540 | 109088 | | tests-mbedmicro-rtos-mbed-threads | K64F | GCC_ARM | 75214 | 11460 | 32768 | 65540 | 109768 | | tests-mbedmicro-rtos-mbed-timer | K64F | GCC_ARM | 68430 | 10780 | 32768 | 65540 | 109088 | | tests-storage_abstraction-basicapi | K64F | GCC_ARM | 107808 | 28908 | 32768 | 65540 | 127216 | +----------------------------------------------------------------------+--------+-----------+-------------+------------+-------+-------+-----------+ ``` Refactored after code review Refactored parse() function Polishing Moved memory usage reporting function call to test.py to group all reporters in one place Bug-fix: on ARM toolchain we were not fetching statistics from last element of memap result listpull/2047/head
parent
458b46c803
commit
12a01f61ca
|
@ -176,7 +176,7 @@ def build_project(src_path, build_path, target, toolchain_name,
|
|||
|
||||
if report != None:
|
||||
start = time()
|
||||
|
||||
|
||||
# If project_id is specified, use that over the default name
|
||||
id_name = project_id.upper() if project_id else name.upper()
|
||||
description = project_description if project_description else name
|
||||
|
@ -232,6 +232,7 @@ def build_project(src_path, build_path, target, toolchain_name,
|
|||
cur_result["elapsed_time"] = end - start
|
||||
cur_result["output"] = toolchain.get_output()
|
||||
cur_result["result"] = "OK"
|
||||
cur_result["memory_usage"] = toolchain.map_outputs
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
|
||||
|
@ -294,7 +295,7 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
|||
|
||||
if report != None:
|
||||
start = time()
|
||||
|
||||
|
||||
# If project_id is specified, use that over the default name
|
||||
id_name = project_id.upper() if project_id else name.upper()
|
||||
description = name
|
||||
|
@ -377,7 +378,7 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
|||
toolchain.copy_files(resources.libraries, build_path, resources=resources)
|
||||
if resources.linker_script:
|
||||
toolchain.copy_files(resources.linker_script, build_path, resources=resources)
|
||||
|
||||
|
||||
if resource.hex_files:
|
||||
toolchain.copy_files(resources.hex_files, build_path, resources=resources)
|
||||
|
||||
|
@ -399,12 +400,12 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
|||
except Exception, e:
|
||||
if report != None:
|
||||
end = time()
|
||||
|
||||
|
||||
if isinstance(e, ToolException):
|
||||
cur_result["result"] = "FAIL"
|
||||
elif isinstance(e, NotSupportedException):
|
||||
cur_result["result"] = "NOT_SUPPORTED"
|
||||
|
||||
|
||||
cur_result["elapsed_time"] = end - start
|
||||
|
||||
toolchain_output = toolchain.get_output()
|
||||
|
@ -428,7 +429,7 @@ def build_lib(lib_id, target, toolchain_name, options=None, verbose=False, clean
|
|||
if not lib.is_supported(target, toolchain_name):
|
||||
print 'Library "%s" is not yet supported on target %s with toolchain %s' % (lib_id, target.name, toolchain)
|
||||
return False
|
||||
|
||||
|
||||
# We need to combine macros from parameter list with macros from library definition
|
||||
MACROS = lib.macros if lib.macros else []
|
||||
if macros:
|
||||
|
@ -441,7 +442,7 @@ def build_lib(lib_id, target, toolchain_name, options=None, verbose=False, clean
|
|||
dependencies_paths = lib.dependencies
|
||||
inc_dirs = lib.inc_dirs
|
||||
inc_dirs_ext = lib.inc_dirs_ext
|
||||
|
||||
|
||||
""" src_path: the path of the source directory
|
||||
build_path: the path of the build directory
|
||||
target: ['LPC1768', 'LPC11U24', 'LPC2368']
|
||||
|
@ -522,7 +523,7 @@ def build_lib(lib_id, target, toolchain_name, options=None, verbose=False, clean
|
|||
# Copy Headers
|
||||
for resource in resources:
|
||||
toolchain.copy_files(resource.headers, build_path, resources=resource)
|
||||
|
||||
|
||||
dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs)
|
||||
|
||||
# Compile Sources
|
||||
|
@ -716,7 +717,7 @@ def mcu_toolchain_matrix(verbose_html=False, platform_filter=None):
|
|||
perm_counter += 1
|
||||
else:
|
||||
text = "-"
|
||||
|
||||
|
||||
row.append(text)
|
||||
pt.add_row(row)
|
||||
|
||||
|
@ -942,6 +943,49 @@ def print_build_results(result_list, build_name):
|
|||
result += "\n"
|
||||
return result
|
||||
|
||||
def print_build_memory_usage_results(report):
|
||||
""" Generate result table with memory usage values for build results
|
||||
Agregates (puts together) reports obtained from self.get_memory_summary()
|
||||
@param report Report generated during build procedure. See
|
||||
"""
|
||||
from prettytable import PrettyTable
|
||||
columns_text = ['name', 'target', 'toolchain']
|
||||
columns_int = ['static_ram', 'stack', 'heap', 'total_ram', 'total_flash']
|
||||
table = PrettyTable(columns_text + columns_int)
|
||||
|
||||
for col in columns_text:
|
||||
table.align[col] = 'l'
|
||||
|
||||
for col in columns_int:
|
||||
table.align[col] = 'r'
|
||||
|
||||
for target in report:
|
||||
for toolchain in report[target]:
|
||||
for name in report[target][toolchain]:
|
||||
for dlist in report[target][toolchain][name]:
|
||||
for dlistelem in dlist:
|
||||
# Get 'memory_usage' record and build table with statistics
|
||||
record = dlist[dlistelem]
|
||||
if 'memory_usage' in record and record['memory_usage']:
|
||||
# Note that summary should be in the last record of
|
||||
# 'memory_usage' section. This is why we are grabbing
|
||||
# last "[-1]" record.
|
||||
row = [
|
||||
record['description'],
|
||||
record['target_name'],
|
||||
record['toolchain_name'],
|
||||
record['memory_usage'][-1]['summary']['static_ram'],
|
||||
record['memory_usage'][-1]['summary']['stack'],
|
||||
record['memory_usage'][-1]['summary']['heap'],
|
||||
record['memory_usage'][-1]['summary']['total_ram'],
|
||||
record['memory_usage'][-1]['summary']['total_flash'],
|
||||
]
|
||||
table.add_row(row)
|
||||
|
||||
result = "Memory map breakdown for built projects (values in Bytes):\n"
|
||||
result += table.get_string(sortby='name')
|
||||
return result
|
||||
|
||||
def write_build_report(build_report, template_filename, filename):
|
||||
build_report_failing = []
|
||||
build_report_passing = []
|
||||
|
@ -963,14 +1007,14 @@ def write_build_report(build_report, template_filename, filename):
|
|||
def scan_for_source_paths(path, exclude_paths=None):
|
||||
ignorepatterns = []
|
||||
paths = []
|
||||
|
||||
|
||||
def is_ignored(file_path):
|
||||
for pattern in ignorepatterns:
|
||||
if fnmatch.fnmatch(file_path, pattern):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
|
||||
|
||||
""" os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
|
||||
When topdown is True, the caller can modify the dirnames list in-place
|
||||
(perhaps using del or slice assignment), and walk() will only recurse into
|
||||
|
|
|
@ -26,9 +26,9 @@ class MemapParser(object):
|
|||
|
||||
self.misc_flash_sections = ('.interrupts', '.flash_config')
|
||||
|
||||
self.other_sections = ('.interrupts_ram', '.init', '.ARM.extab', \
|
||||
'.ARM.exidx', '.ARM.attributes', '.eh_frame', \
|
||||
'.init_array', '.fini_array', '.jcr', '.stab', \
|
||||
self.other_sections = ('.interrupts_ram', '.init', '.ARM.extab',
|
||||
'.ARM.exidx', '.ARM.attributes', '.eh_frame',
|
||||
'.init_array', '.fini_array', '.jcr', '.stab',
|
||||
'.stabstr', '.ARM.exidx', '.ARM')
|
||||
|
||||
# sections to print info (generic for all toolchains)
|
||||
|
@ -43,6 +43,9 @@ class MemapParser(object):
|
|||
# list of all object files and mappting to module names
|
||||
self.object_to_module = dict()
|
||||
|
||||
# Memory usage summary structure
|
||||
self.mem_summary = dict()
|
||||
|
||||
def module_add(self, module_name, size, section):
|
||||
"""
|
||||
Adds a module / section to the list
|
||||
|
@ -67,7 +70,7 @@ class MemapParser(object):
|
|||
return i # should name of the section (assuming it's a known one)
|
||||
|
||||
if line.startswith('.'):
|
||||
return 'unknown' # all others are clasified are unknown
|
||||
return 'unknown' # all others are classified are unknown
|
||||
else:
|
||||
return False # everything else, means no change in section
|
||||
|
||||
|
@ -363,11 +366,12 @@ class MemapParser(object):
|
|||
|
||||
# Create table
|
||||
columns = ['Module']
|
||||
for i in list(self.print_sections):
|
||||
columns.append(i)
|
||||
columns.extend(self.print_sections)
|
||||
|
||||
table = PrettyTable(columns)
|
||||
table.align["Module"] = "l"
|
||||
for col in self.print_sections:
|
||||
table.align[col] = 'r'
|
||||
|
||||
for i in list(self.print_sections):
|
||||
table.align[i] = 'r'
|
||||
|
@ -388,8 +392,12 @@ class MemapParser(object):
|
|||
for k in self.print_sections:
|
||||
row.append(self.modules[i][k])
|
||||
|
||||
json_obj.append({"module":i, "size":{\
|
||||
k:self.modules[i][k] for k in self.print_sections}})
|
||||
json_obj.append({
|
||||
"module":i,
|
||||
"size":{
|
||||
k:self.modules[i][k] for k in self.print_sections
|
||||
}
|
||||
})
|
||||
|
||||
table.add_row(row)
|
||||
|
||||
|
@ -399,16 +407,19 @@ class MemapParser(object):
|
|||
|
||||
table.add_row(subtotal_row)
|
||||
|
||||
if export_format == 'json':
|
||||
json_obj.append({\
|
||||
'summary':{\
|
||||
'total_static_ram':(subtotal['.data']+subtotal['.bss']),\
|
||||
'allocated_heap':(subtotal['.heap']),\
|
||||
'allocated_stack':(subtotal['.stack']),\
|
||||
'total_ram':(subtotal['.data']+subtotal['.bss']+subtotal['.heap']+subtotal['.stack']),\
|
||||
'total_flash':(subtotal['.text']+subtotal['.data']+misc_flash_mem),}})
|
||||
summary = {
|
||||
'summary':{
|
||||
'static_ram':(subtotal['.data']+subtotal['.bss']),
|
||||
'heap':(subtotal['.heap']),
|
||||
'stack':(subtotal['.stack']),
|
||||
'total_ram':(subtotal['.data']+subtotal['.bss']+subtotal['.heap']+subtotal['.stack']),
|
||||
'total_flash':(subtotal['.text']+subtotal['.data']+misc_flash_mem),
|
||||
}
|
||||
}
|
||||
|
||||
file_desc.write(json.dumps(json_obj, indent=4))
|
||||
if export_format == 'json':
|
||||
json_to_file = json_obj + [summary]
|
||||
file_desc.write(json.dumps(json_to_file, indent=4))
|
||||
file_desc.write('\n')
|
||||
|
||||
elif export_format == 'csv-ci': # CSV format for the CI system
|
||||
|
@ -467,33 +478,38 @@ class MemapParser(object):
|
|||
if file_desc is not sys.stdout:
|
||||
file_desc.close()
|
||||
|
||||
self.mem_summary = json_obj + [summary]
|
||||
|
||||
return True
|
||||
|
||||
def get_memory_summary(self):
|
||||
"""! Object is available only after self.generate_output('json') is called
|
||||
@return Return memory summary object
|
||||
"""
|
||||
return self.mem_summary
|
||||
|
||||
def parse(self, mapfile, toolchain):
|
||||
"""
|
||||
Parse and decode map file depending on the toolchain
|
||||
"""
|
||||
|
||||
result = True
|
||||
try:
|
||||
file_input = open(mapfile, 'rt')
|
||||
with open(mapfile, 'rt') as file_input:
|
||||
if toolchain == "ARM" or toolchain == "ARM_STD" or toolchain == "ARM_MICRO":
|
||||
self.search_objects(os.path.abspath(mapfile), "ARM")
|
||||
self.parse_map_file_armcc(file_input)
|
||||
elif toolchain == "GCC_ARM":
|
||||
self.parse_map_file_gcc(file_input)
|
||||
elif toolchain == "IAR":
|
||||
self.search_objects(os.path.abspath(mapfile), toolchain)
|
||||
self.parse_map_file_iar(file_input)
|
||||
else:
|
||||
result = False
|
||||
except IOError as error:
|
||||
print "I/O error({0}): {1}".format(error.errno, error.strerror)
|
||||
return False
|
||||
|
||||
if toolchain == "ARM" or toolchain == "ARM_STD" or toolchain == "ARM_MICRO":
|
||||
self.search_objects(os.path.abspath(mapfile), "ARM")
|
||||
self.parse_map_file_armcc(file_input)
|
||||
elif toolchain == "GCC_ARM":
|
||||
self.parse_map_file_gcc(file_input)
|
||||
elif toolchain == "IAR":
|
||||
self.search_objects(os.path.abspath(mapfile), toolchain)
|
||||
self.parse_map_file_iar(file_input)
|
||||
else:
|
||||
return False
|
||||
|
||||
file_input.close()
|
||||
|
||||
return True
|
||||
result = False
|
||||
return result
|
||||
|
||||
def main():
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ sys.path.insert(0, ROOT)
|
|||
from tools.test_api import test_path_to_name, find_tests, print_tests, build_tests, test_spec_from_test_builds
|
||||
from tools.options import get_default_options_parser
|
||||
from tools.build_api import build_project, build_library
|
||||
from tools.build_api import print_build_memory_usage_results
|
||||
from tools.targets import TARGET_MAP
|
||||
from tools.utils import mkdir, ToolException, NotSupportedException
|
||||
from tools.test_exporters import ReportExporter, ResultExporterType
|
||||
|
@ -37,12 +38,12 @@ if __name__ == '__main__':
|
|||
try:
|
||||
# Parse Options
|
||||
parser = get_default_options_parser()
|
||||
|
||||
|
||||
parser.add_option("-D", "",
|
||||
action="append",
|
||||
dest="macros",
|
||||
help="Add a macro definition")
|
||||
|
||||
|
||||
parser.add_option("-j", "--jobs",
|
||||
type="int",
|
||||
dest="jobs",
|
||||
|
@ -60,25 +61,25 @@ if __name__ == '__main__':
|
|||
|
||||
parser.add_option("-p", "--paths", dest="paths",
|
||||
default=None, help="Limit the tests to those within the specified comma separated list of paths")
|
||||
|
||||
|
||||
format_choices = ["list", "json"]
|
||||
format_default_choice = "list"
|
||||
format_help = "Change the format in which tests are listed. Choices include: %s. Default: %s" % (", ".join(format_choices), format_default_choice)
|
||||
parser.add_option("-f", "--format", type="choice", dest="format",
|
||||
choices=format_choices, default=format_default_choice, help=format_help)
|
||||
|
||||
|
||||
parser.add_option("--continue-on-build-fail", action="store_true", dest="continue_on_build_fail",
|
||||
default=None, help="Continue trying to build all tests if a build failure occurs")
|
||||
|
||||
parser.add_option("-n", "--names", dest="names",
|
||||
default=None, help="Limit the tests to a comma separated list of names")
|
||||
|
||||
|
||||
parser.add_option("--test-spec", dest="test_spec",
|
||||
default=None, help="Destination path for a test spec file that can be used by the Greentea automated test tool")
|
||||
|
||||
|
||||
parser.add_option("--build-report-junit", dest="build_report_junit",
|
||||
default=None, help="Destination path for a build report in the JUnit xml format")
|
||||
|
||||
|
||||
parser.add_option("-v", "--verbose",
|
||||
action="store_true",
|
||||
dest="verbose",
|
||||
|
@ -87,24 +88,24 @@ if __name__ == '__main__':
|
|||
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# Filter tests by path if specified
|
||||
# Filter tests by path if specified
|
||||
if options.paths:
|
||||
all_paths = options.paths.split(",")
|
||||
else:
|
||||
all_paths = ["."]
|
||||
|
||||
|
||||
all_tests = {}
|
||||
tests = {}
|
||||
|
||||
|
||||
# Find all tests in the relevant paths
|
||||
for path in all_paths:
|
||||
all_tests.update(find_tests(path))
|
||||
|
||||
|
||||
# Filter tests by name if specified
|
||||
if options.names:
|
||||
all_names = options.names.split(",")
|
||||
all_names = [x.lower() for x in all_names]
|
||||
|
||||
|
||||
for name in all_names:
|
||||
if any(fnmatch.fnmatch(testname, name) for testname in all_tests):
|
||||
for testname, test in all_tests.items():
|
||||
|
@ -124,16 +125,16 @@ if __name__ == '__main__':
|
|||
if not options.build_dir:
|
||||
print "[ERROR] You must specify a build path"
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
base_source_paths = options.source_dir
|
||||
|
||||
|
||||
# Default base source path is the current directory
|
||||
if not base_source_paths:
|
||||
base_source_paths = ['.']
|
||||
|
||||
|
||||
target = options.mcu
|
||||
|
||||
|
||||
build_report = {}
|
||||
build_properties = {}
|
||||
|
||||
|
@ -150,7 +151,7 @@ if __name__ == '__main__':
|
|||
macros=options.macros,
|
||||
verbose=options.verbose,
|
||||
archive=False)
|
||||
|
||||
|
||||
library_build_success = True
|
||||
except ToolException, e:
|
||||
# ToolException output is handled by the build log
|
||||
|
@ -161,7 +162,7 @@ if __name__ == '__main__':
|
|||
except Exception, e:
|
||||
# Some other exception occurred, print the error message
|
||||
print e
|
||||
|
||||
|
||||
if not library_build_success:
|
||||
print "Failed to build library"
|
||||
else:
|
||||
|
@ -175,32 +176,37 @@ if __name__ == '__main__':
|
|||
verbose=options.verbose,
|
||||
jobs=options.jobs,
|
||||
continue_on_build_fail=options.continue_on_build_fail)
|
||||
|
||||
|
||||
# If a path to a test spec is provided, write it to a file
|
||||
if options.test_spec:
|
||||
test_spec_data = test_spec_from_test_builds(test_build)
|
||||
|
||||
|
||||
# Create the target dir for the test spec if necessary
|
||||
# mkdir will not create the dir if it already exists
|
||||
test_spec_dir = os.path.dirname(options.test_spec)
|
||||
if test_spec_dir:
|
||||
mkdir(test_spec_dir)
|
||||
|
||||
|
||||
try:
|
||||
with open(options.test_spec, 'w') as f:
|
||||
f.write(json.dumps(test_spec_data, indent=2))
|
||||
except IOError, e:
|
||||
print "[ERROR] Error writing test spec to file"
|
||||
print e
|
||||
|
||||
|
||||
# If a path to a JUnit build report spec is provided, write it to a file
|
||||
if options.build_report_junit:
|
||||
report_exporter = ReportExporter(ResultExporterType.JUNIT, package="build")
|
||||
report_exporter.report_to_file(build_report, options.build_report_junit, test_suite_properties=build_properties)
|
||||
|
||||
|
||||
# Print memory map summary on screen
|
||||
if build_report:
|
||||
print
|
||||
print print_build_memory_usage_results(build_report)
|
||||
|
||||
print_report_exporter = ReportExporter(ResultExporterType.PRINT, package="build")
|
||||
status = print_report_exporter.report(build_report)
|
||||
|
||||
|
||||
if status:
|
||||
sys.exit(0)
|
||||
else:
|
||||
|
|
|
@ -46,6 +46,7 @@ from tools.paths import HOST_TESTS
|
|||
from tools.utils import ToolException
|
||||
from tools.utils import NotSupportedException
|
||||
from tools.utils import construct_enum
|
||||
from tools.memap import MemapParser
|
||||
from tools.targets import TARGET_MAP
|
||||
from tools.test_db import BaseDBAccess
|
||||
from tools.build_api import build_project, build_mbed_libs, build_lib
|
||||
|
@ -1970,12 +1971,12 @@ def test_path_to_name(path):
|
|||
while (tail and tail != "."):
|
||||
name_parts.insert(0, tail)
|
||||
head, tail = os.path.split(head)
|
||||
|
||||
|
||||
return "-".join(name_parts).lower()
|
||||
|
||||
def find_tests(base_dir):
|
||||
"""Given any directory, walk through the subdirectories and find all tests"""
|
||||
|
||||
|
||||
def find_test_in_directory(directory, tests_path):
|
||||
"""Given a 'TESTS' directory, return a dictionary of test names and test paths.
|
||||
The formate of the dictionary is {"test-name": "./path/to/test"}"""
|
||||
|
@ -1989,20 +1990,20 @@ def find_tests(base_dir):
|
|||
"name": test_path_to_name(directory),
|
||||
"path": directory
|
||||
}
|
||||
|
||||
|
||||
return test
|
||||
|
||||
tests_path = 'TESTS'
|
||||
tests = {}
|
||||
dirs = scan_for_source_paths(base_dir)
|
||||
|
||||
|
||||
for directory in dirs:
|
||||
test = find_test_in_directory(directory, tests_path)
|
||||
if test:
|
||||
tests[test['name']] = test['path']
|
||||
|
||||
|
||||
return tests
|
||||
|
||||
|
||||
def print_tests(tests, format="list", sort=True):
|
||||
"""Given a dictionary of tests (as returned from "find_tests"), print them
|
||||
in the specified format"""
|
||||
|
@ -2033,12 +2034,11 @@ def build_tests(tests, base_source_paths, build_path, target, toolchain_name,
|
|||
continue_on_build_fail=False):
|
||||
"""Given the data structure from 'find_tests' and the typical build parameters,
|
||||
build all the tests
|
||||
|
||||
|
||||
Returns a tuple of the build result (True or False) followed by the test
|
||||
build data structure"""
|
||||
|
||||
execution_directory = "."
|
||||
|
||||
execution_directory = "."
|
||||
base_path = norm_relative_path(build_path, execution_directory)
|
||||
|
||||
target_name = target if isinstance(target, str) else target.name
|
||||
|
@ -2051,9 +2051,10 @@ def build_tests(tests, base_source_paths, build_path, target, toolchain_name,
|
|||
"binary_type": "bootable",
|
||||
"tests": {}
|
||||
}
|
||||
|
||||
|
||||
result = True
|
||||
|
||||
|
||||
map_outputs_total = list()
|
||||
for test_name, test_path in tests.iteritems():
|
||||
test_build_path = os.path.join(build_path, test_path)
|
||||
src_path = base_source_paths + [test_path]
|
||||
|
@ -2072,21 +2073,21 @@ def build_tests(tests, base_source_paths, build_path, target, toolchain_name,
|
|||
except Exception, e:
|
||||
if not isinstance(e, NotSupportedException):
|
||||
result = False
|
||||
|
||||
|
||||
if continue_on_build_fail:
|
||||
continue
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
# If a clean build was carried out last time, disable it for the next build.
|
||||
# Otherwise the previously built test will be deleted.
|
||||
if clean:
|
||||
clean = False
|
||||
|
||||
|
||||
# Normalize the path
|
||||
if bin_file:
|
||||
bin_file = norm_relative_path(bin_file, execution_directory)
|
||||
|
||||
|
||||
test_build['tests'][test_name] = {
|
||||
"binaries": [
|
||||
{
|
||||
|
@ -2094,15 +2095,15 @@ def build_tests(tests, base_source_paths, build_path, target, toolchain_name,
|
|||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
print 'Image: %s'% bin_file
|
||||
|
||||
|
||||
test_builds = {}
|
||||
test_builds["%s-%s" % (target_name, toolchain_name)] = test_build
|
||||
|
||||
|
||||
|
||||
return result, test_builds
|
||||
|
||||
|
||||
|
||||
def test_spec_from_test_builds(test_builds):
|
||||
return {
|
||||
|
|
|
@ -233,28 +233,28 @@ class mbedToolchain:
|
|||
def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
|
||||
self.target = target
|
||||
self.name = self.__class__.__name__
|
||||
|
||||
|
||||
# compile/assemble/link/binary hooks
|
||||
self.hook = hooks.Hook(target, self)
|
||||
|
||||
# Toolchain flags
|
||||
self.flags = deepcopy(self.DEFAULT_FLAGS)
|
||||
|
||||
|
||||
# User-defined macros
|
||||
self.macros = macros or []
|
||||
|
||||
|
||||
# Macros generated from toolchain and target rules/features
|
||||
self.symbols = None
|
||||
|
||||
|
||||
# Labels generated from toolchain and target rules/features (used for selective build)
|
||||
self.labels = None
|
||||
|
||||
|
||||
# This will hold the configuration data (as returned by Config.get_config_data())
|
||||
self.config_data = None
|
||||
|
||||
# Non-incremental compile
|
||||
self.build_all = False
|
||||
|
||||
|
||||
# Build output dir
|
||||
self.build_dir = None
|
||||
self.timestamp = time()
|
||||
|
@ -265,7 +265,7 @@ class mbedToolchain:
|
|||
# Number of concurrent build jobs. 0 means auto (based on host system cores)
|
||||
self.jobs = 0
|
||||
|
||||
self.CHROOT = None
|
||||
self.CHROOT = None
|
||||
|
||||
# Ignore patterns from .mbedignore files
|
||||
self.ignore_patterns = []
|
||||
|
@ -280,12 +280,13 @@ class mbedToolchain:
|
|||
self.notify_fun = self.print_notify_verbose
|
||||
else:
|
||||
self.notify_fun = self.print_notify
|
||||
|
||||
|
||||
# Silent builds (no output)
|
||||
self.silent = silent
|
||||
|
||||
|
||||
# Print output buffer
|
||||
self.output = ""
|
||||
self.output = str()
|
||||
self.map_outputs = list() # Place to store memmap scan results in JSON like data structures
|
||||
|
||||
# Build options passed by -o flag
|
||||
self.options = options if options is not None else []
|
||||
|
@ -295,7 +296,7 @@ class mbedToolchain:
|
|||
|
||||
if self.options:
|
||||
self.info("Build Options: %s" % (', '.join(self.options)))
|
||||
|
||||
|
||||
# uVisor spepcific rules
|
||||
if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels:
|
||||
self.target.core = re.sub(r"F$", '', self.target.core)
|
||||
|
@ -310,10 +311,10 @@ class mbedToolchain:
|
|||
|
||||
if not self.VERBOSE and event['type'] == 'tool_error':
|
||||
msg = event['message']
|
||||
|
||||
|
||||
elif event['type'] in ['info', 'debug']:
|
||||
msg = event['message']
|
||||
|
||||
|
||||
elif event['type'] == 'cc':
|
||||
event['severity'] = event['severity'].title()
|
||||
event['file'] = basename(event['file'])
|
||||
|
@ -615,7 +616,7 @@ class mbedToolchain:
|
|||
|
||||
def relative_object_path(self, build_path, base_dir, source):
|
||||
source_dir, name, _ = split_path(source)
|
||||
|
||||
|
||||
obj_dir = join(build_path, relpath(source_dir, base_dir))
|
||||
mkdir(obj_dir)
|
||||
return join(obj_dir, name + '.o')
|
||||
|
@ -627,7 +628,7 @@ class mbedToolchain:
|
|||
cmd_list = []
|
||||
for c in includes:
|
||||
if c:
|
||||
cmd_list.append(('-I%s' % c).replace("\\", "/"))
|
||||
cmd_list.append(('-I%s' % c).replace("\\", "/"))
|
||||
string = " ".join(cmd_list)
|
||||
f.write(string)
|
||||
return include_file
|
||||
|
@ -822,12 +823,12 @@ class mbedToolchain:
|
|||
if self.target.OUTPUT_NAMING == "8.3":
|
||||
name = name[0:8]
|
||||
ext = ext[0:3]
|
||||
|
||||
|
||||
# Create destination directory
|
||||
head, tail = split(name)
|
||||
new_path = join(tmp_path, head)
|
||||
mkdir(new_path)
|
||||
|
||||
|
||||
filename = name+'.'+ext
|
||||
elf = join(tmp_path, name + '.elf')
|
||||
bin = join(tmp_path, filename)
|
||||
|
@ -844,7 +845,7 @@ class mbedToolchain:
|
|||
|
||||
self.binary(r, elf, bin)
|
||||
|
||||
self.mem_stats(map)
|
||||
self.map_outputs = self.mem_stats(map)
|
||||
|
||||
self.var("compile_succeded", True)
|
||||
self.var("binary", filename)
|
||||
|
@ -900,7 +901,11 @@ class mbedToolchain:
|
|||
self.notify({'type': 'var', 'key': key, 'val': value})
|
||||
|
||||
def mem_stats(self, map):
|
||||
# Creates parser object
|
||||
"""! Creates parser object
|
||||
@param map Path to linker map file to parse and decode
|
||||
@return Memory summary structure with memory usage statistics
|
||||
None if map file can't be opened and processed
|
||||
"""
|
||||
toolchain = self.__class__.__name__
|
||||
|
||||
# Create memap object
|
||||
|
@ -909,7 +914,7 @@ class mbedToolchain:
|
|||
# Parse and decode a map file
|
||||
if memap.parse(abspath(map), toolchain) is False:
|
||||
self.info("Unknown toolchain for memory statistics %s" % toolchain)
|
||||
return
|
||||
return None
|
||||
|
||||
# Write output to stdout in text (pretty table) format
|
||||
memap.generate_output('table')
|
||||
|
@ -917,11 +922,16 @@ class mbedToolchain:
|
|||
# Write output to file in JSON format
|
||||
map_out = splitext(map)[0] + "_map.json"
|
||||
memap.generate_output('json', map_out)
|
||||
|
||||
|
||||
# Write output to file in CSV format for the CI
|
||||
map_csv = splitext(map)[0] + "_map.csv"
|
||||
memap.generate_output('csv-ci', map_csv)
|
||||
|
||||
# Here we return memory statistics structure (constructed after
|
||||
# call to generate_output) which contains raw data in bytes
|
||||
# about sections + summary
|
||||
return memap.get_memory_summary()
|
||||
|
||||
# Set the configuration data
|
||||
def set_config_data(self, config_data):
|
||||
self.config_data = config_data
|
||||
|
|
Loading…
Reference in New Issue