Merge pull request #2047 from PrzemekWirkus/devel_mmap_proj

[Tools] Add summary for test building
pull/2117/merge
Martin Kojtal 2016-07-07 09:35:19 +01:00 committed by GitHub
commit 9f33ba87b0
5 changed files with 186 additions and 109 deletions

View File

@ -232,6 +232,7 @@ def build_project(src_path, build_path, target, toolchain_name,
cur_result["elapsed_time"] = end - start
cur_result["output"] = toolchain.get_output()
cur_result["result"] = "OK"
cur_result["memory_usage"] = toolchain.map_outputs
add_result_to_report(report, cur_result)
@ -942,6 +943,49 @@ def print_build_results(result_list, build_name):
result += "\n"
return result
def print_build_memory_usage_results(report):
""" Generate result table with memory usage values for build results
Agregates (puts together) reports obtained from self.get_memory_summary()
@param report Report generated during build procedure. See
"""
from prettytable import PrettyTable
columns_text = ['name', 'target', 'toolchain']
columns_int = ['static_ram', 'stack', 'heap', 'total_ram', 'total_flash']
table = PrettyTable(columns_text + columns_int)
for col in columns_text:
table.align[col] = 'l'
for col in columns_int:
table.align[col] = 'r'
for target in report:
for toolchain in report[target]:
for name in report[target][toolchain]:
for dlist in report[target][toolchain][name]:
for dlistelem in dlist:
# Get 'memory_usage' record and build table with statistics
record = dlist[dlistelem]
if 'memory_usage' in record and record['memory_usage']:
# Note that summary should be in the last record of
# 'memory_usage' section. This is why we are grabbing
# last "[-1]" record.
row = [
record['description'],
record['target_name'],
record['toolchain_name'],
record['memory_usage'][-1]['summary']['static_ram'],
record['memory_usage'][-1]['summary']['stack'],
record['memory_usage'][-1]['summary']['heap'],
record['memory_usage'][-1]['summary']['total_ram'],
record['memory_usage'][-1]['summary']['total_flash'],
]
table.add_row(row)
result = "Memory map breakdown for built projects (values in Bytes):\n"
result += table.get_string(sortby='name')
return result
def write_build_report(build_report, template_filename, filename):
build_report_failing = []
build_report_passing = []

View File

@ -26,9 +26,9 @@ class MemapParser(object):
self.misc_flash_sections = ('.interrupts', '.flash_config')
self.other_sections = ('.interrupts_ram', '.init', '.ARM.extab', \
'.ARM.exidx', '.ARM.attributes', '.eh_frame', \
'.init_array', '.fini_array', '.jcr', '.stab', \
self.other_sections = ('.interrupts_ram', '.init', '.ARM.extab',
'.ARM.exidx', '.ARM.attributes', '.eh_frame',
'.init_array', '.fini_array', '.jcr', '.stab',
'.stabstr', '.ARM.exidx', '.ARM')
# sections to print info (generic for all toolchains)
@ -43,6 +43,9 @@ class MemapParser(object):
# list of all object files and mappting to module names
self.object_to_module = dict()
# Memory usage summary structure
self.mem_summary = dict()
def module_add(self, module_name, size, section):
"""
Adds a module / section to the list
@ -67,7 +70,7 @@ class MemapParser(object):
return i # should name of the section (assuming it's a known one)
if line.startswith('.'):
return 'unknown' # all others are clasified are unknown
return 'unknown' # all others are classified are unknown
else:
return False # everything else, means no change in section
@ -363,11 +366,12 @@ class MemapParser(object):
# Create table
columns = ['Module']
for i in list(self.print_sections):
columns.append(i)
columns.extend(self.print_sections)
table = PrettyTable(columns)
table.align["Module"] = "l"
for col in self.print_sections:
table.align[col] = 'r'
for i in list(self.print_sections):
table.align[i] = 'r'
@ -388,8 +392,12 @@ class MemapParser(object):
for k in self.print_sections:
row.append(self.modules[i][k])
json_obj.append({"module":i, "size":{\
k:self.modules[i][k] for k in self.print_sections}})
json_obj.append({
"module":i,
"size":{
k:self.modules[i][k] for k in self.print_sections
}
})
table.add_row(row)
@ -399,16 +407,19 @@ class MemapParser(object):
table.add_row(subtotal_row)
if export_format == 'json':
json_obj.append({\
'summary':{\
'total_static_ram':(subtotal['.data']+subtotal['.bss']),\
'allocated_heap':(subtotal['.heap']),\
'allocated_stack':(subtotal['.stack']),\
'total_ram':(subtotal['.data']+subtotal['.bss']+subtotal['.heap']+subtotal['.stack']),\
'total_flash':(subtotal['.text']+subtotal['.data']+misc_flash_mem),}})
summary = {
'summary':{
'static_ram':(subtotal['.data']+subtotal['.bss']),
'heap':(subtotal['.heap']),
'stack':(subtotal['.stack']),
'total_ram':(subtotal['.data']+subtotal['.bss']+subtotal['.heap']+subtotal['.stack']),
'total_flash':(subtotal['.text']+subtotal['.data']+misc_flash_mem),
}
}
file_desc.write(json.dumps(json_obj, indent=4))
if export_format == 'json':
json_to_file = json_obj + [summary]
file_desc.write(json.dumps(json_to_file, indent=4))
file_desc.write('\n')
elif export_format == 'csv-ci': # CSV format for the CI system
@ -467,19 +478,24 @@ class MemapParser(object):
if file_desc is not sys.stdout:
file_desc.close()
self.mem_summary = json_obj + [summary]
return True
def get_memory_summary(self):
"""! Object is available only after self.generate_output('json') is called
@return Return memory summary object
"""
return self.mem_summary
def parse(self, mapfile, toolchain):
"""
Parse and decode map file depending on the toolchain
"""
result = True
try:
file_input = open(mapfile, 'rt')
except IOError as error:
print "I/O error({0}): {1}".format(error.errno, error.strerror)
return False
with open(mapfile, 'rt') as file_input:
if toolchain == "ARM" or toolchain == "ARM_STD" or toolchain == "ARM_MICRO":
self.search_objects(os.path.abspath(mapfile), "ARM")
self.parse_map_file_armcc(file_input)
@ -489,11 +505,11 @@ class MemapParser(object):
self.search_objects(os.path.abspath(mapfile), toolchain)
self.parse_map_file_iar(file_input)
else:
return False
file_input.close()
return True
result = False
except IOError as error:
print "I/O error({0}): {1}".format(error.errno, error.strerror)
result = False
return result
def main():

View File

@ -29,6 +29,7 @@ sys.path.insert(0, ROOT)
from tools.test_api import test_path_to_name, find_tests, print_tests, build_tests, test_spec_from_test_builds
from tools.options import get_default_options_parser
from tools.build_api import build_project, build_library
from tools.build_api import print_build_memory_usage_results
from tools.targets import TARGET_MAP
from tools.utils import mkdir, ToolException, NotSupportedException
from tools.test_exporters import ReportExporter, ResultExporterType
@ -198,6 +199,11 @@ if __name__ == '__main__':
report_exporter = ReportExporter(ResultExporterType.JUNIT, package="build")
report_exporter.report_to_file(build_report, options.build_report_junit, test_suite_properties=build_properties)
# Print memory map summary on screen
if build_report:
print
print print_build_memory_usage_results(build_report)
print_report_exporter = ReportExporter(ResultExporterType.PRINT, package="build")
status = print_report_exporter.report(build_report)

View File

@ -46,6 +46,7 @@ from tools.paths import HOST_TESTS
from tools.utils import ToolException
from tools.utils import NotSupportedException
from tools.utils import construct_enum
from tools.memap import MemapParser
from tools.targets import TARGET_MAP
from tools.test_db import BaseDBAccess
from tools.build_api import build_project, build_mbed_libs, build_lib
@ -2038,7 +2039,6 @@ def build_tests(tests, base_source_paths, build_path, target, toolchain_name,
build data structure"""
execution_directory = "."
base_path = norm_relative_path(build_path, execution_directory)
target_name = target if isinstance(target, str) else target.name
@ -2054,6 +2054,7 @@ def build_tests(tests, base_source_paths, build_path, target, toolchain_name,
result = True
map_outputs_total = list()
for test_name, test_path in tests.iteritems():
test_build_path = os.path.join(build_path, test_path)
src_path = base_source_paths + [test_path]

View File

@ -285,7 +285,8 @@ class mbedToolchain:
self.silent = silent
# Print output buffer
self.output = ""
self.output = str()
self.map_outputs = list() # Place to store memmap scan results in JSON like data structures
# Build options passed by -o flag
self.options = options if options is not None else []
@ -844,7 +845,7 @@ class mbedToolchain:
self.binary(r, elf, bin)
self.mem_stats(map)
self.map_outputs = self.mem_stats(map)
self.var("compile_succeded", True)
self.var("binary", filename)
@ -900,7 +901,11 @@ class mbedToolchain:
self.notify({'type': 'var', 'key': key, 'val': value})
def mem_stats(self, map):
# Creates parser object
"""! Creates parser object
@param map Path to linker map file to parse and decode
@return Memory summary structure with memory usage statistics
None if map file can't be opened and processed
"""
toolchain = self.__class__.__name__
# Create memap object
@ -909,7 +914,7 @@ class mbedToolchain:
# Parse and decode a map file
if memap.parse(abspath(map), toolchain) is False:
self.info("Unknown toolchain for memory statistics %s" % toolchain)
return
return None
# Write output to stdout in text (pretty table) format
memap.generate_output('table')
@ -922,6 +927,11 @@ class mbedToolchain:
map_csv = splitext(map)[0] + "_map.csv"
memap.generate_output('csv-ci', map_csv)
# Here we return memory statistics structure (constructed after
# call to generate_output) which contains raw data in bytes
# about sections + summary
return memap.get_memory_summary()
# Set the configuration data
def set_config_data(self, config_data):
self.config_data = config_data