mirror of https://github.com/ARMmbed/mbed-os.git
Merge remote-tracking branch 'upstream/master' into pinmap_stm32f1
commit
88e7489862
|
@ -64,16 +64,7 @@ void k64f_init_eth_hardware(void)
|
||||||
PORT_HAL_SetPullCmd(PORTB_BASE, 0, true);
|
PORT_HAL_SetPullCmd(PORTB_BASE, 0, true);
|
||||||
|
|
||||||
PORT_HAL_SetMuxMode(PORTB_BASE, 1, kPortMuxAlt4);
|
PORT_HAL_SetMuxMode(PORTB_BASE, 1, kPortMuxAlt4);
|
||||||
/* Configure GPIO for MII interface */
|
|
||||||
PORT_HAL_SetMuxMode(PORTA_BASE, 9, kPortMuxAlt4); /*!< ENET MII0_RXD3*/
|
|
||||||
PORT_HAL_SetMuxMode(PORTA_BASE, 10, kPortMuxAlt4); /*!< ENET MII0_RXD2*/
|
|
||||||
PORT_HAL_SetMuxMode(PORTA_BASE, 11, kPortMuxAlt4); /*!< ENET MII0_RXCLK*/
|
|
||||||
PORT_HAL_SetMuxMode(PORTA_BASE, 24, kPortMuxAlt4); /*!< ENET MII0_TXD2*/
|
|
||||||
PORT_HAL_SetMuxMode(PORTA_BASE, 25, kPortMuxAlt4); /*!< ENET MII0_TXCLK*/
|
|
||||||
PORT_HAL_SetMuxMode(PORTA_BASE, 26, kPortMuxAlt4); /*!< ENET MII0_TXD3*/
|
|
||||||
PORT_HAL_SetMuxMode(PORTA_BASE, 27, kPortMuxAlt4); /*!< ENET MII0_CRS*/
|
|
||||||
PORT_HAL_SetMuxMode(PORTA_BASE, 28, kPortMuxAlt4); /*!< ENET MII0_TXER*/
|
|
||||||
PORT_HAL_SetMuxMode(PORTA_BASE, 29, kPortMuxAlt4); /*!< ENET MII0_COL*/
|
|
||||||
#if FSL_FEATURE_ENET_SUPPORT_PTP
|
#if FSL_FEATURE_ENET_SUPPORT_PTP
|
||||||
PORT_HAL_SetMuxMode(PORTC_BASE, (16 + ENET_TIMER_CHANNEL_NUM), kPortMuxAlt4); /* ENET ENET0_1588_TMR0*/
|
PORT_HAL_SetMuxMode(PORTC_BASE, (16 + ENET_TIMER_CHANNEL_NUM), kPortMuxAlt4); /* ENET ENET0_1588_TMR0*/
|
||||||
PORT_HAL_SetDriveStrengthMode(PORTC_BASE, (16 + ENET_TIMER_CHANNEL_NUM), kPortHighDriveStrength);
|
PORT_HAL_SetDriveStrengthMode(PORTC_BASE, (16 + ENET_TIMER_CHANNEL_NUM), kPortHighDriveStrength);
|
||||||
|
|
|
@ -254,9 +254,12 @@ if __name__ == '__main__':
|
||||||
print "Completed in: (%.2f)s" % (time() - start)
|
print "Completed in: (%.2f)s" % (time() - start)
|
||||||
print
|
print
|
||||||
|
|
||||||
print print_build_results(successes, "Build successes:"),
|
for report, report_name in [(successes, "Build successes:"),
|
||||||
print print_build_results(skipped, "Build skipped:"),
|
(skipped, "Build skipped:"),
|
||||||
print print_build_results(failures, "Build failures:"),
|
(failures, "Build failures:"),
|
||||||
|
]:
|
||||||
|
if report:
|
||||||
|
print print_build_results(report, report_name),
|
||||||
|
|
||||||
if failures:
|
if failures:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
|
@ -526,7 +526,7 @@ def static_analysis_scan_library(src_paths, build_path, target, toolchain_name,
|
||||||
def print_build_results(result_list, build_name):
|
def print_build_results(result_list, build_name):
|
||||||
""" Generate result string for build results """
|
""" Generate result string for build results """
|
||||||
result = ""
|
result = ""
|
||||||
if result_list:
|
if len(result_list) > 0:
|
||||||
result += build_name + "\n"
|
result += build_name + "\n"
|
||||||
result += "\n".join([" * %s" % f for f in result_list])
|
result += "\n".join([" * %s" % f for f in result_list])
|
||||||
result += "\n"
|
result += "\n"
|
||||||
|
|
|
@ -49,6 +49,7 @@ from workspace_tools.test_db import BaseDBAccess
|
||||||
from workspace_tools.build_api import build_project, build_mbed_libs, build_lib
|
from workspace_tools.build_api import build_project, build_mbed_libs, build_lib
|
||||||
from workspace_tools.build_api import get_target_supported_toolchains
|
from workspace_tools.build_api import get_target_supported_toolchains
|
||||||
from workspace_tools.build_api import write_build_report
|
from workspace_tools.build_api import write_build_report
|
||||||
|
from workspace_tools.build_api import print_build_results
|
||||||
from workspace_tools.libraries import LIBRARIES, LIBRARY_MAP
|
from workspace_tools.libraries import LIBRARIES, LIBRARY_MAP
|
||||||
from workspace_tools.toolchains import TOOLCHAIN_BIN_PATH
|
from workspace_tools.toolchains import TOOLCHAIN_BIN_PATH
|
||||||
from workspace_tools.test_exporters import ReportExporter, ResultExporterType
|
from workspace_tools.test_exporters import ReportExporter, ResultExporterType
|
||||||
|
@ -124,6 +125,7 @@ class SingleTestRunner(object):
|
||||||
TEST_RESULT_TIMEOUT = "TIMEOUT"
|
TEST_RESULT_TIMEOUT = "TIMEOUT"
|
||||||
TEST_RESULT_NO_IMAGE = "NO_IMAGE"
|
TEST_RESULT_NO_IMAGE = "NO_IMAGE"
|
||||||
TEST_RESULT_MBED_ASSERT = "MBED_ASSERT"
|
TEST_RESULT_MBED_ASSERT = "MBED_ASSERT"
|
||||||
|
TEST_RESULT_BUILD_FAILED = "BUILD_FAILED"
|
||||||
|
|
||||||
GLOBAL_LOOPS_COUNT = 1 # How many times each test should be repeated
|
GLOBAL_LOOPS_COUNT = 1 # How many times each test should be repeated
|
||||||
TEST_LOOPS_LIST = [] # We redefine no.of loops per test_id
|
TEST_LOOPS_LIST = [] # We redefine no.of loops per test_id
|
||||||
|
@ -142,7 +144,8 @@ class SingleTestRunner(object):
|
||||||
"timeout" : TEST_RESULT_TIMEOUT,
|
"timeout" : TEST_RESULT_TIMEOUT,
|
||||||
"no_image" : TEST_RESULT_NO_IMAGE,
|
"no_image" : TEST_RESULT_NO_IMAGE,
|
||||||
"end" : TEST_RESULT_UNDEF,
|
"end" : TEST_RESULT_UNDEF,
|
||||||
"mbed_assert" : TEST_RESULT_MBED_ASSERT
|
"mbed_assert" : TEST_RESULT_MBED_ASSERT,
|
||||||
|
"build_failed" : TEST_RESULT_BUILD_FAILED
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
|
@ -182,6 +185,11 @@ class SingleTestRunner(object):
|
||||||
from colorama import init
|
from colorama import init
|
||||||
init()
|
init()
|
||||||
|
|
||||||
|
# Build results
|
||||||
|
build_failures = []
|
||||||
|
build_successes = []
|
||||||
|
build_skipped = []
|
||||||
|
|
||||||
PATTERN = "\\{(" + "|".join(self.TEST_RESULT_MAPPING.keys()) + ")\\}"
|
PATTERN = "\\{(" + "|".join(self.TEST_RESULT_MAPPING.keys()) + ")\\}"
|
||||||
self.RE_DETECT_TESTCASE_RESULT = re.compile(PATTERN)
|
self.RE_DETECT_TESTCASE_RESULT = re.compile(PATTERN)
|
||||||
# Settings related to test loops counters
|
# Settings related to test loops counters
|
||||||
|
@ -299,6 +307,8 @@ class SingleTestRunner(object):
|
||||||
|
|
||||||
def execute_thread_slice(self, q, target, toolchains, clean, test_ids, build_report):
|
def execute_thread_slice(self, q, target, toolchains, clean, test_ids, build_report):
|
||||||
for toolchain in toolchains:
|
for toolchain in toolchains:
|
||||||
|
tt_id = "%s::%s" % (toolchain, target)
|
||||||
|
|
||||||
# Toolchain specific build successes and failures
|
# Toolchain specific build successes and failures
|
||||||
build_report[toolchain] = {
|
build_report[toolchain] = {
|
||||||
"mbed_failure": False,
|
"mbed_failure": False,
|
||||||
|
@ -310,13 +320,14 @@ class SingleTestRunner(object):
|
||||||
}
|
}
|
||||||
# print target, toolchain
|
# print target, toolchain
|
||||||
# Test suite properties returned to external tools like CI
|
# Test suite properties returned to external tools like CI
|
||||||
test_suite_properties = {}
|
test_suite_properties = {
|
||||||
test_suite_properties['jobs'] = self.opts_jobs
|
'jobs': self.opts_jobs,
|
||||||
test_suite_properties['clean'] = clean
|
'clean': clean,
|
||||||
test_suite_properties['target'] = target
|
'target': target,
|
||||||
test_suite_properties['test_ids'] = ', '.join(test_ids)
|
'test_ids': ', '.join(test_ids),
|
||||||
test_suite_properties['toolchain'] = toolchain
|
'toolchain': toolchain,
|
||||||
test_suite_properties['shuffle_random_seed'] = self.shuffle_random_seed
|
'shuffle_random_seed': self.shuffle_random_seed
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
# print '=== %s::%s ===' % (target, toolchain)
|
# print '=== %s::%s ===' % (target, toolchain)
|
||||||
|
@ -329,6 +340,7 @@ class SingleTestRunner(object):
|
||||||
build_mbed_libs_options = ["analyze"] if self.opts_goanna_for_mbed_sdk else None
|
build_mbed_libs_options = ["analyze"] if self.opts_goanna_for_mbed_sdk else None
|
||||||
clean_mbed_libs_options = True if self.opts_goanna_for_mbed_sdk or clean or self.opts_clean else None
|
clean_mbed_libs_options = True if self.opts_goanna_for_mbed_sdk or clean or self.opts_clean else None
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
build_mbed_libs_result = build_mbed_libs(T,
|
build_mbed_libs_result = build_mbed_libs(T,
|
||||||
toolchain,
|
toolchain,
|
||||||
|
@ -337,12 +349,15 @@ class SingleTestRunner(object):
|
||||||
jobs=self.opts_jobs)
|
jobs=self.opts_jobs)
|
||||||
|
|
||||||
if not build_mbed_libs_result:
|
if not build_mbed_libs_result:
|
||||||
|
self.build_skipped.append(tt_id)
|
||||||
print self.logger.log_line(self.logger.LogType.NOTIF, 'Skipped tests for %s target. Toolchain %s is not yet supported for this target'% (T.name, toolchain))
|
print self.logger.log_line(self.logger.LogType.NOTIF, 'Skipped tests for %s target. Toolchain %s is not yet supported for this target'% (T.name, toolchain))
|
||||||
continue
|
continue
|
||||||
|
else:
|
||||||
|
self.build_successes.append(tt_id)
|
||||||
except ToolException:
|
except ToolException:
|
||||||
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building MBED libs for %s using %s'% (target, toolchain))
|
self.build_failures.append(tt_id)
|
||||||
build_report[toolchain]["mbed_failure"] = True
|
build_report[toolchain]["mbed_failure"] = True
|
||||||
#return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext
|
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building MBED libs for %s using %s'% (target, toolchain))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
build_dir = join(BUILD_DIR, "test", target, toolchain)
|
build_dir = join(BUILD_DIR, "test", target, toolchain)
|
||||||
|
@ -411,12 +426,9 @@ class SingleTestRunner(object):
|
||||||
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building library %s'% (lib_id))
|
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building library %s'% (lib_id))
|
||||||
build_report[toolchain]["library_failure"] = True
|
build_report[toolchain]["library_failure"] = True
|
||||||
build_report[toolchain]["library_build_failing"].append(lib_id)
|
build_report[toolchain]["library_build_failing"].append(lib_id)
|
||||||
#return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
for test_id in valid_test_map_keys:
|
for test_id in valid_test_map_keys:
|
||||||
test = TEST_MAP[test_id]
|
test = TEST_MAP[test_id]
|
||||||
|
|
||||||
|
@ -437,6 +449,14 @@ class SingleTestRunner(object):
|
||||||
test_uuid = uuid.uuid4()
|
test_uuid = uuid.uuid4()
|
||||||
MACROS.append('TEST_SUITE_UUID="%s"'% str(test_uuid))
|
MACROS.append('TEST_SUITE_UUID="%s"'% str(test_uuid))
|
||||||
|
|
||||||
|
# Prepare extended test results data structure (it can be used to generate detailed test report)
|
||||||
|
if toolchain not in self.test_summary_ext:
|
||||||
|
self.test_summary_ext[toolchain] = {} # test_summary_ext : toolchain
|
||||||
|
if target not in self.test_summary_ext[toolchain]:
|
||||||
|
self.test_summary_ext[toolchain][target] = {} # test_summary_ext : toolchain : target
|
||||||
|
|
||||||
|
tt_test_id = "%s::%s::%s" % (toolchain, target, test_id) # For logging only
|
||||||
|
|
||||||
project_name = self.opts_firmware_global_name if self.opts_firmware_global_name else None
|
project_name = self.opts_firmware_global_name if self.opts_firmware_global_name else None
|
||||||
try:
|
try:
|
||||||
path = build_project(test.source_dir,
|
path = build_project(test.source_dir,
|
||||||
|
@ -457,7 +477,26 @@ class SingleTestRunner(object):
|
||||||
project_name_str = project_name if project_name is not None else test_id
|
project_name_str = project_name if project_name is not None else test_id
|
||||||
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building project %s'% (project_name_str))
|
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building project %s'% (project_name_str))
|
||||||
build_report[toolchain]["test_build_failing"].append(test_id)
|
build_report[toolchain]["test_build_failing"].append(test_id)
|
||||||
# return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext
|
self.build_failures.append(tt_test_id)
|
||||||
|
|
||||||
|
# Append test results to global test summary
|
||||||
|
self.test_summary.append(
|
||||||
|
(self.TEST_RESULT_BUILD_FAILED, target, toolchain, test_id, 'Toolchain build failed', 0, 0, '-')
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add detailed test result to test summary structure
|
||||||
|
if target not in self.test_summary_ext[toolchain][target]:
|
||||||
|
self.test_summary_ext[toolchain][target][test_id] = { 0: {
|
||||||
|
'single_test_result' : self.TEST_RESULT_BUILD_FAILED,
|
||||||
|
'single_test_output' : '',
|
||||||
|
'target_name' : target,
|
||||||
|
'toolchain_name' : toolchain,
|
||||||
|
'test_id' : test_id,
|
||||||
|
'test_description' : 'Toolchain build failed',
|
||||||
|
'elapsed_time' : 0,
|
||||||
|
'duration' : 0,
|
||||||
|
'copy_method' : None
|
||||||
|
}}
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if self.opts_only_build_tests:
|
if self.opts_only_build_tests:
|
||||||
|
@ -479,17 +518,17 @@ class SingleTestRunner(object):
|
||||||
test_suite_properties['test.path.%s.%s.%s'% (target, toolchain, test_id)] = path
|
test_suite_properties['test.path.%s.%s.%s'% (target, toolchain, test_id)] = path
|
||||||
|
|
||||||
# read MUTs, test specification and perform tests
|
# read MUTs, test specification and perform tests
|
||||||
single_test_result, detailed_test_results = self.handle(test_spec, target, toolchain, test_loops=test_loops)
|
handle_result = self.handle(test_spec, target, toolchain, test_loops=test_loops)
|
||||||
|
if handle_result:
|
||||||
|
single_test_result, detailed_test_results = handle_result
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
# Append test results to global test summary
|
# Append test results to global test summary
|
||||||
if single_test_result is not None:
|
if single_test_result is not None:
|
||||||
self.test_summary.append(single_test_result)
|
self.test_summary.append(single_test_result)
|
||||||
|
|
||||||
# Prepare extended test results data structure (it can be used to generate detailed test report)
|
# Add detailed test result to test summary structure
|
||||||
if toolchain not in self.test_summary_ext:
|
|
||||||
self.test_summary_ext[toolchain] = {} # test_summary_ext : toolchain
|
|
||||||
if target not in self.test_summary_ext[toolchain]:
|
|
||||||
self.test_summary_ext[toolchain][target] = {} # test_summary_ext : toolchain : target
|
|
||||||
if target not in self.test_summary_ext[toolchain][target]:
|
if target not in self.test_summary_ext[toolchain][target]:
|
||||||
self.test_summary_ext[toolchain][target][test_id] = detailed_test_results # test_summary_ext : toolchain : target : test_it
|
self.test_summary_ext[toolchain][target][test_id] = detailed_test_results # test_summary_ext : toolchain : target : test_it
|
||||||
|
|
||||||
|
@ -511,6 +550,9 @@ class SingleTestRunner(object):
|
||||||
self.shuffle_random_seed = round(float(self.opts_shuffle_test_seed), self.SHUFFLE_SEED_ROUND)
|
self.shuffle_random_seed = round(float(self.opts_shuffle_test_seed), self.SHUFFLE_SEED_ROUND)
|
||||||
|
|
||||||
build_reports = []
|
build_reports = []
|
||||||
|
self.build_failures = []
|
||||||
|
self.build_successes = []
|
||||||
|
self.build_skipped = []
|
||||||
|
|
||||||
if self.opts_parallel_test_exec:
|
if self.opts_parallel_test_exec:
|
||||||
###################################################################
|
###################################################################
|
||||||
|
@ -554,7 +596,6 @@ class SingleTestRunner(object):
|
||||||
}
|
}
|
||||||
|
|
||||||
for toolchain in sorted(target_build_report["report"], key=target_build_report["report"].get):
|
for toolchain in sorted(target_build_report["report"], key=target_build_report["report"].get):
|
||||||
print "%s - %s" % (target_build_report["target"], toolchain)
|
|
||||||
report = target_build_report["report"][toolchain]
|
report = target_build_report["report"][toolchain]
|
||||||
|
|
||||||
if report["mbed_failure"]:
|
if report["mbed_failure"]:
|
||||||
|
@ -703,6 +744,7 @@ class SingleTestRunner(object):
|
||||||
""" Prints well-formed summary with results (SQL table like)
|
""" Prints well-formed summary with results (SQL table like)
|
||||||
table shows target x test results matrix across
|
table shows target x test results matrix across
|
||||||
"""
|
"""
|
||||||
|
success_code = 0 # Success code that can be leter returned to
|
||||||
result = "Test summary:\n"
|
result = "Test summary:\n"
|
||||||
# Pretty table package is used to print results
|
# Pretty table package is used to print results
|
||||||
pt = PrettyTable(["Result", "Target", "Toolchain", "Test ID", "Test Description",
|
pt = PrettyTable(["Result", "Target", "Toolchain", "Test ID", "Test Description",
|
||||||
|
@ -723,7 +765,8 @@ class SingleTestRunner(object):
|
||||||
self.TEST_RESULT_IOERR_SERIAL : 0,
|
self.TEST_RESULT_IOERR_SERIAL : 0,
|
||||||
self.TEST_RESULT_NO_IMAGE : 0,
|
self.TEST_RESULT_NO_IMAGE : 0,
|
||||||
self.TEST_RESULT_TIMEOUT : 0,
|
self.TEST_RESULT_TIMEOUT : 0,
|
||||||
self.TEST_RESULT_MBED_ASSERT : 0
|
self.TEST_RESULT_MBED_ASSERT : 0,
|
||||||
|
self.TEST_RESULT_BUILD_FAILED : 0
|
||||||
}
|
}
|
||||||
|
|
||||||
for test in test_summary:
|
for test in test_summary:
|
||||||
|
@ -1413,6 +1456,8 @@ def progress_bar(percent_progress, saturation=0):
|
||||||
|
|
||||||
def singletest_in_cli_mode(single_test):
|
def singletest_in_cli_mode(single_test):
|
||||||
""" Runs SingleTestRunner object in CLI (Command line interface) mode
|
""" Runs SingleTestRunner object in CLI (Command line interface) mode
|
||||||
|
|
||||||
|
@return returns success code (0 == success) for building and running tests
|
||||||
"""
|
"""
|
||||||
start = time()
|
start = time()
|
||||||
# Execute tests depending on options and filter applied
|
# Execute tests depending on options and filter applied
|
||||||
|
@ -1427,7 +1472,17 @@ def singletest_in_cli_mode(single_test):
|
||||||
# prints well-formed summary with results (SQL table like)
|
# prints well-formed summary with results (SQL table like)
|
||||||
# table shows text x toolchain test result matrix
|
# table shows text x toolchain test result matrix
|
||||||
print single_test.generate_test_summary_by_target(test_summary, shuffle_seed)
|
print single_test.generate_test_summary_by_target(test_summary, shuffle_seed)
|
||||||
|
|
||||||
print "Completed in %.2f sec"% (elapsed_time)
|
print "Completed in %.2f sec"% (elapsed_time)
|
||||||
|
print
|
||||||
|
# Write summary of the builds
|
||||||
|
|
||||||
|
for report, report_name in [(single_test.build_successes, "Build successes:"),
|
||||||
|
(single_test.build_skipped, "Build skipped:"),
|
||||||
|
(single_test.build_failures, "Build failures:"),
|
||||||
|
]:
|
||||||
|
if report:
|
||||||
|
print print_build_results(report, report_name)
|
||||||
|
|
||||||
# Store extra reports in files
|
# Store extra reports in files
|
||||||
if single_test.opts_report_html_file_name:
|
if single_test.opts_report_html_file_name:
|
||||||
|
|
Loading…
Reference in New Issue