mbed-os/tools/singletest.py

263 lines
11 KiB
Python
Raw Normal View History

#!/usr/bin/env python2
"""
mbed SDK
2014-07-25 16:37:16 +00:00
Copyright (c) 2011-2014 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: Przemyslaw Wirkus <Przemyslaw.Wirkus@arm.com>
"""
"""
File format example: test_spec.json:
2014-03-12 10:59:19 +00:00
{
"targets": {
"KL46Z": ["ARM", "GCC_ARM"],
"LPC1768": ["ARM", "GCC_ARM", "GCC_CR", "IAR"],
2014-03-12 10:59:19 +00:00
"LPC11U24": ["uARM"],
"NRF51822": ["ARM"]
}
}
File format example: muts_all.json:
2014-03-12 10:59:19 +00:00
{
"1" : {"mcu": "LPC1768",
"port":"COM4",
"disk":"J:\\",
"peripherals": ["TMP102", "digital_loop", "port_loop", "analog_loop", "SD"]
2014-03-12 10:59:19 +00:00
},
"2" : {"mcu": "KL25Z",
"port":"COM7",
"disk":"G:\\",
"peripherals": ["digital_loop", "port_loop", "analog_loop"]
}
2014-03-12 10:59:19 +00:00
}
"""
# Be sure that the tools directory is in the search path
import sys
from os.path import join, abspath, dirname
2014-03-13 11:32:55 +00:00
ROOT = abspath(join(dirname(__file__), ".."))
sys.path.insert(0, ROOT)
# Check: Extra modules which are required by core test suite
from tools.utils import check_required_modules
check_required_modules(['prettytable', 'serial'])
# Imports related to mbed build api
from tools.build_api import mcu_toolchain_matrix
# Imports from TEST API
from tools.test_api import SingleTestRunner
from tools.test_api import singletest_in_cli_mode
from tools.test_api import detect_database_verbose
from tools.test_api import get_json_data_from_file
from tools.test_api import get_avail_tests_summary_table
from tools.test_api import get_default_test_options_parser
from tools.test_api import print_muts_configuration_from_json
from tools.test_api import print_test_configuration_from_json
from tools.test_api import get_autodetected_MUTS_list
from tools.test_api import get_autodetected_TEST_SPEC
from tools.test_api import get_module_avail
from tools.test_exporters import ReportExporter, ResultExporterType
# Importing extra modules which can be not installed but if available they can extend test suite functionality
try:
import mbed_lstools
from tools.compliance.ioper_runner import IOperTestRunner
from tools.compliance.ioper_runner import get_available_oper_test_scopes
except:
pass
2014-07-30 17:11:19 +00:00
def get_version():
""" Returns test script version
"""
2014-08-04 15:16:47 +00:00
single_test_version_major = 1
single_test_version_minor = 5
2014-08-04 15:16:47 +00:00
return (single_test_version_major, single_test_version_minor)
2014-07-30 17:11:19 +00:00
if __name__ == '__main__':
# Command line options
parser = get_default_test_options_parser()
parser.description = """This script allows you to run mbed defined test cases for particular MCU(s) and corresponding toolchain(s)."""
2014-04-09 12:00:45 +00:00
parser.epilog = """Example: singletest.py -i test_spec.json -M muts_all.json"""
(opts, args) = parser.parse_args()
# Print scrip version
if opts.version:
print parser.description
print parser.epilog
print "Version %d.%d"% get_version()
exit(0)
if opts.db_url and opts.verbose_test_configuration_only:
detect_database_verbose(opts.db_url)
exit(0)
# Print summary / information about automation test status
if opts.test_automation_report:
print get_avail_tests_summary_table(platform_filter=opts.general_filter_regex)
exit(0)
# Print summary / information about automation test status
if opts.test_case_report:
test_case_report_cols = ['id',
'automated',
'description',
'peripherals',
'host_test',
'duration',
'source_dir']
print get_avail_tests_summary_table(cols=test_case_report_cols,
result_summary=False,
join_delim='\n',
platform_filter=opts.general_filter_regex)
exit(0)
# Only prints matrix of supported toolchains
if opts.supported_toolchains:
print mcu_toolchain_matrix(platform_filter=opts.general_filter_regex)
exit(0)
test_spec = None
MUTs = None
Bugfix for IOTSFW-345: Adde missing check for optional auto_detect parameter in CLI options. Test procedure: * Check with mbed-ls installed: $ singletest.py --auto -j 8 MBEDLS: Detecting connected mbed-enabled devices... MBEDLS: Detected K64F, port: COM61, mounted: E: Building library CMSIS (K64F, ARM) Copy: startup_MK64F12.o Copy: sys.o Copy: cmsis_nvic.o Copy: system_MK64F12.o * Uninstall mbed-ls iools: $ pip uninstall mbed-ls Uninstalling mbed-ls: c:\python27\lib\site-packages\mbed_ls-0.1.4-py2.7.egg c:\python27\scripts\mbedls-script.py c:\python27\scripts\mbedls.exe c:\python27\scripts\mbedls.exe.manifest Proceed (y/n)? y Successfully uninstalled mbed-ls $ mbedls 'mbedls' is not recognized as an internal or external command, operable program or batch file. $ python Python 2.7.8 (default, Jun 30 2014, 16:03:49) [MSC v.1500 32 bit (Intel)] win32 Type "help", "copyright", "credits" or "license" for more information. >>> import mbed_lstools Traceback (most recent call last): File "<stdin>", line 1, in <module> ImportError: No module named mbed_lstools * Check singletest.py work flow without mbed-ls: $ singletest.py -i test_spec.json -M muts_all.json Building library CMSIS (K64F, ARM) Building library MBED (K64F, ARM) Building project DETECT (K64F, ARM) TargetTest::K64F::ARM::DTCT_1::Simple detect test [OK] in 0.50 of 10 sec Building project DEV_NULL (K64F, ARM) TargetTest::K64F::ARM::EXAMPLE_1::/dev/null [OK] in 3.49 of 20 sec Building project HELLO (K64F, ARM) TargetTest::K64F::ARM::MBED_10::Hello World [OK] in 0.38 of 5 sec Building project TICKER (K64F, ARM) TargetTest::K64F::ARM::MBED_11::Ticker Int [OK] in 11.35 of 15 sec
2015-02-19 11:16:41 +00:00
if hasattr(opts, 'auto_detect') and opts.auto_detect:
# If auto_detect attribute is present, we assume other auto-detection
# parameters like 'toolchains_filter' are also set.
print "MBEDLS: Detecting connected mbed-enabled devices... "
MUTs = get_autodetected_MUTS_list()
for mut in MUTs.values():
print "MBEDLS: Detected %s, port: %s, mounted: %s"% (mut['mcu_unique'] if 'mcu_unique' in mut else mut['mcu'],
mut['port'],
mut['disk'])
# Set up parameters for test specification filter function (we need to set toolchains per target here)
use_default_toolchain = 'default' in opts.toolchains_filter.split(',') if opts.toolchains_filter is not None else True
use_supported_toolchains = 'all' in opts.toolchains_filter.split(',') if opts.toolchains_filter is not None else False
toolchain_filter = opts.toolchains_filter
Added to option --auto handler for -f <mcu> filter switch. Now using -f switch will filter target platforms to test. Use comma to pass more MCU names to filter. ``` $ singletest.py --auto -j 8 -O --config MBEDLS: Detecting connected mbed-enabled devices... MBEDLS: Detected NUCLEO_L053R8, port: COM35, mounted: E: MBEDLS: Detected KL25Z, port: COM89, mounted: F: MUTs configuration in auto-detected: +-------+-------------+---------------+------+-------+ | index | peripherals | mcu | disk | port | +-------+-------------+---------------+------+-------+ | 1 | | NUCLEO_L053R8 | E: | COM35 | | 2 | | KL25Z | F: | COM89 | +-------+-------------+---------------+------+-------+ Test specification in auto-detected: +---------------+-----+------+ | mcu | ARM | uARM | +---------------+-----+------+ | KL25Z | Yes | - | | NUCLEO_L053R8 | - | Yes | +---------------+-----+------+ ``` Building original configuration (no filter): ``` $ singletest.py --auto -j 8 -O MBEDLS: Detecting connected mbed-enabled devices... MBEDLS: Detected NUCLEO_L053R8, port: COM35, mounted: E: MBEDLS: Detected KL25Z, port: COM89, mounted: F: Building library CMSIS (KL25Z, ARM) Building library MBED (KL25Z, ARM) Building project DETECT (KL25Z, ARM) . . . Building library CMSIS (NUCLEO_L053R8, uARM) Building library MBED (NUCLEO_L053R8, uARM) Building project DETECT (NUCLEO_L053R8, uARM) . . . Completed in 3.68 sec ``` ``` $ singletest.py --auto -j 8 -O -f KL25Z --config MBEDLS: Detecting connected mbed-enabled devices... MBEDLS: Detected NUCLEO_L053R8, port: COM35, mounted: E: MBEDLS: Detected KL25Z, port: COM89, mounted: F: MUTs configuration in auto-detected: +-------+-------------+-------+------+-------+ | index | peripherals | mcu | disk | port | +-------+-------------+-------+------+-------+ | 2 | | KL25Z | F: | COM89 | +-------+-------------+-------+------+-------+ Test specification in auto-detected: +-------+-----+ | mcu | ARM | +-------+-----+ | KL25Z | Yes | +-------+-----+ ``` Building original configuration (with applied filter): ``` $ singletest.py --auto -j 8 -O -f KL25Z MBEDLS: Detecting connected mbed-enabled devices... MBEDLS: Detected NUCLEO_L053R8, port: COM35, mounted: E: MBEDLS: Detected KL25Z, port: COM89, mounted: F: Building library CMSIS (KL25Z, ARM) Building library MBED (KL25Z, ARM) Building project DETECT (KL25Z, ARM) . . . Completed in 1.33 sec ```
2015-03-13 15:30:33 +00:00
platform_name_filter = opts.general_filter_regex.split(',') if opts.general_filter_regex is not None else opts.general_filter_regex
# Test specification with information about each target and associated toolchain
test_spec = get_autodetected_TEST_SPEC(MUTs.values(),
use_default_toolchain=use_default_toolchain,
use_supported_toolchains=use_supported_toolchains,
Added to option --auto handler for -f <mcu> filter switch. Now using -f switch will filter target platforms to test. Use comma to pass more MCU names to filter. ``` $ singletest.py --auto -j 8 -O --config MBEDLS: Detecting connected mbed-enabled devices... MBEDLS: Detected NUCLEO_L053R8, port: COM35, mounted: E: MBEDLS: Detected KL25Z, port: COM89, mounted: F: MUTs configuration in auto-detected: +-------+-------------+---------------+------+-------+ | index | peripherals | mcu | disk | port | +-------+-------------+---------------+------+-------+ | 1 | | NUCLEO_L053R8 | E: | COM35 | | 2 | | KL25Z | F: | COM89 | +-------+-------------+---------------+------+-------+ Test specification in auto-detected: +---------------+-----+------+ | mcu | ARM | uARM | +---------------+-----+------+ | KL25Z | Yes | - | | NUCLEO_L053R8 | - | Yes | +---------------+-----+------+ ``` Building original configuration (no filter): ``` $ singletest.py --auto -j 8 -O MBEDLS: Detecting connected mbed-enabled devices... MBEDLS: Detected NUCLEO_L053R8, port: COM35, mounted: E: MBEDLS: Detected KL25Z, port: COM89, mounted: F: Building library CMSIS (KL25Z, ARM) Building library MBED (KL25Z, ARM) Building project DETECT (KL25Z, ARM) . . . Building library CMSIS (NUCLEO_L053R8, uARM) Building library MBED (NUCLEO_L053R8, uARM) Building project DETECT (NUCLEO_L053R8, uARM) . . . Completed in 3.68 sec ``` ``` $ singletest.py --auto -j 8 -O -f KL25Z --config MBEDLS: Detecting connected mbed-enabled devices... MBEDLS: Detected NUCLEO_L053R8, port: COM35, mounted: E: MBEDLS: Detected KL25Z, port: COM89, mounted: F: MUTs configuration in auto-detected: +-------+-------------+-------+------+-------+ | index | peripherals | mcu | disk | port | +-------+-------------+-------+------+-------+ | 2 | | KL25Z | F: | COM89 | +-------+-------------+-------+------+-------+ Test specification in auto-detected: +-------+-----+ | mcu | ARM | +-------+-----+ | KL25Z | Yes | +-------+-----+ ``` Building original configuration (with applied filter): ``` $ singletest.py --auto -j 8 -O -f KL25Z MBEDLS: Detecting connected mbed-enabled devices... MBEDLS: Detected NUCLEO_L053R8, port: COM35, mounted: E: MBEDLS: Detected KL25Z, port: COM89, mounted: F: Building library CMSIS (KL25Z, ARM) Building library MBED (KL25Z, ARM) Building project DETECT (KL25Z, ARM) . . . Completed in 1.33 sec ```
2015-03-13 15:30:33 +00:00
toolchain_filter=toolchain_filter,
platform_name_filter=platform_name_filter)
else:
# Open file with test specification
# test_spec_filename tells script which targets and their toolchain(s)
# should be covered by the test scenario
opts.auto_detect = False
test_spec = get_json_data_from_file(opts.test_spec_filename) if opts.test_spec_filename else None
if test_spec is None:
if not opts.test_spec_filename:
parser.print_help()
exit(-1)
# Get extra MUTs if applicable
MUTs = get_json_data_from_file(opts.muts_spec_filename) if opts.muts_spec_filename else None
if MUTs is None:
if not opts.muts_spec_filename:
parser.print_help()
exit(-1)
if opts.verbose_test_configuration_only:
print "MUTs configuration in %s:" % ('auto-detected' if opts.auto_detect else opts.muts_spec_filename)
if MUTs:
print print_muts_configuration_from_json(MUTs, platform_filter=opts.general_filter_regex)
print
print "Test specification in %s:" % ('auto-detected' if opts.auto_detect else opts.test_spec_filename)
if test_spec:
print print_test_configuration_from_json(test_spec)
exit(0)
if get_module_avail('mbed_lstools'):
if opts.operability_checks:
# Check if test scope is valid and run tests
test_scope = get_available_oper_test_scopes()
if opts.operability_checks in test_scope:
tests = IOperTestRunner(scope=opts.operability_checks)
test_results = tests.run()
# Export results in form of JUnit XML report to separate file
if opts.report_junit_file_name:
report_exporter = ReportExporter(ResultExporterType.JUNIT_OPER)
report_exporter.report_to_file(test_results, opts.report_junit_file_name)
else:
print "Unknown interoperability test scope name: '%s'" % (opts.operability_checks)
print "Available test scopes: %s" % (','.join(["'%s'" % n for n in test_scope]))
exit(0)
# Verbose test specification and MUTs configuration
if MUTs and opts.verbose:
print print_muts_configuration_from_json(MUTs)
if test_spec and opts.verbose:
print print_test_configuration_from_json(test_spec)
if opts.only_build_tests:
# We are skipping testing phase, and suppress summary
opts.suppress_summary = True
single_test = SingleTestRunner(_global_loops_count=opts.test_global_loops_value,
_test_loops_list=opts.test_loops_list,
_muts=MUTs,
_clean=opts.clean,
_opts_db_url=opts.db_url,
_opts_log_file_name=opts.log_file_name,
_opts_report_html_file_name=opts.report_html_file_name,
_opts_report_junit_file_name=opts.report_junit_file_name,
_opts_report_build_file_name=opts.report_build_file_name,
_test_spec=test_spec,
2014-07-28 16:46:21 +00:00
_opts_goanna_for_mbed_sdk=opts.goanna_for_mbed_sdk,
_opts_goanna_for_tests=opts.goanna_for_tests,
_opts_shuffle_test_order=opts.shuffle_test_order,
_opts_shuffle_test_seed=opts.shuffle_test_seed,
_opts_test_by_names=opts.test_by_names,
_opts_peripheral_by_names=opts.peripheral_by_names,
2014-07-28 16:46:21 +00:00
_opts_test_only_peripheral=opts.test_only_peripheral,
_opts_test_only_common=opts.test_only_common,
_opts_verbose_skipped_tests=opts.verbose_skipped_tests,
_opts_verbose_test_result_only=opts.verbose_test_result_only,
2014-07-28 16:46:21 +00:00
_opts_verbose=opts.verbose,
_opts_firmware_global_name=opts.firmware_global_name,
_opts_only_build_tests=opts.only_build_tests,
Parallel functionality, marge conflicts resolved (v2) Description =========== Added thread model for function SingleTest::execute where for each target we have separate thread. Added new experimental feature. Currently we are only building in parallel mbed SDK and libraries with dependencies. Each MUT is flashed separately, but with this new feature we are able to flash multiple boards at the same time and get results in parallel. Test execution time is reduced by n where n is number of MUTs (MUT: mbed under test). using --parallel means that: * separate thread will be spawned to build mbed SDK and dependencies (libraries) for each unique target with all declared toolchains. E.g: Thread(n==1): LPC1768: ARM, uARM, GCC_ARM Thread(n==2): K64F: ARM, GCC_ARM Thread(n==3): NUCLEO_X: uARM . . . Thread(4): for toolchain in NUCLEO_Z[toolchains] -> build mbed SDK -> build libs -> build project -> run MUT testrunner return test results for NUCLEO_Z[toolchain] Done: 1. Added option --parallel. 2. Decoupled execute function so it can be run in parallel with other execute functions. 3. Thread join via Queue, not Thread::Join() to avoid deadlocks or waits for particular thread to finish. 4. All builds are in parallel, but because each target and library for each toolchain have different directory we do not worry about building in parallel and compiler collisions. Missing: 1. No sync for 'print' (TODO). 2. No sync on test result structures - not needed because we only append to them (?). Experimental --parallel bugfix: self.test_suite_properties_ext dict was not populated with target name.
2015-03-12 09:55:05 +00:00
_opts_parallel_test_exec=opts.parallel_test_exec,
2014-07-30 10:03:32 +00:00
_opts_suppress_summary=opts.suppress_summary,
_opts_test_x_toolchain_summary=opts.test_x_toolchain_summary,
_opts_copy_method=opts.copy_method,
_opts_mut_reset_type=opts.mut_reset_type,
_opts_jobs=opts.jobs,
_opts_waterfall_test=opts.waterfall_test,
_opts_consolidate_waterfall_test=opts.consolidate_waterfall_test,
_opts_extend_test_timeout=opts.extend_test_timeout,
_opts_auto_detect=opts.auto_detect)
# Runs test suite in CLI mode
if (singletest_in_cli_mode(single_test)):
exit(0)
else:
exit(-1)