mirror of https://github.com/ARMmbed/mbed-os.git
commit
289b8064e0
|
@ -6,3 +6,4 @@ install:
|
||||||
- "sudo $TRAVIS_BUILD_DIR/travis/install_dependencies.sh > /dev/null"
|
- "sudo $TRAVIS_BUILD_DIR/travis/install_dependencies.sh > /dev/null"
|
||||||
- sudo pip install colorama
|
- sudo pip install colorama
|
||||||
- sudo pip install prettytable
|
- sudo pip install prettytable
|
||||||
|
- sudo pip install jinja2
|
||||||
|
|
|
@ -29,6 +29,8 @@ from workspace_tools.paths import MBED_TARGETS_PATH, MBED_LIBRARIES, MBED_API, M
|
||||||
from workspace_tools.targets import TARGET_NAMES, TARGET_MAP
|
from workspace_tools.targets import TARGET_NAMES, TARGET_MAP
|
||||||
from workspace_tools.libraries import Library
|
from workspace_tools.libraries import Library
|
||||||
from workspace_tools.toolchains import TOOLCHAIN_CLASSES
|
from workspace_tools.toolchains import TOOLCHAIN_CLASSES
|
||||||
|
from jinja2 import FileSystemLoader
|
||||||
|
from jinja2.environment import Environment
|
||||||
|
|
||||||
|
|
||||||
def build_project(src_path, build_path, target, toolchain_name,
|
def build_project(src_path, build_path, target, toolchain_name,
|
||||||
|
@ -529,3 +531,20 @@ def print_build_results(result_list, build_name):
|
||||||
result += "\n".join([" * %s" % f for f in result_list])
|
result += "\n".join([" * %s" % f for f in result_list])
|
||||||
result += "\n"
|
result += "\n"
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
def write_build_report(build_report, template_filename, filename):
|
||||||
|
build_report_failing = []
|
||||||
|
build_report_passing = []
|
||||||
|
|
||||||
|
for report in build_report:
|
||||||
|
if len(report["failing"]) > 0:
|
||||||
|
build_report_failing.append(report)
|
||||||
|
else:
|
||||||
|
build_report_passing.append(report)
|
||||||
|
|
||||||
|
env = Environment(extensions=['jinja2.ext.with_'])
|
||||||
|
env.loader = FileSystemLoader('ci_templates')
|
||||||
|
template = env.get_template(template_filename)
|
||||||
|
|
||||||
|
with open(filename, 'w+') as f:
|
||||||
|
f.write(template.render(failing_builds=build_report_failing, passing_builds=build_report_passing))
|
||||||
|
|
|
@ -24,6 +24,7 @@ ROOT = abspath(join(dirname(__file__), ".."))
|
||||||
sys.path.insert(0, ROOT)
|
sys.path.insert(0, ROOT)
|
||||||
|
|
||||||
from workspace_tools.build_api import build_mbed_libs
|
from workspace_tools.build_api import build_mbed_libs
|
||||||
|
from workspace_tools.build_api import write_build_report
|
||||||
from workspace_tools.targets import TARGET_MAP
|
from workspace_tools.targets import TARGET_MAP
|
||||||
|
|
||||||
OFFICIAL_MBED_LIBRARY_BUILD = (
|
OFFICIAL_MBED_LIBRARY_BUILD = (
|
||||||
|
@ -98,31 +99,67 @@ if __name__ == '__main__':
|
||||||
default=1, help="Number of concurrent jobs (default 1). Use 0 for auto based on host machine's number of CPUs")
|
default=1, help="Number of concurrent jobs (default 1). Use 0 for auto based on host machine's number of CPUs")
|
||||||
parser.add_option("-v", "--verbose", action="store_true", dest="verbose",
|
parser.add_option("-v", "--verbose", action="store_true", dest="verbose",
|
||||||
default=False, help="Verbose diagnostic output")
|
default=False, help="Verbose diagnostic output")
|
||||||
|
parser.add_option("-t", "--toolchains", dest="toolchains", help="Use toolchains names separated by comma")
|
||||||
|
|
||||||
|
parser.add_option("", "--report-build", dest="report_build_file_name", help="Output the build results to an html file")
|
||||||
|
|
||||||
|
|
||||||
options, args = parser.parse_args()
|
options, args = parser.parse_args()
|
||||||
start = time()
|
start = time()
|
||||||
failures = []
|
failures = []
|
||||||
successes = []
|
successes = []
|
||||||
|
skips = []
|
||||||
|
build_report = []
|
||||||
for target_name, toolchain_list in OFFICIAL_MBED_LIBRARY_BUILD:
|
for target_name, toolchain_list in OFFICIAL_MBED_LIBRARY_BUILD:
|
||||||
if options.official_only:
|
if options.official_only:
|
||||||
toolchains = (getattr(TARGET_MAP[target_name], 'default_toolchain', 'ARM'),)
|
toolchains = (getattr(TARGET_MAP[target_name], 'default_toolchain', 'ARM'),)
|
||||||
else:
|
else:
|
||||||
toolchains = toolchain_list
|
toolchains = toolchain_list
|
||||||
|
|
||||||
|
if options.toolchains:
|
||||||
|
print "Only building using the following toolchains: %s" % (options.toolchains)
|
||||||
|
toolchainSet = set(toolchains)
|
||||||
|
toolchains = toolchainSet and set((options.toolchains).split(','))
|
||||||
|
|
||||||
|
|
||||||
|
cur_target_build_report = { "target": target_name, "passing": [], "failing": [], "skipped": []}
|
||||||
|
|
||||||
for toolchain in toolchains:
|
for toolchain in toolchains:
|
||||||
id = "%s::%s" % (target_name, toolchain)
|
id = "%s::%s" % (target_name, toolchain)
|
||||||
try:
|
try:
|
||||||
build_mbed_libs(TARGET_MAP[target_name], toolchain, verbose=options.verbose, jobs=options.jobs)
|
built_mbed_lib = build_mbed_libs(TARGET_MAP[target_name], toolchain, verbose=options.verbose, jobs=options.jobs)
|
||||||
|
|
||||||
|
if built_mbed_lib:
|
||||||
successes.append(id)
|
successes.append(id)
|
||||||
|
cur_target_build_report["passing"].append({ "toolchain": toolchain })
|
||||||
|
else:
|
||||||
|
skips.append(id)
|
||||||
|
cur_target_build_report["skipped"].append({ "toolchain": toolchain })
|
||||||
|
|
||||||
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
failures.append(id)
|
failures.append(id)
|
||||||
|
cur_target_build_report["failing"].append({ "toolchain": toolchain })
|
||||||
print e
|
print e
|
||||||
|
|
||||||
|
if len(toolchains) > 0:
|
||||||
|
build_report.append(cur_target_build_report)
|
||||||
|
|
||||||
# Write summary of the builds
|
# Write summary of the builds
|
||||||
|
|
||||||
|
if options.report_build_file_name:
|
||||||
|
write_build_report(build_report, 'library_build/report.html', options.report_build_file_name)
|
||||||
|
|
||||||
print "\n\nCompleted in: (%.2f)s" % (time() - start)
|
print "\n\nCompleted in: (%.2f)s" % (time() - start)
|
||||||
|
|
||||||
if successes:
|
if successes:
|
||||||
print "\n\nBuild successes:"
|
print "\n\nBuild successes:"
|
||||||
print "\n".join([" * %s" % s for s in successes])
|
print "\n".join([" * %s" % s for s in successes])
|
||||||
|
|
||||||
|
if skips:
|
||||||
|
print "\n\nBuild skips:"
|
||||||
|
print "\n".join([" * %s" % s for s in skips])
|
||||||
|
|
||||||
if failures:
|
if failures:
|
||||||
print "\n\nBuild failures:"
|
print "\n\nBuild failures:"
|
||||||
print "\n".join([" * %s" % f for f in failures])
|
print "\n".join([" * %s" % f for f in failures])
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
<div class="toggleshow{% if report.failing|length == 0 %} toggleshow-hide{% endif %}">
|
||||||
|
<h3>
|
||||||
|
<a href="#" class="toggleshow-title">
|
||||||
|
<span class="toggleshow-arrow"></span>
|
||||||
|
{% if report.failing|length > 0 %}
|
||||||
|
<span class="redbold">[FAIL]</span>
|
||||||
|
{% else %}
|
||||||
|
<span class="greenbold">[PASS]</span>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{{report.target}} - Passing: {{report.passing|length}}, Failing: {{report.failing|length}}, Skipped: {{report.skipped|length}}
|
||||||
|
</a>
|
||||||
|
</h3>
|
||||||
|
|
||||||
|
<div class="toggleshow-body">
|
||||||
|
<h4 class="redbold">Failing</h4>
|
||||||
|
{% with build = report.failing %}
|
||||||
|
{% include 'library_build/build_report_table.html' %}
|
||||||
|
{% endwith %}
|
||||||
|
|
||||||
|
<h4 class="greenbold">Passing</h4>
|
||||||
|
{% with build = report.passing %}
|
||||||
|
{% include 'library_build/build_report_table.html' %}
|
||||||
|
{% endwith %}
|
||||||
|
|
||||||
|
<h4>Skipped</h4>
|
||||||
|
{% with build = report.skipped %}
|
||||||
|
{% include 'library_build/build_report_table.html' %}
|
||||||
|
{% endwith %}
|
||||||
|
</div>
|
||||||
|
</div>
|
|
@ -0,0 +1,10 @@
|
||||||
|
<table class="sortable pane bigtable stripped-odd">
|
||||||
|
<tr>
|
||||||
|
<th>Toolchain</th>
|
||||||
|
</tr>
|
||||||
|
{% for run in build %}
|
||||||
|
<tr>
|
||||||
|
<td>{{run.toolchain}}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</table>
|
|
@ -0,0 +1,11 @@
|
||||||
|
<h2>{{failing_builds|length}} Failing Builds</h2>
|
||||||
|
{% for report in failing_builds %}
|
||||||
|
{% include 'library_build/build_report.html' %}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
<h2>{{passing_builds|length}} Passing Builds</h2>
|
||||||
|
{% for report in passing_builds %}
|
||||||
|
{% include 'library_build/build_report.html' %}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% include 'scripts.js' %}
|
|
@ -0,0 +1,53 @@
|
||||||
|
<script>
|
||||||
|
var elements = document.querySelectorAll(".toggleshow"),
|
||||||
|
hideClass = 'toggleshow-hide';
|
||||||
|
|
||||||
|
for (var i = 0; i < elements.length; i++) {
|
||||||
|
var arrow = elements[i].querySelector(".toggleshow-arrow");
|
||||||
|
// Initial hide/show based on class
|
||||||
|
// Update arrow as well
|
||||||
|
if (containsClass(elements[i], 'toggleshow-hide')) {
|
||||||
|
toggleDisplay(elements[i]);
|
||||||
|
changeArrow(arrow, false);
|
||||||
|
} else {
|
||||||
|
changeArrow(arrow, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add click handler
|
||||||
|
addClick(elements[i], toggleDisplay);
|
||||||
|
}
|
||||||
|
|
||||||
|
function containsClass(element, className) {
|
||||||
|
var eleClassName = ' ' + elements[i].className + ' ';
|
||||||
|
return eleClassName.indexOf(' ' + className + ' ') > -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
function toggleDisplay(parentElement) {
|
||||||
|
var body = parentElement.querySelector(".toggleshow-body"),
|
||||||
|
arrow = parentElement.querySelector(".toggleshow-arrow");
|
||||||
|
|
||||||
|
if (body.style.display == 'block' || body.style.display == '') {
|
||||||
|
body.style.display = 'none';
|
||||||
|
changeArrow(arrow, false);
|
||||||
|
} else {
|
||||||
|
body.style.display = 'block';
|
||||||
|
changeArrow(arrow, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function changeArrow(element, visible) {
|
||||||
|
if (visible) {
|
||||||
|
element.innerHTML = '▲';
|
||||||
|
} else {
|
||||||
|
element.innerHTML = '▼';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function addClick(parentElement, func) {
|
||||||
|
parentElement.querySelector(".toggleshow-title").addEventListener("click", function(e) {
|
||||||
|
func(parentElement);
|
||||||
|
e.preventDefault();
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
</script>
|
|
@ -0,0 +1,31 @@
|
||||||
|
<div class="toggleshow{% if report.failing|length == 0 %} toggleshow-hide{% endif %}">
|
||||||
|
<h3>
|
||||||
|
<a href="#" class="toggleshow-title">
|
||||||
|
<span class="toggleshow-arrow"></span>
|
||||||
|
{% if report.failing|length > 0 %}
|
||||||
|
<span class="redbold">[FAIL]</span>
|
||||||
|
{% else %}
|
||||||
|
<span class="greenbold">[PASS]</span>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{{report.target}} - Passing: {{report.passing|length}}, Failing: {{report.failing|length}}, Skipped: {{report.skipped|length}}
|
||||||
|
</a>
|
||||||
|
</h3>
|
||||||
|
|
||||||
|
<div class="toggleshow-body">
|
||||||
|
<h4 class="redbold">Failing</h4>
|
||||||
|
{% with build = report.failing %}
|
||||||
|
{% include 'tests_build/build_report_table.html' %}
|
||||||
|
{% endwith %}
|
||||||
|
|
||||||
|
<h4 class="greenbold">Passing</h4>
|
||||||
|
{% with build = report.passing %}
|
||||||
|
{% include 'tests_build/build_report_table.html' %}
|
||||||
|
{% endwith %}
|
||||||
|
|
||||||
|
<h4>Skipped</h4>
|
||||||
|
{% with build = report.skipped %}
|
||||||
|
{% include 'tests_build/build_report_table.html' %}
|
||||||
|
{% endwith %}
|
||||||
|
</div>
|
||||||
|
</div>
|
|
@ -0,0 +1,12 @@
|
||||||
|
<table class="sortable pane bigtable stripped-odd">
|
||||||
|
<tr>
|
||||||
|
<th>Toolchain</th>
|
||||||
|
<th>Project</th>
|
||||||
|
</tr>
|
||||||
|
{% for run in build %}
|
||||||
|
<tr>
|
||||||
|
<td>{{run.toolchain}}</td>
|
||||||
|
<td>{{run.project}}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</table>
|
|
@ -0,0 +1,11 @@
|
||||||
|
<h2>{{failing_builds|length}} Failing Builds</h2>
|
||||||
|
{% for report in failing_builds %}
|
||||||
|
{% include 'tests_build/build_report.html' %}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
<h2>{{passing_builds|length}} Passing Builds</h2>
|
||||||
|
{% for report in passing_builds %}
|
||||||
|
{% include 'tests_build/build_report.html' %}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% include 'scripts.js' %}
|
|
@ -209,6 +209,7 @@ if __name__ == '__main__':
|
||||||
_opts_log_file_name=opts.log_file_name,
|
_opts_log_file_name=opts.log_file_name,
|
||||||
_opts_report_html_file_name=opts.report_html_file_name,
|
_opts_report_html_file_name=opts.report_html_file_name,
|
||||||
_opts_report_junit_file_name=opts.report_junit_file_name,
|
_opts_report_junit_file_name=opts.report_junit_file_name,
|
||||||
|
_opts_report_build_file_name=opts.report_build_file_name,
|
||||||
_test_spec=test_spec,
|
_test_spec=test_spec,
|
||||||
_opts_goanna_for_mbed_sdk=opts.goanna_for_mbed_sdk,
|
_opts_goanna_for_mbed_sdk=opts.goanna_for_mbed_sdk,
|
||||||
_opts_goanna_for_tests=opts.goanna_for_tests,
|
_opts_goanna_for_tests=opts.goanna_for_tests,
|
||||||
|
|
|
@ -48,6 +48,7 @@ from workspace_tools.targets import TARGET_MAP
|
||||||
from workspace_tools.test_db import BaseDBAccess
|
from workspace_tools.test_db import BaseDBAccess
|
||||||
from workspace_tools.build_api import build_project, build_mbed_libs, build_lib
|
from workspace_tools.build_api import build_project, build_mbed_libs, build_lib
|
||||||
from workspace_tools.build_api import get_target_supported_toolchains
|
from workspace_tools.build_api import get_target_supported_toolchains
|
||||||
|
from workspace_tools.build_api import write_build_report
|
||||||
from workspace_tools.libraries import LIBRARIES, LIBRARY_MAP
|
from workspace_tools.libraries import LIBRARIES, LIBRARY_MAP
|
||||||
from workspace_tools.toolchains import TOOLCHAIN_BIN_PATH
|
from workspace_tools.toolchains import TOOLCHAIN_BIN_PATH
|
||||||
from workspace_tools.test_exporters import ReportExporter, ResultExporterType
|
from workspace_tools.test_exporters import ReportExporter, ResultExporterType
|
||||||
|
@ -153,6 +154,7 @@ class SingleTestRunner(object):
|
||||||
_opts_log_file_name=None,
|
_opts_log_file_name=None,
|
||||||
_opts_report_html_file_name=None,
|
_opts_report_html_file_name=None,
|
||||||
_opts_report_junit_file_name=None,
|
_opts_report_junit_file_name=None,
|
||||||
|
_opts_report_build_file_name=None,
|
||||||
_test_spec={},
|
_test_spec={},
|
||||||
_opts_goanna_for_mbed_sdk=None,
|
_opts_goanna_for_mbed_sdk=None,
|
||||||
_opts_goanna_for_tests=None,
|
_opts_goanna_for_tests=None,
|
||||||
|
@ -205,6 +207,7 @@ class SingleTestRunner(object):
|
||||||
self.opts_log_file_name = _opts_log_file_name
|
self.opts_log_file_name = _opts_log_file_name
|
||||||
self.opts_report_html_file_name = _opts_report_html_file_name
|
self.opts_report_html_file_name = _opts_report_html_file_name
|
||||||
self.opts_report_junit_file_name = _opts_report_junit_file_name
|
self.opts_report_junit_file_name = _opts_report_junit_file_name
|
||||||
|
self.opts_report_build_file_name = _opts_report_build_file_name
|
||||||
self.opts_goanna_for_mbed_sdk = _opts_goanna_for_mbed_sdk
|
self.opts_goanna_for_mbed_sdk = _opts_goanna_for_mbed_sdk
|
||||||
self.opts_goanna_for_tests = _opts_goanna_for_tests
|
self.opts_goanna_for_tests = _opts_goanna_for_tests
|
||||||
self.opts_shuffle_test_order = _opts_shuffle_test_order
|
self.opts_shuffle_test_order = _opts_shuffle_test_order
|
||||||
|
@ -294,8 +297,17 @@ class SingleTestRunner(object):
|
||||||
test_summary_ext = {}
|
test_summary_ext = {}
|
||||||
execute_thread_slice_lock = Lock()
|
execute_thread_slice_lock = Lock()
|
||||||
|
|
||||||
def execute_thread_slice(self, q, target, toolchains, clean, test_ids):
|
def execute_thread_slice(self, q, target, toolchains, clean, test_ids, build_report):
|
||||||
for toolchain in toolchains:
|
for toolchain in toolchains:
|
||||||
|
# Toolchain specific build successes and failures
|
||||||
|
build_report[toolchain] = {
|
||||||
|
"mbed_failure": False,
|
||||||
|
"library_failure": False,
|
||||||
|
"library_build_passing": [],
|
||||||
|
"library_build_failing": [],
|
||||||
|
"test_build_passing": [],
|
||||||
|
"test_build_failing": []
|
||||||
|
}
|
||||||
# print target, toolchain
|
# print target, toolchain
|
||||||
# Test suite properties returned to external tools like CI
|
# Test suite properties returned to external tools like CI
|
||||||
test_suite_properties = {}
|
test_suite_properties = {}
|
||||||
|
@ -306,6 +318,7 @@ class SingleTestRunner(object):
|
||||||
test_suite_properties['toolchain'] = toolchain
|
test_suite_properties['toolchain'] = toolchain
|
||||||
test_suite_properties['shuffle_random_seed'] = self.shuffle_random_seed
|
test_suite_properties['shuffle_random_seed'] = self.shuffle_random_seed
|
||||||
|
|
||||||
|
|
||||||
# print '=== %s::%s ===' % (target, toolchain)
|
# print '=== %s::%s ===' % (target, toolchain)
|
||||||
# Let's build our test
|
# Let's build our test
|
||||||
if target not in TARGET_MAP:
|
if target not in TARGET_MAP:
|
||||||
|
@ -328,9 +341,9 @@ class SingleTestRunner(object):
|
||||||
continue
|
continue
|
||||||
except ToolException:
|
except ToolException:
|
||||||
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building MBED libs for %s using %s'% (target, toolchain))
|
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building MBED libs for %s using %s'% (target, toolchain))
|
||||||
|
build_report[toolchain]["mbed_failure"] = True
|
||||||
#return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext
|
#return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext
|
||||||
q.put(target + '_'.join(toolchains))
|
continue
|
||||||
return
|
|
||||||
|
|
||||||
build_dir = join(BUILD_DIR, "test", target, toolchain)
|
build_dir = join(BUILD_DIR, "test", target, toolchain)
|
||||||
|
|
||||||
|
@ -340,6 +353,7 @@ class SingleTestRunner(object):
|
||||||
|
|
||||||
# Enumerate through all tests and shuffle test order if requested
|
# Enumerate through all tests and shuffle test order if requested
|
||||||
test_map_keys = sorted(TEST_MAP.keys())
|
test_map_keys = sorted(TEST_MAP.keys())
|
||||||
|
|
||||||
if self.opts_shuffle_test_order:
|
if self.opts_shuffle_test_order:
|
||||||
random.shuffle(test_map_keys, self.shuffle_random_func)
|
random.shuffle(test_map_keys, self.shuffle_random_func)
|
||||||
# Update database with shuffle seed f applicable
|
# Update database with shuffle seed f applicable
|
||||||
|
@ -358,61 +372,29 @@ class SingleTestRunner(object):
|
||||||
self.db_logger.update_build_id_info(self.db_logger_build_id, _extra=json.dumps(self.dump_options()))
|
self.db_logger.update_build_id_info(self.db_logger_build_id, _extra=json.dumps(self.dump_options()))
|
||||||
self.db_logger.disconnect();
|
self.db_logger.disconnect();
|
||||||
|
|
||||||
for test_id in test_map_keys:
|
valid_test_map_keys = self.get_valid_tests(test_map_keys, target, toolchain, test_ids)
|
||||||
|
skipped_test_map_keys = self.get_skipped_tests(test_map_keys, valid_test_map_keys)
|
||||||
|
|
||||||
|
for skipped_test_id in skipped_test_map_keys:
|
||||||
|
test_suite_properties['skipped'].append(skipped_test_id)
|
||||||
|
|
||||||
|
|
||||||
|
# First pass through all tests and determine which libraries need to be built
|
||||||
|
libraries = set()
|
||||||
|
for test_id in valid_test_map_keys:
|
||||||
test = TEST_MAP[test_id]
|
test = TEST_MAP[test_id]
|
||||||
if self.opts_test_by_names and test_id not in self.opts_test_by_names.split(','):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if test_ids and test_id not in test_ids:
|
# Detect which lib should be added to test
|
||||||
continue
|
# Some libs have to compiled like RTOS or ETH
|
||||||
|
for lib in LIBRARIES:
|
||||||
|
if lib['build_dir'] in test.dependencies:
|
||||||
|
libraries.add(lib['id'])
|
||||||
|
|
||||||
if self.opts_test_only_peripheral and not test.peripherals:
|
|
||||||
if self.opts_verbose_skipped_tests:
|
|
||||||
print self.logger.log_line(self.logger.LogType.INFO, 'Common test skipped for target %s'% (target))
|
|
||||||
test_suite_properties['skipped'].append(test_id)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if self.opts_peripheral_by_names and test.peripherals and not len([i for i in test.peripherals if i in self.opts_peripheral_by_names.split(',')]):
|
|
||||||
# We will skip tests not forced with -p option
|
|
||||||
if self.opts_verbose_skipped_tests:
|
|
||||||
print self.logger.log_line(self.logger.LogType.INFO, 'Common test skipped for target %s'% (target))
|
|
||||||
test_suite_properties['skipped'].append(test_id)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if self.opts_test_only_common and test.peripherals:
|
|
||||||
if self.opts_verbose_skipped_tests:
|
|
||||||
print self.logger.log_line(self.logger.LogType.INFO, 'Peripheral test skipped for target %s'% (target))
|
|
||||||
test_suite_properties['skipped'].append(test_id)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if test.automated and test.is_supported(target, toolchain):
|
|
||||||
if test.peripherals is None and self.opts_only_build_tests:
|
|
||||||
# When users are using 'build only flag' and test do not have
|
|
||||||
# specified peripherals we can allow test building by default
|
|
||||||
pass
|
|
||||||
elif self.opts_peripheral_by_names and test_id not in self.opts_peripheral_by_names.split(','):
|
|
||||||
# If we force peripheral with option -p we expect test
|
|
||||||
# to pass even if peripheral is not in MUTs file.
|
|
||||||
pass
|
|
||||||
elif not self.is_peripherals_available(target, test.peripherals):
|
|
||||||
if self.opts_verbose_skipped_tests:
|
|
||||||
if test.peripherals:
|
|
||||||
print self.logger.log_line(self.logger.LogType.INFO, 'Peripheral %s test skipped for target %s'% (",".join(test.peripherals), target))
|
|
||||||
else:
|
|
||||||
print self.logger.log_line(self.logger.LogType.INFO, 'Test %s skipped for target %s'% (test_id, target))
|
|
||||||
test_suite_properties['skipped'].append(test_id)
|
|
||||||
continue
|
|
||||||
|
|
||||||
build_project_options = ["analyze"] if self.opts_goanna_for_tests else None
|
build_project_options = ["analyze"] if self.opts_goanna_for_tests else None
|
||||||
clean_project_options = True if self.opts_goanna_for_tests or clean or self.opts_clean else None
|
clean_project_options = True if self.opts_goanna_for_tests or clean or self.opts_clean else None
|
||||||
|
|
||||||
# Detect which lib should be added to test
|
# Build all required libraries
|
||||||
# Some libs have to compiled like RTOS or ETH
|
|
||||||
libraries = []
|
|
||||||
for lib in LIBRARIES:
|
|
||||||
if lib['build_dir'] in test.dependencies:
|
|
||||||
libraries.append(lib['id'])
|
|
||||||
# Build libs for test
|
|
||||||
for lib_id in libraries:
|
for lib_id in libraries:
|
||||||
try:
|
try:
|
||||||
build_lib(lib_id,
|
build_lib(lib_id,
|
||||||
|
@ -422,11 +404,21 @@ class SingleTestRunner(object):
|
||||||
verbose=self.opts_verbose,
|
verbose=self.opts_verbose,
|
||||||
clean=clean_mbed_libs_options,
|
clean=clean_mbed_libs_options,
|
||||||
jobs=self.opts_jobs)
|
jobs=self.opts_jobs)
|
||||||
|
|
||||||
|
build_report[toolchain]["library_build_passing"].append(lib_id)
|
||||||
|
|
||||||
except ToolException:
|
except ToolException:
|
||||||
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building library %s'% (lib_id))
|
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building library %s'% (lib_id))
|
||||||
|
build_report[toolchain]["library_failure"] = True
|
||||||
|
build_report[toolchain]["library_build_failing"].append(lib_id)
|
||||||
#return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext
|
#return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext
|
||||||
q.put(target + '_'.join(toolchains))
|
continue
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
for test_id in valid_test_map_keys:
|
||||||
|
test = TEST_MAP[test_id]
|
||||||
|
|
||||||
test_suite_properties['test.libs.%s.%s.%s'% (target, toolchain, test_id)] = ', '.join(libraries)
|
test_suite_properties['test.libs.%s.%s.%s'% (target, toolchain, test_id)] = ', '.join(libraries)
|
||||||
|
|
||||||
|
@ -459,12 +451,15 @@ class SingleTestRunner(object):
|
||||||
macros=MACROS,
|
macros=MACROS,
|
||||||
inc_dirs=INC_DIRS,
|
inc_dirs=INC_DIRS,
|
||||||
jobs=self.opts_jobs)
|
jobs=self.opts_jobs)
|
||||||
|
build_report[toolchain]["test_build_passing"].append(test_id)
|
||||||
|
|
||||||
except ToolException:
|
except ToolException:
|
||||||
project_name_str = project_name if project_name is not None else test_id
|
project_name_str = project_name if project_name is not None else test_id
|
||||||
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building project %s'% (project_name_str))
|
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building project %s'% (project_name_str))
|
||||||
|
build_report[toolchain]["test_build_failing"].append(test_id)
|
||||||
# return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext
|
# return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext
|
||||||
q.put(target + '_'.join(toolchains))
|
continue
|
||||||
return
|
|
||||||
if self.opts_only_build_tests:
|
if self.opts_only_build_tests:
|
||||||
# With this option we are skipping testing phase
|
# With this option we are skipping testing phase
|
||||||
continue
|
continue
|
||||||
|
@ -500,6 +495,7 @@ class SingleTestRunner(object):
|
||||||
|
|
||||||
test_suite_properties['skipped'] = ', '.join(test_suite_properties['skipped'])
|
test_suite_properties['skipped'] = ', '.join(test_suite_properties['skipped'])
|
||||||
self.test_suite_properties_ext[target][toolchain] = test_suite_properties
|
self.test_suite_properties_ext[target][toolchain] = test_suite_properties
|
||||||
|
|
||||||
# return self.test_summary, self.shuffle_random_seed, test_summary_ext, self.test_suite_properties_ext
|
# return self.test_summary, self.shuffle_random_seed, test_summary_ext, self.test_suite_properties_ext
|
||||||
q.put(target + '_'.join(toolchains))
|
q.put(target + '_'.join(toolchains))
|
||||||
return
|
return
|
||||||
|
@ -514,6 +510,8 @@ class SingleTestRunner(object):
|
||||||
if self.opts_shuffle_test_seed is not None and self.is_shuffle_seed_float():
|
if self.opts_shuffle_test_seed is not None and self.is_shuffle_seed_float():
|
||||||
self.shuffle_random_seed = round(float(self.opts_shuffle_test_seed), self.SHUFFLE_SEED_ROUND)
|
self.shuffle_random_seed = round(float(self.opts_shuffle_test_seed), self.SHUFFLE_SEED_ROUND)
|
||||||
|
|
||||||
|
build_reports = []
|
||||||
|
|
||||||
if self.opts_parallel_test_exec:
|
if self.opts_parallel_test_exec:
|
||||||
###################################################################
|
###################################################################
|
||||||
# Experimental, parallel test execution per singletest instance.
|
# Experimental, parallel test execution per singletest instance.
|
||||||
|
@ -526,7 +524,9 @@ class SingleTestRunner(object):
|
||||||
# get information about available MUTs (per target).
|
# get information about available MUTs (per target).
|
||||||
for target, toolchains in self.test_spec['targets'].iteritems():
|
for target, toolchains in self.test_spec['targets'].iteritems():
|
||||||
self.test_suite_properties_ext[target] = {}
|
self.test_suite_properties_ext[target] = {}
|
||||||
t = threading.Thread(target=self.execute_thread_slice, args = (q, target, toolchains, clean, test_ids))
|
cur_build_report = {}
|
||||||
|
t = threading.Thread(target=self.execute_thread_slice, args = (q, target, toolchains, clean, test_ids, cur_build_report))
|
||||||
|
build_reports.append({ "target": target, "report": cur_build_report})
|
||||||
t.daemon = True
|
t.daemon = True
|
||||||
t.start()
|
t.start()
|
||||||
execute_threads.append(t)
|
execute_threads.append(t)
|
||||||
|
@ -538,16 +538,118 @@ class SingleTestRunner(object):
|
||||||
for target, toolchains in self.test_spec['targets'].iteritems():
|
for target, toolchains in self.test_spec['targets'].iteritems():
|
||||||
if target not in self.test_suite_properties_ext:
|
if target not in self.test_suite_properties_ext:
|
||||||
self.test_suite_properties_ext[target] = {}
|
self.test_suite_properties_ext[target] = {}
|
||||||
self.execute_thread_slice(q, target, toolchains, clean, test_ids)
|
|
||||||
|
cur_build_report = {}
|
||||||
|
self.execute_thread_slice(q, target, toolchains, clean, test_ids, cur_build_report)
|
||||||
|
build_reports.append({ "target": target, "report": cur_build_report})
|
||||||
q.get()
|
q.get()
|
||||||
|
|
||||||
|
build_report = []
|
||||||
|
|
||||||
|
for target_build_report in build_reports:
|
||||||
|
cur_report = {
|
||||||
|
"target": target_build_report["target"],
|
||||||
|
"passing": [],
|
||||||
|
"failing": []
|
||||||
|
}
|
||||||
|
|
||||||
|
for toolchain in sorted(target_build_report["report"], key=target_build_report["report"].get):
|
||||||
|
print "%s - %s" % (target_build_report["target"], toolchain)
|
||||||
|
report = target_build_report["report"][toolchain]
|
||||||
|
|
||||||
|
if report["mbed_failure"]:
|
||||||
|
cur_report["failing"].append({
|
||||||
|
"toolchain": toolchain,
|
||||||
|
"project": "mbed library"
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
for passing_library in report["library_build_failing"]:
|
||||||
|
cur_report["failing"].append({
|
||||||
|
"toolchain": toolchain,
|
||||||
|
"project": "Library::%s" % (passing_library)
|
||||||
|
})
|
||||||
|
|
||||||
|
for failing_library in report["library_build_passing"]:
|
||||||
|
cur_report["passing"].append({
|
||||||
|
"toolchain": toolchain,
|
||||||
|
"project": "Library::%s" % (failing_library)
|
||||||
|
})
|
||||||
|
|
||||||
|
for passing_test in report["test_build_passing"]:
|
||||||
|
cur_report["passing"].append({
|
||||||
|
"toolchain": toolchain,
|
||||||
|
"project": "Test::%s" % (passing_test)
|
||||||
|
})
|
||||||
|
|
||||||
|
for failing_test in report["test_build_failing"]:
|
||||||
|
cur_report["failing"].append({
|
||||||
|
"toolchain": toolchain,
|
||||||
|
"project": "Test::%s" % (failing_test)
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
build_report.append(cur_report)
|
||||||
|
|
||||||
if self.db_logger:
|
if self.db_logger:
|
||||||
self.db_logger.reconnect();
|
self.db_logger.reconnect();
|
||||||
if self.db_logger.is_connected():
|
if self.db_logger.is_connected():
|
||||||
self.db_logger.update_build_id_info(self.db_logger_build_id, _status_fk=self.db_logger.BUILD_ID_STATUS_COMPLETED)
|
self.db_logger.update_build_id_info(self.db_logger_build_id, _status_fk=self.db_logger.BUILD_ID_STATUS_COMPLETED)
|
||||||
self.db_logger.disconnect();
|
self.db_logger.disconnect();
|
||||||
|
|
||||||
return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext
|
return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext, build_report
|
||||||
|
|
||||||
|
def get_valid_tests(self, test_map_keys, target, toolchain, test_ids):
|
||||||
|
valid_test_map_keys = []
|
||||||
|
|
||||||
|
for test_id in test_map_keys:
|
||||||
|
test = TEST_MAP[test_id]
|
||||||
|
if self.opts_test_by_names and test_id not in self.opts_test_by_names.split(','):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if test_ids and test_id not in test_ids:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if self.opts_test_only_peripheral and not test.peripherals:
|
||||||
|
if self.opts_verbose_skipped_tests:
|
||||||
|
print self.logger.log_line(self.logger.LogType.INFO, 'Common test skipped for target %s'% (target))
|
||||||
|
continue
|
||||||
|
|
||||||
|
if self.opts_peripheral_by_names and test.peripherals and not len([i for i in test.peripherals if i in self.opts_peripheral_by_names.split(',')]):
|
||||||
|
# We will skip tests not forced with -p option
|
||||||
|
if self.opts_verbose_skipped_tests:
|
||||||
|
print self.logger.log_line(self.logger.LogType.INFO, 'Common test skipped for target %s'% (target))
|
||||||
|
continue
|
||||||
|
|
||||||
|
if self.opts_test_only_common and test.peripherals:
|
||||||
|
if self.opts_verbose_skipped_tests:
|
||||||
|
print self.logger.log_line(self.logger.LogType.INFO, 'Peripheral test skipped for target %s'% (target))
|
||||||
|
continue
|
||||||
|
|
||||||
|
if test.automated and test.is_supported(target, toolchain):
|
||||||
|
if test.peripherals is None and self.opts_only_build_tests:
|
||||||
|
# When users are using 'build only flag' and test do not have
|
||||||
|
# specified peripherals we can allow test building by default
|
||||||
|
pass
|
||||||
|
elif self.opts_peripheral_by_names and test_id not in self.opts_peripheral_by_names.split(','):
|
||||||
|
# If we force peripheral with option -p we expect test
|
||||||
|
# to pass even if peripheral is not in MUTs file.
|
||||||
|
pass
|
||||||
|
elif not self.is_peripherals_available(target, test.peripherals):
|
||||||
|
if self.opts_verbose_skipped_tests:
|
||||||
|
if test.peripherals:
|
||||||
|
print self.logger.log_line(self.logger.LogType.INFO, 'Peripheral %s test skipped for target %s'% (",".join(test.peripherals), target))
|
||||||
|
else:
|
||||||
|
print self.logger.log_line(self.logger.LogType.INFO, 'Test %s skipped for target %s'% (test_id, target))
|
||||||
|
continue
|
||||||
|
|
||||||
|
# The test has made it through all the filters, so add it to the valid tests list
|
||||||
|
valid_test_map_keys.append(test_id)
|
||||||
|
|
||||||
|
return valid_test_map_keys
|
||||||
|
|
||||||
|
def get_skipped_tests(self, all_test_map_keys, valid_test_map_keys):
|
||||||
|
# NOTE: This will not preserve order
|
||||||
|
return list(set(all_test_map_keys) - set(valid_test_map_keys))
|
||||||
|
|
||||||
def generate_test_summary_by_target(self, test_summary, shuffle_seed=None):
|
def generate_test_summary_by_target(self, test_summary, shuffle_seed=None):
|
||||||
""" Prints well-formed summary with results (SQL table like)
|
""" Prints well-formed summary with results (SQL table like)
|
||||||
|
@ -1314,7 +1416,7 @@ def singletest_in_cli_mode(single_test):
|
||||||
"""
|
"""
|
||||||
start = time()
|
start = time()
|
||||||
# Execute tests depending on options and filter applied
|
# Execute tests depending on options and filter applied
|
||||||
test_summary, shuffle_seed, test_summary_ext, test_suite_properties_ext = single_test.execute()
|
test_summary, shuffle_seed, test_summary_ext, test_suite_properties_ext, build_report = single_test.execute()
|
||||||
elapsed_time = time() - start
|
elapsed_time = time() - start
|
||||||
|
|
||||||
# Human readable summary
|
# Human readable summary
|
||||||
|
@ -1333,9 +1435,12 @@ def singletest_in_cli_mode(single_test):
|
||||||
report_exporter = ReportExporter(ResultExporterType.HTML)
|
report_exporter = ReportExporter(ResultExporterType.HTML)
|
||||||
report_exporter.report_to_file(test_summary_ext, single_test.opts_report_html_file_name, test_suite_properties=test_suite_properties_ext)
|
report_exporter.report_to_file(test_summary_ext, single_test.opts_report_html_file_name, test_suite_properties=test_suite_properties_ext)
|
||||||
if single_test.opts_report_junit_file_name:
|
if single_test.opts_report_junit_file_name:
|
||||||
# Export results in form of HTML report to separate file
|
# Export results in form of JUnit XML report to separate file
|
||||||
report_exporter = ReportExporter(ResultExporterType.JUNIT)
|
report_exporter = ReportExporter(ResultExporterType.JUNIT)
|
||||||
report_exporter.report_to_file(test_summary_ext, single_test.opts_report_junit_file_name, test_suite_properties=test_suite_properties_ext)
|
report_exporter.report_to_file(test_summary_ext, single_test.opts_report_junit_file_name, test_suite_properties=test_suite_properties_ext)
|
||||||
|
if single_test.opts_report_build_file_name:
|
||||||
|
# Export build results as html report to sparate file
|
||||||
|
write_build_report(build_report, 'tests_build/report.html', single_test.opts_report_build_file_name)
|
||||||
|
|
||||||
|
|
||||||
class TestLogger():
|
class TestLogger():
|
||||||
|
@ -1706,6 +1811,10 @@ def get_default_test_options_parser():
|
||||||
dest='report_junit_file_name',
|
dest='report_junit_file_name',
|
||||||
help='You can log test suite results in form of JUnit compliant XML report')
|
help='You can log test suite results in form of JUnit compliant XML report')
|
||||||
|
|
||||||
|
parser.add_option("", "--report-build",
|
||||||
|
dest="report_build_file_name",
|
||||||
|
help="Output the build results to an html file")
|
||||||
|
|
||||||
parser.add_option('', '--verbose-skipped',
|
parser.add_option('', '--verbose-skipped',
|
||||||
dest='verbose_skipped_tests',
|
dest='verbose_skipped_tests',
|
||||||
default=False,
|
default=False,
|
||||||
|
|
Loading…
Reference in New Issue