mirror of https://github.com/ARMmbed/mbed-os.git
Consolidating reporting in build_release and test_api
parent
6ee94dee1e
commit
736cae108e
|
@ -23,6 +23,7 @@ import colorama
|
|||
from types import ListType
|
||||
from shutil import rmtree
|
||||
from os.path import join, exists, basename
|
||||
from time import time
|
||||
|
||||
from workspace_tools.utils import mkdir, run_cmd, run_cmd_ext
|
||||
from workspace_tools.paths import MBED_TARGETS_PATH, MBED_LIBRARIES, MBED_API, MBED_HAL, MBED_COMMON
|
||||
|
@ -33,9 +34,50 @@ from jinja2 import FileSystemLoader
|
|||
from jinja2.environment import Environment
|
||||
|
||||
|
||||
def prep_report(report, target_name, toolchain_name, id_name):
|
||||
# Setup report keys
|
||||
if not target_name in report:
|
||||
report[target_name] = {}
|
||||
|
||||
if not toolchain_name in report[target_name]:
|
||||
report[target_name][toolchain_name] = {}
|
||||
|
||||
if not id_name in report[target_name][toolchain_name]:
|
||||
report[target_name][toolchain_name][id_name] = []
|
||||
|
||||
def prep_properties(properties, target_name, toolchain_name, id_name):
|
||||
# Setup test properties
|
||||
if not target_name in properties:
|
||||
properties[target_name] = {}
|
||||
|
||||
if not toolchain_name in properties[target_name]:
|
||||
properties[target_name][toolchain_name] = {}
|
||||
|
||||
properties[target_name][toolchain_name]["target"] = target_name
|
||||
properties[target_name][toolchain_name]["toolchain"] = toolchain_name
|
||||
|
||||
def create_result(target_name, toolchain_name, id_name, description):
|
||||
cur_result = {}
|
||||
cur_result["target_name"] = target_name
|
||||
cur_result["toolchain_name"] = toolchain_name
|
||||
cur_result["id"] = id_name
|
||||
cur_result["description"] = description
|
||||
cur_result["elapsed_time"] = 0
|
||||
cur_result["output"] = ""
|
||||
|
||||
return cur_result
|
||||
|
||||
def add_result_to_report(report, result):
|
||||
target = result["target_name"]
|
||||
toolchain = result["toolchain_name"]
|
||||
id_name = result['id']
|
||||
result_wrap = { 0: result }
|
||||
report[target][toolchain][id_name].append(result_wrap)
|
||||
|
||||
def build_project(src_path, build_path, target, toolchain_name,
|
||||
libraries_paths=None, options=None, linker_script=None,
|
||||
clean=False, notify=None, verbose=False, name=None, macros=None, inc_dirs=None, jobs=1, silent=False):
|
||||
clean=False, notify=None, verbose=False, name=None, macros=None, inc_dirs=None,
|
||||
jobs=1, silent=False, report=None, properties=None, project_id=None):
|
||||
""" This function builds project. Project can be for example one test / UT
|
||||
"""
|
||||
# Toolchain instance
|
||||
|
@ -48,7 +90,6 @@ def build_project(src_path, build_path, target, toolchain_name,
|
|||
# We need to remove all paths which are repeated to avoid
|
||||
# multiple compilations and linking with the same objects
|
||||
src_paths = [src_paths[0]] + list(set(src_paths[1:]))
|
||||
|
||||
PROJECT_BASENAME = basename(src_paths[0])
|
||||
|
||||
if name is None:
|
||||
|
@ -59,44 +100,79 @@ def build_project(src_path, build_path, target, toolchain_name,
|
|||
# User used custom global project name to have the same name for the
|
||||
toolchain.info("Building project %s to %s (%s, %s)" % (PROJECT_BASENAME.upper(), name, target.name, toolchain_name))
|
||||
|
||||
# Scan src_path and libraries_paths for resources
|
||||
resources = toolchain.scan_resources(src_paths[0])
|
||||
for path in src_paths[1:]:
|
||||
resources.add(toolchain.scan_resources(path))
|
||||
if libraries_paths is not None:
|
||||
src_paths.extend(libraries_paths)
|
||||
for path in libraries_paths:
|
||||
start = time()
|
||||
id_name = project_id.upper()
|
||||
description = project_id
|
||||
cur_result = None
|
||||
|
||||
if report != None:
|
||||
prep_report(report, target.name, toolchain_name, id_name)
|
||||
cur_result = create_result(target.name, toolchain_name, id_name, description)
|
||||
|
||||
if properties != None:
|
||||
prep_properties(properties, target.name, toolchain_name, id_name)
|
||||
|
||||
try:
|
||||
# Scan src_path and libraries_paths for resources
|
||||
resources = toolchain.scan_resources(src_paths[0])
|
||||
for path in src_paths[1:]:
|
||||
resources.add(toolchain.scan_resources(path))
|
||||
if libraries_paths is not None:
|
||||
src_paths.extend(libraries_paths)
|
||||
for path in libraries_paths:
|
||||
resources.add(toolchain.scan_resources(path))
|
||||
|
||||
if linker_script is not None:
|
||||
resources.linker_script = linker_script
|
||||
if linker_script is not None:
|
||||
resources.linker_script = linker_script
|
||||
|
||||
# Build Directory
|
||||
if clean:
|
||||
if exists(build_path):
|
||||
rmtree(build_path)
|
||||
mkdir(build_path)
|
||||
# Build Directory
|
||||
if clean:
|
||||
if exists(build_path):
|
||||
rmtree(build_path)
|
||||
mkdir(build_path)
|
||||
|
||||
# We need to add if necessary additional include directories
|
||||
if inc_dirs:
|
||||
if type(inc_dirs) == ListType:
|
||||
resources.inc_dirs.extend(inc_dirs)
|
||||
else:
|
||||
resources.inc_dirs.append(inc_dirs)
|
||||
# We need to add if necessary additional include directories
|
||||
if inc_dirs:
|
||||
if type(inc_dirs) == ListType:
|
||||
resources.inc_dirs.extend(inc_dirs)
|
||||
else:
|
||||
resources.inc_dirs.append(inc_dirs)
|
||||
|
||||
# Compile Sources
|
||||
for path in src_paths:
|
||||
src = toolchain.scan_resources(path)
|
||||
objects = toolchain.compile_sources(src, build_path, resources.inc_dirs)
|
||||
resources.objects.extend(objects)
|
||||
# Compile Sources
|
||||
for path in src_paths:
|
||||
src = toolchain.scan_resources(path)
|
||||
objects = toolchain.compile_sources(src, build_path, resources.inc_dirs)
|
||||
resources.objects.extend(objects)
|
||||
|
||||
# Link Program
|
||||
return toolchain.link_program(resources, build_path, name)
|
||||
# Link Program
|
||||
res, needed_update = toolchain.link_program(resources, build_path, name)
|
||||
|
||||
if report != None and needed_update:
|
||||
end = time()
|
||||
cur_result["elapsed_time"] = end - start
|
||||
cur_result["output"] = ""
|
||||
cur_result["result"] = "OK"
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
|
||||
return res
|
||||
|
||||
except Exception, e:
|
||||
end = time()
|
||||
cur_result["result"] = "FAIL"
|
||||
cur_result["output"] = str(e)
|
||||
cur_result["elapsed_time"] = end - start
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
|
||||
# Let Exception propagate
|
||||
raise e
|
||||
|
||||
|
||||
def build_library(src_paths, build_path, target, toolchain_name,
|
||||
dependencies_paths=None, options=None, name=None, clean=False,
|
||||
notify=None, verbose=False, macros=None, inc_dirs=None, inc_dirs_ext=None, jobs=1, silent=False):
|
||||
notify=None, verbose=False, macros=None, inc_dirs=None, inc_dirs_ext=None,
|
||||
jobs=1, silent=False, report=None, properties=None):
|
||||
""" src_path: the path of the source directory
|
||||
build_path: the path of the build directory
|
||||
target: ['LPC1768', 'LPC11U24', 'LPC2368']
|
||||
|
@ -111,62 +187,101 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
|||
if type(src_paths) != ListType:
|
||||
src_paths = [src_paths]
|
||||
|
||||
for src_path in src_paths:
|
||||
if not exists(src_path):
|
||||
raise Exception("The library source folder does not exist: %s", src_path)
|
||||
|
||||
# Toolchain instance
|
||||
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent)
|
||||
toolchain.VERBOSE = verbose
|
||||
toolchain.jobs = jobs
|
||||
toolchain.build_all = clean
|
||||
|
||||
# The first path will give the name to the library
|
||||
name = basename(src_paths[0])
|
||||
toolchain.info("Building library %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
|
||||
|
||||
# Scan Resources
|
||||
resources = []
|
||||
start = time()
|
||||
id_name = name.upper()
|
||||
description = name
|
||||
cur_result = None
|
||||
|
||||
if report != None:
|
||||
prep_report(report, target.name, toolchain_name, id_name)
|
||||
cur_result = create_result(target.name, toolchain_name, id_name, description)
|
||||
|
||||
if properties != None:
|
||||
prep_properties(properties, target.name, toolchain_name, id_name)
|
||||
|
||||
for src_path in src_paths:
|
||||
resources.append(toolchain.scan_resources(src_path))
|
||||
if not exists(src_path):
|
||||
error_msg = "The library source folder does not exist: %s", src_path
|
||||
|
||||
# Add extra include directories / files which are required by library
|
||||
# This files usually are not in the same directory as source files so
|
||||
# previous scan will not include them
|
||||
if inc_dirs_ext is not None:
|
||||
for inc_ext in inc_dirs_ext:
|
||||
resources.append(toolchain.scan_resources(inc_ext))
|
||||
if report != None:
|
||||
cur_result["output"] = error_msg
|
||||
cur_result["result"] = "FAIL"
|
||||
add_result_to_report(report, cur_result)
|
||||
|
||||
# Dependencies Include Paths
|
||||
dependencies_include_dir = []
|
||||
if dependencies_paths is not None:
|
||||
for path in dependencies_paths:
|
||||
lib_resources = toolchain.scan_resources(path)
|
||||
dependencies_include_dir.extend(lib_resources.inc_dirs)
|
||||
raise Exception(error_msg)
|
||||
|
||||
if inc_dirs:
|
||||
dependencies_include_dir.extend(inc_dirs)
|
||||
try:
|
||||
# Toolchain instance
|
||||
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent)
|
||||
toolchain.VERBOSE = verbose
|
||||
toolchain.jobs = jobs
|
||||
toolchain.build_all = clean
|
||||
|
||||
# Create the desired build directory structure
|
||||
bin_path = join(build_path, toolchain.obj_path)
|
||||
mkdir(bin_path)
|
||||
tmp_path = join(build_path, '.temp', toolchain.obj_path)
|
||||
mkdir(tmp_path)
|
||||
toolchain.info("Building library %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
|
||||
|
||||
# Copy Headers
|
||||
for resource in resources:
|
||||
toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path)
|
||||
dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs)
|
||||
# Scan Resources
|
||||
resources = []
|
||||
for src_path in src_paths:
|
||||
resources.append(toolchain.scan_resources(src_path))
|
||||
|
||||
# Compile Sources
|
||||
objects = []
|
||||
for resource in resources:
|
||||
objects.extend(toolchain.compile_sources(resource, tmp_path, dependencies_include_dir))
|
||||
# Add extra include directories / files which are required by library
|
||||
# This files usually are not in the same directory as source files so
|
||||
# previous scan will not include them
|
||||
if inc_dirs_ext is not None:
|
||||
for inc_ext in inc_dirs_ext:
|
||||
resources.append(toolchain.scan_resources(inc_ext))
|
||||
|
||||
toolchain.build_library(objects, bin_path, name)
|
||||
# Dependencies Include Paths
|
||||
dependencies_include_dir = []
|
||||
if dependencies_paths is not None:
|
||||
for path in dependencies_paths:
|
||||
lib_resources = toolchain.scan_resources(path)
|
||||
dependencies_include_dir.extend(lib_resources.inc_dirs)
|
||||
|
||||
if inc_dirs:
|
||||
dependencies_include_dir.extend(inc_dirs)
|
||||
|
||||
def build_lib(lib_id, target, toolchain, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False):
|
||||
# Create the desired build directory structure
|
||||
bin_path = join(build_path, toolchain.obj_path)
|
||||
mkdir(bin_path)
|
||||
tmp_path = join(build_path, '.temp', toolchain.obj_path)
|
||||
mkdir(tmp_path)
|
||||
|
||||
# Copy Headers
|
||||
for resource in resources:
|
||||
toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path)
|
||||
dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs)
|
||||
|
||||
# Compile Sources
|
||||
objects = []
|
||||
for resource in resources:
|
||||
objects.extend(toolchain.compile_sources(resource, tmp_path, dependencies_include_dir))
|
||||
|
||||
needed_update = toolchain.build_library(objects, bin_path, name)
|
||||
|
||||
if report != None and needed_update:
|
||||
end = time()
|
||||
cur_result["elapsed_time"] = end - start
|
||||
cur_result["output"] = ""
|
||||
cur_result["result"] = "OK"
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
|
||||
except Exception, e:
|
||||
end = time()
|
||||
cur_result["result"] = "FAIL"
|
||||
cur_result["output"] = str(e)
|
||||
cur_result["elapsed_time"] = end - start
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
|
||||
# Let Exception propagate
|
||||
raise e
|
||||
|
||||
def build_lib(lib_id, target, toolchain, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None):
|
||||
""" Wrapper for build_library function.
|
||||
Function builds library in proper directory using all dependencies and macros defined by user.
|
||||
"""
|
||||
|
@ -177,7 +292,7 @@ def build_lib(lib_id, target, toolchain, options=None, verbose=False, clean=Fals
|
|||
if macros:
|
||||
MACROS.extend(macros)
|
||||
|
||||
build_library(lib.source_dir, lib.build_dir, target, toolchain, lib.dependencies, options,
|
||||
return build_library(lib.source_dir, lib.build_dir, target, toolchain, lib.dependencies, options,
|
||||
verbose=verbose,
|
||||
silent=silent,
|
||||
clean=clean,
|
||||
|
@ -185,83 +300,126 @@ def build_lib(lib_id, target, toolchain, options=None, verbose=False, clean=Fals
|
|||
notify=notify,
|
||||
inc_dirs=lib.inc_dirs,
|
||||
inc_dirs_ext=lib.inc_dirs_ext,
|
||||
jobs=jobs)
|
||||
jobs=jobs,
|
||||
report=report,
|
||||
properties=properties)
|
||||
else:
|
||||
print 'Library "%s" is not yet supported on target %s with toolchain %s' % (lib_id, target.name, toolchain)
|
||||
return False
|
||||
|
||||
|
||||
# We do have unique legacy conventions about how we build and package the mbed library
|
||||
def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False):
|
||||
def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None):
|
||||
""" Function returns True is library was built and false if building was skipped """
|
||||
start = time()
|
||||
id_name = "MBED"
|
||||
description = "mbed SDK"
|
||||
cur_result = None
|
||||
|
||||
if report != None:
|
||||
prep_report(report, target.name, toolchain_name, id_name)
|
||||
cur_result = create_result(target.name, toolchain_name, id_name, description)
|
||||
|
||||
if properties != None:
|
||||
prep_properties(properties, target.name, toolchain_name, id_name)
|
||||
|
||||
# Check toolchain support
|
||||
if toolchain_name not in target.supported_toolchains:
|
||||
supported_toolchains_text = ", ".join(target.supported_toolchains)
|
||||
print '%s target is not yet supported by toolchain %s' % (target.name, toolchain_name)
|
||||
print '%s target supports %s toolchain%s' % (target.name, supported_toolchains_text, 's' if len(target.supported_toolchains) > 1 else '')
|
||||
|
||||
if report != None:
|
||||
cur_result["result"] = "SKIP"
|
||||
add_result_to_report(report, cur_result)
|
||||
|
||||
return False
|
||||
|
||||
# Toolchain
|
||||
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent)
|
||||
toolchain.VERBOSE = verbose
|
||||
toolchain.jobs = jobs
|
||||
toolchain.build_all = clean
|
||||
try:
|
||||
# Toolchain
|
||||
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent)
|
||||
toolchain.VERBOSE = verbose
|
||||
toolchain.jobs = jobs
|
||||
toolchain.build_all = clean
|
||||
|
||||
# Source and Build Paths
|
||||
BUILD_TARGET = join(MBED_LIBRARIES, "TARGET_" + target.name)
|
||||
BUILD_TOOLCHAIN = join(BUILD_TARGET, "TOOLCHAIN_" + toolchain.name)
|
||||
mkdir(BUILD_TOOLCHAIN)
|
||||
# Source and Build Paths
|
||||
BUILD_TARGET = join(MBED_LIBRARIES, "TARGET_" + target.name)
|
||||
BUILD_TOOLCHAIN = join(BUILD_TARGET, "TOOLCHAIN_" + toolchain.name)
|
||||
mkdir(BUILD_TOOLCHAIN)
|
||||
|
||||
TMP_PATH = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
|
||||
mkdir(TMP_PATH)
|
||||
TMP_PATH = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
|
||||
mkdir(TMP_PATH)
|
||||
|
||||
# CMSIS
|
||||
toolchain.info("Building library %s (%s, %s)"% ('CMSIS', target.name, toolchain_name))
|
||||
cmsis_src = join(MBED_TARGETS_PATH, "cmsis")
|
||||
resources = toolchain.scan_resources(cmsis_src)
|
||||
# CMSIS
|
||||
toolchain.info("Building library %s (%s, %s)"% ('CMSIS', target.name, toolchain_name))
|
||||
cmsis_src = join(MBED_TARGETS_PATH, "cmsis")
|
||||
resources = toolchain.scan_resources(cmsis_src)
|
||||
|
||||
toolchain.copy_files(resources.headers, BUILD_TARGET)
|
||||
toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN)
|
||||
toolchain.copy_files(resources.bin_files, BUILD_TOOLCHAIN)
|
||||
toolchain.copy_files(resources.headers, BUILD_TARGET)
|
||||
toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN)
|
||||
toolchain.copy_files(resources.bin_files, BUILD_TOOLCHAIN)
|
||||
|
||||
objects = toolchain.compile_sources(resources, TMP_PATH)
|
||||
toolchain.copy_files(objects, BUILD_TOOLCHAIN)
|
||||
objects = toolchain.compile_sources(resources, TMP_PATH)
|
||||
toolchain.copy_files(objects, BUILD_TOOLCHAIN)
|
||||
|
||||
# mbed
|
||||
toolchain.info("Building library %s (%s, %s)" % ('MBED', target.name, toolchain_name))
|
||||
# mbed
|
||||
toolchain.info("Building library %s (%s, %s)" % ('MBED', target.name, toolchain_name))
|
||||
|
||||
# Common Headers
|
||||
toolchain.copy_files(toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES)
|
||||
toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES)
|
||||
# Common Headers
|
||||
toolchain.copy_files(toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES)
|
||||
toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES)
|
||||
|
||||
# Target specific sources
|
||||
HAL_SRC = join(MBED_TARGETS_PATH, "hal")
|
||||
hal_implementation = toolchain.scan_resources(HAL_SRC)
|
||||
toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files + hal_implementation.libraries, BUILD_TARGET, HAL_SRC)
|
||||
incdirs = toolchain.scan_resources(BUILD_TARGET).inc_dirs
|
||||
objects = toolchain.compile_sources(hal_implementation, TMP_PATH, [MBED_LIBRARIES] + incdirs)
|
||||
# Target specific sources
|
||||
HAL_SRC = join(MBED_TARGETS_PATH, "hal")
|
||||
hal_implementation = toolchain.scan_resources(HAL_SRC)
|
||||
toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files + hal_implementation.libraries, BUILD_TARGET, HAL_SRC)
|
||||
incdirs = toolchain.scan_resources(BUILD_TARGET).inc_dirs
|
||||
objects = toolchain.compile_sources(hal_implementation, TMP_PATH, [MBED_LIBRARIES] + incdirs)
|
||||
|
||||
# Common Sources
|
||||
mbed_resources = toolchain.scan_resources(MBED_COMMON)
|
||||
objects += toolchain.compile_sources(mbed_resources, TMP_PATH, [MBED_LIBRARIES] + incdirs)
|
||||
# Common Sources
|
||||
mbed_resources = toolchain.scan_resources(MBED_COMMON)
|
||||
objects += toolchain.compile_sources(mbed_resources, TMP_PATH, [MBED_LIBRARIES] + incdirs)
|
||||
|
||||
# A number of compiled files need to be copied as objects as opposed to
|
||||
# being part of the mbed library, for reasons that have to do with the way
|
||||
# the linker search for symbols in archives. These are:
|
||||
# - retarget.o: to make sure that the C standard lib symbols get overridden
|
||||
# - board.o: mbed_die is weak
|
||||
# - mbed_overrides.o: this contains platform overrides of various weak SDK functions
|
||||
separate_names, separate_objects = ['retarget.o', 'board.o', 'mbed_overrides.o'], []
|
||||
for o in objects:
|
||||
for name in separate_names:
|
||||
if o.endswith(name):
|
||||
separate_objects.append(o)
|
||||
for o in separate_objects:
|
||||
objects.remove(o)
|
||||
toolchain.build_library(objects, BUILD_TOOLCHAIN, "mbed")
|
||||
for o in separate_objects:
|
||||
toolchain.copy_files(o, BUILD_TOOLCHAIN)
|
||||
return True
|
||||
# A number of compiled files need to be copied as objects as opposed to
|
||||
# being part of the mbed library, for reasons that have to do with the way
|
||||
# the linker search for symbols in archives. These are:
|
||||
# - retarget.o: to make sure that the C standard lib symbols get overridden
|
||||
# - board.o: mbed_die is weak
|
||||
# - mbed_overrides.o: this contains platform overrides of various weak SDK functions
|
||||
separate_names, separate_objects = ['retarget.o', 'board.o', 'mbed_overrides.o'], []
|
||||
|
||||
for o in objects:
|
||||
for name in separate_names:
|
||||
if o.endswith(name):
|
||||
separate_objects.append(o)
|
||||
|
||||
for o in separate_objects:
|
||||
objects.remove(o)
|
||||
|
||||
needed_update = toolchain.build_library(objects, BUILD_TOOLCHAIN, "mbed")
|
||||
|
||||
for o in separate_objects:
|
||||
toolchain.copy_files(o, BUILD_TOOLCHAIN)
|
||||
|
||||
if report != None and needed_update:
|
||||
end = time()
|
||||
cur_result["elapsed_time"] = end - start
|
||||
cur_result["output"] = ""
|
||||
cur_result["result"] = "OK"
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
|
||||
return True
|
||||
except Exception, e:
|
||||
end = time()
|
||||
cur_result["result"] = "FAIL"
|
||||
cur_result["output"] = str(e)
|
||||
cur_result["elapsed_time"] = end - start
|
||||
|
||||
add_result_to_report(report, cur_result)
|
||||
|
||||
# Let Exception propagate
|
||||
raise e
|
||||
|
||||
def get_unique_supported_toolchains():
|
||||
""" Get list of all unique toolchains supported by targets """
|
||||
|
|
|
@ -142,15 +142,11 @@ if __name__ == '__main__':
|
|||
|
||||
parser.add_option("-p", "--platforms", dest="platforms", default="", help="Build only for the platform namesseparated by comma")
|
||||
|
||||
parser.add_option("", "--report-build", dest="report_build_file_name", help="Output the build results to an html file")
|
||||
parser.add_option("", "--report-build", dest="report_build_file_name", help="Output the build results to an junit xml file")
|
||||
|
||||
|
||||
options, args = parser.parse_args()
|
||||
id_name = "MBED"
|
||||
start = time()
|
||||
failures = []
|
||||
successes = []
|
||||
skips = []
|
||||
report = {}
|
||||
properties = {}
|
||||
|
||||
|
@ -174,74 +170,23 @@ if __name__ == '__main__':
|
|||
toolchains = toolchainSet and set((options.toolchains).split(','))
|
||||
|
||||
for toolchain in toolchains:
|
||||
if not target_name in report:
|
||||
report[target_name] = {}
|
||||
|
||||
if not toolchain in report[target_name]:
|
||||
report[target_name][toolchain] = {}
|
||||
|
||||
if not id_name in report[target_name][toolchain]:
|
||||
report[target_name][toolchain][id_name] = []
|
||||
|
||||
if not target_name in properties:
|
||||
properties[target_name] = {}
|
||||
|
||||
if not toolchain in properties[target_name]:
|
||||
properties[target_name][toolchain] = {}
|
||||
|
||||
properties[target_name][toolchain]["target"] = target_name
|
||||
properties[target_name][toolchain]["toolchain"] = toolchain
|
||||
|
||||
|
||||
id = "%s::%s" % (target_name, toolchain)
|
||||
|
||||
start = time()
|
||||
cur_result = {}
|
||||
cur_result["toolchain_name"] = toolchain
|
||||
cur_result["target_name"] = target_name
|
||||
cur_result["id"] = id_name
|
||||
cur_result["description"] = "mbed SDK"
|
||||
|
||||
try:
|
||||
built_mbed_lib = build_mbed_libs(TARGET_MAP[target_name], toolchain, verbose=options.verbose, jobs=options.jobs)
|
||||
end = time()
|
||||
|
||||
cur_result["elapsed_time"] = end - start
|
||||
cur_result["output"] = ""
|
||||
|
||||
if built_mbed_lib:
|
||||
cur_result["result"] = "OK"
|
||||
else:
|
||||
cur_result["result"] = "SKIP"
|
||||
built_mbed_lib = build_mbed_libs(TARGET_MAP[target_name], toolchain, verbose=options.verbose, jobs=options.jobs, report=report, properties=properties)
|
||||
|
||||
except Exception, e:
|
||||
exc_type, exc_value, exc_tb = sys.exc_info()
|
||||
end = time()
|
||||
cur_result["result"] = "FAIL"
|
||||
cur_result["output"] = str(e)
|
||||
cur_result["elapsed_time"] = end - start
|
||||
print str(e)
|
||||
|
||||
cur_result_wrap = { 0: cur_result }
|
||||
report[target_name][toolchain][id_name].append(cur_result_wrap)
|
||||
|
||||
# Write summary of the builds
|
||||
|
||||
if options.report_build_file_name:
|
||||
report_exporter = ReportExporter(ResultExporterType.JUNIT)
|
||||
report_exporter.report_to_file(report, options.report_build_file_name, test_suite_properties=properties)
|
||||
file_report_exporter = ReportExporter(ResultExporterType.JUNIT)
|
||||
file_report_exporter.report_to_file(report, options.report_build_file_name, test_suite_properties=properties)
|
||||
|
||||
print "\n\nCompleted in: (%.2f)s" % (time() - start)
|
||||
|
||||
if successes:
|
||||
print "\n\nBuild successes:"
|
||||
print "\n".join([" * %s" % s for s in successes])
|
||||
print_report_exporter = ReportExporter(ResultExporterType.PRINT)
|
||||
status = print_report_exporter.report(report)
|
||||
|
||||
if skips:
|
||||
print "\n\nBuild skips:"
|
||||
print "\n".join([" * %s" % s for s in skips])
|
||||
|
||||
if failures:
|
||||
print "\n\nBuild failures:"
|
||||
print "\n".join([" * %s" % f for f in failures])
|
||||
if not status:
|
||||
sys.exit(1)
|
||||
|
|
|
@ -50,7 +50,10 @@ from workspace_tools.test_db import BaseDBAccess
|
|||
from workspace_tools.build_api import build_project, build_mbed_libs, build_lib
|
||||
from workspace_tools.build_api import get_target_supported_toolchains
|
||||
from workspace_tools.build_api import write_build_report
|
||||
from workspace_tools.build_api import print_build_results
|
||||
from workspace_tools.build_api import prep_report
|
||||
from workspace_tools.build_api import prep_properties
|
||||
from workspace_tools.build_api import create_result
|
||||
from workspace_tools.build_api import add_result_to_report
|
||||
from workspace_tools.libraries import LIBRARIES, LIBRARY_MAP
|
||||
from workspace_tools.toolchains import TOOLCHAIN_BIN_PATH
|
||||
from workspace_tools.test_exporters import ReportExporter, ResultExporterType
|
||||
|
@ -188,11 +191,6 @@ class SingleTestRunner(object):
|
|||
from colorama import init
|
||||
init()
|
||||
|
||||
# Build results
|
||||
build_failures = []
|
||||
build_successes = []
|
||||
build_skipped = []
|
||||
|
||||
PATTERN = "\\{(" + "|".join(self.TEST_RESULT_MAPPING.keys()) + ")\\}"
|
||||
self.RE_DETECT_TESTCASE_RESULT = re.compile(PATTERN)
|
||||
# Settings related to test loops counters
|
||||
|
@ -310,19 +308,10 @@ class SingleTestRunner(object):
|
|||
test_summary_ext = {}
|
||||
execute_thread_slice_lock = Lock()
|
||||
|
||||
def execute_thread_slice(self, q, target, toolchains, clean, test_ids, build_report):
|
||||
def execute_thread_slice(self, q, target, toolchains, clean, test_ids, build_report, build_properties):
|
||||
for toolchain in toolchains:
|
||||
tt_id = "%s::%s" % (toolchain, target)
|
||||
|
||||
# Toolchain specific build successes and failures
|
||||
build_report[toolchain] = {
|
||||
"mbed_failure": False,
|
||||
"library_failure": False,
|
||||
"library_build_passing": [],
|
||||
"library_build_failing": [],
|
||||
"test_build_passing": [],
|
||||
"test_build_failing": []
|
||||
}
|
||||
# print target, toolchain
|
||||
# Test suite properties returned to external tools like CI
|
||||
test_suite_properties = {
|
||||
|
@ -352,17 +341,15 @@ class SingleTestRunner(object):
|
|||
options=build_mbed_libs_options,
|
||||
clean=clean_mbed_libs_options,
|
||||
verbose=self.opts_verbose,
|
||||
jobs=self.opts_jobs)
|
||||
jobs=self.opts_jobs,
|
||||
report=build_report,
|
||||
properties=build_properties)
|
||||
|
||||
if not build_mbed_libs_result:
|
||||
self.build_skipped.append(tt_id)
|
||||
print self.logger.log_line(self.logger.LogType.NOTIF, 'Skipped tests for %s target. Toolchain %s is not yet supported for this target'% (T.name, toolchain))
|
||||
continue
|
||||
else:
|
||||
self.build_successes.append(tt_id)
|
||||
|
||||
except ToolException:
|
||||
self.build_failures.append(tt_id)
|
||||
build_report[toolchain]["mbed_failure"] = True
|
||||
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building MBED libs for %s using %s'% (target, toolchain))
|
||||
continue
|
||||
|
||||
|
@ -424,14 +411,12 @@ class SingleTestRunner(object):
|
|||
options=build_project_options,
|
||||
verbose=self.opts_verbose,
|
||||
clean=clean_mbed_libs_options,
|
||||
jobs=self.opts_jobs)
|
||||
|
||||
build_report[toolchain]["library_build_passing"].append(lib_id)
|
||||
jobs=self.opts_jobs,
|
||||
report=build_report,
|
||||
properties=build_properties)
|
||||
|
||||
except ToolException:
|
||||
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building library %s'% (lib_id))
|
||||
build_report[toolchain]["library_failure"] = True
|
||||
build_report[toolchain]["library_build_failing"].append(lib_id)
|
||||
continue
|
||||
|
||||
|
||||
|
@ -476,14 +461,14 @@ class SingleTestRunner(object):
|
|||
name=project_name,
|
||||
macros=MACROS,
|
||||
inc_dirs=INC_DIRS,
|
||||
jobs=self.opts_jobs)
|
||||
build_report[toolchain]["test_build_passing"].append(test_id)
|
||||
jobs=self.opts_jobs,
|
||||
report=build_report,
|
||||
properties=build_properties,
|
||||
project_id=test_id)
|
||||
|
||||
except ToolException:
|
||||
project_name_str = project_name if project_name is not None else test_id
|
||||
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building project %s'% (project_name_str))
|
||||
build_report[toolchain]["test_build_failing"].append(test_id)
|
||||
self.build_failures.append(tt_test_id)
|
||||
|
||||
# Append test results to global test summary
|
||||
self.test_summary.append(
|
||||
|
@ -572,10 +557,8 @@ class SingleTestRunner(object):
|
|||
if self.opts_shuffle_test_seed is not None and self.is_shuffle_seed_float():
|
||||
self.shuffle_random_seed = round(float(self.opts_shuffle_test_seed), self.SHUFFLE_SEED_ROUND)
|
||||
|
||||
build_reports = []
|
||||
self.build_failures = []
|
||||
self.build_successes = []
|
||||
self.build_skipped = []
|
||||
build_report = {}
|
||||
build_properties = {}
|
||||
|
||||
if self.opts_parallel_test_exec:
|
||||
###################################################################
|
||||
|
@ -589,9 +572,7 @@ class SingleTestRunner(object):
|
|||
# get information about available MUTs (per target).
|
||||
for target, toolchains in self.test_spec['targets'].iteritems():
|
||||
self.test_suite_properties_ext[target] = {}
|
||||
cur_build_report = {}
|
||||
t = threading.Thread(target=self.execute_thread_slice, args = (q, target, toolchains, clean, test_ids, cur_build_report))
|
||||
build_reports.append({ "target": target, "report": cur_build_report})
|
||||
t = threading.Thread(target=self.execute_thread_slice, args = (q, target, toolchains, clean, test_ids, build_report, build_properties))
|
||||
t.daemon = True
|
||||
t.start()
|
||||
execute_threads.append(t)
|
||||
|
@ -604,63 +585,16 @@ class SingleTestRunner(object):
|
|||
if target not in self.test_suite_properties_ext:
|
||||
self.test_suite_properties_ext[target] = {}
|
||||
|
||||
cur_build_report = {}
|
||||
self.execute_thread_slice(q, target, toolchains, clean, test_ids, cur_build_report)
|
||||
build_reports.append({ "target": target, "report": cur_build_report})
|
||||
self.execute_thread_slice(q, target, toolchains, clean, test_ids, build_report, build_properties)
|
||||
q.get()
|
||||
|
||||
build_report = []
|
||||
|
||||
for target_build_report in build_reports:
|
||||
cur_report = {
|
||||
"target": target_build_report["target"],
|
||||
"passing": [],
|
||||
"failing": []
|
||||
}
|
||||
|
||||
for toolchain in sorted(target_build_report["report"], key=target_build_report["report"].get):
|
||||
report = target_build_report["report"][toolchain]
|
||||
|
||||
if report["mbed_failure"]:
|
||||
cur_report["failing"].append({
|
||||
"toolchain": toolchain,
|
||||
"project": "mbed library"
|
||||
})
|
||||
else:
|
||||
for passing_library in report["library_build_failing"]:
|
||||
cur_report["failing"].append({
|
||||
"toolchain": toolchain,
|
||||
"project": "Library::%s" % (passing_library)
|
||||
})
|
||||
|
||||
for failing_library in report["library_build_passing"]:
|
||||
cur_report["passing"].append({
|
||||
"toolchain": toolchain,
|
||||
"project": "Library::%s" % (failing_library)
|
||||
})
|
||||
|
||||
for passing_test in report["test_build_passing"]:
|
||||
cur_report["passing"].append({
|
||||
"toolchain": toolchain,
|
||||
"project": "Test::%s" % (passing_test)
|
||||
})
|
||||
|
||||
for failing_test in report["test_build_failing"]:
|
||||
cur_report["failing"].append({
|
||||
"toolchain": toolchain,
|
||||
"project": "Test::%s" % (failing_test)
|
||||
})
|
||||
|
||||
|
||||
build_report.append(cur_report)
|
||||
|
||||
if self.db_logger:
|
||||
self.db_logger.reconnect();
|
||||
if self.db_logger.is_connected():
|
||||
self.db_logger.update_build_id_info(self.db_logger_build_id, _status_fk=self.db_logger.BUILD_ID_STATUS_COMPLETED)
|
||||
self.db_logger.disconnect();
|
||||
|
||||
return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext, build_report
|
||||
return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext, build_report, build_properties
|
||||
|
||||
def get_valid_tests(self, test_map_keys, target, toolchain, test_ids):
|
||||
valid_test_map_keys = []
|
||||
|
@ -1523,7 +1457,7 @@ def singletest_in_cli_mode(single_test):
|
|||
"""
|
||||
start = time()
|
||||
# Execute tests depending on options and filter applied
|
||||
test_summary, shuffle_seed, test_summary_ext, test_suite_properties_ext, build_report = single_test.execute()
|
||||
test_summary, shuffle_seed, test_summary_ext, test_suite_properties_ext, build_report, build_properties = single_test.execute()
|
||||
elapsed_time = time() - start
|
||||
|
||||
# Human readable summary
|
||||
|
@ -1539,12 +1473,8 @@ def singletest_in_cli_mode(single_test):
|
|||
print
|
||||
# Write summary of the builds
|
||||
|
||||
for report, report_name in [(single_test.build_successes, "Build successes:"),
|
||||
(single_test.build_skipped, "Build skipped:"),
|
||||
(single_test.build_failures, "Build failures:"),
|
||||
]:
|
||||
if report:
|
||||
print print_build_results(report, report_name)
|
||||
print_report_exporter = ReportExporter(ResultExporterType.PRINT)
|
||||
status = print_report_exporter.report(build_report)
|
||||
|
||||
# Store extra reports in files
|
||||
if single_test.opts_report_html_file_name:
|
||||
|
@ -1557,10 +1487,11 @@ def singletest_in_cli_mode(single_test):
|
|||
report_exporter.report_to_file(test_summary_ext, single_test.opts_report_junit_file_name, test_suite_properties=test_suite_properties_ext)
|
||||
if single_test.opts_report_build_file_name:
|
||||
# Export build results as html report to sparate file
|
||||
write_build_report(build_report, 'tests_build/report.html', single_test.opts_report_build_file_name)
|
||||
report_exporter = ReportExporter(ResultExporterType.JUNIT)
|
||||
report_exporter.report_to_file(build_report, single_test.opts_report_build_file_name, test_suite_properties=build_properties)
|
||||
|
||||
# Returns True if no build failures of the test projects or their dependencies
|
||||
return len(single_test.build_failures) == 0
|
||||
return status
|
||||
|
||||
class TestLogger():
|
||||
""" Super-class for logging and printing ongoing events for test suite pass
|
||||
|
@ -1965,7 +1896,7 @@ def get_default_test_options_parser():
|
|||
|
||||
parser.add_option("", "--report-build",
|
||||
dest="report_build_file_name",
|
||||
help="Output the build results to an html file")
|
||||
help="Output the build results to a junit xml file")
|
||||
|
||||
parser.add_option('', '--verbose-skipped',
|
||||
dest='verbose_skipped_tests',
|
||||
|
|
|
@ -23,7 +23,8 @@ from workspace_tools.utils import construct_enum
|
|||
ResultExporterType = construct_enum(HTML='Html_Exporter',
|
||||
JUNIT='JUnit_Exporter',
|
||||
JUNIT_OPER='JUnit_Exporter_Interoperability',
|
||||
BUILD='Build_Exporter')
|
||||
BUILD='Build_Exporter',
|
||||
PRINT='Print_Exporter')
|
||||
|
||||
|
||||
class ReportExporter():
|
||||
|
@ -82,6 +83,9 @@ class ReportExporter():
|
|||
elif self.result_exporter_type == ResultExporterType.JUNIT_OPER:
|
||||
# JUNIT exporter for interoperability test
|
||||
return self.exporter_junit_ioper(test_summary_ext, test_suite_properties)
|
||||
elif self.result_exporter_type == ResultExporterType.PRINT:
|
||||
# JUNIT exporter for interoperability test
|
||||
return self.exporter_print(test_summary_ext)
|
||||
return None
|
||||
|
||||
def report_to_file(self, test_summary_ext, file_name, test_suite_properties=None):
|
||||
|
@ -189,7 +193,7 @@ class ReportExporter():
|
|||
result += '<table><tr>'
|
||||
for target in targets:
|
||||
toolchains = sorted(test_result_ext[target].keys())
|
||||
for target in targets:
|
||||
for toolchain in toolchains:
|
||||
result += '<td></td>'
|
||||
result += '<td></td>'
|
||||
|
||||
|
@ -233,6 +237,8 @@ class ReportExporter():
|
|||
tc.add_failure_info(description, _stdout)
|
||||
elif result == 'ERROR':
|
||||
tc.add_error_info(description, _stdout)
|
||||
elif result == 'SKIP':
|
||||
tc.add_skipped_info(description, _stdout)
|
||||
|
||||
test_cases.append(tc)
|
||||
ts = TestSuite("test.suite.ioper.%s" % (platform), test_cases)
|
||||
|
@ -274,6 +280,9 @@ class ReportExporter():
|
|||
if test_result['result'] == 'FAIL':
|
||||
message = test_result['result']
|
||||
tc.add_failure_info(message, _stdout)
|
||||
elif test_result['result'] == 'SKIP':
|
||||
message = test_result['result']
|
||||
tc.add_skipped_info(message, _stdout)
|
||||
elif test_result['result'] != 'OK':
|
||||
message = test_result['result']
|
||||
tc.add_error_info(message, _stdout)
|
||||
|
@ -283,3 +292,51 @@ class ReportExporter():
|
|||
ts = TestSuite("test.suite.%s.%s"% (target, toolchain), test_cases, properties=test_suite_properties[target][toolchain])
|
||||
test_suites.append(ts)
|
||||
return TestSuite.to_xml_string(test_suites)
|
||||
|
||||
def exporter_print_helper(self, array):
|
||||
for item in array:
|
||||
print " * %s::%s::%s" % (item["target_name"], item["toolchain_name"], item["id"])
|
||||
|
||||
def exporter_print(self, test_result_ext):
|
||||
""" Export test results in print format.
|
||||
"""
|
||||
failures = []
|
||||
skips = []
|
||||
successes = []
|
||||
|
||||
unique_test_ids = self.get_all_unique_test_ids(test_result_ext)
|
||||
targets = sorted(test_result_ext.keys())
|
||||
|
||||
for target in targets:
|
||||
toolchains = sorted(test_result_ext[target].keys())
|
||||
for toolchain in toolchains:
|
||||
tests = sorted(test_result_ext[target][toolchain].keys())
|
||||
for test in tests:
|
||||
test_runs = test_result_ext[target][toolchain][test]
|
||||
for test_runner in test_runs:
|
||||
#test_run = test_result_ext[target][toolchain][test][test_run_number][0]
|
||||
test_run = test_runner[0]
|
||||
|
||||
if test_run["result"] == "FAIL":
|
||||
failures.append(test_run)
|
||||
elif test_run["result"] == "SKIP":
|
||||
skips.append(test_run)
|
||||
elif test_run["result"] == "OK":
|
||||
successes.append(test_run)
|
||||
else:
|
||||
raise Exception("Unhandled result type: %s" % (test_run["result"]))
|
||||
|
||||
if successes:
|
||||
print "\n\nBuild successes:"
|
||||
self.exporter_print_helper(successes)
|
||||
|
||||
if skips:
|
||||
print "\n\nBuild skips:"
|
||||
self.exporter_print_helper(skips)
|
||||
|
||||
if failures:
|
||||
print "\n\nBuild failures:"
|
||||
self.exporter_print_helper(failures)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
|
|
@ -393,7 +393,7 @@ class mbedToolchain:
|
|||
|
||||
elif ext == '.hex':
|
||||
resources.hex_files.append(file_path)
|
||||
|
||||
|
||||
elif ext == '.bin':
|
||||
resources.bin_files.append(file_path)
|
||||
|
||||
|
@ -625,13 +625,18 @@ class mbedToolchain:
|
|||
return self.compile(self.cppc, source, object, includes)
|
||||
|
||||
def build_library(self, objects, dir, name):
|
||||
needed_update = False
|
||||
lib = self.STD_LIB_NAME % name
|
||||
fout = join(dir, lib)
|
||||
if self.need_update(fout, objects):
|
||||
self.info("Library: %s" % lib)
|
||||
self.archive(objects, fout)
|
||||
needed_update = True
|
||||
|
||||
return needed_update
|
||||
|
||||
def link_program(self, r, tmp_path, name):
|
||||
needed_update = False
|
||||
ext = 'bin'
|
||||
if hasattr(self.target, 'OUTPUT_EXT'):
|
||||
ext = self.target.OUTPUT_EXT
|
||||
|
@ -647,10 +652,12 @@ class mbedToolchain:
|
|||
bin = join(tmp_path, filename)
|
||||
|
||||
if self.need_update(elf, r.objects + r.libraries + [r.linker_script]):
|
||||
needed_update = True
|
||||
self.progress("link", name)
|
||||
self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
|
||||
|
||||
if self.need_update(bin, [elf]):
|
||||
needed_update = True
|
||||
self.progress("elf2bin", name)
|
||||
|
||||
self.binary(r, elf, bin)
|
||||
|
@ -658,7 +665,7 @@ class mbedToolchain:
|
|||
self.var("compile_succeded", True)
|
||||
self.var("binary", filename)
|
||||
|
||||
return bin
|
||||
return bin, needed_update
|
||||
|
||||
def default_cmd(self, command):
|
||||
_stdout, _stderr, _rc = run_cmd(command)
|
||||
|
|
Loading…
Reference in New Issue