mirror of https://github.com/ARMmbed/mbed-os.git
Rewrite test detection to avoid relying on "inc_dirs"
parent
8166889885
commit
545553b6bc
|
@ -480,7 +480,6 @@ def build_project(src_paths, build_path, target, toolchain_name,
|
||||||
stats_depth - depth level for memap to display file/dirs
|
stats_depth - depth level for memap to display file/dirs
|
||||||
ignore - list of paths to add to mbedignore
|
ignore - list of paths to add to mbedignore
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Convert src_path to a list if needed
|
# Convert src_path to a list if needed
|
||||||
if not isinstance(src_paths, list):
|
if not isinstance(src_paths, list):
|
||||||
src_paths = [src_paths]
|
src_paths = [src_paths]
|
||||||
|
@ -628,6 +627,7 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
||||||
# Convert src_path to a list if needed
|
# Convert src_path to a list if needed
|
||||||
if not isinstance(src_paths, list):
|
if not isinstance(src_paths, list):
|
||||||
src_paths = [src_paths]
|
src_paths = [src_paths]
|
||||||
|
src_paths = [relpath(s) for s in src_paths]
|
||||||
|
|
||||||
# Build path
|
# Build path
|
||||||
if archive:
|
if archive:
|
||||||
|
@ -679,28 +679,25 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
||||||
raise Exception(error_msg)
|
raise Exception(error_msg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resources = Resources(notify).scan_with_toolchain(
|
res = Resources(notify).scan_with_toolchain(
|
||||||
src_paths, toolchain, dependencies_paths, inc_dirs=inc_dirs)
|
src_paths, toolchain, dependencies_paths, inc_dirs=inc_dirs)
|
||||||
|
|
||||||
# Copy headers, objects and static libraries - all files needed for
|
# Copy headers, objects and static libraries - all files needed for
|
||||||
# static lib
|
# static lib
|
||||||
toolchain.copy_files(resources.headers, build_path, resources=resources)
|
to_copy = (
|
||||||
toolchain.copy_files(resources.objects, build_path, resources=resources)
|
res.get_file_refs(FileType.HEADER) +
|
||||||
toolchain.copy_files(resources.libraries, build_path,
|
res.get_file_refs(FileType.OBJECT) +
|
||||||
resources=resources)
|
res.get_file_refs(FileType.LIB) +
|
||||||
toolchain.copy_files(resources.json_files, build_path,
|
res.get_file_refs(FileType.JSON) +
|
||||||
resources=resources)
|
res.get_file_refs(FileType.LD_SCRIPT) +
|
||||||
if resources.linker_script:
|
res.get_file_refs(FileType.HEX) +
|
||||||
toolchain.copy_files(resources.linker_script, build_path,
|
res.get_file_refs(FileType.BIN)
|
||||||
resources=resources)
|
)
|
||||||
|
toolchain.copy_files(to_copy, build_path)
|
||||||
if resources.hex_files:
|
|
||||||
toolchain.copy_files(resources.hex_files, build_path,
|
|
||||||
resources=resources)
|
|
||||||
|
|
||||||
# Compile Sources
|
# Compile Sources
|
||||||
objects = toolchain.compile_sources(resources, resources.inc_dirs)
|
objects = toolchain.compile_sources(
|
||||||
resources.objects.extend(objects)
|
res, res.get_file_paths(FileType.INC_DIR))
|
||||||
|
res.add_files_to_type(FileType.OBJECT, objects)
|
||||||
|
|
||||||
if archive:
|
if archive:
|
||||||
toolchain.build_library(objects, build_path, name)
|
toolchain.build_library(objects, build_path, name)
|
||||||
|
@ -714,8 +711,6 @@ def build_library(src_paths, build_path, target, toolchain_name,
|
||||||
end = time()
|
end = time()
|
||||||
cur_result["elapsed_time"] = end - start
|
cur_result["elapsed_time"] = end - start
|
||||||
cur_result["result"] = "OK"
|
cur_result["result"] = "OK"
|
||||||
|
|
||||||
|
|
||||||
add_result_to_report(report, cur_result)
|
add_result_to_report(report, cur_result)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -840,8 +835,8 @@ def build_lib(lib_id, target, toolchain_name, clean=False, macros=None,
|
||||||
inc_dirs=inc_dirs, dependencies_paths=dependencies_paths)
|
inc_dirs=inc_dirs, dependencies_paths=dependencies_paths)
|
||||||
|
|
||||||
# Copy Headers
|
# Copy Headers
|
||||||
toolchain.copy_files(resources.headers, build_path,
|
toolchain.copy_files(
|
||||||
resources=resources)
|
resources.get_file_refs(FileType.HEADER), build_path)
|
||||||
|
|
||||||
dependencies_include_dir = Resources(notify).sacn_with_toolchain([build_path], toolchain).inc_dirs
|
dependencies_include_dir = Resources(notify).sacn_with_toolchain([build_path], toolchain).inc_dirs
|
||||||
|
|
||||||
|
@ -968,14 +963,18 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None,
|
||||||
toolchain.set_config_data(toolchain.config.get_config_data())
|
toolchain.set_config_data(toolchain.config.get_config_data())
|
||||||
|
|
||||||
# distribute header files
|
# distribute header files
|
||||||
toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES)
|
toolchain.copy_files(
|
||||||
|
[FileRef(basename(MBED_HEADER),MBED_HEADER)], MBED_LIBRARIES)
|
||||||
library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES]
|
library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES]
|
||||||
|
|
||||||
for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS),
|
for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS),
|
||||||
(MBED_PLATFORM, MBED_LIBRARIES_PLATFORM),
|
(MBED_PLATFORM, MBED_LIBRARIES_PLATFORM),
|
||||||
(MBED_HAL, MBED_LIBRARIES_HAL)]:
|
(MBED_HAL, MBED_LIBRARIES_HAL)]:
|
||||||
resources = Resources(notify).scan_with_toolchain([dir], toolchain)
|
resources = Resources(notify).scan_with_toolchain([dir], toolchain)
|
||||||
toolchain.copy_files(resources.headers, dest)
|
toolchain.copy_files(
|
||||||
|
[FileRef(basename(p), p) for p
|
||||||
|
in resources.get_file_paths(FileType.HEADER)] ,
|
||||||
|
dest)
|
||||||
library_incdirs.append(dest)
|
library_incdirs.append(dest)
|
||||||
|
|
||||||
# collect resources of the libs to compile
|
# collect resources of the libs to compile
|
||||||
|
@ -1011,7 +1010,7 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None,
|
||||||
hal_objects = toolchain.compile_sources(hal_res, incdirs + [tmp_path])
|
hal_objects = toolchain.compile_sources(hal_res, incdirs + [tmp_path])
|
||||||
|
|
||||||
# Copy everything into the build directory
|
# Copy everything into the build directory
|
||||||
to_copy = sum([
|
to_copy = [FileRef(basename(p), p) for p in sum([
|
||||||
hal_res.headers,
|
hal_res.headers,
|
||||||
hal_res.hex_files,
|
hal_res.hex_files,
|
||||||
hal_res.bin_files,
|
hal_res.bin_files,
|
||||||
|
@ -1022,7 +1021,7 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None,
|
||||||
cmsis_objects,
|
cmsis_objects,
|
||||||
hal_objects,
|
hal_objects,
|
||||||
separate_objects,
|
separate_objects,
|
||||||
], [])
|
], [])]
|
||||||
toolchain.copy_files(to_copy, build_toolchain)
|
toolchain.copy_files(to_copy, build_toolchain)
|
||||||
|
|
||||||
if report is not None:
|
if report is not None:
|
||||||
|
|
|
@ -40,7 +40,7 @@ try:
|
||||||
from Queue import Queue, Empty
|
from Queue import Queue, Empty
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from queue import Queue, Empty
|
from queue import Queue, Empty
|
||||||
from os.path import join, exists, basename, relpath
|
from os.path import join, exists, basename, relpath, isdir
|
||||||
from threading import Thread, Lock
|
from threading import Thread, Lock
|
||||||
from multiprocessing import Pool, cpu_count
|
from multiprocessing import Pool, cpu_count
|
||||||
from subprocess import Popen, PIPE
|
from subprocess import Popen, PIPE
|
||||||
|
@ -2083,49 +2083,33 @@ def find_tests(base_dir, target_name, toolchain_name, app_config=None):
|
||||||
commons = []
|
commons = []
|
||||||
|
|
||||||
# Scan the directory for paths to probe for 'TESTS' folders
|
# Scan the directory for paths to probe for 'TESTS' folders
|
||||||
base_resources = Resources(MockNotifier())
|
base_resources = Resources(MockNotifier(), collect_ignores=True)
|
||||||
base_resources.add_directory(base_dir)
|
base_resources.add_directory(base_dir)
|
||||||
|
|
||||||
dirs = base_resources.inc_dirs
|
dirs = [d for d in base_resources.ignored_dirs if basename(d) == 'TESTS']
|
||||||
for directory in dirs:
|
for directory in dirs:
|
||||||
subdirs = os.listdir(directory)
|
for test_group_directory in os.listdir(directory):
|
||||||
|
grp_dir = join(directory, test_group_directory)
|
||||||
# If the directory contains a subdirectory called 'TESTS', scan it for test cases
|
if not isdir(grp_dir):
|
||||||
if 'TESTS' in subdirs:
|
continue
|
||||||
walk_base_dir = join(directory, 'TESTS')
|
for test_case_directory in os.listdir(grp_dir):
|
||||||
test_resources = Resources(MockNotifier())
|
d = join(directory, test_group_directory, test_case_directory)
|
||||||
test_resources.add_directory(walk_base_dir, base_dir)
|
if not isdir(d):
|
||||||
|
continue
|
||||||
# Loop through all subdirectories
|
special_dirs = ['host_tests', 'COMMON']
|
||||||
for d in test_resources.inc_dirs:
|
if test_group_directory not in special_dirs and test_case_directory not in special_dirs:
|
||||||
|
test_name = test_path_to_name(d, base_dir)
|
||||||
# If the test case folder is not called 'host_tests' or 'COMMON' and it is
|
tests[(test_name, directory, test_group_directory, test_case_directory)] = [d]
|
||||||
# located two folders down from the main 'TESTS' folder (ex. TESTS/testgroup/testcase)
|
if test_case_directory == 'COMMON':
|
||||||
# then add it to the tests
|
def predicate(base_pred, group_pred, name_base_group_case):
|
||||||
relative_path = relpath(d, walk_base_dir)
|
(name, base, group, case) = name_base_group_case
|
||||||
relative_path_parts = os.path.normpath(relative_path).split(os.sep)
|
return base == base_pred and group == group_pred
|
||||||
if len(relative_path_parts) == 2:
|
commons.append((functools.partial(predicate, directory, test_group_directory), d))
|
||||||
test_group_directory_path, test_case_directory = os.path.split(d)
|
if test_group_directory == 'COMMON':
|
||||||
test_group_directory = os.path.basename(test_group_directory_path)
|
def predicate(base_pred, name_base_group_case):
|
||||||
|
(name, base, group, case) = name_base_group_case
|
||||||
# Check to make sure discoverd folder is not in a host test directory or common directory
|
return base == base_pred
|
||||||
special_dirs = ['host_tests', 'COMMON']
|
commons.append((functools.partial(predicate, directory), grp_dir))
|
||||||
if test_group_directory not in special_dirs and test_case_directory not in special_dirs:
|
|
||||||
test_name = test_path_to_name(d, base_dir)
|
|
||||||
tests[(test_name, walk_base_dir, test_group_directory, test_case_directory)] = [d]
|
|
||||||
|
|
||||||
# Also find any COMMON paths, we'll add these later once we find all the base tests
|
|
||||||
if 'COMMON' in relative_path_parts:
|
|
||||||
if relative_path_parts[0] != 'COMMON':
|
|
||||||
def predicate(base_pred, group_pred, name_base_group_case):
|
|
||||||
(name, base, group, case) = name_base_group_case
|
|
||||||
return base == base_pred and group == group_pred
|
|
||||||
commons.append((functools.partial(predicate, walk_base_dir, relative_path_parts[0]), d))
|
|
||||||
else:
|
|
||||||
def predicate(base_pred, name_base_group_case):
|
|
||||||
(name, base, group, case) = name_base_group_case
|
|
||||||
return base == base_pred
|
|
||||||
commons.append((functools.partial(predicate, walk_base_dir), d))
|
|
||||||
|
|
||||||
# Apply common directories
|
# Apply common directories
|
||||||
for pred, path in commons:
|
for pred, path in commons:
|
||||||
|
|
|
@ -290,17 +290,10 @@ class mbedToolchain:
|
||||||
if not isinstance(files_paths, list):
|
if not isinstance(files_paths, list):
|
||||||
files_paths = [files_paths]
|
files_paths = [files_paths]
|
||||||
|
|
||||||
for source in files_paths:
|
for dest, source in files_paths:
|
||||||
if source is None:
|
target = join(trg_path, dest)
|
||||||
files_paths.remove(source)
|
|
||||||
|
|
||||||
for source in files_paths:
|
|
||||||
_, relative_path = split(source)
|
|
||||||
|
|
||||||
target = join(trg_path, relative_path)
|
|
||||||
|
|
||||||
if (target != source) and (self.need_update(target, [source])):
|
if (target != source) and (self.need_update(target, [source])):
|
||||||
self.progress("copy", relative_path)
|
self.progress("copy", dest)
|
||||||
mkdir(dirname(target))
|
mkdir(dirname(target))
|
||||||
copyfile(source, target)
|
copyfile(source, target)
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue