Rewrite test detection to avoid relying on "inc_dirs"

pull/7183/head
Jimmy Brisson 2018-06-19 15:23:50 -05:00
parent 8166889885
commit 545553b6bc
3 changed files with 53 additions and 77 deletions

View File

@ -480,7 +480,6 @@ def build_project(src_paths, build_path, target, toolchain_name,
stats_depth - depth level for memap to display file/dirs
ignore - list of paths to add to mbedignore
"""
# Convert src_path to a list if needed
if not isinstance(src_paths, list):
src_paths = [src_paths]
@ -628,6 +627,7 @@ def build_library(src_paths, build_path, target, toolchain_name,
# Convert src_path to a list if needed
if not isinstance(src_paths, list):
src_paths = [src_paths]
src_paths = [relpath(s) for s in src_paths]
# Build path
if archive:
@ -679,28 +679,25 @@ def build_library(src_paths, build_path, target, toolchain_name,
raise Exception(error_msg)
try:
resources = Resources(notify).scan_with_toolchain(
res = Resources(notify).scan_with_toolchain(
src_paths, toolchain, dependencies_paths, inc_dirs=inc_dirs)
# Copy headers, objects and static libraries - all files needed for
# static lib
toolchain.copy_files(resources.headers, build_path, resources=resources)
toolchain.copy_files(resources.objects, build_path, resources=resources)
toolchain.copy_files(resources.libraries, build_path,
resources=resources)
toolchain.copy_files(resources.json_files, build_path,
resources=resources)
if resources.linker_script:
toolchain.copy_files(resources.linker_script, build_path,
resources=resources)
if resources.hex_files:
toolchain.copy_files(resources.hex_files, build_path,
resources=resources)
to_copy = (
res.get_file_refs(FileType.HEADER) +
res.get_file_refs(FileType.OBJECT) +
res.get_file_refs(FileType.LIB) +
res.get_file_refs(FileType.JSON) +
res.get_file_refs(FileType.LD_SCRIPT) +
res.get_file_refs(FileType.HEX) +
res.get_file_refs(FileType.BIN)
)
toolchain.copy_files(to_copy, build_path)
# Compile Sources
objects = toolchain.compile_sources(resources, resources.inc_dirs)
resources.objects.extend(objects)
objects = toolchain.compile_sources(
res, res.get_file_paths(FileType.INC_DIR))
res.add_files_to_type(FileType.OBJECT, objects)
if archive:
toolchain.build_library(objects, build_path, name)
@ -714,8 +711,6 @@ def build_library(src_paths, build_path, target, toolchain_name,
end = time()
cur_result["elapsed_time"] = end - start
cur_result["result"] = "OK"
add_result_to_report(report, cur_result)
return True
@ -840,8 +835,8 @@ def build_lib(lib_id, target, toolchain_name, clean=False, macros=None,
inc_dirs=inc_dirs, dependencies_paths=dependencies_paths)
# Copy Headers
toolchain.copy_files(resources.headers, build_path,
resources=resources)
toolchain.copy_files(
resources.get_file_refs(FileType.HEADER), build_path)
dependencies_include_dir = Resources(notify).sacn_with_toolchain([build_path], toolchain).inc_dirs
@ -968,14 +963,18 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None,
toolchain.set_config_data(toolchain.config.get_config_data())
# distribute header files
toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES)
toolchain.copy_files(
[FileRef(basename(MBED_HEADER),MBED_HEADER)], MBED_LIBRARIES)
library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES]
for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS),
(MBED_PLATFORM, MBED_LIBRARIES_PLATFORM),
(MBED_HAL, MBED_LIBRARIES_HAL)]:
resources = Resources(notify).scan_with_toolchain([dir], toolchain)
toolchain.copy_files(resources.headers, dest)
toolchain.copy_files(
[FileRef(basename(p), p) for p
in resources.get_file_paths(FileType.HEADER)] ,
dest)
library_incdirs.append(dest)
# collect resources of the libs to compile
@ -1011,7 +1010,7 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None,
hal_objects = toolchain.compile_sources(hal_res, incdirs + [tmp_path])
# Copy everything into the build directory
to_copy = sum([
to_copy = [FileRef(basename(p), p) for p in sum([
hal_res.headers,
hal_res.hex_files,
hal_res.bin_files,
@ -1022,7 +1021,7 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None,
cmsis_objects,
hal_objects,
separate_objects,
], [])
], [])]
toolchain.copy_files(to_copy, build_toolchain)
if report is not None:

View File

@ -40,7 +40,7 @@ try:
from Queue import Queue, Empty
except ImportError:
from queue import Queue, Empty
from os.path import join, exists, basename, relpath
from os.path import join, exists, basename, relpath, isdir
from threading import Thread, Lock
from multiprocessing import Pool, cpu_count
from subprocess import Popen, PIPE
@ -2083,49 +2083,33 @@ def find_tests(base_dir, target_name, toolchain_name, app_config=None):
commons = []
# Scan the directory for paths to probe for 'TESTS' folders
base_resources = Resources(MockNotifier())
base_resources = Resources(MockNotifier(), collect_ignores=True)
base_resources.add_directory(base_dir)
dirs = base_resources.inc_dirs
dirs = [d for d in base_resources.ignored_dirs if basename(d) == 'TESTS']
for directory in dirs:
subdirs = os.listdir(directory)
# If the directory contains a subdirectory called 'TESTS', scan it for test cases
if 'TESTS' in subdirs:
walk_base_dir = join(directory, 'TESTS')
test_resources = Resources(MockNotifier())
test_resources.add_directory(walk_base_dir, base_dir)
# Loop through all subdirectories
for d in test_resources.inc_dirs:
# If the test case folder is not called 'host_tests' or 'COMMON' and it is
# located two folders down from the main 'TESTS' folder (ex. TESTS/testgroup/testcase)
# then add it to the tests
relative_path = relpath(d, walk_base_dir)
relative_path_parts = os.path.normpath(relative_path).split(os.sep)
if len(relative_path_parts) == 2:
test_group_directory_path, test_case_directory = os.path.split(d)
test_group_directory = os.path.basename(test_group_directory_path)
# Check to make sure discoverd folder is not in a host test directory or common directory
special_dirs = ['host_tests', 'COMMON']
if test_group_directory not in special_dirs and test_case_directory not in special_dirs:
test_name = test_path_to_name(d, base_dir)
tests[(test_name, walk_base_dir, test_group_directory, test_case_directory)] = [d]
# Also find any COMMON paths, we'll add these later once we find all the base tests
if 'COMMON' in relative_path_parts:
if relative_path_parts[0] != 'COMMON':
def predicate(base_pred, group_pred, name_base_group_case):
(name, base, group, case) = name_base_group_case
return base == base_pred and group == group_pred
commons.append((functools.partial(predicate, walk_base_dir, relative_path_parts[0]), d))
else:
def predicate(base_pred, name_base_group_case):
(name, base, group, case) = name_base_group_case
return base == base_pred
commons.append((functools.partial(predicate, walk_base_dir), d))
for test_group_directory in os.listdir(directory):
grp_dir = join(directory, test_group_directory)
if not isdir(grp_dir):
continue
for test_case_directory in os.listdir(grp_dir):
d = join(directory, test_group_directory, test_case_directory)
if not isdir(d):
continue
special_dirs = ['host_tests', 'COMMON']
if test_group_directory not in special_dirs and test_case_directory not in special_dirs:
test_name = test_path_to_name(d, base_dir)
tests[(test_name, directory, test_group_directory, test_case_directory)] = [d]
if test_case_directory == 'COMMON':
def predicate(base_pred, group_pred, name_base_group_case):
(name, base, group, case) = name_base_group_case
return base == base_pred and group == group_pred
commons.append((functools.partial(predicate, directory, test_group_directory), d))
if test_group_directory == 'COMMON':
def predicate(base_pred, name_base_group_case):
(name, base, group, case) = name_base_group_case
return base == base_pred
commons.append((functools.partial(predicate, directory), grp_dir))
# Apply common directories
for pred, path in commons:

View File

@ -290,17 +290,10 @@ class mbedToolchain:
if not isinstance(files_paths, list):
files_paths = [files_paths]
for source in files_paths:
if source is None:
files_paths.remove(source)
for source in files_paths:
_, relative_path = split(source)
target = join(trg_path, relative_path)
for dest, source in files_paths:
target = join(trg_path, dest)
if (target != source) and (self.need_update(target, [source])):
self.progress("copy", relative_path)
self.progress("copy", dest)
mkdir(dirname(target))
copyfile(source, target)