Merge pull request #2244 from bridadan/test-discovery-refactor

Test discovery refactor
pull/2259/head
Sam Grove 2016-07-26 15:45:41 -05:00 committed by GitHub
commit 64928b02df
4 changed files with 73 additions and 93 deletions

View File

@ -970,63 +970,3 @@ def write_build_report(build_report, template_filename, filename):
with open(filename, 'w+') as f:
f.write(template.render(failing_builds=build_report_failing, passing_builds=build_report_passing))
def scan_for_source_paths(path, exclude_paths=None):
ignorepatterns = []
paths = []
def is_ignored(file_path):
for pattern in ignorepatterns:
if fnmatch.fnmatch(file_path, pattern):
return True
return False
""" os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
When topdown is True, the caller can modify the dirnames list in-place
(perhaps using del or slice assignment), and walk() will only recurse into
the subdirectories whose names remain in dirnames; this can be used to prune
the search, impose a specific order of visiting, or even to inform walk()
about directories the caller creates or renames before it resumes walk()
again. Modifying dirnames when topdown is False is ineffective, because in
bottom-up mode the directories in dirnames are generated before dirpath
itself is generated.
"""
for root, dirs, files in walk(path, followlinks=True):
# Remove ignored directories
# Check if folder contains .mbedignore
if ".mbedignore" in files :
with open (join(root,".mbedignore"), "r") as f:
lines=f.readlines()
lines = [l.strip() for l in lines] # Strip whitespaces
lines = [l for l in lines if l != ""] # Strip empty lines
lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
# Append root path to glob patterns
# and append patterns to ignorepatterns
ignorepatterns.extend([join(root,line.strip()) for line in lines])
for d in copy(dirs):
dir_path = join(root, d)
# Always ignore hidden directories
if d.startswith('.'):
dirs.remove(d)
# Remove dirs that already match the ignorepatterns
# to avoid travelling into them and to prevent them
# on appearing in include path.
if is_ignored(join(dir_path,"")):
dirs.remove(d)
if exclude_paths:
for exclude_path in exclude_paths:
rel_path = relpath(dir_path, exclude_path)
if not (rel_path.startswith('..')):
dirs.remove(d)
break
# Add root to include paths
paths.append(root)
return paths

View File

@ -31,7 +31,7 @@ from tools.options import get_default_options_parser
from tools.build_api import build_project, build_library
from tools.build_api import print_build_memory_usage_results
from tools.targets import TARGET_MAP
from tools.utils import mkdir, ToolException, NotSupportedException
from tools.utils import mkdir, ToolException, NotSupportedException, args_error
from tools.test_exporters import ReportExporter, ResultExporterType
from utils import argparse_filestring_type, argparse_lowercase_type, argparse_many
from utils import argparse_dir_not_parent
@ -105,9 +105,19 @@ if __name__ == '__main__':
all_tests = {}
tests = {}
# Target
if options.mcu is None :
args_error(parser, "[ERROR] You should specify an MCU")
mcu = options.mcu[0]
# Toolchain
if options.tool is None:
args_error(parser, "[ERROR] You should specify a TOOLCHAIN")
toolchain = options.tool[0]
# Find all tests in the relevant paths
for path in all_paths:
all_tests.update(find_tests(path))
all_tests.update(find_tests(path, mcu, toolchain, options.options))
# Filter tests by name if specified
if options.names:
@ -151,16 +161,13 @@ if __name__ == '__main__':
if not base_source_paths:
base_source_paths = ['.']
target = options.mcu[0]
build_report = {}
build_properties = {}
library_build_success = False
try:
# Build sources
build_library(base_source_paths, options.build_dir, target, options.tool[0],
build_library(base_source_paths, options.build_dir, mcu, toolchain,
options=options.options,
jobs=options.jobs,
clean=options.clean,
@ -187,7 +194,7 @@ if __name__ == '__main__':
print "Failed to build library"
else:
# Build all the tests
test_build_success, test_build = build_tests(tests, [options.build_dir], options.build_dir, target, options.tool[0],
test_build_success, test_build = build_tests(tests, [options.build_dir], options.build_dir, mcu, toolchain,
options=options.options,
clean=options.clean,
report=build_report,

View File

@ -31,10 +31,11 @@ import ctypes
from types import ListType
from colorama import Fore, Back, Style
from prettytable import PrettyTable
from copy import copy
from time import sleep, time
from Queue import Queue, Empty
from os.path import join, exists, basename
from os.path import join, exists, basename, relpath
from threading import Thread, Lock
from subprocess import Popen, PIPE
@ -56,7 +57,8 @@ from tools.build_api import prep_report
from tools.build_api import prep_properties
from tools.build_api import create_result
from tools.build_api import add_result_to_report
from tools.build_api import scan_for_source_paths
from tools.build_api import prepare_toolchain
from tools.build_api import scan_resources
from tools.libraries import LIBRARIES, LIBRARY_MAP
from tools.toolchains import TOOLCHAIN_PATHS
from tools.toolchains import TOOLCHAINS
@ -65,6 +67,7 @@ from tools.utils import argparse_filestring_type
from tools.utils import argparse_uppercase_type
from tools.utils import argparse_lowercase_type
from tools.utils import argparse_many
from tools.utils import get_path_depth
import tools.host_tests.host_tests_plugins as host_tests_plugins
@ -1987,33 +1990,46 @@ def test_path_to_name(path):
return "-".join(name_parts).lower()
def find_tests(base_dir):
"""Given any directory, walk through the subdirectories and find all tests"""
def find_tests(base_dir, target_name, toolchain_name, options=None):
""" Finds all tests in a directory recursively
base_dir: path to the directory to scan for tests (ex. 'path/to/project')
target_name: name of the target to use for scanning (ex. 'K64F')
toolchain_name: name of the toolchain to use for scanning (ex. 'GCC_ARM')
options: Compile options to pass to the toolchain (ex. ['debug-info'])
"""
def find_test_in_directory(directory, tests_path):
"""Given a 'TESTS' directory, return a dictionary of test names and test paths.
The formate of the dictionary is {"test-name": "./path/to/test"}"""
test = None
if tests_path in directory:
head, test_case_directory = os.path.split(directory)
if test_case_directory != tests_path and test_case_directory != "host_tests":
head, test_group_directory = os.path.split(head)
if test_group_directory != tests_path and test_case_directory != "host_tests":
test = {
"name": test_path_to_name(directory),
"path": directory
}
return test
tests_path = 'TESTS'
tests = {}
dirs = scan_for_source_paths(base_dir)
# Prepare the toolchain
toolchain = prepare_toolchain(base_dir, target_name, toolchain_name, options=options, silent=True)
# Scan the directory for paths to probe for 'TESTS' folders
base_resources = scan_resources(base_dir, toolchain)
dirs = base_resources.inc_dirs
for directory in dirs:
test = find_test_in_directory(directory, tests_path)
if test:
tests[test['name']] = test['path']
subdirs = os.listdir(directory)
# If the directory contains a subdirectory called 'TESTS', scan it for test cases
if 'TESTS' in subdirs:
walk_base_dir = join(directory, 'TESTS')
test_resources = toolchain.scan_resources(walk_base_dir, base_path=base_dir)
# Loop through all subdirectories
for d in test_resources.inc_dirs:
# If the test case folder is not called 'host_tests' and it is
# located two folders down from the main 'TESTS' folder (ex. TESTS/testgroup/testcase)
# then add it to the tests
path_depth = get_path_depth(relpath(d, walk_base_dir))
if path_depth == 2:
test_group_directory_path, test_case_directory = os.path.split(d)
test_group_directory = os.path.basename(test_group_directory_path)
# Check to make sure discoverd folder is not in a host test directory
if test_case_directory != 'host_tests' and test_group_directory != 'host_tests':
test_name = test_path_to_name(d)
tests[test_name] = d
return tests

View File

@ -21,7 +21,7 @@ import argparse
import math
from os import listdir, remove, makedirs
from shutil import copyfile
from os.path import isdir, join, exists, split, relpath, splitext, abspath, commonprefix
from os.path import isdir, join, exists, split, relpath, splitext, abspath, commonprefix, normpath
from subprocess import Popen, PIPE, STDOUT, call
import json
from collections import OrderedDict
@ -173,6 +173,23 @@ def split_path(path):
return base, name, ext
def get_path_depth(path):
""" Given a path, return the number of directory levels present.
This roughly translates to the number of path separators (os.sep) + 1.
Ex. Given "path/to/dir", this would return 3
Special cases: "." and "/" return 0
"""
normalized_path = normpath(path)
path_depth = 0
head, tail = split(normalized_path)
while(tail and tail != '.'):
path_depth += 1
head, tail = split(head)
return path_depth
def args_error(parser, message):
print "\n\n%s\n\n" % message
parser.print_help()