Merge pull request #395 from screamerbg/master

Support for multiple compile queues/jobs, stats cache and compile order consistency
pull/433/head
Bogdan Marinescu 2014-08-07 11:19:13 +01:00
commit 2df4afd948
8 changed files with 283 additions and 69 deletions

View File

@ -107,6 +107,9 @@ if __name__ == '__main__':
dest='general_filter_regex',
default=None,
help='For some commands you can use filter to filter out results')
parser.add_option("-j", "--jobs", type="int", dest="jobs",
default=1, help="Number of concurrent jobs (default 1). Use 0 for auto based on host machine's number of CPUs")
parser.add_option("-v", "--verbose",
action="store_true",
@ -182,12 +185,12 @@ if __name__ == '__main__':
try:
mcu = TARGET_MAP[target]
# CMSIS and MBED libs analysis
static_analysis_scan(mcu, toolchain, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT, verbose=options.verbose)
static_analysis_scan(mcu, toolchain, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT, verbose=options.verbose, jobs=options.jobs)
for lib_id in libraries:
# Static check for library
static_analysis_scan_lib(lib_id, mcu, toolchain, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT,
options=options.options,
notify=notify, verbose=options.verbose, clean=options.clean,
notify=notify, verbose=options.verbose, jobs=options.jobs, clean=options.clean,
macros=options.macros)
pass
except Exception, e:
@ -204,7 +207,7 @@ if __name__ == '__main__':
try:
mcu = TARGET_MAP[target]
lib_build_res = build_mbed_libs(mcu, toolchain, options=options.options,
notify=notify, verbose=options.verbose, clean=options.clean,
notify=notify, verbose=options.verbose, jobs=options.jobs, clean=options.clean,
macros=options.macros)
for lib_id in libraries:

View File

@ -31,11 +31,12 @@ from workspace_tools.targets import TARGET_NAMES, TARGET_MAP
def build_project(src_path, build_path, target, toolchain_name,
libraries_paths=None, options=None, linker_script=None,
clean=False, notify=None, verbose=False, name=None, macros=None, inc_dirs=None):
clean=False, notify=None, verbose=False, name=None, macros=None, inc_dirs=None, jobs=1):
""" This function builds project. Project can be for example one test / UT """
# Toolchain instance
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, notify, macros)
toolchain.VERBOSE = verbose
toolchain.jobs = jobs
toolchain.build_all = clean
src_paths = [src_path] if type(src_path) != ListType else src_path
@ -90,7 +91,7 @@ def build_project(src_path, build_path, target, toolchain_name,
def build_library(src_paths, build_path, target, toolchain_name,
dependencies_paths=None, options=None, name=None, clean=False,
notify=None, verbose=False, macros=None, inc_dirs=None):
notify=None, verbose=False, macros=None, inc_dirs=None, jobs=1):
""" src_path: the path of the source directory
build_path: the path of the build directory
target: ['LPC1768', 'LPC11U24', 'LPC2368']
@ -110,6 +111,7 @@ def build_library(src_paths, build_path, target, toolchain_name,
# Toolchain instance
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify)
toolchain.VERBOSE = verbose
toolchain.jobs = jobs
toolchain.build_all = clean
# The first path will give the name to the library
@ -166,7 +168,7 @@ def build_lib(lib_id, target, toolchain, options=None, verbose=False, clean=Fals
# We do have unique legacy conventions about how we build and package the mbed library
def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None):
def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1):
""" Function returns True is library was built and false if building was skipped """
# Check toolchain support
if toolchain_name not in target.supported_toolchains:
@ -178,6 +180,7 @@ def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=F
# Toolchain
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify)
toolchain.VERBOSE = verbose
toolchain.jobs = jobs
toolchain.build_all = clean
# Source and Build Paths
@ -296,10 +299,11 @@ def get_target_supported_toolchains(target):
return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP else None
def static_analysis_scan(target, toolchain_name, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT, options=None, verbose=False, clean=False, macros=None, notify=None):
def static_analysis_scan(target, toolchain_name, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1):
# Toolchain
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify)
toolchain.VERBOSE = verbose
toolchain.jobs = jobs
toolchain.build_all = clean
# Source and Build Paths
@ -420,7 +424,7 @@ def static_analysis_scan_lib(lib_id, target, toolchain, cppcheck_cmd, cppcheck_m
def static_analysis_scan_library(src_paths, build_path, target, toolchain_name, cppcheck_cmd, cppcheck_msg_format,
dependencies_paths=None, options=None, name=None, clean=False,
notify=None, verbose=False, macros=None):
notify=None, verbose=False, macros=None, jobs=1):
""" Function scans library (or just some set of sources/headers) for staticly detectable defects """
if type(src_paths) != ListType:
src_paths = [src_paths]
@ -432,6 +436,7 @@ def static_analysis_scan_library(src_paths, build_path, target, toolchain_name,
# Toolchain instance
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify)
toolchain.VERBOSE = verbose
toolchain.jobs = jobs
# The first path will give the name to the library
name = basename(src_paths[0])

View File

@ -72,6 +72,8 @@ if __name__ == '__main__':
parser = OptionParser()
parser.add_option('-o', '--official', dest="official_only", default=False, action="store_true",
help="Build using only the official toolchain for each target")
parser.add_option("-j", "--jobs", type="int", dest="jobs",
default=1, help="Number of concurrent jobs (default 1). Use 0 for auto based on host machine's number of CPUs")
parser.add_option("-v", "--verbose", action="store_true", dest="verbose",
default=False, help="Verbose diagnostic output")
options, args = parser.parse_args()
@ -86,7 +88,7 @@ if __name__ == '__main__':
for toolchain in toolchains:
id = "%s::%s" % (target_name, toolchain)
try:
build_mbed_libs(TARGET_MAP[target_name], toolchain, verbose=options.verbose)
build_mbed_libs(TARGET_MAP[target_name], toolchain, verbose=options.verbose, jobs=options.jobs)
successes.append(id)
except Exception, e:
failures.append(id)

View File

@ -47,6 +47,8 @@ if __name__ == '__main__':
help="The index of the desired test program: [0-%d]" % (len(TESTS)-1))
parser.add_option("-n", dest="program_name",
help="The name of the desired test program")
parser.add_option("-j", "--jobs", type="int", dest="jobs",
default=1, help="Number of concurrent jobs (default 1). Use 0 for auto based on host machine's number of CPUs")
parser.add_option("-v", "--verbose", action="store_true", dest="verbose",
default=False, help="Verbose diagnostic output")
parser.add_option("-D", "", action="append", dest="macros",

View File

@ -26,9 +26,16 @@ from workspace_tools.utils import run_cmd, mkdir, rel_path, ToolException, split
from workspace_tools.patch import patch
from workspace_tools.settings import BUILD_OPTIONS, MBED_ORG_USER
from multiprocessing import Pool, Manager, cpu_count
from time import sleep
from pprint import pprint
import workspace_tools.hooks as hooks
import re
#Disables multiprocessing if set to higher number than the host machine CPUs
CPU_COUNT_MIN = 1
def print_notify(event):
# Default command line notification
if event['type'] in ['info', 'debug']:
@ -60,6 +67,22 @@ def print_notify_verbose(event):
elif event['type'] == 'progress':
print_notify(event) # standard handle
def compile_worker(job):
results = []
for command in job['commands']:
_, stderr, rc = run_cmd(command, job['work_dir'])
results.append({
'code': rc,
'output': stderr,
'command': command
})
return {
'source': job['source'],
'object': job['object'],
'commands': job['commands'],
'results': results
}
class Resources:
def __init__(self, base_path=None):
@ -199,6 +222,7 @@ class mbedToolchain:
self.options = []
else:
self.options = options
self.macros = macros or []
self.options.extend(BUILD_OPTIONS)
if self.options:
@ -209,9 +233,19 @@ class mbedToolchain:
self.symbols = None
self.labels = None
self.has_config = False
self.stat_cache = {}
self.build_all = False
self.timestamp = time()
self.jobs = 1
self.CHROOT = None
self.mp_pool = None
def __exit__():
if self.mp_pool is not None:
self.mp_pool.terminate()
def goanna_parse_line(self, line):
if "analyze" in self.options:
@ -279,7 +313,32 @@ class mbedToolchain:
return True
return False
def need_update_new(self, target, dependencies):
if self.build_all:
return True
if not exists(target):
return True
target_mod_time = stat(target).st_mtime
for d in dependencies:
# Some objects are not provided with full path and here we do not have
# information about the library paths. Safe option: assume an update
if not d:
return True
if self.stat_cache.has_key(d):
if self.stat_cache[d] >= target_mod_time:
return True
else:
if not exists(d): return True
self.stat_cache[d] = stat(d).st_mtime
if self.stat_cache[d] >= target_mod_time: return True
return False
def scan_resources(self, path):
labels = self.get_labels()
resources = Resources(path)
@ -395,73 +454,193 @@ class mbedToolchain:
obj_dir = join(build_path, relpath(source_dir, base_dir))
mkdir(obj_dir)
return join(obj_dir, name + '.o')
def compile_sources(self, resources, build_path, inc_dirs=None):
# Web IDE progress bar for project build
self.to_be_compiled = len(resources.s_sources) + len(resources.c_sources) + len(resources.cpp_sources)
files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
self.to_be_compiled = len(files_to_compile)
self.compiled = 0
objects = []
#for i in self.build_params:
# self.debug(i)
# self.debug("%s" % self.build_params[i])
inc_paths = resources.inc_dirs
if inc_dirs is not None:
inc_paths.extend(inc_dirs)
objects=[]
queue=[]
prev_dir=None
# The dependency checking for C/C++ is delegated to the compiler
base_path = resources.base_path
for source in resources.s_sources:
files_to_compile.sort()
for source in files_to_compile:
_, name, _ = split_path(source)
object = self.relative_object_path(build_path, base_path, source)
# Avoid multiple mkdir() calls on same work directory
work_dir = dirname(object)
if work_dir is not prev_dir:
prev_dir = work_dir
mkdir(work_dir)
# Queue mode (multiprocessing)
commands = self._compile_command(source, object, inc_paths)
if commands is not None:
queue.append({
'source': source,
'object': object,
'commands': commands,
'work_dir': work_dir,
'chroot': self.CHROOT
})
else:
objects.append(object)
# Use queues/multiprocessing if cpu count is higher than setting
jobs = self.jobs if self.jobs else cpu_count()
if jobs > CPU_COUNT_MIN and len(queue) > jobs:
return self.compile_queue(queue, objects)
else:
return self.compile_seq(queue, objects)
def compile_seq(self, queue, objects):
for item in queue:
result = compile_worker(item)
self.compiled += 1
object = self.relative_object_path(build_path, base_path, source)
if self.need_update(object, [source]):
self.progress("assemble", source, build_update=True)
self.assemble(source, object, inc_paths)
objects.append(object)
# The dependency checking for C/C++ is delegated to the specific compiler
for source in resources.c_sources:
object = self.relative_object_path(build_path, base_path, source)
self.compile_c(source, object, inc_paths)
objects.append(object)
for source in resources.cpp_sources:
object = self.relative_object_path(build_path, base_path, source)
self.compile_cpp(source, object, inc_paths)
objects.append(object)
self.progress("compile", item['source'], build_update=True)
for res in result['results']:
self.debug("Command: %s" % ' '.join(res['command']))
self._compile_output([
res['code'],
res['output'],
res['command']
])
objects.append(result['object'])
return objects
def compile(self, cc, source, object, includes):
def compile_queue(self, queue, objects):
jobs_count = int(self.jobs if self.jobs else cpu_count())
p = Pool(processes=jobs_count)
results = []
for i in range(len(queue)):
results.append(p.apply_async(compile_worker, [queue[i]]))
itr = 0
while True:
itr += 1
if itr > 6000:
p.terminate()
p.join()
raise ToolException("Compile did not finish in 5 minutes")
pending = 0
for r in results:
if r._ready is True:
try:
result = r.get()
results.remove(r)
self.compiled += 1
self.progress("compile", result['source'], build_update=True)
for res in result['results']:
self.debug("Command: %s" % ' '.join(res['command']))
self._compile_output([
res['code'],
res['output'],
res['command']
])
objects.append(result['object'])
except ToolException, err:
p.terminate()
p.join()
raise ToolException(err)
else:
pending += 1
if pending > jobs_count:
break
if len(results) == 0:
break
sleep(0.01)
results = None
p.terminate()
p.join()
return objects
def _compile_command(self, source, object, includes):
# Check dependencies
base, _ = splitext(object)
dep_path = base + '.d'
_, ext = splitext(source)
ext = ext.lower()
if ext == '.c' or ext == '.cpp':
base, _ = splitext(object)
dep_path = base + '.d'
deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else []
if len(deps) == 0 or self.need_update(object, deps):
return self._compile(source, object, includes)
elif ext == '.s':
deps = [source]
if self.need_update(object, deps):
return self._assemble(source, object, includes)
else:
return False
return None
def _compile_output(self, output=[]):
rc = output[0]
stderr = output[1]
command = output[2]
# Parse output for Warnings and Errors
self.parse_output(stderr)
self.debug("Return: %s" % rc)
self.debug("Output: %s" % stderr)
# Check return code
if rc != 0:
raise ToolException(stderr)
self.compiled += 1
if (not exists(dep_path) or
self.need_update(object, self.parse_dependencies(dep_path))):
self.progress("compile", source, build_update=True)
# Compile
command = cc + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source]
if hasattr(self, "get_dep_opt"):
command.extend(self.get_dep_opt(dep_path))
if hasattr(self, "cc_extra"):
command.extend(self.cc_extra(base))
self.debug(command)
_, stderr, rc = run_cmd(self.hook.get_cmdline_compiler(command), dirname(object))
# Parse output for Warnings and Errors
self.parse_output(stderr)
# Check return code
if rc != 0:
raise ToolException(stderr)
def _compile(self, source, object, includes):
_, ext = splitext(source)
ext = ext.lower()
cc = self.cppc if ext == ".cpp" else self.cc
command = cc + ['-D%s' % s for s in self.get_symbols()] + ["-I%s" % i for i in includes] + ["-o", object, source]
if hasattr(self, "get_dep_opt"):
base, _ = splitext(object)
dep_path = base + '.d'
command.extend(self.get_dep_opt(dep_path))
if hasattr(self, "cc_extra"):
command.extend(self.cc_extra(base))
return [command]
def compile(self, source, object, includes):
self.progress("compile", source, build_update=True)
commands = self._compile(source, object, includes)
for command in commands:
self.debug("Command: %s" % ' '.join(command))
_, stderr, rc = run_cmd(command, dirname(object))
self._compile_output([rc, stderr, command])
def compile_c(self, source, object, includes):
self.compile(self.cc, source, object, includes)
self.compile(source, object, includes)
def compile_cpp(self, source, object, includes):
self.compile(self.cppc, source, object, includes)
self.compile(source, object, includes)
def build_library(self, objects, dir, name):
lib = self.STD_LIB_NAME % name
@ -494,7 +673,7 @@ class mbedToolchain:
self.binary(r, elf, bin)
if self.target.name.startswith('LPC'):
self.debug("LPC Patch %s" % filename)
self.debug("LPC Patch: %s" % filename)
patch(bin)
self.var("compile_succeded", True)
@ -506,9 +685,11 @@ class mbedToolchain:
return bin
def default_cmd(self, command):
self.debug(command)
self.debug("Command: %s" % ' '.join(command))
stdout, stderr, rc = run_cmd(command)
self.debug(stdout)
self.debug("Return: %s" % rc)
self.debug("Output: %s" % ' '.join(stdout))
if rc != 0:
for line in stderr.splitlines():
self.tool_error(line)
@ -522,6 +703,7 @@ class mbedToolchain:
if self.VERBOSE:
if type(message) is ListType:
message = ' '.join(message)
message = "[DEBUG] " + message
self.notify({'type': 'debug', 'message': message})
def cc_info(self, severity, file, line, message, target_name=None, toolchain_name=None):

View File

@ -80,11 +80,18 @@ class ARM(mbedToolchain):
if option in tool:
tool.remove(option)
def assemble(self, source, object, includes):
def _assemble(self, source, object, includes):
# Preprocess first, then assemble
tempfile = object + '.E.s'
self.default_cmd(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-E", "-o", tempfile, source])
self.default_cmd(self.hook.get_cmdline_assembler(self.asm + ["-o", object, tempfile]))
return [
self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-E", "-o", tempfile, source],
self.hook.get_cmdline_assembler(self.asm + ["-o", object, tempfile])
]
def assemble(self, source, object, includes):
commands = self._assemble(source, object, includes);
for command in commands:
self.default_cmd(command)
def parse_dependencies(self, dep_path):
dependencies = []
@ -114,7 +121,10 @@ class ARM(mbedToolchain):
match.group('line'),
match.group('message')
)
def get_dep_opt(self, dep_path):
return ["--depend", dep_path]
def archive(self, objects, lib_path):
self.default_cmd([self.ar, '-r', lib_path] + objects)

View File

@ -81,8 +81,13 @@ class GCC(mbedToolchain):
self.ar = join(tool_path, "arm-none-eabi-ar")
self.elf2bin = join(tool_path, "arm-none-eabi-objcopy")
def _assemble(self, source, object, includes):
return [self.hook.get_cmdline_assembler(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source])]
def assemble(self, source, object, includes):
self.default_cmd(self.hook.get_cmdline_assembler(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source]))
commands = self._assemble(source, object, includes);
for command in commands:
self.default_cmd(command)
def parse_dependencies(self, dep_path):
dependencies = []

View File

@ -93,9 +93,14 @@ class IAR(mbedToolchain):
def parse_dependencies(self, dep_path):
return [path.strip() for path in open(dep_path).readlines()
if (path and not path.isspace())]
def _assemble(self, source, object, includes):
return [self.hook.get_cmdline_assembler(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source])]
def assemble(self, source, object, includes):
self.default_cmd(self.hook.get_cmdline_assembler(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source]))
commands = self._assemble(source, object, includes);
for command in commands:
self.default_cmd(command)
def archive(self, objects, lib_path):
if exists(lib_path):