mirror of https://github.com/ARMmbed/mbed-os.git
Remove static analysis scan
An earlier patch in this series relies on the assumption that all toolchain construction goes through `prepare_toolchain`. This is still not the case. The only remaining user of the `mbedToolchain` object that does not go through `prepare_toolchain` is the static analysis scanner. It's basically dead-code at this point. I say we remove it. So this patch removes it.pull/3852/head
parent
51aa3330dd
commit
d20bcba89b
115
tools/build.py
115
tools/build.py
|
@ -34,7 +34,6 @@ from tools.options import get_default_options_parser
|
||||||
from tools.options import extract_profile
|
from tools.options import extract_profile
|
||||||
from tools.build_api import build_library, build_mbed_libs, build_lib
|
from tools.build_api import build_library, build_mbed_libs, build_lib
|
||||||
from tools.build_api import mcu_toolchain_matrix
|
from tools.build_api import mcu_toolchain_matrix
|
||||||
from tools.build_api import static_analysis_scan, static_analysis_scan_lib, static_analysis_scan_library
|
|
||||||
from tools.build_api import print_build_results
|
from tools.build_api import print_build_results
|
||||||
from tools.settings import CPPCHECK_CMD, CPPCHECK_MSG_FORMAT
|
from tools.settings import CPPCHECK_CMD, CPPCHECK_MSG_FORMAT
|
||||||
from utils import argparse_filestring_type, args_error
|
from utils import argparse_filestring_type, args_error
|
||||||
|
@ -120,12 +119,6 @@ if __name__ == '__main__':
|
||||||
default=None,
|
default=None,
|
||||||
help='For some commands you can use filter to filter out results')
|
help='For some commands you can use filter to filter out results')
|
||||||
|
|
||||||
parser.add_argument("--cppcheck",
|
|
||||||
action="store_true",
|
|
||||||
dest="cppcheck_validation",
|
|
||||||
default=False,
|
|
||||||
help="Forces 'cppcheck' static code analysis")
|
|
||||||
|
|
||||||
parser.add_argument("-j", "--jobs", type=int, dest="jobs",
|
parser.add_argument("-j", "--jobs", type=int, dest="jobs",
|
||||||
default=0, help="Number of concurrent jobs. Default: 0/auto (based on host machine's number of CPUs)")
|
default=0, help="Number of concurrent jobs. Default: 0/auto (based on host machine's number of CPUs)")
|
||||||
parser.add_argument("-N", "--artifact-name", dest="artifact_name",
|
parser.add_argument("-N", "--artifact-name", dest="artifact_name",
|
||||||
|
@ -212,84 +205,56 @@ if __name__ == '__main__':
|
||||||
|
|
||||||
for toolchain in toolchains:
|
for toolchain in toolchains:
|
||||||
for target in targets:
|
for target in targets:
|
||||||
# CPPCHECK code validation
|
tt_id = "%s::%s" % (toolchain, target)
|
||||||
if options.cppcheck_validation:
|
if toolchain not in TARGET_MAP[target].supported_toolchains:
|
||||||
|
# Log this later
|
||||||
|
print "%s skipped: toolchain not supported" % tt_id
|
||||||
|
skipped.append(tt_id)
|
||||||
|
else:
|
||||||
try:
|
try:
|
||||||
mcu = TARGET_MAP[target]
|
mcu = TARGET_MAP[target]
|
||||||
# CMSIS and MBED libs analysis
|
|
||||||
profile = extract_profile(parser, options, toolchain)
|
profile = extract_profile(parser, options, toolchain)
|
||||||
static_analysis_scan(
|
if options.source_dir:
|
||||||
mcu, toolchain, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT,
|
lib_build_res = build_library(options.source_dir, options.build_dir, mcu, toolchain,
|
||||||
verbose=options.verbose, jobs=options.jobs,
|
extra_verbose=options.extra_verbose_notify,
|
||||||
build_profile=profile)
|
verbose=options.verbose,
|
||||||
|
silent=options.silent,
|
||||||
|
jobs=options.jobs,
|
||||||
|
clean=options.clean,
|
||||||
|
archive=(not options.no_archive),
|
||||||
|
macros=options.macros,
|
||||||
|
name=options.artifact_name,
|
||||||
|
build_profile=profile)
|
||||||
|
else:
|
||||||
|
lib_build_res = build_mbed_libs(mcu, toolchain,
|
||||||
|
extra_verbose=options.extra_verbose_notify,
|
||||||
|
verbose=options.verbose,
|
||||||
|
silent=options.silent,
|
||||||
|
jobs=options.jobs,
|
||||||
|
clean=options.clean,
|
||||||
|
macros=options.macros,
|
||||||
|
build_profile=profile)
|
||||||
|
|
||||||
for lib_id in libraries:
|
for lib_id in libraries:
|
||||||
# Static check for library
|
build_lib(lib_id, mcu, toolchain,
|
||||||
static_analysis_scan_lib(
|
extra_verbose=options.extra_verbose_notify,
|
||||||
lib_id, mcu, toolchain, CPPCHECK_CMD,
|
verbose=options.verbose,
|
||||||
CPPCHECK_MSG_FORMAT,
|
silent=options.silent,
|
||||||
extra_verbose=options.extra_verbose_notify,
|
clean=options.clean,
|
||||||
verbose=options.verbose, jobs=options.jobs,
|
macros=options.macros,
|
||||||
clean=options.clean, macros=options.macros,
|
jobs=options.jobs,
|
||||||
build_profile=profile)
|
build_profile=profile)
|
||||||
pass
|
if lib_build_res:
|
||||||
|
successes.append(tt_id)
|
||||||
|
else:
|
||||||
|
skipped.append(tt_id)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
import traceback
|
import traceback
|
||||||
traceback.print_exc(file=sys.stdout)
|
traceback.print_exc(file=sys.stdout)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
failures.append(tt_id)
|
||||||
print e
|
print e
|
||||||
else:
|
|
||||||
# Build
|
|
||||||
tt_id = "%s::%s" % (toolchain, target)
|
|
||||||
if toolchain not in TARGET_MAP[target].supported_toolchains:
|
|
||||||
# Log this later
|
|
||||||
print "%s skipped: toolchain not supported" % tt_id
|
|
||||||
skipped.append(tt_id)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
mcu = TARGET_MAP[target]
|
|
||||||
profile = extract_profile(parser, options, toolchain)
|
|
||||||
if options.source_dir:
|
|
||||||
lib_build_res = build_library(options.source_dir, options.build_dir, mcu, toolchain,
|
|
||||||
extra_verbose=options.extra_verbose_notify,
|
|
||||||
verbose=options.verbose,
|
|
||||||
silent=options.silent,
|
|
||||||
jobs=options.jobs,
|
|
||||||
clean=options.clean,
|
|
||||||
archive=(not options.no_archive),
|
|
||||||
macros=options.macros,
|
|
||||||
name=options.artifact_name,
|
|
||||||
build_profile=profile)
|
|
||||||
else:
|
|
||||||
lib_build_res = build_mbed_libs(mcu, toolchain,
|
|
||||||
extra_verbose=options.extra_verbose_notify,
|
|
||||||
verbose=options.verbose,
|
|
||||||
silent=options.silent,
|
|
||||||
jobs=options.jobs,
|
|
||||||
clean=options.clean,
|
|
||||||
macros=options.macros,
|
|
||||||
build_profile=profile)
|
|
||||||
|
|
||||||
for lib_id in libraries:
|
|
||||||
build_lib(lib_id, mcu, toolchain,
|
|
||||||
extra_verbose=options.extra_verbose_notify,
|
|
||||||
verbose=options.verbose,
|
|
||||||
silent=options.silent,
|
|
||||||
clean=options.clean,
|
|
||||||
macros=options.macros,
|
|
||||||
jobs=options.jobs,
|
|
||||||
build_profile=profile)
|
|
||||||
if lib_build_res:
|
|
||||||
successes.append(tt_id)
|
|
||||||
else:
|
|
||||||
skipped.append(tt_id)
|
|
||||||
except Exception, e:
|
|
||||||
if options.verbose:
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc(file=sys.stdout)
|
|
||||||
sys.exit(1)
|
|
||||||
failures.append(tt_id)
|
|
||||||
print e
|
|
||||||
|
|
||||||
|
|
||||||
# Write summary of the builds
|
# Write summary of the builds
|
||||||
|
|
|
@ -1199,283 +1199,6 @@ def get_target_supported_toolchains(target):
|
||||||
else None
|
else None
|
||||||
|
|
||||||
|
|
||||||
def static_analysis_scan(target, toolchain_name, cppcheck_cmd,
|
|
||||||
cppcheck_msg_format, verbose=False,
|
|
||||||
clean=False, macros=None, notify=None, jobs=1,
|
|
||||||
extra_verbose=False, build_profile=None):
|
|
||||||
"""Perform static analysis on a target and toolchain combination
|
|
||||||
|
|
||||||
Positional arguments:
|
|
||||||
target - the target to fake the build for
|
|
||||||
toolchain_name - pretend you would compile with this toolchain
|
|
||||||
cppcheck_cmd - the command used to do static analysis
|
|
||||||
cppcheck_msg_format - the format of the check messages
|
|
||||||
|
|
||||||
Keyword arguments:
|
|
||||||
verbose - more printing!
|
|
||||||
clean - start from a clean slate
|
|
||||||
macros - extra macros to compile with
|
|
||||||
notify - the notification event handling function
|
|
||||||
jobs - number of commands to run at once
|
|
||||||
extra_verbose - even moar printing
|
|
||||||
build_profile - a dict of flags that will be passed to the compiler
|
|
||||||
"""
|
|
||||||
# Toolchain
|
|
||||||
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, macros=macros,
|
|
||||||
notify=notify,
|
|
||||||
extra_verbose=extra_verbose,
|
|
||||||
build_profile=build_profile)
|
|
||||||
toolchain.VERBOSE = verbose
|
|
||||||
toolchain.jobs = jobs
|
|
||||||
toolchain.build_all = clean
|
|
||||||
|
|
||||||
# Source and Build Paths
|
|
||||||
build_target = join(MBED_LIBRARIES, "TARGET_" + target.name)
|
|
||||||
build_toolchain = join(build_target, "TOOLCHAIN_" + toolchain.name)
|
|
||||||
mkdir(build_toolchain)
|
|
||||||
|
|
||||||
tmp_path = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
|
|
||||||
mkdir(tmp_path)
|
|
||||||
|
|
||||||
# CMSIS
|
|
||||||
toolchain.info("Static analysis for %s (%s, %s)" %
|
|
||||||
('CMSIS', target.name, toolchain_name))
|
|
||||||
cmsis_src = MBED_CMSIS_PATH
|
|
||||||
resources = toolchain.scan_resources(cmsis_src)
|
|
||||||
|
|
||||||
# Copy files before analysis
|
|
||||||
toolchain.copy_files(resources.headers, build_target)
|
|
||||||
toolchain.copy_files(resources.linker_script, build_toolchain)
|
|
||||||
|
|
||||||
# Gather include paths, c, cpp sources and macros to transfer to cppcheck
|
|
||||||
# command line
|
|
||||||
includes = ["-I%s"% i for i in resources.inc_dirs]
|
|
||||||
includes.append("-I%s"% str(build_target))
|
|
||||||
c_sources = " ".join(resources.c_sources)
|
|
||||||
cpp_sources = " ".join(resources.cpp_sources)
|
|
||||||
macros = ["-D%s"% s for s in toolchain.get_symbols() + toolchain.macros]
|
|
||||||
|
|
||||||
includes = [inc.strip() for inc in includes]
|
|
||||||
macros = [mac.strip() for mac in macros]
|
|
||||||
|
|
||||||
check_cmd = cppcheck_cmd
|
|
||||||
check_cmd += cppcheck_msg_format
|
|
||||||
check_cmd += includes
|
|
||||||
check_cmd += macros
|
|
||||||
|
|
||||||
# We need to pass some params via file to avoid "command line too long in
|
|
||||||
# some OSs"
|
|
||||||
tmp_file = tempfile.NamedTemporaryFile(delete=False)
|
|
||||||
tmp_file.writelines(line + '\n' for line in c_sources.split())
|
|
||||||
tmp_file.writelines(line + '\n' for line in cpp_sources.split())
|
|
||||||
tmp_file.close()
|
|
||||||
check_cmd += ["--file-list=%s"% tmp_file.name]
|
|
||||||
|
|
||||||
_stdout, _stderr, _ = run_cmd(check_cmd)
|
|
||||||
if verbose:
|
|
||||||
print _stdout
|
|
||||||
print _stderr
|
|
||||||
|
|
||||||
# =========================================================================
|
|
||||||
|
|
||||||
# MBED
|
|
||||||
toolchain.info("Static analysis for %s (%s, %s)" %
|
|
||||||
('MBED', target.name, toolchain_name))
|
|
||||||
|
|
||||||
# Common Headers
|
|
||||||
toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES)
|
|
||||||
toolchain.copy_files(toolchain.scan_resources(MBED_DRIVERS).headers,
|
|
||||||
MBED_LIBRARIES)
|
|
||||||
toolchain.copy_files(toolchain.scan_resources(MBED_PLATFORM).headers,
|
|
||||||
MBED_LIBRARIES)
|
|
||||||
toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers,
|
|
||||||
MBED_LIBRARIES)
|
|
||||||
|
|
||||||
# Target specific sources
|
|
||||||
hal_src = join(MBED_TARGETS_PATH, "hal")
|
|
||||||
hal_implementation = toolchain.scan_resources(hal_src)
|
|
||||||
|
|
||||||
# Copy files before analysis
|
|
||||||
toolchain.copy_files(hal_implementation.headers +
|
|
||||||
hal_implementation.hex_files, build_target,
|
|
||||||
resources=hal_implementation)
|
|
||||||
incdirs = toolchain.scan_resources(build_target)
|
|
||||||
|
|
||||||
target_includes = ["-I%s" % i for i in incdirs.inc_dirs]
|
|
||||||
target_includes.append("-I%s"% str(build_target))
|
|
||||||
target_includes.append("-I%s"% str(hal_src))
|
|
||||||
target_c_sources = " ".join(incdirs.c_sources)
|
|
||||||
target_cpp_sources = " ".join(incdirs.cpp_sources)
|
|
||||||
target_macros = ["-D%s"% s for s in
|
|
||||||
toolchain.get_symbols() + toolchain.macros]
|
|
||||||
|
|
||||||
# Common Sources
|
|
||||||
mbed_resources = toolchain.scan_resources(MBED_COMMON)
|
|
||||||
|
|
||||||
# Gather include paths, c, cpp sources and macros to transfer to cppcheck
|
|
||||||
# command line
|
|
||||||
mbed_includes = ["-I%s" % i for i in mbed_resources.inc_dirs]
|
|
||||||
mbed_includes.append("-I%s"% str(build_target))
|
|
||||||
mbed_includes.append("-I%s"% str(MBED_DRIVERS))
|
|
||||||
mbed_includes.append("-I%s"% str(MBED_PLATFORM))
|
|
||||||
mbed_includes.append("-I%s"% str(MBED_HAL))
|
|
||||||
mbed_c_sources = " ".join(mbed_resources.c_sources)
|
|
||||||
mbed_cpp_sources = " ".join(mbed_resources.cpp_sources)
|
|
||||||
|
|
||||||
target_includes = [inc.strip() for inc in target_includes]
|
|
||||||
mbed_includes = [inc.strip() for inc in mbed_includes]
|
|
||||||
target_macros = [mac.strip() for mac in target_macros]
|
|
||||||
|
|
||||||
check_cmd = cppcheck_cmd
|
|
||||||
check_cmd += cppcheck_msg_format
|
|
||||||
check_cmd += target_includes
|
|
||||||
check_cmd += mbed_includes
|
|
||||||
check_cmd += target_macros
|
|
||||||
|
|
||||||
# We need to pass some parames via file to avoid "command line too long in
|
|
||||||
# some OSs"
|
|
||||||
tmp_file = tempfile.NamedTemporaryFile(delete=False)
|
|
||||||
tmp_file.writelines(line + '\n' for line in target_c_sources.split())
|
|
||||||
tmp_file.writelines(line + '\n' for line in target_cpp_sources.split())
|
|
||||||
tmp_file.writelines(line + '\n' for line in mbed_c_sources.split())
|
|
||||||
tmp_file.writelines(line + '\n' for line in mbed_cpp_sources.split())
|
|
||||||
tmp_file.close()
|
|
||||||
check_cmd += ["--file-list=%s"% tmp_file.name]
|
|
||||||
|
|
||||||
_stdout, _stderr, _ = run_cmd_ext(check_cmd)
|
|
||||||
if verbose:
|
|
||||||
print _stdout
|
|
||||||
print _stderr
|
|
||||||
|
|
||||||
|
|
||||||
def static_analysis_scan_lib(lib_id, target, toolchain, cppcheck_cmd,
|
|
||||||
cppcheck_msg_format, verbose=False,
|
|
||||||
clean=False, macros=None, notify=None, jobs=1,
|
|
||||||
extra_verbose=False, build_profile=None):
|
|
||||||
"""Perform static analysis on a library as if it were to be compiled for a
|
|
||||||
particular target and toolchain combination
|
|
||||||
"""
|
|
||||||
lib = Library(lib_id)
|
|
||||||
if lib.is_supported(target, toolchain):
|
|
||||||
static_analysis_scan_library(
|
|
||||||
lib.source_dir, lib.build_dir, target, toolchain, cppcheck_cmd,
|
|
||||||
cppcheck_msg_format, lib.dependencies, verbose=verbose,
|
|
||||||
clean=clean, macros=macros, notify=notify, jobs=jobs,
|
|
||||||
extra_verbose=extra_verbose, build_profile=build_profile)
|
|
||||||
else:
|
|
||||||
print('Library "%s" is not yet supported on target %s with toolchain %s'
|
|
||||||
% (lib_id, target.name, toolchain))
|
|
||||||
|
|
||||||
|
|
||||||
def static_analysis_scan_library(src_paths, build_path, target, toolchain_name,
|
|
||||||
cppcheck_cmd, cppcheck_msg_format,
|
|
||||||
dependencies_paths=None,
|
|
||||||
name=None, clean=False, notify=None,
|
|
||||||
verbose=False, macros=None, jobs=1,
|
|
||||||
extra_verbose=False, build_profile=None):
|
|
||||||
""" Function scans library for statically detectable defects
|
|
||||||
|
|
||||||
Positional arguments:
|
|
||||||
src_paths - the list of library paths to scan
|
|
||||||
build_path - the location directory of result files
|
|
||||||
target - the target to fake the build for
|
|
||||||
toolchain_name - pretend you would compile with this toolchain
|
|
||||||
cppcheck_cmd - the command used to do static analysis
|
|
||||||
cppcheck_msg_format - the format of the check messages
|
|
||||||
|
|
||||||
Keyword arguments:
|
|
||||||
dependencies_paths - the paths to sources that this library depends on
|
|
||||||
name - the name of this library
|
|
||||||
clean - start from a clean slate
|
|
||||||
notify - the notification event handling function
|
|
||||||
verbose - more printing!
|
|
||||||
macros - extra macros to compile with
|
|
||||||
jobs - number of commands to run at once
|
|
||||||
extra_verbose - even moar printing
|
|
||||||
build_profile - a dict of flags that will be passed to the compiler
|
|
||||||
"""
|
|
||||||
if type(src_paths) != ListType:
|
|
||||||
src_paths = [src_paths]
|
|
||||||
|
|
||||||
for src_path in src_paths:
|
|
||||||
if not exists(src_path):
|
|
||||||
raise Exception("The library source folder does not exist: %s",
|
|
||||||
src_path)
|
|
||||||
|
|
||||||
# Toolchain instance
|
|
||||||
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, macros=macros,
|
|
||||||
notify=notify,
|
|
||||||
extra_verbose=extra_verbose,
|
|
||||||
build_profile=build_profile)
|
|
||||||
toolchain.VERBOSE = verbose
|
|
||||||
toolchain.jobs = jobs
|
|
||||||
|
|
||||||
# The first path will give the name to the library
|
|
||||||
name = basename(src_paths[0])
|
|
||||||
toolchain.info("Static analysis for library %s (%s, %s)" %
|
|
||||||
(name.upper(), target.name, toolchain_name))
|
|
||||||
|
|
||||||
# Scan Resources
|
|
||||||
resources = []
|
|
||||||
for src_path in src_paths:
|
|
||||||
resources.append(toolchain.scan_resources(src_path))
|
|
||||||
|
|
||||||
# Dependencies Include Paths
|
|
||||||
dependencies_include_dir = []
|
|
||||||
if dependencies_paths is not None:
|
|
||||||
for path in dependencies_paths:
|
|
||||||
lib_resources = toolchain.scan_resources(path)
|
|
||||||
dependencies_include_dir.extend(lib_resources.inc_dirs)
|
|
||||||
|
|
||||||
# Create the desired build directory structure
|
|
||||||
bin_path = join(build_path, toolchain.obj_path)
|
|
||||||
mkdir(bin_path)
|
|
||||||
tmp_path = join(build_path, '.temp', toolchain.obj_path)
|
|
||||||
mkdir(tmp_path)
|
|
||||||
|
|
||||||
# Gather include paths, c, cpp sources and macros to transfer to cppcheck
|
|
||||||
# command line
|
|
||||||
includes = ["-I%s" % i for i in dependencies_include_dir + src_paths]
|
|
||||||
c_sources = " "
|
|
||||||
cpp_sources = " "
|
|
||||||
macros = ['-D%s' % s for s in toolchain.get_symbols() + toolchain.macros]
|
|
||||||
|
|
||||||
# Copy Headers
|
|
||||||
for resource in resources:
|
|
||||||
toolchain.copy_files(resource.headers, build_path, resources=resource)
|
|
||||||
includes += ["-I%s" % i for i in resource.inc_dirs]
|
|
||||||
c_sources += " ".join(resource.c_sources) + " "
|
|
||||||
cpp_sources += " ".join(resource.cpp_sources) + " "
|
|
||||||
|
|
||||||
dependencies_include_dir.extend(
|
|
||||||
toolchain.scan_resources(build_path).inc_dirs)
|
|
||||||
|
|
||||||
includes = [inc.strip() for inc in includes]
|
|
||||||
macros = [mac.strip() for mac in macros]
|
|
||||||
|
|
||||||
check_cmd = cppcheck_cmd
|
|
||||||
check_cmd += cppcheck_msg_format
|
|
||||||
check_cmd += includes
|
|
||||||
check_cmd += macros
|
|
||||||
|
|
||||||
# We need to pass some parameters via file to avoid "command line too long
|
|
||||||
# in some OSs". A temporary file is created to store e.g. cppcheck list of
|
|
||||||
# files for command line
|
|
||||||
tmp_file = tempfile.NamedTemporaryFile(delete=False)
|
|
||||||
tmp_file.writelines(line + '\n' for line in c_sources.split())
|
|
||||||
tmp_file.writelines(line + '\n' for line in cpp_sources.split())
|
|
||||||
tmp_file.close()
|
|
||||||
check_cmd += ["--file-list=%s"% tmp_file.name]
|
|
||||||
|
|
||||||
# This will allow us to grab result from both stdio and stderr outputs (so
|
|
||||||
# we can show them) We assume static code analysis tool is outputting
|
|
||||||
# defects on STDERR
|
|
||||||
_stdout, _stderr, _ = run_cmd_ext(check_cmd)
|
|
||||||
if verbose:
|
|
||||||
print _stdout
|
|
||||||
print _stderr
|
|
||||||
|
|
||||||
|
|
||||||
def print_build_results(result_list, build_name):
|
def print_build_results(result_list, build_name):
|
||||||
""" Generate result string for build results
|
""" Generate result string for build results
|
||||||
|
|
||||||
|
|
|
@ -299,9 +299,6 @@ class mbedToolchain:
|
||||||
self.build_dir = build_dir
|
self.build_dir = build_dir
|
||||||
self.timestamp = time()
|
self.timestamp = time()
|
||||||
|
|
||||||
# Output build naming based on target+toolchain combo (mbed 2.0 builds)
|
|
||||||
self.obj_path = join("TARGET_"+target.name, "TOOLCHAIN_"+self.name)
|
|
||||||
|
|
||||||
# Number of concurrent build jobs. 0 means auto (based on host system cores)
|
# Number of concurrent build jobs. 0 means auto (based on host system cores)
|
||||||
self.jobs = 0
|
self.jobs = 0
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue