Improve resources API to include more use cases

pull/7183/head
Jimmy Brisson 2018-06-12 15:00:56 -05:00
parent 06f3fca6cf
commit de913e1ea2
8 changed files with 294 additions and 303 deletions

View File

@ -42,7 +42,7 @@ from .paths import (MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,
MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS,
MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL,
BUILD_DIR)
from .resources import Resources
from .resources import Resources, FileType
from .notifier.mock import MockNotifier
from .targets import TARGET_NAMES, TARGET_MAP
from .libraries import Library
@ -529,7 +529,7 @@ def build_project(src_paths, build_path, target, toolchain_name,
# Compile Sources
objects = toolchain.compile_sources(resources, resources.inc_dirs)
resources.objects.extend(objects)
resources.add_files_to_type(FileType.OBJECT, objects)
# Link Program
if toolchain.config.has_regions:

View File

@ -30,6 +30,7 @@ from jinja2 import FileSystemLoader, StrictUndefined
from jinja2.environment import Environment
from jsonschema import Draft4Validator, RefResolver
from ..resources import FileType
from ..utils import (json_file_to_dict, intelhex_offset, integer,
NotSupportedException)
from ..arm_pack_manager import Cache
@ -61,6 +62,14 @@ RAM_OVERRIDES = set([
BOOTLOADER_OVERRIDES = ROM_OVERRIDES | RAM_OVERRIDES
ALLOWED_FEATURES = [
"UVISOR", "BLE", "CLIENT", "IPV4", "LWIP", "COMMON_PAL", "STORAGE",
"NANOSTACK","CRYPTOCELL310",
# Nanostack configurations
"LOWPAN_BORDER_ROUTER", "LOWPAN_HOST", "LOWPAN_ROUTER", "NANOSTACK_FULL",
"THREAD_BORDER_ROUTER", "THREAD_END_DEVICE", "THREAD_ROUTER",
"ETHERNET_HOST",
]
# Base class for all configuration exceptions
class ConfigException(Exception):
@ -396,13 +405,6 @@ class Config(object):
__unused_overrides = set(["target.bootloader_img", "target.restrict_size",
"target.mbed_app_start", "target.mbed_app_size"])
# Allowed features in configurations
__allowed_features = [
"UVISOR", "BLE", "CLIENT", "IPV4", "LWIP", "COMMON_PAL", "STORAGE", "NANOSTACK","CRYPTOCELL310",
# Nanostack configurations
"LOWPAN_BORDER_ROUTER", "LOWPAN_HOST", "LOWPAN_ROUTER", "NANOSTACK_FULL", "THREAD_BORDER_ROUTER", "THREAD_END_DEVICE", "THREAD_ROUTER", "ETHERNET_HOST"
]
@classmethod
def find_app_config(cls, top_level_dirs):
app_config_location = None
@ -1043,7 +1045,7 @@ class Config(object):
.update_target(self.target)
for feature in self.target.features:
if feature not in self.__allowed_features:
if feature not in ALLOWED_FEATURES:
raise ConfigException(
"Feature '%s' is not a supported features" % feature)
@ -1084,7 +1086,9 @@ class Config(object):
while True:
# Add/update the configuration with any .json files found while
# scanning
self.add_config_files(resources.json_files)
self.add_config_files(
f.path for f in resources.get_file_refs(FileType.JSON)
)
# Add features while we find new ones
features = set(self.get_features())

View File

@ -18,14 +18,15 @@
from __future__ import print_function, division, absolute_import
import sys
from os.path import join, abspath, dirname, exists
from os.path import join, abspath, dirname, exists, isfile
from os.path import basename, relpath, normpath, splitext
from os import makedirs, walk
import copy
from shutil import rmtree, copyfile
import zipfile
from ..resources import Resources
from ..resources import Resources, FileType, FileRef
from ..config import ALLOWED_FEATURES
from ..build_api import prepare_toolchain
from ..targets import TARGET_NAMES
from . import (lpcxpresso, ds5_5, iar, makefile, embitz, coide, kds, simplicity,
@ -161,22 +162,23 @@ def generate_project_files(resources, export_path, target, name, toolchain, ide,
return files, exporter
def _inner_zip_export(resources, inc_repos):
for loc, res in resources.items():
to_zip = (
res.headers + res.s_sources + res.c_sources +\
res.cpp_sources + res.libraries + res.hex_files + \
[res.linker_script] + res.bin_files + res.objects + \
res.json_files + res.lib_refs + res.lib_builds)
if inc_repos:
for directory in res.repo_dirs:
for root, _, files in walk(directory):
for repo_file in files:
source = join(root, repo_file)
to_zip.append(source)
res.file_basepath[source] = res.base_path
to_zip += res.repo_files
yield loc, to_zip
def _inner_zip_export(resources, prj_files, inc_repos):
to_zip = sum((resources.get_file_refs(ftype) for ftype
in Resources.ALL_FILE_TYPES),
[])
to_zip.extend(FileRef(basename(pfile), pfile) for pfile in prj_files)
for dest, source in resources.get_file_refs(FileType.BLD_REF):
target_dir, _ = splitext(dest)
dest = join(target_dir, ".bld", "bldrc")
to_zip.append(FileRef(dest, source))
if inc_repos:
for dest, source in resources.get_file_refs(FileType.REPO_DIRS):
for root, _, files in walk(source):
for repo_file in files:
file_source = join(root, repo_file)
file_dest = join(dest, relpath(file_source, source))
to_zip.append(FileRef(file_dest, file_source))
return to_zip
def zip_export(file_name, prefix, resources, project_files, inc_repos, notify):
"""Create a zip file from an exported project.
@ -188,32 +190,19 @@ def zip_export(file_name, prefix, resources, project_files, inc_repos, notify):
project_files - a list of extra files to be added to the root of the prefix
directory
"""
to_zip_list = list(_inner_zip_export(resources, inc_repos))
total_files = sum(len(to_zip) for _, to_zip in to_zip_list)
total_files += len(project_files)
to_zip_list = sorted(set(_inner_zip_export(
resources, project_files, inc_repos)))
total_files = len(to_zip_list)
zipped = 0
with zipfile.ZipFile(file_name, "w") as zip_file:
for prj_file in project_files:
zip_file.write(prj_file, join(prefix, basename(prj_file)))
for loc, to_zip in to_zip_list:
res = resources[loc]
for source in to_zip:
if source:
zip_file.write(
source,
join(prefix, loc,
relpath(source, res.file_basepath[source])))
notify.progress("Zipping", source,
100 * (zipped / total_files))
zipped += 1
for lib, res in resources.items():
for source in res.lib_builds:
target_dir, _ = splitext(source)
dest = join(prefix, loc,
relpath(target_dir, res.file_basepath[source]),
".bld", "bldrc")
zip_file.write(source, dest)
for dest, source in to_zip_list:
if source and isfile(source):
zip_file.write(source, join(prefix, dest))
zipped += 1
notify.progress("Zipping", source,
100 * (zipped / total_files))
else:
zipped += 1
def export_project(src_paths, export_path, target, ide, libraries_paths=None,
@ -275,26 +264,16 @@ def export_project(src_paths, export_path, target, ide, libraries_paths=None,
if name is None:
name = basename(normpath(abspath(src_paths[0])))
resource_dict = {}
resources = Resources(notify, collect_ignores=True)
for loc, path in src_paths.items():
res = Resources(collect_ignores=True)
res.add_toolchain_labels(toolchain)
resources.add_toolchain_labels(toolchain)
for p in path:
res.add_directory(p)
resource_dict[loc] = res
resources = Resources()
for loc, res in resource_dict.items():
temp = copy.deepcopy(res)
temp.subtract_basepath(".", loc)
resources.add(temp)
resources.add_directory(p, into_path=loc)
toolchain.build_dir = export_path
toolchain.config.load_resources(resources)
toolchain.set_config_data(toolchain.config.get_config_data())
config_header = toolchain.get_config_header()
resources.headers.append(config_header)
resources.file_basepath[config_header] = dirname(config_header)
resources.add_file_ref(FileType.HEADER, basename(config_header), config_header)
# Change linker script if specified
if linker_script is not None:
@ -303,16 +282,13 @@ def export_project(src_paths, export_path, target, ide, libraries_paths=None,
files, exporter = generate_project_files(resources, export_path,
target, name, toolchain, ide,
macros=macros)
files.append(config_header)
if zip_proj:
for resource in resource_dict.values():
for label, res in resource.features.items():
resource.add(res)
resources.add_features(ALLOWED_FEATURES)
if isinstance(zip_proj, basestring):
zip_export(join(export_path, zip_proj), name, resource_dict,
zip_export(join(export_path, zip_proj), name, resources,
files + list(exporter.static_files), inc_repos, notify)
else:
zip_export(zip_proj, name, resource_dict,
zip_export(zip_proj, name, resources,
files + list(exporter.static_files), inc_repos, notify)
else:
for static_file in exporter.static_files:

View File

@ -10,6 +10,7 @@ import copy
from tools.targets import TARGET_MAP
from tools.utils import mkdir
from tools.resources import FileType
class TargetNotSupportedException(Exception):
@ -87,12 +88,8 @@ class Exporter(object):
return self.TOOLCHAIN
def add_config(self):
"""Add the containgin directory of mbed_config.h to include dirs"""
config = self.toolchain.get_config_header()
if config:
self.resources.inc_dirs.append(
dirname(relpath(config,
self.resources.file_basepath[config])))
"""Add the containing directory of mbed_config.h to include dirs"""
pass
@property
def flags(self):
@ -116,11 +113,15 @@ class Exporter(object):
flags['c_flags'] += c_defines
flags['cxx_flags'] += c_defines
if config_header:
config_header = relpath(config_header,
self.resources.file_basepath[config_header])
flags['c_flags'] += self.toolchain.get_config_option(config_header)
def is_config_header(f):
return f.path == config_header
config_header= filter(
is_config_header, self.resources.get_file_refs(FileType.HEADER)
)[0]
flags['c_flags'] += self.toolchain.get_config_option(
config_header.name)
flags['cxx_flags'] += self.toolchain.get_config_option(
config_header)
config_header.name)
return flags
def get_source_paths(self):
@ -181,8 +182,7 @@ class Exporter(object):
Positional Arguments:
src - the src's location
"""
rel_path = relpath(src, self.resources.file_basepath[src])
path_list = os.path.normpath(rel_path).split(os.sep)
path_list = os.path.normpath(src).split(os.sep)
assert len(path_list) >= 1
if len(path_list) == 1:
key = self.project_name

View File

@ -2,7 +2,7 @@ from __future__ import print_function, absolute_import
from builtins import str
import os
from os.path import sep, normpath, join, exists
from os.path import sep, normpath, join, exists, dirname
import ntpath
import copy
from collections import namedtuple
@ -10,6 +10,7 @@ import shutil
from subprocess import Popen, PIPE
import re
from tools.resources import FileType
from tools.arm_pack_manager import Cache
from tools.targets import TARGET_MAP
from tools.export.exporters import Exporter, apply_supported_whitelist
@ -228,10 +229,10 @@ class Uvision(Exporter):
self.resources.inc_dirs).encode('utf-8'),
'device': DeviceUvision(self.target),
}
sct_file = self.resources.linker_script
sct_name, sct_path = self.resources.get_file_refs(FileType.LD_SCRIPT)[0]
ctx['linker_script'] = self.toolchain.correct_scatter_shebang(
sct_file, self.resources.file_basepath[sct_file])
if ctx['linker_script'] != sct_file:
sct_path, dirname(sct_name))
if ctx['linker_script'] != sct_path:
self.generated_files.append(ctx['linker_script'])
core = ctx['device'].core
ctx['cputype'] = core.rstrip("FD")

View File

@ -55,10 +55,11 @@ def resolve_exporter_alias(ide):
def setup_project(
ide,
target,
program=None,
source_dir=None,
build=None,
export_path=None
zip,
program,
source_dir,
build,
export_path,
):
"""Generate a name, if not provided, and find dependencies
@ -82,7 +83,10 @@ def setup_project(
project_name = TESTS[program]
else:
project_name = basename(normpath(realpath(source_dir[0])))
src_paths = {relpath(path, project_dir): [path] for path in source_dir}
if zip:
src_paths = {path.strip(".\\/"): [path] for path in source_dir}
else:
src_paths = {relpath(path, project_dir): [path] for path in source_dir}
lib_paths = None
else:
test = Test(program)
@ -124,6 +128,7 @@ def export(target, ide, build=None, src=None, macros=None, project_id=None,
project_dir, name, src, lib = setup_project(
ide,
target,
bool(zip_proj),
program=project_id,
source_dir=src,
build=build,
@ -289,6 +294,13 @@ def get_args(argv):
default=None
)
parser.add_argument(
"-z",
action="store_true",
default=None,
dest="zip",
)
parser.add_argument(
"--ignore",
dest="ignore",
@ -352,7 +364,7 @@ def main():
src=options.source_dir,
macros=options.macros,
project_id=options.program,
zip_proj=not bool(options.source_dir),
zip_proj=not bool(options.source_dir) or options.zip,
build_profile=profile,
app_config=options.app_config,
export_path=options.build_dir,

View File

@ -34,14 +34,13 @@ from __future__ import print_function, division, absolute_import
import fnmatch
import re
from collections import namedtuple, defaultdict
from copy import copy
from itertools import chain
from os import walk
from os.path import (join, splitext, dirname, relpath, basename, split, normcase,
abspath, exists)
from ..toolchains import TOOLCHAINS
# Support legacy build conventions: the original mbed build system did not have
# standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
# had the knowledge of a list of these directories to be ignored.
@ -79,39 +78,53 @@ LEGACY_TOOLCHAIN_NAMES = {
}
FileRef = namedtuple("FileRef", "name path")
class FileType(object):
C_SRC = "c"
CPP_SRC = "c++"
ASM_SRC = "s"
HEADER = "header"
INC_DIR = "inc"
LIB_DIR = "libdir"
LIB = "lib"
OBJECT = "o"
HEX = "hex"
BIN = "bin"
JSON = "json"
LD_SCRIPT = "ld"
LIB_REF = "libref"
BLD_REF = "bldref"
REPO_DIR = "repodir"
def __init__(self):
raise NotImplemented
class Resources(object):
def __init__(self, notify, base_path=None, collect_ignores=False):
ALL_FILE_TYPES = [
FileType.C_SRC,
FileType.CPP_SRC,
FileType.ASM_SRC,
FileType.HEADER,
FileType.INC_DIR,
FileType.LIB_DIR,
FileType.LIB,
FileType.OBJECT,
FileType.HEX,
FileType.BIN,
FileType.JSON,
FileType.LD_SCRIPT,
FileType.LIB_REF,
FileType.BLD_REF,
FileType.REPO_DIR,
]
def __init__(self, notify, collect_ignores=False):
self.notify = notify
self.base_path = base_path
self.collect_ignores = collect_ignores
self._file_refs = defaultdict(list)
self._label_paths = []
self.file_basepath = {}
self.inc_dirs = []
self.headers = []
self.s_sources = []
self.c_sources = []
self.cpp_sources = []
self.lib_dirs = set([])
self.objects = []
self.libraries = []
# mbed special files
self.lib_builds = []
self.lib_refs = []
self.repo_dirs = []
self.repo_files = []
self.linker_script = None
# Other files
self.hex_files = []
self.bin_files = []
self.json_files = []
self.ignored_dirs = []
@ -122,12 +135,13 @@ class Resources(object):
}
# Pre-mbed 2.0 ignore dirs
self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS)
self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS)
# Ignore patterns from .mbedignore files
self.ignore_patterns = []
self._ignore_regex = re.compile("$^")
def __add__(self, resources):
if resources is None:
return self
@ -145,79 +159,15 @@ class Resources(object):
self.ignored_dirs.append(directory)
def add(self, resources):
for f,p in resources.file_basepath.items():
self.file_basepath[f] = p
self.inc_dirs += resources.inc_dirs
self.headers += resources.headers
self.s_sources += resources.s_sources
self.c_sources += resources.c_sources
self.cpp_sources += resources.cpp_sources
for file_type in self.ALL_FILE_TYPES:
self._file_refs[file_type].extend(resources._file_refs[file_type])
self.lib_dirs |= resources.lib_dirs
self.objects += resources.objects
self.libraries += resources.libraries
self.lib_builds += resources.lib_builds
self.lib_refs += resources.lib_refs
self.repo_dirs += resources.repo_dirs
self.repo_files += resources.repo_files
if resources.linker_script is not None:
self.linker_script = resources.linker_script
self.hex_files += resources.hex_files
self.bin_files += resources.bin_files
self.json_files += resources.json_files
self.ignored_dirs += resources.ignored_dirs
self._label_paths += resources._label_paths
return self
def rewrite_basepath(self, file_name, export_path, loc):
""" Replace the basepath of filename with export_path
Positional arguments:
file_name - the absolute path to a file
export_path - the final destination of the file after export
"""
new_f = join(loc, relpath(file_name, self.file_basepath[file_name]))
self.file_basepath[new_f] = export_path
return new_f
def subtract_basepath(self, export_path, loc=""):
""" Rewrite all of the basepaths with the export_path
Positional arguments:
export_path - the final destination of the resources with respect to the
generated project files
"""
keys = ['s_sources', 'c_sources', 'cpp_sources', 'hex_files',
'objects', 'libraries', 'inc_dirs', 'headers', 'linker_script',
'lib_dirs']
for key in keys:
vals = getattr(self, key)
if isinstance(vals, set):
vals = list(vals)
if isinstance(vals, list):
new_vals = []
for val in vals:
new_vals.append(self.rewrite_basepath(
val, export_path, loc))
if isinstance(getattr(self, key), set):
setattr(self, key, set(new_vals))
else:
setattr(self, key, new_vals)
elif vals:
setattr(self, key, self.rewrite_basepath(
vals, export_path, loc))
def closure(res, export_path=export_path, loc=loc):
res.subtract_basepath(export_path, loc)
return res
def _collect_duplicates(self, dupe_dict, dupe_headers):
for filename in self.s_sources + self.c_sources + self.cpp_sources:
objname, _ = splitext(basename(filename))
@ -249,52 +199,41 @@ class Resources(object):
(headername, " ".join(locations)))
return count
def relative_to(self, base, dot=False):
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
'cpp_sources', 'lib_dirs', 'objects', 'libraries',
'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
'hex_files', 'bin_files', 'json_files']:
v = [rel_path(f, base, dot) for f in getattr(self, field)]
setattr(self, field, v)
if self.linker_script is not None:
self.linker_script = rel_path(self.linker_script, base, dot)
for file_type in self.ALL_FILE_TYPES:
v = [f._replace(name=rel_path(f, base, dot)) for
f in self.get_file_refs(file_type)]
self._file_refs[file_type] = v
def win_to_unix(self):
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
'cpp_sources', 'lib_dirs', 'objects', 'libraries',
'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
'hex_files', 'bin_files', 'json_files']:
v = [f.replace('\\', '/') for f in getattr(self, field)]
setattr(self, field, v)
if self.linker_script is not None:
self.linker_script = self.linker_script.replace('\\', '/')
for file_type in self.ALL_FILE_TYPES:
v = [f._replace(name=f.replace('\\', '/')) for
f in self.get_file_refs(file_type)]
self._file_refs[file_type] = v
def __str__(self):
s = []
for (label, resources) in (
('Include Directories', self.inc_dirs),
('Headers', self.headers),
for (label, file_type) in (
('Include Directories', FileType.INC_DIR),
('Headers', FileType.HEADER),
('Assembly sources', self.s_sources),
('C sources', self.c_sources),
('C++ sources', self.cpp_sources),
('Assembly sources', FileType.ASM_SRC),
('C sources', FileType.C_SRC),
('C++ sources', FileType.CPP_SRC),
('Library directories', self.lib_dirs),
('Objects', self.objects),
('Libraries', self.libraries),
('Library directories', FileType.LIB_DIR),
('Objects', FileType.OBJECT),
('Libraries', FileType.LIB),
('Hex files', self.hex_files),
('Bin files', self.bin_files),
('Hex files', FileType.HEX),
('Bin files', FileType.BIN),
('Linker script', FileType.LD_SCRIPT)
):
resources = self.get_file_refs(file_type)
if resources:
s.append('%s:\n ' % label + '\n '.join(resources))
if self.linker_script:
s.append('Linker Script: ' + self.linker_script)
s.append('%s:\n ' % label + '\n '.join(
"%s -> %s" % (name, path) for name, path in resources))
return '\n'.join(s)
@ -303,10 +242,10 @@ class Resources(object):
self.labels.setdefault(prefix, [])
self.labels[prefix].extend(labels)
prefixed_labels = set("%s_%s" % (prefix, label) for label in labels)
for path, base_path in self._label_paths:
for path, base_path, into_path in self._label_paths:
if basename(path) in prefixed_labels:
self.add_directory(path, base_path)
self._label_paths = [(p, b) for p, b in self._label_paths
self.add_directory(path, base_path, into_path)
self._label_paths = [(p, b, i) for p, b, i in self._label_paths
if basename(p) not in prefixed_labels]
def add_target_labels(self, target):
@ -345,7 +284,83 @@ class Resources(object):
return (dirname.startswith(label_type + "_") and
dirname[len(label_type) + 1:] not in self.labels[label_type])
def add_directory(self, path, base_path=None, exclude_paths=None):
def add_file_ref(self, file_type, file_name, file_path):
ref = FileRef(file_name, file_path)
self._file_refs[file_type].append(ref)
def get_file_refs(self, file_type):
"""Return a list of FileRef for every file of the given type"""
return self._file_refs[file_type]
def get_file_names(self, file_type):
return [f.name for f in self.get_file_refs(file_type)]
def add_files_to_type(self, file_type, files):
self._file_refs[file_type].extend(FileRef(f, f) for f in files)
@property
def inc_dirs(self):
return self.get_file_names(FileType.INC_DIR)
@property
def headers(self):
return self.get_file_names(FileType.HEADER)
@property
def s_sources(self):
return self.get_file_names(FileType.ASM_SRC)
@property
def c_sources(self):
return self.get_file_names(FileType.C_SRC)
@property
def cpp_sources(self):
return self.get_file_names(FileType.CPP_SRC)
@property
def lib_dirs(self):
return self.get_file_names(FileType.LIB_DIR)
@property
def objects(self):
return self.get_file_names(FileType.OBJECT)
@property
def libraries(self):
return self.get_file_names(FileType.LIB)
@property
def lib_builds(self):
return self.get_file_names(FileType.BLD_REF)
@property
def lib_refs(self):
return self.get_file_names(FileType.LIB_REF)
@property
def linker_script(self):
return self.get_file_names(FileType.LD_SCRIPT)[0]
@property
def hex_files(self):
return self.get_file_names(FileType.HEX)
@property
def bin_files(self):
return self.get_file_names(FileType.BIN)
@property
def json_files(self):
return self.get_file_names(FileType.JSON)
def add_directory(
self,
path,
base_path=None,
into_path=None,
exclude_paths=None,
):
""" Scan a directory and include its resources in this resources obejct
Positional arguments:
@ -354,12 +369,16 @@ class Resources(object):
Keyword arguments
base_path - If this is part of an incremental scan, include the origin
directory root of the scan here
into_path - Pretend that scanned files are within the specified
directory within a project instead of using their actual path
exclude_paths - A list of paths that are to be excluded from a build
"""
self.notify.progress("scan", abspath(path))
if base_path is None:
base_path = path
if into_path is None:
into_path = path
if self.collect_ignores and path in self.ignored_dirs:
self.ignored_dirs.remove(path)
if exclude_paths:
@ -384,11 +403,12 @@ class Resources(object):
for d in copy(dirs):
dir_path = join(root, d)
if d == '.hg' or d == '.git':
self.repo_dirs.append(dir_path)
fake_path = join(into_path, relpath(dir_path, base_path))
self.add_file_ref(FileType.REPO_DIR, fake_path, dir_path)
if (any(self._not_current_label(d, t) for t
in ['TARGET', 'TOOLCHAIN', 'FEATURE'])):
self._label_paths.append((dir_path, base_path))
self._label_paths.append((dir_path, base_path, into_path))
self.ignore_dir(dir_path)
dirs.remove(d)
elif (d.startswith('.') or d in self.legacy_ignore_dirs or
@ -398,14 +418,35 @@ class Resources(object):
# Add root to include paths
root = root.rstrip("/")
self.inc_dirs.append(root)
self.file_basepath[root] = base_path
fake_root = join(into_path, relpath(root, base_path))
self.add_file_ref(FileType.INC_DIR, fake_root, root)
for file in files:
file_path = join(root, file)
self._add_file(file_path, base_path)
self._add_file(file_path, base_path, into_path)
def _add_file(self, file_path, base_path):
_EXT = {
".c": FileType.C_SRC,
".cc": FileType.CPP_SRC,
".cpp": FileType.CPP_SRC,
".s": FileType.ASM_SRC,
".h": FileType.HEADER,
".hh": FileType.HEADER,
".hpp": FileType.HEADER,
".o": FileType.OBJECT,
".hex": FileType.HEX,
".bin": FileType.BIN,
".json": FileType.JSON,
".a": FileType.LIB,
".ar": FileType.LIB,
".sct": FileType.LD_SCRIPT,
".ld": FileType.LD_SCRIPT,
".icf": FileType.LD_SCRIPT,
".lib": FileType.LIB_REF,
".bld": FileType.BLD_REF,
}
def _add_file(self, file_path, base_path, into_path):
""" Add a single file into the resources object that was found by
scanning starting as base_path
"""
@ -415,55 +456,13 @@ class Resources(object):
self.ignore_dir(relpath(file_path, base_path))
return
self.file_basepath[file_path] = base_path
fake_path = join(into_path, relpath(file_path, base_path))
_, ext = splitext(file_path)
ext = ext.lower()
if ext == '.s':
self.s_sources.append(file_path)
elif ext == '.c':
self.c_sources.append(file_path)
elif ext == '.cpp' or ext == '.cc':
self.cpp_sources.append(file_path)
elif ext == '.h' or ext == '.hpp' or ext == '.hh':
self.headers.append(file_path)
elif ext == '.o':
self.objects.append(file_path)
elif ext in ('.a', '.ar'):
self.libraries.append(file_path)
self.lib_dirs.add(dirname(file_path))
elif ext in ('.sct', '.icf', '.ld'):
if self.linker_script is not None:
self.notify.info("Warning: Multiple linker scripts detected: %s and %s" % (self.linker_script, file_path))
else:
self.linker_script = file_path
elif ext == '.lib':
self.lib_refs.append(file_path)
elif ext == '.bld':
self.lib_builds.append(file_path)
elif basename(file_path) == '.hgignore':
self.repo_files.append(file_path)
elif basename(file_path) == '.gitignore':
self.repo_files.append(file_path)
elif ext == '.hex':
self.hex_files.append(file_path)
elif ext == '.bin':
self.bin_files.append(file_path)
elif ext == '.json':
self.json_files.append(file_path)
try:
file_type = self._EXT[ext.lower()]
self.add_file_ref(file_type, fake_path, file_path)
except KeyError:
pass
def scan_with_toolchain(self, src_paths, toolchain, dependencies_paths=None,

View File

@ -38,6 +38,7 @@ from ..utils import (run_cmd, mkdir, rel_path, ToolException,
from ..settings import MBED_ORG_USER, PRINT_COMPILER_OUTPUT_AS_LINK
from .. import hooks
from ..notifier.term import TerminalNotifier
from ..resources import FileType
from ..memap import MemapParser
from ..config import ConfigException
@ -284,7 +285,7 @@ class mbedToolchain:
return resources
def copy_files(self, files_paths, trg_path, resources=None, rel_path=None):
def copy_files(self, files_paths, trg_path, resources=None):
# Handle a single file
if not isinstance(files_paths, list):
files_paths = [files_paths]
@ -294,12 +295,7 @@ class mbedToolchain:
files_paths.remove(source)
for source in files_paths:
if resources is not None and source in resources.file_basepath:
relative_path = relpath(source, resources.file_basepath[source])
elif rel_path is not None:
relative_path = relpath(source, rel_path)
else:
_, relative_path = split(source)
_, relative_path = split(source)
target = join(trg_path, relative_path)
@ -310,10 +306,10 @@ class mbedToolchain:
# THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
def relative_object_path(self, build_path, base_dir, source):
source_dir, name, _ = split_path(source)
def relative_object_path(self, build_path, file_ref):
source_dir, name, _ = split_path(file_ref.name)
obj_dir = relpath(join(build_path, relpath(source_dir, base_dir)))
obj_dir = relpath(join(build_path, source_dir))
if obj_dir is not self.prev_dir:
self.prev_dir = obj_dir
mkdir(obj_dir)
@ -368,7 +364,11 @@ class mbedToolchain:
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
def compile_sources(self, resources, inc_dirs=None):
# Web IDE progress bar for project build
files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
files_to_compile = (
resources.get_file_refs(FileType.ASM_SRC) +
resources.get_file_refs(FileType.C_SRC) +
resources.get_file_refs(FileType.CPP_SRC)
)
self.to_be_compiled = len(files_to_compile)
self.compiled = 0
@ -399,11 +399,10 @@ class mbedToolchain:
# Sort compile queue for consistency
files_to_compile.sort()
for source in files_to_compile:
object = self.relative_object_path(
self.build_dir, resources.file_basepath[source], source)
object = self.relative_object_path(self.build_dir, source)
# Queue mode (multiprocessing)
commands = self.compile_command(source, object, inc_paths)
commands = self.compile_command(source.path, object, inc_paths)
if commands is not None:
queue.append({
'source': source,
@ -429,7 +428,7 @@ class mbedToolchain:
result = compile_worker(item)
self.compiled += 1
self.progress("compile", item['source'], build_update=True)
self.progress("compile", item['source'].name, build_update=True)
for res in result['results']:
self.notify.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
self.compile_output([
@ -467,7 +466,7 @@ class mbedToolchain:
results.remove(r)
self.compiled += 1
self.progress("compile", result['source'], build_update=True)
self.progress("compile", result['source'].name, build_update=True)
for res in result['results']:
self.notify.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
self.compile_output([
@ -628,15 +627,15 @@ class mbedToolchain:
bin = None if ext == 'elf' else full_path
map = join(tmp_path, name + '.map')
r.objects = sorted(set(r.objects))
objects = sorted(set(r.objects))
config_file = ([self.config.app_config_location]
if self.config.app_config_location else [])
dependencies = r.objects + r.libraries + [r.linker_script] + config_file
dependencies = objects + r.libraries + [r.linker_script] + config_file
dependencies.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-ld"))
if self.need_update(elf, dependencies):
needed_update = True
self.progress("link", name)
self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
self.link(elf, objects, r.libraries, r.lib_dirs, r.linker_script)
if bin and self.need_update(bin, [elf]):
needed_update = True