mirror of https://github.com/ARMmbed/mbed-os.git
Add config system macros to exporters
Support various exporter featurespull/1893/head
parent
9f62d70fbf
commit
75a18ff1a3
|
|
@ -57,7 +57,7 @@ def online_build_url_resolver(url):
|
||||||
|
|
||||||
|
|
||||||
def export(project_path, project_name, ide, target, destination='/tmp/',
|
def export(project_path, project_name, ide, target, destination='/tmp/',
|
||||||
tempdir=None, clean=True, extra_symbols=None, build_url_resolver=online_build_url_resolver):
|
tempdir=None, clean=True, extra_symbols=None, zip=True, relative=False, build_url_resolver=online_build_url_resolver):
|
||||||
# Convention: we are using capitals for toolchain and target names
|
# Convention: we are using capitals for toolchain and target names
|
||||||
if target is not None:
|
if target is not None:
|
||||||
target = target.upper()
|
target = target.upper()
|
||||||
|
|
@ -74,7 +74,7 @@ def export(project_path, project_name, ide, target, destination='/tmp/',
|
||||||
try:
|
try:
|
||||||
ide = "zip"
|
ide = "zip"
|
||||||
exporter = zip.ZIP(target, tempdir, project_name, build_url_resolver, extra_symbols=extra_symbols)
|
exporter = zip.ZIP(target, tempdir, project_name, build_url_resolver, extra_symbols=extra_symbols)
|
||||||
exporter.scan_and_copy_resources(project_path, tempdir)
|
exporter.scan_and_copy_resources(project_path, tempdir, relative)
|
||||||
exporter.generate()
|
exporter.generate()
|
||||||
report['success'] = True
|
report['success'] = True
|
||||||
except OldLibrariesException, e:
|
except OldLibrariesException, e:
|
||||||
|
|
@ -101,7 +101,7 @@ def export(project_path, project_name, ide, target, destination='/tmp/',
|
||||||
# target checked, export
|
# target checked, export
|
||||||
try:
|
try:
|
||||||
exporter = Exporter(target, tempdir, project_name, build_url_resolver, extra_symbols=extra_symbols)
|
exporter = Exporter(target, tempdir, project_name, build_url_resolver, extra_symbols=extra_symbols)
|
||||||
exporter.scan_and_copy_resources(project_path, tempdir)
|
exporter.scan_and_copy_resources(project_path, tempdir, relative)
|
||||||
exporter.generate()
|
exporter.generate()
|
||||||
report['success'] = True
|
report['success'] = True
|
||||||
except OldLibrariesException, e:
|
except OldLibrariesException, e:
|
||||||
|
|
@ -133,8 +133,12 @@ def export(project_path, project_name, ide, target, destination='/tmp/',
|
||||||
# add readme file to every offline export.
|
# add readme file to every offline export.
|
||||||
open(os.path.join(tempdir, 'GettingStarted.htm'),'w').write('<meta http-equiv="refresh" content="0; url=http://mbed.org/handbook/Getting-Started-mbed-Exporters#%s"/>'% (ide))
|
open(os.path.join(tempdir, 'GettingStarted.htm'),'w').write('<meta http-equiv="refresh" content="0; url=http://mbed.org/handbook/Getting-Started-mbed-Exporters#%s"/>'% (ide))
|
||||||
# copy .hgignore file to exported direcotry as well.
|
# copy .hgignore file to exported direcotry as well.
|
||||||
copy(os.path.join(exporter.TEMPLATE_DIR,'.hgignore'),tempdir)
|
if exists(os.path.join(exporter.TEMPLATE_DIR,'.hgignore')):
|
||||||
|
copy(os.path.join(exporter.TEMPLATE_DIR,'.hgignore'), tempdir)
|
||||||
|
if zip:
|
||||||
zip_path = zip_working_directory_and_clean_up(tempdir, destination, project_name, clean)
|
zip_path = zip_working_directory_and_clean_up(tempdir, destination, project_name, clean)
|
||||||
|
else:
|
||||||
|
zip_path = destination
|
||||||
|
|
||||||
return zip_path, report
|
return zip_path, report
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ from jinja2 import Template, FileSystemLoader
|
||||||
from jinja2.environment import Environment
|
from jinja2.environment import Environment
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
from zipfile import ZipFile, ZIP_DEFLATED
|
from zipfile import ZipFile, ZIP_DEFLATED
|
||||||
|
from operator import add
|
||||||
|
|
||||||
from tools.utils import mkdir
|
from tools.utils import mkdir
|
||||||
from tools.toolchains import TOOLCHAIN_CLASSES
|
from tools.toolchains import TOOLCHAIN_CLASSES
|
||||||
|
|
@ -16,6 +17,8 @@ from project_generator.generate import Generator
|
||||||
from project_generator.project import Project
|
from project_generator.project import Project
|
||||||
from project_generator.settings import ProjectSettings
|
from project_generator.settings import ProjectSettings
|
||||||
|
|
||||||
|
from tools.config import Config
|
||||||
|
|
||||||
class OldLibrariesException(Exception): pass
|
class OldLibrariesException(Exception): pass
|
||||||
|
|
||||||
class Exporter(object):
|
class Exporter(object):
|
||||||
|
|
@ -31,6 +34,7 @@ class Exporter(object):
|
||||||
jinja_loader = FileSystemLoader(os.path.dirname(os.path.abspath(__file__)))
|
jinja_loader = FileSystemLoader(os.path.dirname(os.path.abspath(__file__)))
|
||||||
self.jinja_environment = Environment(loader=jinja_loader)
|
self.jinja_environment = Environment(loader=jinja_loader)
|
||||||
self.extra_symbols = extra_symbols
|
self.extra_symbols = extra_symbols
|
||||||
|
self.config_macros = []
|
||||||
|
|
||||||
def get_toolchain(self):
|
def get_toolchain(self):
|
||||||
return self.TOOLCHAIN
|
return self.TOOLCHAIN
|
||||||
|
|
@ -46,24 +50,40 @@ class Exporter(object):
|
||||||
self.toolchain.copy_files(r, trg_path, rel_path=src_path)
|
self.toolchain.copy_files(r, trg_path, rel_path=src_path)
|
||||||
return resources
|
return resources
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_dir_grouped_files(files):
|
||||||
|
""" Get grouped files based on the dirname """
|
||||||
|
files_grouped = {}
|
||||||
|
for file in files:
|
||||||
|
rel_path = os.path.relpath(file, os.getcwd())
|
||||||
|
dir_path = os.path.dirname(rel_path)
|
||||||
|
if dir_path == '':
|
||||||
|
# all files within the current dir go into Source_Files
|
||||||
|
dir_path = 'Source_Files'
|
||||||
|
if not dir_path in files_grouped.keys():
|
||||||
|
files_grouped[dir_path] = []
|
||||||
|
files_grouped[dir_path].append(file)
|
||||||
|
return files_grouped
|
||||||
|
|
||||||
def progen_get_project_data(self):
|
def progen_get_project_data(self):
|
||||||
""" Get ProGen project data """
|
""" Get ProGen project data """
|
||||||
# provide default data, some tools don't require any additional
|
# provide default data, some tools don't require any additional
|
||||||
# tool specific settings
|
# tool specific settings
|
||||||
sources = []
|
code_files = []
|
||||||
for r_type in ['c_sources', 'cpp_sources', 's_sources']:
|
for r_type in ['c_sources', 'cpp_sources', 's_sources']:
|
||||||
for file in getattr(self.resources, r_type):
|
for file in getattr(self.resources, r_type):
|
||||||
sources.append(file)
|
code_files.append(file)
|
||||||
|
|
||||||
|
sources_files = code_files + self.resources.hex_files + self.resources.objects + \
|
||||||
|
self.resources.libraries
|
||||||
|
sources_grouped = Exporter._get_dir_grouped_files(sources_files)
|
||||||
|
headers_grouped = Exporter._get_dir_grouped_files(self.resources.headers)
|
||||||
|
|
||||||
project_data = {
|
project_data = {
|
||||||
'common': {
|
'common': {
|
||||||
'sources': {
|
'sources': sources_grouped,
|
||||||
'Source Files': sources + self.resources.hex_files +
|
'includes': headers_grouped,
|
||||||
self.resources.objects + self.resources.libraries,
|
'build_dir':'.build',
|
||||||
},
|
|
||||||
'includes': {
|
|
||||||
'Include Files': self.resources.headers,
|
|
||||||
},
|
|
||||||
'target': [TARGET_MAP[self.target].progen['target']],
|
'target': [TARGET_MAP[self.target].progen['target']],
|
||||||
'macros': self.get_symbols(),
|
'macros': self.get_symbols(),
|
||||||
'export_dir': [self.inputDir],
|
'export_dir': [self.inputDir],
|
||||||
|
|
@ -73,7 +93,7 @@ class Exporter(object):
|
||||||
return project_data
|
return project_data
|
||||||
|
|
||||||
def progen_gen_file(self, tool_name, project_data):
|
def progen_gen_file(self, tool_name, project_data):
|
||||||
""" Generate project using ProGen Project API """
|
"""" Generate project using ProGen Project API """
|
||||||
settings = ProjectSettings()
|
settings = ProjectSettings()
|
||||||
project = Project(self.program_name, [project_data], settings)
|
project = Project(self.program_name, [project_data], settings)
|
||||||
# TODO: Fix this, the inc_dirs are not valid (our scripts copy files), therefore progen
|
# TODO: Fix this, the inc_dirs are not valid (our scripts copy files), therefore progen
|
||||||
|
|
@ -95,17 +115,20 @@ class Exporter(object):
|
||||||
|
|
||||||
return resources
|
return resources
|
||||||
|
|
||||||
def scan_and_copy_resources(self, prj_path, trg_path):
|
def scan_and_copy_resources(self, prj_paths, trg_path, relative=False):
|
||||||
# Copy only the file for the required target and toolchain
|
# Copy only the file for the required target and toolchain
|
||||||
lib_builds = []
|
lib_builds = []
|
||||||
|
# Create the configuration object
|
||||||
|
cfg = Config(self.target, prj_paths)
|
||||||
for src in ['lib', 'src']:
|
for src in ['lib', 'src']:
|
||||||
resources = self.__scan_and_copy(join(prj_path, src), trg_path)
|
resources = reduce(add, [self.__scan_and_copy(join(path, src), trg_path) for path in prj_paths])
|
||||||
lib_builds.extend(resources.lib_builds)
|
lib_builds.extend(resources.lib_builds)
|
||||||
|
|
||||||
# The repository files
|
# The repository files
|
||||||
for repo_dir in resources.repo_dirs:
|
for repo_dir in resources.repo_dirs:
|
||||||
repo_files = self.__scan_all(repo_dir)
|
repo_files = self.__scan_all(repo_dir)
|
||||||
self.toolchain.copy_files(repo_files, trg_path, rel_path=join(prj_path, src))
|
for path in proj_paths :
|
||||||
|
self.toolchain.copy_files(repo_files, trg_path, rel_path=join(path, src))
|
||||||
|
|
||||||
# The libraries builds
|
# The libraries builds
|
||||||
for bld in lib_builds:
|
for bld in lib_builds:
|
||||||
|
|
@ -120,9 +143,17 @@ class Exporter(object):
|
||||||
fhandle = file(join(hgdir, 'keep.me'), 'a')
|
fhandle = file(join(hgdir, 'keep.me'), 'a')
|
||||||
fhandle.close()
|
fhandle.close()
|
||||||
|
|
||||||
|
if not relative:
|
||||||
# Final scan of the actual exported resources
|
# Final scan of the actual exported resources
|
||||||
self.resources = self.toolchain.scan_resources(trg_path)
|
self.resources = self.toolchain.scan_resources(trg_path)
|
||||||
self.resources.relative_to(trg_path, self.DOT_IN_RELATIVE_PATH)
|
self.resources.relative_to(trg_path, self.DOT_IN_RELATIVE_PATH)
|
||||||
|
else:
|
||||||
|
# use the prj_dir (source, not destination)
|
||||||
|
self.resources = reduce(add, [self.toolchain.scan_resources(path) for path in prj_paths])
|
||||||
|
# Add all JSON files discovered during scanning to the configuration object
|
||||||
|
cfg.add_config_files(self.resources.json_files)
|
||||||
|
# Get data from the configuration system
|
||||||
|
self.config_macros = cfg.get_config_data_macros()
|
||||||
# Check the existence of a binary build of the mbed library for the desired target
|
# Check the existence of a binary build of the mbed library for the desired target
|
||||||
# This prevents exporting the mbed libraries from source
|
# This prevents exporting the mbed libraries from source
|
||||||
# if not self.toolchain.mbed_libs:
|
# if not self.toolchain.mbed_libs:
|
||||||
|
|
@ -141,7 +172,7 @@ class Exporter(object):
|
||||||
""" This function returns symbols which must be exported.
|
""" This function returns symbols which must be exported.
|
||||||
Please add / overwrite symbols in each exporter separately
|
Please add / overwrite symbols in each exporter separately
|
||||||
"""
|
"""
|
||||||
symbols = self.toolchain.get_symbols()
|
symbols = self.toolchain.get_symbols() + self.config_macros
|
||||||
# We have extra symbols from e.g. libraries, we want to have them also added to export
|
# We have extra symbols from e.g. libraries, we want to have them also added to export
|
||||||
if add_extra_symbols:
|
if add_extra_symbols:
|
||||||
if self.extra_symbols is not None:
|
if self.extra_symbols is not None:
|
||||||
|
|
|
||||||
|
|
@ -67,6 +67,7 @@ class IAREmbeddedWorkbench(Exporter):
|
||||||
|
|
||||||
project_data['tool_specific'] = {}
|
project_data['tool_specific'] = {}
|
||||||
project_data['tool_specific'].update(tool_specific)
|
project_data['tool_specific'].update(tool_specific)
|
||||||
|
project_data['common']['build_dir'] = os.path.join(project_data['common']['build_dir'], 'iar_arm')
|
||||||
self.progen_gen_file('iar_arm', project_data)
|
self.progen_gen_file('iar_arm', project_data)
|
||||||
|
|
||||||
# Currently not used, we should reuse folder_name to create virtual folders
|
# Currently not used, we should reuse folder_name to create virtual folders
|
||||||
|
|
|
||||||
|
|
@ -85,5 +85,6 @@ class Uvision4(Exporter):
|
||||||
project_data['common']['macros'].pop(i)
|
project_data['common']['macros'].pop(i)
|
||||||
i += 1
|
i += 1
|
||||||
project_data['common']['macros'].append('__ASSERT_MSG')
|
project_data['common']['macros'].append('__ASSERT_MSG')
|
||||||
|
project_data['common']['build_dir'] = join(project_data['common']['build_dir'], 'uvision4')
|
||||||
self.progen_gen_file('uvision', project_data)
|
self.progen_gen_file('uvision', project_data)
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue