diff --git a/requirements.txt b/requirements.txt
index 963bea2036..d01de683e9 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,8 +3,8 @@ PySerial>=2.7
PrettyTable>=0.7.2
Jinja2>=2.7.3
IntelHex>=1.3
-project-generator>=0.9.7,<0.10.0
-project-generator-definitions>=0.2.26,<0.3.0
+project-generator==0.9.10
+project_generator_definitions>=0.2.26,<0.3.0
junit-xml
pyYAML
requests
diff --git a/tools/build_api.py b/tools/build_api.py
index 41688a2c87..f1980b72d8 100644
--- a/tools/build_api.py
+++ b/tools/build_api.py
@@ -1,6 +1,6 @@
"""
mbed SDK
-Copyright (c) 2011-2013 ARM Limited
+Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -326,7 +326,7 @@ def prepare_toolchain(src_paths, target, toolchain_name,
return toolchain
def scan_resources(src_paths, toolchain, dependencies_paths=None,
- inc_dirs=None):
+ inc_dirs=None, base_path=None):
""" Scan resources using initialized toolcain
Positional arguments
@@ -338,9 +338,9 @@ def scan_resources(src_paths, toolchain, dependencies_paths=None,
"""
# Scan src_path
- resources = toolchain.scan_resources(src_paths[0])
+ resources = toolchain.scan_resources(src_paths[0], base_path=base_path)
for path in src_paths[1:]:
- resources.add(toolchain.scan_resources(path))
+ resources.add(toolchain.scan_resources(path, base_path=base_path))
# Scan dependency paths for include dirs
if dependencies_paths is not None:
diff --git a/tools/export/__init__.py b/tools/export/__init__.py
index 8ae7c69070..301dc7117a 100644
--- a/tools/export/__init__.py
+++ b/tools/export/__init__.py
@@ -1,27 +1,28 @@
+"""The generic interface for all exporters.
"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# mbed SDK
+# Copyright (c) 2011-2016 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
import os, tempfile
from os.path import join, exists, basename
from shutil import copytree, rmtree, copy
import yaml
-from tools.utils import mkdir
-from tools.export import uvision4, uvision5, codered, gccarm, ds5_5, iar, emblocks, coide, kds, zip, simplicityv3, atmelstudio, sw4stm32, e2studio
-from tools.export.exporters import zip_working_directory_and_clean_up, OldLibrariesException, FailedBuildException
+from tools.export import uvision4, uvision5, codered, gccarm, ds5_5, iar
+from tools.export import emblocks, coide, kds, simplicityv3, atmelstudio
+from tools.export import sw4stm32, e2studio, zip
+from tools.export.exporters import OldLibrariesException, FailedBuildException
from tools.targets import TARGET_NAMES, EXPORT_MAP, TARGET_MAP
from project_generator_definitions.definitions import ProGenDef
@@ -41,6 +42,7 @@ EXPORTERS = {
'atmelstudio' : atmelstudio.AtmelStudio,
'sw4stm32' : sw4stm32.Sw4STM32,
'e2studio' : e2studio.E2Studio,
+ 'zip' : zip.ZIP,
}
ERROR_MESSAGE_UNSUPPORTED_TOOLCHAIN = """
@@ -52,162 +54,25 @@ ERROR_MESSAGE_NOT_EXPORT_LIBS = """
To export this project please import the export version of the mbed library.
"""
-def online_build_url_resolver(url):
- # TODO: Retrieve the path and name of an online library build URL
- return {'path':'', 'name':''}
+def mcu_ide_matrix(verbose_html=False):
+ """Shows target map using prettytable
-
-def export(project_path, project_name, ide, target, destination='/tmp/',
- tempdir=None, pgen_build = False, clean=True, extra_symbols=None, make_zip=True, sources_relative=False,
- build_url_resolver=online_build_url_resolver, progen_build=False):
- # Convention: we are using capitals for toolchain and target names
- if target is not None:
- target = target.upper()
-
- if tempdir is None:
- tempdir = tempfile.mkdtemp()
-
- use_progen = False
- supported = True
- report = {'success': False, 'errormsg':'', 'skip': False}
-
- if ide is None or ide == "zip":
- # Simple ZIP exporter
- try:
- ide = "zip"
- exporter = zip.ZIP(target, tempdir, project_name, build_url_resolver, extra_symbols=extra_symbols)
- exporter.scan_and_copy_resources(project_path, tempdir, sources_relative)
- exporter.generate()
- report['success'] = True
- except OldLibrariesException, e:
- report['errormsg'] = ERROR_MESSAGE_NOT_EXPORT_LIBS
- else:
- if ide not in EXPORTERS:
- report['errormsg'] = ERROR_MESSAGE_UNSUPPORTED_TOOLCHAIN % (target, ide)
- report['skip'] = True
- else:
- Exporter = EXPORTERS[ide]
- target = EXPORT_MAP.get(target, target)
- try:
- if Exporter.PROGEN_ACTIVE:
- use_progen = True
- except AttributeError:
- pass
-
- if target not in Exporter.TARGETS or Exporter.TOOLCHAIN not in TARGET_MAP[target].supported_toolchains:
- supported = False
-
- if use_progen:
- if not ProGenDef(ide).is_supported(TARGET_MAP[target].progen['target']):
- supported = False
-
- if supported:
- # target checked, export
- try:
- exporter = Exporter(target, tempdir, project_name, build_url_resolver, extra_symbols=extra_symbols, sources_relative=sources_relative)
- exporter.scan_and_copy_resources(project_path, tempdir, sources_relative)
- if progen_build:
- #try to build with pgen ide builders
- try:
- exporter.generate(progen_build=True)
- report['success'] = True
- except FailedBuildException, f:
- report['errormsg'] = "Build Failed"
- else:
- exporter.generate()
- report['success'] = True
- except OldLibrariesException, e:
- report['errormsg'] = ERROR_MESSAGE_NOT_EXPORT_LIBS
-
- else:
- report['errormsg'] = ERROR_MESSAGE_UNSUPPORTED_TOOLCHAIN % (target, ide)
- report['skip'] = True
-
- zip_path = None
- if report['success']:
- # readme.txt to contain more exported data
- exporter_yaml = {
- 'project_generator': {
- 'active' : False,
- }
- }
- if use_progen:
- try:
- import pkg_resources
- version = pkg_resources.get_distribution('project_generator').version
- exporter_yaml['project_generator']['version'] = version
- exporter_yaml['project_generator']['active'] = True;
- exporter_yaml['project_generator_definitions'] = {}
- version = pkg_resources.get_distribution('project_generator_definitions').version
- exporter_yaml['project_generator_definitions']['version'] = version
- except ImportError:
- pass
- with open(os.path.join(tempdir, 'exporter.yaml'), 'w') as outfile:
- yaml.dump(exporter_yaml, outfile, default_flow_style=False)
- # add readme file to every offline export.
- open(os.path.join(tempdir, 'GettingStarted.htm'),'w').write(''% (ide))
- # copy .hgignore file to exported direcotry as well.
- if exists(os.path.join(exporter.TEMPLATE_DIR,'.hgignore')):
- copy(os.path.join(exporter.TEMPLATE_DIR,'.hgignore'), tempdir)
- if make_zip:
- zip_path = zip_working_directory_and_clean_up(tempdir, destination, project_name, clean)
- else:
- zip_path = destination
-
- return zip_path, report
-
-
-###############################################################################
-# Generate project folders following the online conventions
-###############################################################################
-def copy_tree(src, dst, clean=True):
- if exists(dst):
- if clean:
- rmtree(dst)
- else:
- return
-
- copytree(src, dst)
-
-
-def setup_user_prj(user_dir, prj_path, lib_paths=None):
+ Keyword argumets:
+ verbose_html - print the matrix in html format
"""
- Setup a project with the same directory structure of the mbed online IDE
- """
- mkdir(user_dir)
-
- # Project Path
- copy_tree(prj_path, join(user_dir, "src"))
-
- # Project Libraries
- user_lib = join(user_dir, "lib")
- mkdir(user_lib)
-
- if lib_paths is not None:
- for lib_path in lib_paths:
- copy_tree(lib_path, join(user_lib, basename(lib_path)))
-
-def mcu_ide_matrix(verbose_html=False, platform_filter=None):
- """ Shows target map using prettytable """
- supported_ides = []
- for key in EXPORTERS.iterkeys():
- supported_ides.append(key)
- supported_ides.sort()
- from prettytable import PrettyTable, ALL # Only use it in this function so building works without extra modules
+ supported_ides = sorted(EXPORTERS.keys())
+ # Only use it in this function so building works without extra modules
+ from prettytable import PrettyTable, ALL
# All tests status table print
- columns = ["Platform"] + supported_ides
- pt = PrettyTable(columns)
+ table_printer = PrettyTable(["Platform"] + supported_ides)
# Align table
- for col in columns:
- pt.align[col] = "c"
- pt.align["Platform"] = "l"
+ for col in supported_ides:
+ table_printer.align[col] = "c"
+ table_printer.align["Platform"] = "l"
perm_counter = 0
- target_counter = 0
for target in sorted(TARGET_NAMES):
- target_counter += 1
-
row = [target] # First column is platform name
for ide in supported_ides:
text = "-"
@@ -218,20 +83,24 @@ def mcu_ide_matrix(verbose_html=False, platform_filter=None):
text = "x"
perm_counter += 1
row.append(text)
- pt.add_row(row)
+ table_printer.add_row(row)
- pt.border = True
- pt.vrules = ALL
- pt.hrules = ALL
- # creates a html page suitable for a browser
- # result = pt.get_html_string(format=True) if verbose_html else pt.get_string()
+ table_printer.border = True
+ table_printer.vrules = ALL
+ table_printer.hrules = ALL
# creates a html page in a shorter format suitable for readme.md
- result = pt.get_html_string() if verbose_html else pt.get_string()
+ if verbose_html:
+ result = table_printer.get_html_string()
+ else:
+ result = table_printer.get_string()
result += "\n"
result += "Total IDEs: %d\n"% (len(supported_ides))
- if verbose_html: result += "
"
- result += "Total platforms: %d\n"% (target_counter)
- if verbose_html: result += "
"
+ if verbose_html:
+ result += "
"
+ result += "Total platforms: %d\n"% (len(TARGET_NAMES))
+ if verbose_html:
+ result += "
"
result += "Total permutations: %d"% (perm_counter)
- if verbose_html: result = result.replace("&", "&")
+ if verbose_html:
+ result = result.replace("&", "&")
return result
diff --git a/tools/export/atmelstudio.py b/tools/export/atmelstudio.py
index f85a047b64..66c3c43020 100644
--- a/tools/export/atmelstudio.py
+++ b/tools/export/atmelstudio.py
@@ -1,6 +1,6 @@
"""
mbed SDK
-Copyright (c) 2011-2015 ARM Limited
+Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -61,7 +61,7 @@ class AtmelStudio(Exporter):
ctx = {
'target': self.target,
- 'name': self.program_name,
+ 'name': self.project_name,
'source_files': source_files,
'source_folders': source_folders,
'object_files': self.resources.objects,
@@ -69,11 +69,11 @@ class AtmelStudio(Exporter):
'library_paths': self.resources.lib_dirs,
'linker_script': self.resources.linker_script,
'libraries': libraries,
- 'symbols': self.get_symbols(),
+ 'symbols': self.toolchain.get_symbols(),
'solution_uuid': solution_uuid.upper(),
'project_uuid': project_uuid.upper()
}
- ctx.update(self.progen_flags)
+ ctx.update(self.flags)
target = self.target.lower()
- self.gen_file('atmelstudio6_2.atsln.tmpl', ctx, '%s.atsln' % self.program_name)
- self.gen_file('atmelstudio6_2.cppproj.tmpl', ctx, '%s.cppproj' % self.program_name)
+ self.gen_file('atmelstudio6_2.atsln.tmpl', ctx, '%s.atsln' % self.project_name)
+ self.gen_file('atmelstudio6_2.cppproj.tmpl', ctx, '%s.cppproj' % self.project_name)
diff --git a/tools/export/codered.py b/tools/export/codered.py
index d7f815a827..185e69a60d 100644
--- a/tools/export/codered.py
+++ b/tools/export/codered.py
@@ -1,6 +1,6 @@
"""
mbed SDK
-Copyright (c) 2011-2013 ARM Limited
+Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -48,13 +48,13 @@ class CodeRed(Exporter):
libraries.append(l[3:])
ctx = {
- 'name': self.program_name,
+ 'name': self.project_name,
'include_paths': self.resources.inc_dirs,
'linker_script': self.resources.linker_script,
'object_files': self.resources.objects,
'libraries': libraries,
- 'symbols': self.get_symbols()
+ 'symbols': self.toolchain.get_symbols()
}
- ctx.update(self.progen_flags)
+ ctx.update(self.flags)
self.gen_file('codered_%s_project.tmpl' % self.target.lower(), ctx, '.project')
self.gen_file('codered_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject')
diff --git a/tools/export/coide.py b/tools/export/coide.py
index 77390afdd8..4af69986ef 100644
--- a/tools/export/coide.py
+++ b/tools/export/coide.py
@@ -1,6 +1,6 @@
"""
mbed SDK
-Copyright (c) 2014 ARM Limited
+Copyright (c) 2014-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -98,7 +98,7 @@ class CoIDE(Exporter):
self.resources.linker_script = ''
ctx = {
- 'name': self.program_name,
+ 'name': self.project_name,
'source_files': source_files,
'header_files': header_files,
'include_paths': self.resources.inc_dirs,
@@ -106,9 +106,9 @@ class CoIDE(Exporter):
'library_paths': self.resources.lib_dirs,
'object_files': self.resources.objects,
'libraries': libraries,
- 'symbols': self.get_symbols()
+ 'symbols': self.toolchain.get_symbols()
}
target = self.target.lower()
# Project file
- self.gen_file('coide_%s.coproj.tmpl' % target, ctx, '%s.coproj' % self.program_name)
+ self.gen_file('coide_%s.coproj.tmpl' % target, ctx, '%s.coproj' % self.project_name)
diff --git a/tools/export/ds5_5.py b/tools/export/ds5_5.py
index 71242efdd7..9be2535867 100644
--- a/tools/export/ds5_5.py
+++ b/tools/export/ds5_5.py
@@ -1,6 +1,6 @@
"""
mbed SDK
-Copyright (c) 2011-2013 ARM Limited
+Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -54,12 +54,12 @@ class DS5_5(Exporter):
})
ctx = {
- 'name': self.program_name,
+ 'name': self.project_name,
'include_paths': self.resources.inc_dirs,
'scatter_file': self.resources.linker_script,
'object_files': self.resources.objects + self.resources.libraries,
'source_files': source_files,
- 'symbols': self.get_symbols()
+ 'symbols': self.toolchain.get_symbols()
}
target = self.target.lower()
diff --git a/tools/export/e2studio.py b/tools/export/e2studio.py
index 66cd9dec9b..205287089a 100644
--- a/tools/export/e2studio.py
+++ b/tools/export/e2studio.py
@@ -1,6 +1,6 @@
"""
mbed SDK
-Copyright (c) 2011-2013 ARM Limited
+Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -33,15 +33,15 @@ class E2Studio(Exporter):
libraries.append(l[3:])
ctx = {
- 'name': self.program_name,
+ 'name': self.project_name,
'include_paths': self.resources.inc_dirs,
'linker_script': self.resources.linker_script,
'object_files': self.resources.objects,
'libraries': libraries,
- 'symbols': self.get_symbols()
+ 'symbols': self.toolchain.get_symbols()
}
self.gen_file('e2studio_%s_project.tmpl' % self.target.lower(), ctx, '.project')
self.gen_file('e2studio_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject')
self.gen_file('e2studio_%s_gdbinit.tmpl' % self.target.lower(), ctx, '.gdbinit')
- self.gen_file('e2studio_launch.tmpl', ctx, '%s OpenOCD.launch' % self.program_name)
+ self.gen_file('e2studio_launch.tmpl', ctx, '%s OpenOCD.launch' % self.project_name)
diff --git a/tools/export/emblocks.py b/tools/export/emblocks.py
index a5f20d2c9d..9e24199452 100644
--- a/tools/export/emblocks.py
+++ b/tools/export/emblocks.py
@@ -1,6 +1,6 @@
"""
mbed SDK
-Copyright (c) 2014 ARM Limited
+Copyright (c) 2014-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -60,7 +60,7 @@ class IntermediateFile(Exporter):
self.resources.linker_script = ''
ctx = {
- 'name': self.program_name,
+ 'name': self.project_name,
'target': self.target,
'toolchain': self.toolchain.name,
'source_files': source_files,
@@ -68,13 +68,13 @@ class IntermediateFile(Exporter):
'script_file': self.resources.linker_script,
'library_paths': self.resources.lib_dirs,
'libraries': libraries,
- 'symbols': self.get_symbols(),
+ 'symbols': self.toolchain.get_symbols(),
'object_files': self.resources.objects,
'sys_libs': self.toolchain.sys_libs,
- 'cc_org': self.flags['common'] + self.flags['c'],
- 'ld_org': self.flags['common'] + self.flags['ld'],
- 'cppc_org': self.flags['common'] + self.flags['cxx']
+ 'cc_org': self.flags['common_flags'] + self.flags['c_flags'],
+ 'ld_org': self.flags['common_flags'] + self.flags['ld_flags'],
+ 'cppc_org': self.flags['common_flags'] + self.flags['cxx_flags']
}
# EmBlocks intermediate file template
- self.gen_file('emblocks.eix.tmpl', ctx, '%s.eix' % self.program_name)
+ self.gen_file('emblocks.eix.tmpl', ctx, '%s.eix' % self.project_name)
diff --git a/tools/export/exporters.py b/tools/export/exporters.py
index d1846372e5..c3b381547c 100644
--- a/tools/export/exporters.py
+++ b/tools/export/exporters.py
@@ -1,256 +1,219 @@
"""Just a template for subclassing"""
-import uuid, shutil, os, logging, fnmatch
-from os import walk, remove
-from os.path import join, dirname, isdir, split
-from copy import copy
-from jinja2 import Template, FileSystemLoader
+import os
+import sys
+import logging
+from os.path import join, dirname, relpath
+from itertools import groupby
+from jinja2 import FileSystemLoader
from jinja2.environment import Environment
-from contextlib import closing
-from zipfile import ZipFile, ZIP_DEFLATED
-from operator import add
-from tools.utils import mkdir
-from tools.toolchains import TOOLCHAIN_CLASSES
from tools.targets import TARGET_MAP
-
-from project_generator.generate import Generator
-from project_generator.project import Project
+from project_generator.tools import tool
+from project_generator.tools_supported import ToolsSupported
from project_generator.settings import ProjectSettings
+from project_generator_definitions.definitions import ProGenDef
-from tools.config import Config
-class OldLibrariesException(Exception): pass
+class OldLibrariesException(Exception):
+ """Exception that indicates an export can not complete due to an out of date
+ library version.
+ """
+ pass
-class FailedBuildException(Exception) : pass
+class FailedBuildException(Exception):
+ """Exception that indicates that a build failed"""
+ pass
+
+class TargetNotSupportedException(Exception):
+ """Indicates that an IDE does not support a particular MCU"""
+ pass
-# Exporter descriptor for TARGETS
-# TARGETS as class attribute for backward compatibility (allows: if in Exporter.TARGETS)
class ExporterTargetsProperty(object):
+ """ Exporter descriptor for TARGETS
+ TARGETS as class attribute for backward compatibility
+ (allows: if in Exporter.TARGETS)
+ """
def __init__(self, func):
self.func = func
def __get__(self, inst, cls):
return self.func(cls)
class Exporter(object):
+ """Exporter base class
+
+ This class is meant to be extended by individual exporters, and provides a
+ few helper methods for implementing an exporter with either jinja2 or
+ progen.
+ """
TEMPLATE_DIR = dirname(__file__)
DOT_IN_RELATIVE_PATH = False
+ NAME = None
+ TARGETS = None
+ TOOLCHAIN = None
- def __init__(self, target, inputDir, program_name, build_url_resolver, extra_symbols=None, sources_relative=True):
- self.inputDir = inputDir
+ def __init__(self, target, export_dir, project_name, toolchain,
+ extra_symbols=None, resources=None):
+ """Initialize an instance of class exporter
+ Positional arguments:
+ target - the target mcu/board for this project
+ export_dir - the directory of the exported project files
+ project_name - the name of the project
+ toolchain - an instance of class toolchain
+
+ Keyword arguments:
+ extra_symbols - a list of extra macros for the toolchain
+ resources - an instance of class Resources
+ """
+ self.export_dir = export_dir
self.target = target
- self.program_name = program_name
- self.toolchain = TOOLCHAIN_CLASSES[self.get_toolchain()](TARGET_MAP[target])
- self.build_url_resolver = build_url_resolver
+ self.project_name = project_name
+ self.toolchain = toolchain
jinja_loader = FileSystemLoader(os.path.dirname(os.path.abspath(__file__)))
self.jinja_environment = Environment(loader=jinja_loader)
- self.extra_symbols = extra_symbols if extra_symbols else []
- self.config_macros = []
- self.sources_relative = sources_relative
- self.config_header = None
+ self.resources = resources
+ self.generated_files = []
+ self.builder_files_dict = {}
def get_toolchain(self):
+ """A helper getter function that we should probably eliminate"""
return self.TOOLCHAIN
@property
def flags(self):
- return self.toolchain.flags
+ """Returns a dictionary of toolchain flags.
+ Keys of the dictionary are:
+ cxx_flags - c++ flags
+ c_flags - c flags
+ ld_flags - linker flags
+ asm_flags - assembler flags
+ common_flags - common options
+ """
+ config_header = self.toolchain.get_config_header()
+ flags = {key + "_flags": value for key, value
+ in self.toolchain.flags.iteritems()}
+ asm_defines = ["-D" + symbol for symbol in self.toolchain.get_symbols(True)]
+ c_defines = ["-D" + symbol for symbol in self.toolchain.get_symbols()]
+ flags['asm_flags'] += asm_defines
+ flags['c_flags'] += c_defines
+ flags['cxx_flags'] += c_defines
+ if config_header:
+ config_header = relpath(config_header,
+ self.resources.file_basepath[config_header])
+ flags['c_flags'] += self.toolchain.get_config_option(config_header)
+ flags['cxx_flags'] += self.toolchain.get_config_option(
+ config_header)
+ return flags
- @property
- def progen_flags(self):
- if not hasattr(self, "_progen_flag_cache") :
- self._progen_flag_cache = dict([(key + "_flags", value) for key,value in self.flags.iteritems()])
- asm_defines = ["-D"+symbol for symbol in self.toolchain.get_symbols(True)]
- c_defines = ["-D" + symbol for symbol in self.toolchain.get_symbols()]
- self._progen_flag_cache['asm_flags'] += asm_defines
- self._progen_flag_cache['c_flags'] += c_defines
- self._progen_flag_cache['cxx_flags'] += c_defines
- if self.config_header:
- self._progen_flag_cache['c_flags'] += self.toolchain.get_config_option(self.config_header)
- self._progen_flag_cache['cxx_flags'] += self.toolchain.get_config_option(self.config_header)
- return self._progen_flag_cache
-
- def __scan_and_copy(self, src_path, trg_path):
- resources = self.toolchain.scan_resources(src_path)
-
- for r_type in ['headers', 's_sources', 'c_sources', 'cpp_sources',
- 'objects', 'libraries', 'linker_script',
- 'lib_builds', 'lib_refs', 'hex_files', 'bin_files']:
- r = getattr(resources, r_type)
- if r:
- self.toolchain.copy_files(r, trg_path, resources=resources)
- return resources
-
- @staticmethod
- def _get_dir_grouped_files(files):
- """ Get grouped files based on the dirname """
- files_grouped = {}
- for file in files:
- rel_path = os.path.relpath(file, os.getcwd())
- dir_path = os.path.dirname(rel_path)
- if dir_path == '':
- # all files within the current dir go into Source_Files
- dir_path = 'Source_Files'
- if not dir_path in files_grouped.keys():
- files_grouped[dir_path] = []
- files_grouped[dir_path].append(file)
- return files_grouped
+ def get_source_paths(self):
+ """Returns a list of the directories where source files are contained"""
+ source_keys = ['s_sources', 'c_sources', 'cpp_sources', 'hex_files',
+ 'objects', 'libraries']
+ source_files = []
+ for key in source_keys:
+ source_files.extend(getattr(self.resources, key))
+ return list(set([os.path.dirname(src) for src in source_files]))
def progen_get_project_data(self):
""" Get ProGen project data """
# provide default data, some tools don't require any additional
# tool specific settings
- code_files = []
- for r_type in ['c_sources', 'cpp_sources', 's_sources']:
- for file in getattr(self.resources, r_type):
- code_files.append(file)
- sources_files = code_files + self.resources.hex_files + self.resources.objects + \
- self.resources.libraries
- sources_grouped = Exporter._get_dir_grouped_files(sources_files)
- headers_grouped = Exporter._get_dir_grouped_files(self.resources.headers)
+ def make_key(src):
+ """turn a source file into it's group name"""
+ key = os.path.basename(os.path.dirname(src))
+ if not key:
+ key = os.path.basename(os.path.normpath(self.export_dir))
+ return key
- project_data = {
- 'common': {
- 'sources': sources_grouped,
- 'includes': headers_grouped,
- 'build_dir':'.build',
- 'target': [TARGET_MAP[self.target].progen['target']],
- 'macros': self.get_symbols(),
- 'export_dir': [self.inputDir],
- 'linker_file': [self.resources.linker_script],
- }
- }
+ def grouped(sources):
+ """Group the source files by their encompassing directory"""
+ data = sorted(sources, key=make_key)
+ return {k: list(g) for k, g in groupby(data, make_key)}
+
+ if self.toolchain.get_config_header():
+ config_header = self.toolchain.get_config_header()
+ config_header = relpath(config_header,
+ self.resources.file_basepath[config_header])
+ else:
+ config_header = None
+
+ # we want to add this to our include dirs
+ config_dir = os.path.dirname(config_header) if config_header else []
+
+ project_data = tool.get_tool_template()
+
+ project_data['target'] = TARGET_MAP[self.target].progen['target']
+ project_data['source_paths'] = self.get_source_paths()
+ project_data['include_paths'] = self.resources.inc_dirs + [config_dir]
+ project_data['include_files'] = grouped(self.resources.headers)
+ project_data['source_files_s'] = grouped(self.resources.s_sources)
+ project_data['source_files_c'] = grouped(self.resources.c_sources)
+ project_data['source_files_cpp'] = grouped(self.resources.cpp_sources)
+ project_data['source_files_obj'] = grouped(self.resources.objects)
+ project_data['source_files_lib'] = grouped(self.resources.libraries)
+ project_data['output_dir']['path'] = self.export_dir
+ project_data['linker_file'] = self.resources.linker_script
+ project_data['macros'] = []
+ project_data['build_dir'] = 'build'
+ project_data['template'] = None
+ project_data['name'] = self.project_name
+ project_data['output_type'] = 'exe'
+ project_data['debugger'] = None
return project_data
- def progen_gen_file(self, tool_name, project_data, progen_build=False):
- """ Generate project using ProGen Project API """
+ def progen_gen_file(self, project_data):
+ """ Generate project using ProGen Project API
+ Positional arguments:
+ tool_name - the tool for which to generate project files
+ project_data - a dict whose base key, values are specified in
+ progen_get_project_data, the items will have been
+ modified by Exporter subclasses
+
+ Keyword arguments:
+ progen_build - A boolean that determines if the tool will build the
+ project
+ """
+ if not self.check_supported(self.NAME):
+ raise TargetNotSupportedException("Target not supported")
settings = ProjectSettings()
- project = Project(self.program_name, [project_data], settings)
- # TODO: Fix this, the inc_dirs are not valid (our scripts copy files), therefore progen
- # thinks it is not dict but a file, and adds them to workspace.
- project.project['common']['include_paths'] = self.resources.inc_dirs
- project.generate(tool_name, copied=not self.sources_relative)
- if progen_build:
- print("Project exported, building...")
- result = project.build(tool_name)
- if result == -1:
- raise FailedBuildException("Build Failed")
+ exporter = ToolsSupported().get_tool(self.NAME)
+ self.builder_files_dict = {self.NAME:exporter(project_data, settings).export_project()}
+ for middle in self.builder_files_dict.values():
+ for field, thing in middle.iteritems():
+ if field == "files":
+ for filename in thing.values():
+ self.generated_files.append(filename)
- def __scan_all(self, path):
- resources = []
+ def progen_build(self):
+ """Build a project that was already generated by progen"""
+ print("Project {} exported, building for {}...".format(
+ self.project_name, self.NAME))
+ sys.stdout.flush()
+ builder = ToolsSupported().get_tool(self.NAME)
+ result = builder(self.builder_files_dict[self.NAME], ProjectSettings()).build_project()
+ if result == -1:
+ raise FailedBuildException("Build Failed")
- for root, dirs, files in walk(path):
- for d in copy(dirs):
- if d == '.' or d == '..':
- dirs.remove(d)
-
- for file in files:
- file_path = join(root, file)
- resources.append(file_path)
-
- return resources
-
- def scan_and_copy_resources(self, prj_paths, trg_path, relative=False):
- # Copy only the file for the required target and toolchain
- lib_builds = []
- # Create the configuration object
- if isinstance(prj_paths, basestring):
- prj_paths = [prj_paths]
- config = Config(self.target, prj_paths)
- for src in ['lib', 'src']:
- resources = self.__scan_and_copy(join(prj_paths[0], src), trg_path)
- for path in prj_paths[1:]:
- resources.add(self.__scan_and_copy(join(path, src), trg_path))
-
- lib_builds.extend(resources.lib_builds)
-
- # The repository files
- #for repo_dir in resources.repo_dirs:
- # repo_files = self.__scan_all(repo_dir)
- # for path in prj_paths:
- # self.toolchain.copy_files(repo_files, trg_path, rel_path=join(path, src))
-
- # The libraries builds
- for bld in lib_builds:
- build_url = open(bld).read().strip()
- lib_data = self.build_url_resolver(build_url)
- lib_path = lib_data['path'].rstrip('\\/')
- self.__scan_and_copy(lib_path, join(trg_path, lib_data['name']))
-
- # Create .hg dir in mbed build dir so it's ignored when versioning
- hgdir = join(trg_path, lib_data['name'], '.hg')
- mkdir(hgdir)
- fhandle = file(join(hgdir, 'keep.me'), 'a')
- fhandle.close()
-
- if not relative:
- # Final scan of the actual exported resources
- resources = self.toolchain.scan_resources(trg_path)
- resources.relative_to(trg_path, self.DOT_IN_RELATIVE_PATH)
- else:
- # use the prj_dir (source, not destination)
- resources = self.toolchain.scan_resources(prj_paths[0])
- for path in prj_paths[1:]:
- resources.add(toolchain.scan_resources(path))
-
- # Loads the resources into the config system which might expand/modify resources based on config data
- self.resources = config.load_resources(resources)
-
- if hasattr(self, "MBED_CONFIG_HEADER_SUPPORTED") and self.MBED_CONFIG_HEADER_SUPPORTED :
- # Add the configuration file to the target directory
- self.config_header = self.toolchain.MBED_CONFIG_FILE_NAME
- config.get_config_data_header(join(trg_path, self.config_header))
- self.config_macros = []
- self.resources.inc_dirs.append(".")
- else:
- # And add the configuration macros to the toolchain
- self.config_macros = config.get_config_data_macros()
+ def check_supported(self, ide):
+ """Indicated if this combination of IDE and MCU is supported"""
+ if self.target not in self.TARGETS or \
+ self.TOOLCHAIN not in TARGET_MAP[self.target].supported_toolchains:
+ return False
+ if not ProGenDef(ide).is_supported(
+ TARGET_MAP[self.target].progen['target']):
+ return False
+ return True
def gen_file(self, template_file, data, target_file):
- template_path = join(Exporter.TEMPLATE_DIR, template_file)
- template = self.jinja_environment.get_template(template_file)
+ """Generates a project file from a template using jinja"""
+ jinja_loader = FileSystemLoader(
+ os.path.dirname(os.path.abspath(__file__)))
+ jinja_environment = Environment(loader=jinja_loader)
+
+ template = jinja_environment.get_template(template_file)
target_text = template.render(data)
- target_path = join(self.inputDir, target_file)
- logging.debug("Generating: %s" % target_path)
+ target_path = join(self.export_dir, target_file)
+ logging.debug("Generating: %s", target_path)
open(target_path, "w").write(target_text)
-
- def get_symbols(self, add_extra_symbols=True):
- """ This function returns symbols which must be exported.
- Please add / overwrite symbols in each exporter separately
- """
-
- # We have extra symbols from e.g. libraries, we want to have them also added to export
- extra = self.extra_symbols if add_extra_symbols else []
- if hasattr(self, "MBED_CONFIG_HEADER_SUPPORTED") and self.MBED_CONFIG_HEADER_SUPPORTED:
- # If the config header is supported, we will preinclude it and do not not
- # need the macros as preprocessor flags
- return extra
-
- symbols = self.toolchain.get_symbols(True) + self.toolchain.get_symbols() \
- + self.config_macros + extra
- return symbols
-
-def zip_working_directory_and_clean_up(tempdirectory=None, destination=None, program_name=None, clean=True):
- uid = str(uuid.uuid4())
- zipfilename = '%s.zip'%uid
-
- logging.debug("Zipping up %s to %s" % (tempdirectory, join(destination, zipfilename)))
- # make zip
- def zipdir(basedir, archivename):
- assert isdir(basedir)
- fakeroot = program_name + '/'
- with closing(ZipFile(archivename, "w", ZIP_DEFLATED)) as z:
- for root, _, files in os.walk(basedir):
- # NOTE: ignore empty directories
- for fn in files:
- absfn = join(root, fn)
- zfn = fakeroot + '/' + absfn[len(basedir)+len(os.sep):]
- z.write(absfn, zfn)
-
- zipdir(tempdirectory, join(destination, zipfilename))
-
- if clean:
- shutil.rmtree(tempdirectory)
-
- return join(destination, zipfilename)
+ self.generated_files += [target_path]
diff --git a/tools/export/gccarm.py b/tools/export/gccarm.py
index 3cdb0477ff..feb46aa011 100644
--- a/tools/export/gccarm.py
+++ b/tools/export/gccarm.py
@@ -1,6 +1,6 @@
"""
mbed SDK
-Copyright (c) 2011-2013 ARM Limited
+Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -14,9 +14,9 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
-from exporters import Exporter
-from os.path import splitext, basename, relpath, join, abspath
+from os.path import splitext, basename, relpath, join, abspath, dirname
from os import curdir, getcwd
+from tools.export.exporters import Exporter
class GccArm(Exporter):
@@ -135,8 +135,6 @@ class GccArm(Exporter):
def generate(self):
# "make" wants Unix paths
- if self.sources_relative:
- self.resources.relative_to(self.prj_paths[0])
self.resources.win_to_unix()
to_be_compiled = []
@@ -152,19 +150,20 @@ class GccArm(Exporter):
l, _ = splitext(basename(lib))
libraries.append(l[3:])
- build_dir = abspath(join(self.inputDir, ".build"))
ctx = {
- 'name': self.program_name,
+ 'name': self.project_name,
'to_be_compiled': to_be_compiled,
'object_files': self.resources.objects,
'include_paths': self.resources.inc_dirs,
'library_paths': self.resources.lib_dirs,
'linker_script': self.resources.linker_script,
'libraries': libraries,
- 'symbols': self.get_symbols(),
+ 'symbols': self.toolchain.get_symbols(),
'cpu_flags': self.toolchain.cpu,
- 'vpath': [relpath(s, build_dir) for s in self.prj_paths] if self.sources_relative else [".."],
- 'hex_files': self.resources.hex_files
+ 'hex_files': self.resources.hex_files,
+ 'vpath': (["../../.."]
+ if basename(dirname(dirname(self.export_dir))) == "projectfiles"
+ else [".."])
}
for key in ['include_paths', 'library_paths', 'linker_script', 'hex_files']:
@@ -174,9 +173,5 @@ class GccArm(Exporter):
ctx[key] = ctx['vpath'][0] + "/" + ctx[key]
if "../." not in ctx["include_paths"]:
ctx["include_paths"] += ['../.']
- ctx.update(self.progen_flags)
+ ctx.update(self.flags)
self.gen_file('gcc_arm_%s.tmpl' % self.target.lower(), ctx, 'Makefile')
-
- def scan_and_copy_resources(self, prj_paths, trg_path, relative=False):
- self.prj_paths = prj_paths
- Exporter.scan_and_copy_resources(self, prj_paths, trg_path, relative)
diff --git a/tools/export/iar.py b/tools/export/iar.py
index 5c52a3d1ec..3ca488d666 100644
--- a/tools/export/iar.py
+++ b/tools/export/iar.py
@@ -1,6 +1,6 @@
"""
mbed SDK
-Copyright (c) 2011-2015 ARM Limited
+Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -28,7 +28,7 @@ class IAREmbeddedWorkbench(Exporter):
Exporter class for IAR Systems. This class uses project generator.
"""
# These 2 are currently for exporters backward compatiblity
- NAME = 'IAR'
+ NAME = 'iar_arm'
TOOLCHAIN = 'IAR'
# PROGEN_ACTIVE contains information for exporter scripts that this is using progen
PROGEN_ACTIVE = True
@@ -50,39 +50,23 @@ class IAREmbeddedWorkbench(Exporter):
continue
return cls._targets_supported
- def generate(self, progen_build=False):
+ def generate(self):
""" Generates the project files """
project_data = self.progen_get_project_data()
- tool_specific = {}
- # Expand tool specific settings by IAR specific settings which are required
try:
if TARGET_MAP[self.target].progen['iar']['template']:
- tool_specific['iar'] = TARGET_MAP[self.target].progen['iar']
+ project_data['template']=TARGET_MAP[self.target].progen['iar']['template']
except KeyError:
# use default template
# by the mbed projects
- tool_specific['iar'] = {
- # We currently don't use misc, template sets those for us
- # 'misc': {
- # 'cxx_flags': ['--no_rtti', '--no_exceptions'],
- # 'c_flags': ['--diag_suppress=Pa050,Pa084,Pa093,Pa082'],
- # 'ld_flags': ['--skip_dynamic_initialization'],
- # },
- 'template': [os.path.join(os.path.dirname(__file__), 'iar_template.ewp.tmpl')],
- }
+ project_data['template']=[os.path.join(os.path.dirname(__file__), 'iar_template.ewp.tmpl')]
- project_data['tool_specific'] = {}
- project_data['tool_specific'].setdefault("iar", {})
- project_data['tool_specific']['iar'].setdefault("misc", {})
- project_data['tool_specific']['iar'].update(tool_specific['iar'])
- project_data['tool_specific']['iar']['misc'].update(self.progen_flags)
+ project_data['misc'] = self.flags
# VLA is enabled via template IccAllowVLA
- project_data['tool_specific']['iar']['misc']['c_flags'].remove("--vla")
- project_data['common']['build_dir'] = os.path.join(project_data['common']['build_dir'], 'iar_arm')
- if progen_build:
- self.progen_gen_file('iar_arm', project_data, True)
- else:
- self.progen_gen_file('iar_arm', project_data)
+ project_data['misc']['c_flags'].remove("--vla")
+ project_data['misc']['asm_flags'] = list(set(project_data['misc']['asm_flags']))
+ project_data['build_dir'] = os.path.join(project_data['build_dir'], 'iar_arm')
+ self.progen_gen_file(project_data)
# Currently not used, we should reuse folder_name to create virtual folders
class IarFolder():
diff --git a/tools/export/kds.py b/tools/export/kds.py
index 13c038deba..b77a507f17 100644
--- a/tools/export/kds.py
+++ b/tools/export/kds.py
@@ -1,6 +1,6 @@
"""
mbed SDK
-Copyright (c) 2011-2013 ARM Limited
+Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -35,13 +35,13 @@ class KDS(Exporter):
libraries.append(l[3:])
ctx = {
- 'name': self.program_name,
+ 'name': self.project_name,
'include_paths': self.resources.inc_dirs,
'linker_script': self.resources.linker_script,
'object_files': self.resources.objects,
'libraries': libraries,
- 'symbols': self.get_symbols()
+ 'symbols': self.toolchain.get_symbols()
}
self.gen_file('kds_%s_project.tmpl' % self.target.lower(), ctx, '.project')
self.gen_file('kds_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject')
- self.gen_file('kds_launch.tmpl', ctx, '%s.launch' % self.program_name)
+ self.gen_file('kds_launch.tmpl', ctx, '%s.launch' % self.project_name)
diff --git a/tools/export/simplicityv3.py b/tools/export/simplicityv3.py
index 3254152127..ba6f6f185b 100644
--- a/tools/export/simplicityv3.py
+++ b/tools/export/simplicityv3.py
@@ -1,6 +1,6 @@
"""
mbed SDK
-Copyright (c) 2014 ARM Limited
+Copyright (c) 2014-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -147,7 +147,7 @@ class SimplicityV3(Exporter):
libraries.append(l[3:])
defines = []
- for define in self.get_symbols():
+ for define in self.toolchain.get_symbols():
if '=' in define:
keyval = define.split('=')
defines.append( (keyval[0], keyval[1]) )
@@ -157,7 +157,7 @@ class SimplicityV3(Exporter):
self.check_and_add_path(split(self.resources.linker_script)[0])
ctx = {
- 'name': self.program_name,
+ 'name': self.project_name,
'main_files': main_files,
'recursiveFolders': self.orderedPaths,
'object_files': self.resources.objects,
@@ -165,13 +165,12 @@ class SimplicityV3(Exporter):
'library_paths': self.resources.lib_dirs,
'linker_script': self.resources.linker_script,
'libraries': libraries,
- 'symbols': self.get_symbols(),
'defines': defines,
'part': self.PARTS[self.target],
'kit': self.KITS[self.target],
'loopcount': 0
}
- ctx.update(self.progen_flags)
+ ctx.update(self.flags)
## Strip main folder from include paths because ssproj is not capable of handling it
if '.' in ctx['include_paths']:
@@ -191,4 +190,4 @@ class SimplicityV3(Exporter):
print("\t" + bpath.name + "\n")
'''
- self.gen_file('simplicityv3_slsproj.tmpl', ctx, '%s.slsproj' % self.program_name)
+ self.gen_file('simplicityv3_slsproj.tmpl', ctx, '%s.slsproj' % self.project_name)
diff --git a/tools/export/sw4stm32.py b/tools/export/sw4stm32.py
index bacc02260c..160879f92e 100644
--- a/tools/export/sw4stm32.py
+++ b/tools/export/sw4stm32.py
@@ -65,7 +65,7 @@ class Sw4STM32(Exporter):
TARGETS = BOARDS.keys()
def __gen_dir(self, dirname):
- settings = join(self.inputDir, dirname)
+ settings = join(self.export_dir, dirname)
mkdir(settings)
def __generate_uid(self):
@@ -78,13 +78,13 @@ class Sw4STM32(Exporter):
libraries.append(l[3:])
ctx = {
- 'name': self.program_name,
+ 'name': self.project_name,
'include_paths': self.resources.inc_dirs,
'linker_script': self.resources.linker_script,
'library_paths': self.resources.lib_dirs,
'object_files': self.resources.objects,
'libraries': libraries,
- 'symbols': self.get_symbols(),
+ 'symbols': self.toolchain.get_symbols(),
'board_name': self.BOARDS[self.target.upper()]['name'],
'mcu_name': self.BOARDS[self.target.upper()]['mcuId'],
'debug_config_uid': self.__generate_uid(),
diff --git a/tools/export/uvision4.py b/tools/export/uvision4.py
index 0a76c89f82..5d3b548d74 100644
--- a/tools/export/uvision4.py
+++ b/tools/export/uvision4.py
@@ -1,6 +1,6 @@
"""
mbed SDK
-Copyright (c) 2011-2013 ARM Limited
+Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -28,7 +28,7 @@ class Uvision4(Exporter):
Exporter class for uvision. This class uses project generator.
"""
# These 2 are currently for exporters backward compatiblity
- NAME = 'uVision4'
+ NAME = 'uvision'
TOOLCHAIN = 'ARM'
# PROGEN_ACTIVE contains information for exporter scripts that this is using progen
PROGEN_ACTIVE = True
@@ -53,7 +53,7 @@ class Uvision4(Exporter):
def get_toolchain(self):
return TARGET_MAP[self.target].default_toolchain
- def generate(self, progen_build=False):
+ def generate(self):
""" Generates the project files """
project_data = self.progen_get_project_data()
tool_specific = {}
@@ -72,25 +72,24 @@ class Uvision4(Exporter):
project_data['tool_specific'].update(tool_specific)
# get flags from toolchain and apply
- project_data['tool_specific']['uvision']['misc'] = {}
+ project_data['misc'] = {}
# need to make this a string for progen. Only adds preprocessor when "macros" set
asm_flag_string = '--cpreproc --cpreproc_opts=-D__ASSERT_MSG,' + ",".join(
- list(set(self.progen_flags['asm_flags'])))
- project_data['tool_specific']['uvision']['misc']['asm_flags'] = [asm_flag_string]
+ list(set(self.flags['asm_flags'])))
+ # asm flags only, common are not valid within uvision project, they are armcc specific
+ project_data['misc']['asm_flags'] = [asm_flag_string]
# cxx flags included, as uvision have them all in one tab
- project_data['tool_specific']['uvision']['misc']['c_flags'] = list(set(
- ['-D__ASSERT_MSG'] + self.progen_flags['common_flags'] + self.progen_flags['c_flags'] + self.progen_flags[
- 'cxx_flags']))
+ project_data['misc']['c_flags'] = list(set(['-D__ASSERT_MSG']
+ + self.flags['common_flags']
+ + self.flags['c_flags']
+ + self.flags['cxx_flags']))
# not compatible with c99 flag set in the template
- project_data['tool_specific']['uvision']['misc']['c_flags'].remove("--c99")
+ project_data['misc']['c_flags'].remove("--c99")
# cpp is not required as it's implicit for cpp files
- project_data['tool_specific']['uvision']['misc']['c_flags'].remove("--cpp")
+ project_data['misc']['c_flags'].remove("--cpp")
# we want no-vla for only cxx, but it's also applied for C in IDE, thus we remove it
- project_data['tool_specific']['uvision']['misc']['c_flags'].remove("--no_vla")
- project_data['tool_specific']['uvision']['misc']['ld_flags'] = self.progen_flags['ld_flags']
+ project_data['misc']['c_flags'].remove("--no_vla")
+ project_data['misc']['ld_flags'] = self.flags['ld_flags']
- project_data['common']['build_dir'] = project_data['common']['build_dir'] + '\\' + 'uvision4'
- if progen_build:
- self.progen_gen_file('uvision', project_data, True)
- else:
- self.progen_gen_file('uvision', project_data)
+ project_data['build_dir'] = project_data['build_dir'] + '\\' + 'uvision4'
+ self.progen_gen_file(project_data)
diff --git a/tools/export/uvision5.py b/tools/export/uvision5.py
index f985b3ff88..5eb08a5803 100644
--- a/tools/export/uvision5.py
+++ b/tools/export/uvision5.py
@@ -28,7 +28,7 @@ class Uvision5(Exporter):
Exporter class for uvision5. This class uses project generator.
"""
# These 2 are currently for exporters backward compatiblity
- NAME = 'uVision5'
+ NAME = 'uvision5'
TOOLCHAIN = 'ARM'
# PROGEN_ACTIVE contains information for exporter scripts that this is using progen
PROGEN_ACTIVE = True
@@ -53,7 +53,7 @@ class Uvision5(Exporter):
def get_toolchain(self):
return TARGET_MAP[self.target].default_toolchain
- def generate(self, progen_build=False):
+ def generate(self):
""" Generates the project files """
project_data = self.progen_get_project_data()
tool_specific = {}
@@ -68,27 +68,28 @@ class Uvision5(Exporter):
'template': [join(dirname(__file__), 'uvision.uvproj.tmpl')],
}
+ #project_data['template'] = [tool_specific['uvision5']['template']]
project_data['tool_specific'] = {}
project_data['tool_specific'].update(tool_specific)
# get flags from toolchain and apply
- project_data['tool_specific']['uvision5']['misc'] = {}
-
- # need to make this a string got progen. Only adds preprocessor when "macros" set
- asm_flag_string = '--cpreproc --cpreproc_opts=-D__ASSERT_MSG,' + ",".join(list(set(self.progen_flags['asm_flags'])))
- project_data['tool_specific']['uvision5']['misc']['asm_flags'] = [asm_flag_string]
+ project_data['misc'] = {}
+ asm_flag_string = '--cpreproc --cpreproc_opts=-D__ASSERT_MSG,' + ",".join(list(set(self.flags['asm_flags'])))
+ # asm flags only, common are not valid within uvision project, they are armcc specific
+ project_data['misc']['asm_flags'] = [asm_flag_string]
# cxx flags included, as uvision have them all in one tab
- project_data['tool_specific']['uvision5']['misc']['c_flags'] = list(set(['-D__ASSERT_MSG']+self.progen_flags['common_flags'] + self.progen_flags['c_flags'] + self.progen_flags['cxx_flags']))
+ project_data['misc']['c_flags'] = list(set(['-D__ASSERT_MSG']
+ + self.flags['common_flags']
+ + self.flags['c_flags']
+ + self.flags['cxx_flags']))
# not compatible with c99 flag set in the template
- project_data['tool_specific']['uvision5']['misc']['c_flags'].remove("--c99")
+ project_data['misc']['c_flags'].remove("--c99")
# cpp is not required as it's implicit for cpp files
- project_data['tool_specific']['uvision5']['misc']['c_flags'].remove("--cpp")
+ project_data['misc']['c_flags'].remove("--cpp")
# we want no-vla for only cxx, but it's also applied for C in IDE, thus we remove it
- project_data['tool_specific']['uvision5']['misc']['c_flags'].remove("--no_vla")
- project_data['tool_specific']['uvision5']['misc']['ld_flags'] = self.progen_flags['ld_flags']
+ project_data['misc']['c_flags'].remove("--no_vla")
+ project_data['misc']['ld_flags'] = self.flags['ld_flags']
- project_data['common']['build_dir'] = project_data['common']['build_dir'] + '\\' + 'uvision5'
- if progen_build:
- self.progen_gen_file('uvision5', project_data, True)
- else:
- self.progen_gen_file('uvision5', project_data)
+ i = 0
+ project_data['build_dir'] = project_data['build_dir'] + '\\' + 'uvision5'
+ self.progen_gen_file(project_data)
diff --git a/tools/export/zip.py b/tools/export/zip.py
index b9828a61a1..3961eb0622 100644
--- a/tools/export/zip.py
+++ b/tools/export/zip.py
@@ -33,9 +33,11 @@ class ZIP(Exporter):
's_sources':'2'
}
+ TOOLCHAIN = 'ARM'
+
def get_toolchain(self):
return 'uARM' if (self.target in self.USING_MICROLIB) else 'ARM'
def generate(self):
return True
-
\ No newline at end of file
+
diff --git a/tools/project.py b/tools/project.py
index 873e7fbe50..4fb905fbe1 100644
--- a/tools/project.py
+++ b/tools/project.py
@@ -1,3 +1,6 @@
+""" The CLI entry point for exporting projects from the mbed tools to any of the
+supported IDEs or project structures.
+"""
import sys
from os.path import join, abspath, dirname, exists, basename
ROOT = abspath(join(dirname(__file__), ".."))
@@ -5,21 +8,87 @@ sys.path.insert(0, ROOT)
from shutil import move, rmtree
from argparse import ArgumentParser
-from os import path
+from os.path import normpath, realpath
-from tools.paths import EXPORT_DIR
-from tools.export import export, EXPORTERS, mcu_ide_matrix
+from tools.paths import EXPORT_DIR, MBED_BASE, MBED_LIBRARIES
+from tools.export import EXPORTERS, mcu_ide_matrix
from tools.tests import TESTS, TEST_MAP
-from tools.tests import test_known, test_name_known
+from tools.tests import test_known, test_name_known, Test
from tools.targets import TARGET_NAMES
-from tools.libraries import LIBRARIES
-from utils import argparse_filestring_type, argparse_many, args_error
-from utils import argparse_force_lowercase_type, argparse_force_uppercase_type, argparse_dir_not_parent
-from project_api import setup_project, perform_export, print_results, get_lib_symbols
+from tools.utils import argparse_filestring_type, argparse_many, args_error
+from tools.utils import argparse_force_lowercase_type
+from tools.utils import argparse_force_uppercase_type
+from tools.project_api import export_project
+def setup_project(ide, target, program=None, source_dir=None, build=None):
+ """Generate a name, if not provided, and find dependencies
-if __name__ == '__main__':
+ Positional arguments:
+ ide - IDE or project structure that will soon be exported to
+ target - MCU that the project will build for
+
+ Keyword arguments:
+ program - the index of a test program
+ source_dir - the directory, or directories that contain all of the sources
+ build - a directory that will contain the result of the export
+ """
+ # Some libraries have extra macros (called by exporter symbols) to we need
+ # to pass them to maintain compilation macros integrity between compiled
+ # library and header files we might use with it
+ if source_dir:
+ # --source is used to generate IDE files to toolchain directly
+ # in the source tree and doesn't generate zip file
+ project_dir = source_dir[0]
+ if program:
+ project_name = TESTS[program]
+ else:
+ project_name = basename(normpath(realpath(source_dir[0])))
+ src_paths = source_dir
+ lib_paths = None
+ else:
+ test = Test(program)
+ if not build:
+ # Substitute the mbed library builds with their sources
+ if MBED_LIBRARIES in test.dependencies:
+ test.dependencies.remove(MBED_LIBRARIES)
+ test.dependencies.append(MBED_BASE)
+
+ src_paths = [test.source_dir]
+ lib_paths = test.dependencies
+ project_name = "_".join([test.id, ide, target])
+ project_dir = join(EXPORT_DIR, project_name)
+
+ return project_dir, project_name, src_paths, lib_paths
+
+
+def export(target, ide, build=None, src=None, macros=None, project_id=None,
+ clean=False, zip_proj=False, options=None):
+ """Do an export of a project.
+
+ Positional arguments:
+ target - MCU that the project will compile for
+ ide - the IDE or project structure to export to
+
+ Keyword arguments:
+ build - to use the compiled mbed libraries or not
+ src - directory or directories that contain the source to export
+ macros - extra macros to add to the project
+ project_id - the name of the project
+ clean - start from a clean state before exporting
+ zip_proj - create a zip file or not
+ """
+ project_dir, name, src, lib = setup_project(ide, target, program=project_id,
+ source_dir=src, build=build)
+
+ zip_name = name+".zip" if zip_proj else None
+
+ export_project(src, project_dir, target, ide, clean=clean, name=name,
+ macros=macros, libraries_paths=lib, zip_proj=zip_name, options=options)
+
+
+def main():
+ """Entry point"""
# Parse Options
parser = ArgumentParser()
@@ -29,32 +98,35 @@ if __name__ == '__main__':
toolchainlist.sort()
parser.add_argument("-m", "--mcu",
- metavar="MCU",
- default='LPC1768',
- type=argparse_many(argparse_force_uppercase_type(targetnames, "MCU")),
- help="generate project for the given MCU (%s)"% ', '.join(targetnames))
+ metavar="MCU",
+ default='LPC1768',
+ type=argparse_force_uppercase_type(targetnames, "MCU"),
+ help="generate project for the given MCU ({})".format(
+ ', '.join(targetnames)))
parser.add_argument("-i",
- dest="ide",
- default='uvision',
- type=argparse_force_lowercase_type(toolchainlist, "toolchain"),
- help="The target IDE: %s"% str(toolchainlist))
+ dest="ide",
+ default='uvision',
+ type=argparse_force_lowercase_type(
+ toolchainlist, "toolchain"),
+ help="The target IDE: %s"% str(toolchainlist))
parser.add_argument("-c", "--clean",
- action="store_true",
- default=False,
- help="clean the export directory")
+ action="store_true",
+ default=False,
+ help="clean the export directory")
group = parser.add_mutually_exclusive_group(required=False)
- group.add_argument("-p",
- type=test_known,
- dest="program",
- help="The index of the desired test program: [0-%d]"% (len(TESTS)-1))
+ group.add_argument(
+ "-p",
+ type=test_known,
+ dest="program",
+ help="The index of the desired test program: [0-%s]"% (len(TESTS)-1))
group.add_argument("-n",
- type=test_name_known,
- dest="program",
- help="The name of the desired test program")
+ type=test_name_known,
+ dest="program",
+ help="The name of the desired test program")
parser.add_argument("-b",
dest="build",
@@ -63,40 +135,46 @@ if __name__ == '__main__':
help="use the mbed library build, instead of the sources")
group.add_argument("-L", "--list-tests",
- action="store_true",
- dest="list_tests",
- default=False,
- help="list available programs in order and exit")
+ action="store_true",
+ dest="list_tests",
+ default=False,
+ help="list available programs in order and exit")
group.add_argument("-S", "--list-matrix",
- action="store_true",
- dest="supported_ides",
- default=False,
- help="displays supported matrix of MCUs and IDEs")
+ action="store_true",
+ dest="supported_ides",
+ default=False,
+ help="displays supported matrix of MCUs and IDEs")
parser.add_argument("-E",
- action="store_true",
- dest="supported_ides_html",
- default=False,
- help="writes tools/export/README.md")
+ action="store_true",
+ dest="supported_ides_html",
+ default=False,
+ help="writes tools/export/README.md")
parser.add_argument("--source",
- action="append",
- type=argparse_filestring_type,
- dest="source_dir",
- default=[],
- help="The source (input) directory")
+ action="append",
+ type=argparse_filestring_type,
+ dest="source_dir",
+ default=[],
+ help="The source (input) directory")
parser.add_argument("-D",
- action="append",
- dest="macros",
- help="Add a macro definition")
+ action="append",
+ dest="macros",
+ help="Add a macro definition")
+
+ parser.add_argument("-o",
+ type=argparse_many(str),
+ dest="opts",
+ default=["debug-info"],
+ help="Toolchain options")
options = parser.parse_args()
# Print available tests in order and exit
if options.list_tests is True:
- print '\n'.join(map(str, sorted(TEST_MAP.values())))
+ print '\n'.join([str(test) for test in sorted(TEST_MAP.values())])
sys.exit()
# Only prints matrix of supported IDEs
@@ -108,13 +186,13 @@ if __name__ == '__main__':
if options.supported_ides_html:
html = mcu_ide_matrix(verbose_html=True)
try:
- with open("./export/README.md","w") as f:
- f.write("Exporter IDE/Platform Support\n")
- f.write("-----------------------------------\n")
- f.write("\n")
- f.write(html)
- except IOError as e:
- print "I/O error({0}): {1}".format(e.errno, e.strerror)
+ with open("./export/README.md", "w") as readme:
+ readme.write("Exporter IDE/Platform Support\n")
+ readme.write("-----------------------------------\n")
+ readme.write("\n")
+ readme.write(html)
+ except IOError as exc:
+ print "I/O error({0}): {1}".format(exc.errno, exc.strerror)
except:
print "Unexpected error:", sys.exc_info()[0]
raise
@@ -125,12 +203,9 @@ if __name__ == '__main__':
if exists(EXPORT_DIR):
rmtree(EXPORT_DIR)
- # Export results
- successes = []
- failures = []
+ for mcu in options.mcu:
+ zip_proj = not bool(options.source_dir)
- # source_dir = use relative paths, otherwise sources are copied
- sources_relative = True if options.source_dir else False
# Target
if not options.mcu:
args_error(parser, "argument -m/--mcu is required")
@@ -141,32 +216,12 @@ if __name__ == '__main__':
if (options.program is None) and (not options.source_dir):
args_error(parser, "one of -p, -n, or --source is required")
+ # Export to selected toolchain
+ export(options.mcu, options.ide, build=options.build,
+ src=options.source_dir, macros=options.macros,
+ project_id=options.program, clean=options.clean,
+ zip_proj=zip_proj, options=options.opts)
- for mcu in options.mcu:
- # Program Number or name
- p, src, ide = options.program, options.source_dir, options.ide
- try:
- project_dir, project_name, project_temp = setup_project(mcu, ide, p, src, options.build)
- zip = not bool(src) # create zip when no src_dir provided
- clean = not bool(src) # don't clean when source is provided, use acrual source tree for IDE files
- # Export to selected toolchain
- lib_symbols = get_lib_symbols(options.macros, src, p)
- tmp_path, report = export(project_dir, project_name, ide, mcu, project_dir[0], project_temp, clean=clean, make_zip=zip, extra_symbols=lib_symbols, sources_relative=sources_relative)
- except OSError as e:
- if e.errno == 2:
- report = dict(success=False, errormsg="Library path '%s' does not exist. Ensure that the library is built." % (e.filename))
- else:
- report = dict(success=False, errormsg="An OS error occured: errno #{}".format(e.errno))
- if report['success']:
- if not zip:
- zip_path = join(project_temp, project_name)
- else:
- zip_path = join(EXPORT_DIR, "%s_%s_%s.zip" % (project_name, ide, mcu))
- move(tmp_path, zip_path)
- successes.append("%s::%s\t%s"% (mcu, ide, zip_path))
- else:
- failures.append("%s::%s\t%s"% (mcu, ide, report['errormsg']))
-
- # Prints export results
- print_results(successes, failures)
+if __name__ == "__main__":
+ main()
diff --git a/tools/project_api.py b/tools/project_api.py
index f0bfb04795..56ef18282e 100644
--- a/tools/project_api.py
+++ b/tools/project_api.py
@@ -1,110 +1,255 @@
+""" The new way of doing exports """
import sys
-from os.path import join, abspath, dirname, exists, basename
+from os.path import join, abspath, dirname, exists
+from os.path import basename, relpath, normpath
+from os import makedirs, walk
ROOT = abspath(join(dirname(__file__), ".."))
sys.path.insert(0, ROOT)
+import copy
+from shutil import rmtree
+import zipfile
-from tools.paths import EXPORT_WORKSPACE, EXPORT_TMP
-from tools.paths import MBED_BASE, MBED_LIBRARIES
-from tools.export import export, setup_user_prj
-from tools.utils import mkdir
-from tools.tests import Test, TEST_MAP, TESTS
-from tools.libraries import LIBRARIES
-
-try:
- import tools.private_settings as ps
-except:
- ps = object()
+from tools.build_api import prepare_toolchain
+from tools.build_api import scan_resources
+from tools.export import EXPORTERS
+from tools.toolchains import Resources
-def get_program(n):
- p = TEST_MAP[n].n
- return p
+def get_exporter_toolchain(ide):
+ """ Return the exporter class and the toolchain string as a tuple
+
+ Positional arguments:
+ ide - the ide name of an exporter
+ """
+ return EXPORTERS[ide], EXPORTERS[ide].TOOLCHAIN
-def get_test(p):
- return Test(p)
+def rewrite_basepath(file_name, resources, export_path, loc):
+ """ Replace the basepath of filename with export_path
+
+ Positional arguments:
+ file_name - the absolute path to a file
+ resources - the resources object that the file came from
+ export_path - the final destination of the file after export
+ """
+ new_f = join(loc, relpath(file_name, resources.file_basepath[file_name]))
+ resources.file_basepath[join(export_path, new_f)] = export_path
+ return new_f
-def get_test_from_name(n):
- if not n in TEST_MAP.keys():
- # Check if there is an alias for this in private_settings.py
- if getattr(ps, "test_alias", None) is not None:
- alias = ps.test_alias.get(n, "")
- if not alias in TEST_MAP.keys():
- return None
+def subtract_basepath(resources, export_path, loc=""):
+ """ Rewrite all of the basepaths with the export_path
+
+ Positional arguments:
+ resources - the resource object to rewrite the basepaths of
+ export_path - the final destination of the resources with respect to the
+ generated project files
+ """
+ keys = ['s_sources', 'c_sources', 'cpp_sources', 'hex_files',
+ 'objects', 'libraries', 'inc_dirs', 'headers', 'linker_script',
+ 'lib_dirs']
+ for key in keys:
+ vals = getattr(resources, key)
+ if isinstance(vals, set):
+ vals = list(vals)
+ if isinstance(vals, list):
+ new_vals = []
+ for val in vals:
+ new_vals.append(rewrite_basepath(val, resources, export_path,
+ loc))
+ if isinstance(getattr(resources, key), set):
+ setattr(resources, key, set(new_vals))
else:
- n = alias
- else:
- return None
- return get_program(n)
+ setattr(resources, key, new_vals)
+ elif vals:
+ setattr(resources, key, rewrite_basepath(vals, resources,
+ export_path, loc))
-def get_lib_symbols(macros, src, program):
- # Some libraries have extra macros (called by exporter symbols) to we need to pass
- # them to maintain compilation macros integrity between compiled library and
- # header files we might use with it
- lib_symbols = []
- if macros:
- lib_symbols += macros
- if src:
- return lib_symbols
- test = get_test(program)
- for lib in LIBRARIES:
- if lib['build_dir'] in test.dependencies:
- lib_macros = lib.get('macros', None)
- if lib_macros is not None:
- lib_symbols.extend(lib_macros)
+def generate_project_files(resources, export_path, target, name, toolchain, ide,
+ macros=None):
+ """Generate the project files for a project
+
+ Positional arguments:
+ resources - a Resources object containing all of the files needed to build
+ this project
+ export_path - location to place project files
+ name - name of the project
+ toolchain - a toolchain class that corresponds to the toolchain used by the
+ IDE or makefile
+ ide - IDE name to export to
+
+ Optional arguments:
+ macros - additional macros that should be defined within the exported
+ project
+ """
+ exporter_cls, _ = get_exporter_toolchain(ide)
+ exporter = exporter_cls(target, export_path, name, toolchain,
+ extra_symbols=macros, resources=resources)
+ exporter.generate()
+ files = exporter.generated_files
+ return files, exporter
-def setup_project(mcu, ide, program=None, source_dir=None, build=None):
+def zip_export(file_name, prefix, resources, project_files):
+ """Create a zip file from an exported project.
- # Some libraries have extra macros (called by exporter symbols) to we need to pass
- # them to maintain compilation macros integrity between compiled library and
- # header files we might use with it
- if source_dir:
- # --source is used to generate IDE files to toolchain directly in the source tree and doesn't generate zip file
- project_dir = source_dir
- project_name = TESTS[program] if program else "Unnamed_Project"
- project_temp = join(source_dir[0], 'projectfiles', '%s_%s' % (ide, mcu))
- mkdir(project_temp)
+ Positional Parameters:
+ file_name - the file name of the resulting zip file
+ prefix - a directory name that will prefix the entire zip file's contents
+ resources - a resources object with files that must be included in the zip
+ project_files - a list of extra files to be added to the root of the prefix
+ directory
+ """
+ with zipfile.ZipFile(file_name, "w") as zip_file:
+ for prj_file in project_files:
+ zip_file.write(prj_file, join(prefix, basename(prj_file)))
+ for loc, resource in resources.iteritems():
+ print resource.features
+ for res in [resource] + resource.features.values():
+ extras = []
+ for directory in res.repo_dirs:
+ for root, _, files in walk(directory):
+ for repo_file in files:
+ source = join(root, repo_file)
+ extras.append(source)
+ res.file_basepath[source] = res.base_path
+ for source in \
+ res.headers + res.s_sources + res.c_sources +\
+ res.cpp_sources + res.libraries + res.hex_files + \
+ [res.linker_script] + res.bin_files + res.objects + \
+ res.json_files + res.lib_refs + res.lib_builds + \
+ res.repo_files + extras:
+ if source:
+ zip_file.write(
+ source,
+ join(prefix, loc,
+ relpath(source, res.file_basepath[source])))
+
+
+
+def export_project(src_paths, export_path, target, ide,
+ libraries_paths=None, options=None, linker_script=None,
+ clean=False, notify=None, verbose=False, name=None,
+ inc_dirs=None, jobs=1, silent=False, extra_verbose=False,
+ config=None, macros=None, zip_proj=None):
+ """Generates a project file and creates a zip archive if specified
+
+ Positional Arguments:
+ src_paths - a list of paths from which to find source files
+ export_path - a path specifying the location of generated project files
+ target - the mbed board/mcu for which to generate the executable
+ ide - the ide for which to generate the project fields
+
+ Keyword Arguments:
+ libraries_paths - paths to additional libraries
+ options - build options passed by -o flag
+ linker_script - path to the linker script for the specified target
+ clean - removes the export_path if it exists
+ notify - function is passed all events, and expected to handle notification
+ of the user, emit the events to a log, etc.
+ verbose - assigns the notify function to toolchains print_notify_verbose
+ name - project name
+ inc_dirs - additional include directories
+ jobs - number of threads
+ silent - silent build - no output
+ extra_verbose - assigns the notify function to toolchains
+ print_notify_verbose
+ config - toolchain's config object
+ macros - User-defined macros
+ zip_proj - string name of the zip archive you wish to creat (exclude arg
+ if you do not wish to create an archive
+ """
+
+ # Convert src_path to a list if needed
+ if isinstance(src_paths, dict):
+ paths = sum(src_paths.values(), [])
+ elif isinstance(src_paths, list):
+ paths = src_paths[:]
else:
- test = get_test(program)
- if not build:
- # Substitute the library builds with the sources
- # TODO: Substitute also the other library build paths
- if MBED_LIBRARIES in test.dependencies:
- test.dependencies.remove(MBED_LIBRARIES)
- test.dependencies.append(MBED_BASE)
+ paths = [src_paths]
- # Build the project with the same directory structure of the mbed online IDE
- project_name = test.id
- project_dir = [join(EXPORT_WORKSPACE, project_name)]
- project_temp = EXPORT_TMP
- setup_user_prj(project_dir[0], test.source_dir, test.dependencies)
+ # Extend src_paths wit libraries_paths
+ if libraries_paths is not None:
+ paths.extend(libraries_paths)
- return project_dir, project_name, project_temp
+ if not isinstance(src_paths, dict):
+ src_paths = {"": paths}
+
+ # Export Directory
+ if exists(export_path) and clean:
+ rmtree(export_path)
+ if not exists(export_path):
+ makedirs(export_path)
+
+ _, toolchain_name = get_exporter_toolchain(ide)
+
+ # Pass all params to the unified prepare_resources()
+ toolchain = prepare_toolchain(paths, target, toolchain_name,
+ macros=macros, options=options, clean=clean,
+ jobs=jobs, notify=notify, silent=silent,
+ verbose=verbose, extra_verbose=extra_verbose,
+ config=config)
+ # The first path will give the name to the library
+ if name is None:
+ name = basename(normpath(abspath(src_paths[0])))
+
+ # Call unified scan_resources
+ resource_dict = {loc: scan_resources(path, toolchain, inc_dirs=inc_dirs)
+ for loc, path in src_paths.iteritems()}
+ resources = Resources()
+ toolchain.build_dir = export_path
+ config_header = toolchain.get_config_header()
+ resources.headers.append(config_header)
+ resources.file_basepath[config_header] = dirname(config_header)
+
+ if zip_proj:
+ subtract_basepath(resources, export_path)
+ for loc, res in resource_dict.iteritems():
+ temp = copy.deepcopy(res)
+ subtract_basepath(temp, export_path, loc)
+ resources.add(temp)
+ else:
+ for _, res in resource_dict.iteritems():
+ resources.add(res)
+
+ # Change linker script if specified
+ if linker_script is not None:
+ resources.linker_script = linker_script
+
+ files, exporter = generate_project_files(resources, export_path,
+ target, name, toolchain, ide,
+ macros=macros)
+ files.append(config_header)
+ if zip_proj:
+ if isinstance(zip_proj, basestring):
+ zip_export(join(export_path, zip_proj), name, resource_dict, files)
+ else:
+ zip_export(zip_proj, name, resource_dict, files)
+
+ return exporter
-def perform_export(dir, name, ide, mcu, temp, clean=False, zip=False, lib_symbols='',
- sources_relative=False, progen_build=False):
+def print_results(successes, failures, skips=None):
+ """ Print out the results of an export process
- tmp_path, report = export(dir, name, ide, mcu, dir[0], temp, clean=clean,
- make_zip=zip, extra_symbols=lib_symbols, sources_relative=sources_relative,
- progen_build=progen_build)
- return tmp_path, report
+ Positional arguments:
+ successes - The list of exports that succeeded
+ failures - The list of exports that failed
-
-def print_results(successes, failures, skips = []):
+ Keyword arguments:
+ skips - The list of exports that were skipped
+ """
print
- if len(successes) > 0:
+ if successes:
print "Successful: "
for success in successes:
print " * %s" % success
- if len(failures) > 0:
+ if failures:
print "Failed: "
for failure in failures:
print " * %s" % failure
- if len(skips) > 0:
+ if skips:
print "Skipped: "
for skip in skips:
print " * %s" % skip
diff --git a/tools/test/export/build_test.py b/tools/test/export/build_test.py
index ec80fcfd00..e87b4875b1 100644
--- a/tools/test/export/build_test.py
+++ b/tools/test/export/build_test.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python
"""
mbed SDK
-Copyright (c) 2011-2013 ARM Limited
+Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -16,154 +16,166 @@ See the License for the specific language governing permissions and
limitations under the License.
"""
-
import sys
-import argparse
-import os
+from os import path, remove, rename
import shutil
-from os.path import join, abspath, dirname, exists, basename
-r=dirname(__file__)
-ROOT = abspath(join(r, "..","..",".."))
+ROOT = path.abspath(path.join(path.dirname(__file__), "..", "..", ".."))
sys.path.insert(0, ROOT)
+import argparse
from tools.export import EXPORTERS
-from tools.targets import TARGET_NAMES, TARGET_MAP
-from tools.project_api import setup_project, perform_export, print_results, get_test_from_name, get_lib_symbols
-from project_generator_definitions.definitions import ProGenDef
-from tools.utils import args_error
+from tools.targets import TARGET_NAMES
+from tools.tests import TESTS
+from tools.project import setup_project
+from tools.project_api import print_results, export_project
+from tools.tests import test_name_known, Test
+from tools.export.exporters import FailedBuildException, \
+ TargetNotSupportedException
+from tools.utils import argparse_force_lowercase_type, \
+ argparse_force_uppercase_type, argparse_many
-class ProgenBuildTest():
- def __init__(self, desired_ides, targets):
- #map of targets and the ides that can build programs for them
- self.target_ides = {}
- for target in targets:
- self.target_ides[target] =[]
- for ide in desired_ides:
- if target in EXPORTERS[ide].TARGETS:
- #target is supported by ide
- self.target_ides[target].append(ide)
- if len(self.target_ides[target]) == 0:
- del self.target_ides[target]
+class ProgenBuildTest(object):
+ """Object to encapsulate logic for progen build testing"""
+ def __init__(self, desired_ides, mcus, tests):
+ """
+ Initialize an instance of class ProgenBuildTest
+ Args:
+ desired_ides: the IDEs you wish to make/build project files for
+ mcus: the mcus to specify in project files
+ tests: the test projects to make/build project files from
+ """
+ self.ides = desired_ides
+ self.mcus = mcus
+ self.tests = tests
+ @property
+ def mcu_ide_pairs(self):
+ """Yields tuples of valid mcu, ide combinations"""
+ for mcu in self.mcus:
+ for ide in self.ides:
+ if mcu in EXPORTERS[ide].TARGETS:
+ yield mcu, ide
@staticmethod
- def get_pgen_targets(ides):
- #targets supported by pgen and desired ides for tests
- targs = []
- for ide in ides:
- for target in TARGET_NAMES:
- if target not in targs and hasattr(TARGET_MAP[target],'progen') \
- and ProGenDef(ide).is_supported(TARGET_MAP[target].progen['target']):
- targs.append(target)
- return targs
-
- @staticmethod
- def handle_project_files(project_dir, mcu, test, tool, clean=False):
+ def handle_log_files(project_dir, tool, name):
+ """
+ Renames/moves log files
+ Args:
+ project_dir: the directory that contains project files
+ tool: the ide that created the project files
+ name: the name of the project
+ clean: a boolean value determining whether to remove the
+ created project files
+ """
log = ''
if tool == 'uvision' or tool == 'uvision5':
- log = os.path.join(project_dir,"build","build_log.txt")
+ log = path.join(project_dir, "build", "build_log.txt")
elif tool == 'iar':
- log = os.path.join(project_dir, 'build_log.txt')
+ log = path.join(project_dir, 'build_log.txt')
try:
- with open(log, 'r') as f:
- print f.read()
- except:
- return
+ with open(log, 'r') as in_log:
+ print in_log.read()
+ log_name = path.join(path.dirname(project_dir), name + "_log.txt")
- prefix = "_".join([test, mcu, tool])
- log_name = os.path.join(os.path.dirname(project_dir), prefix+"_log.txt")
+ # check if a log already exists for this platform+test+ide
+ if path.exists(log_name):
+ # delete it if so
+ remove(log_name)
+ rename(log, log_name)
+ except IOError:
+ pass
- #check if a log already exists for this platform+test+ide
- if os.path.exists(log_name):
- #delete it if so
- os.remove(log_name)
- os.rename(log, log_name)
+ def generate_and_build(self, clean=False):
+ """
+ Generate the project file and build the project
+ Args:
+ clean: a boolean value determining whether to remove the
+ created project files
- if clean:
- shutil.rmtree(project_dir, ignore_errors=True)
- return
+ Returns:
+ successes: a list of strings that contain the mcu, ide, test
+ properties of a successful build test
+ skips: a list of strings that contain the mcu, ide, test properties
+ of a skipped test (if the ide does not support mcu)
+ failures: a list of strings that contain the mcu, ide, test
+ properties of a failed build test
- def generate_and_build(self, tests, clean=False):
-
- #build results
+ """
successes = []
failures = []
skips = []
- for mcu, ides in self.target_ides.items():
- for test in tests:
- #resolve name alias
- test = get_test_from_name(test)
- for ide in ides:
- lib_symbols = get_lib_symbols(None, None, test)
- project_dir, project_name, project_temp = setup_project(mcu, ide, test)
+ for mcu, ide in self.mcu_ide_pairs:
+ for test in self.tests:
+ export_location, name, src, lib = setup_project(ide, mcu,
+ program=test)
+ test_name = Test(test).id
+ try:
+ exporter = export_project(src, export_location, mcu, ide,
+ clean=clean, name=name,
+ libraries_paths=lib)
+ exporter.progen_build()
+ successes.append("%s::%s\t%s" % (mcu, ide, test_name))
+ except FailedBuildException:
+ failures.append("%s::%s\t%s" % (mcu, ide, test_name))
+ except TargetNotSupportedException:
+ skips.append("%s::%s\t%s" % (mcu, ide, test_name))
- dest_dir = os.path.dirname(project_temp)
- destination = os.path.join(dest_dir,"_".join([project_name, mcu, ide]))
-
- tmp_path, report = perform_export(project_dir, project_name, ide, mcu, destination,
- lib_symbols=lib_symbols, progen_build = True)
-
- if report['success']:
- successes.append("build for %s::%s\t%s" % (mcu, ide, project_name))
- elif report['skip']:
- skips.append("%s::%s\t%s" % (mcu, ide, project_name))
- else:
- failures.append("%s::%s\t%s for %s" % (mcu, ide, report['errormsg'], project_name))
-
- ProgenBuildTest.handle_project_files(destination, mcu, project_name, ide, clean)
+ ProgenBuildTest.handle_log_files(export_location, ide, name)
+ if clean:
+ shutil.rmtree(export_location, ignore_errors=True)
return successes, failures, skips
-if __name__ == '__main__':
- accepted_ides = ["iar", "uvision", "uvision5"]
- accepted_targets = sorted(ProgenBuildTest.get_pgen_targets(accepted_ides))
- default_tests = ["MBED_BLINKY"]
+def main():
+ """Entry point"""
+ toolchainlist = ["iar", "uvision", "uvision5"]
+ default_tests = [test_name_known("MBED_BLINKY")]
+ targetnames = TARGET_NAMES
+ targetnames.sort()
- parser = argparse.ArgumentParser(description = "Test progen builders. Leave any flag off to run with all possible options.")
- parser.add_argument("-i", "--IDEs",
- nargs = '+',
- dest="ides",
- help="tools you wish to perfrom build tests. (%s)" % ', '.join(accepted_ides),
- default = accepted_ides)
+ parser = argparse.ArgumentParser(description=
+ "Test progen builders. Leave any flag off"
+ " to run with all possible options.")
+ parser.add_argument("-i",
+ dest="ides",
+ default=toolchainlist,
+ type=argparse_many(argparse_force_lowercase_type(
+ toolchainlist, "toolchain")),
+ help="The target IDE: %s"% str(toolchainlist))
+
+ parser.add_argument(
+ "-p",
+ type=argparse_many(test_name_known),
+ dest="programs",
+ help="The index of the desired test program: [0-%d]" % (len(TESTS) - 1),
+ default=default_tests)
parser.add_argument("-n",
- nargs='+',
- dest="tests",
- help="names of desired test programs",
- default = default_tests)
+ type=argparse_many(test_name_known),
+ dest="programs",
+ help="The name of the desired test program",
+ default=default_tests)
- parser.add_argument("-m", "--mcus",
- nargs='+',
- dest ="targets",
- help="generate project for the given MCUs (%s)" % '\n '.join(accepted_targets),
- default = accepted_targets)
+ parser.add_argument(
+ "-m", "--mcu",
+ metavar="MCU",
+ default='LPC1768',
+ nargs="+",
+ type=argparse_force_uppercase_type(targetnames, "MCU"),
+ help="generate project for the given MCU (%s)" % ', '.join(targetnames))
parser.add_argument("-c", "--clean",
dest="clean",
- action = "store_true",
+ action="store_true",
help="clean up the exported project files",
default=False)
options = parser.parse_args()
-
- tests = options.tests
- ides = [ide.lower() for ide in options.ides]
- targets = [target.upper() for target in options.targets]
-
- if any(get_test_from_name(test) is None for test in tests):
- args_error(parser, "[ERROR] test name not recognized")
-
- if any(target not in accepted_targets for target in targets):
- args_error(parser, "[ERROR] mcu must be one of the following:\n %s" % '\n '.join(accepted_targets))
-
- if any(ide not in accepted_ides for ide in ides):
- args_error(parser, "[ERROR] ide must be in %s" % ', '.join(accepted_ides))
-
- build_test = ProgenBuildTest(ides, targets)
- successes, failures, skips = build_test.generate_and_build(tests, options.clean)
+ test = ProgenBuildTest(options.ides, options.mcu, options.programs)
+ successes, failures, skips = test.generate_and_build(clean=options.clean)
print_results(successes, failures, skips)
sys.exit(len(failures))
-
-
+if __name__ == "__main__":
+ main()
diff --git a/tools/tests.py b/tools/tests.py
index 66da1249ef..29c7101b8f 100644
--- a/tools/tests.py
+++ b/tools/tests.py
@@ -1253,7 +1253,7 @@ def test_known(string):
def test_name_known(string):
if string not in TEST_MAP.keys() and \
(getattr(ps, "test_alias", None) is None or \
- ps.test_alias.get(test_id, "") not in TEST_MAP.keys()):
+ ps.test_alias.get(string, "") not in TEST_MAP.keys()):
raise ArgumentTypeError("Program with name '{0}' not found. Supported tests are: \n{1}".format(string, columnate([t['id'] for t in TESTS])))
return TEST_MAP[string].n
diff --git a/tools/toolchains/__init__.py b/tools/toolchains/__init__.py
index 1bb892cf8c..740a510d8e 100644
--- a/tools/toolchains/__init__.py
+++ b/tools/toolchains/__init__.py
@@ -564,6 +564,7 @@ class mbedToolchain:
# Add root to include paths
resources.inc_dirs.append(root)
+ resources.file_basepath[root] = base_path
for file in files:
file_path = join(root, file)