Refactor export subsystem

Makes several broad changes:
 - removes dead code that dealt with the online build system
 - replaces export function with a much simpler one that:
   - does not copy any sources
   - the zip file hits the disk
   - the mbed_config.h hits the disk
   - the project files hit the disk
   - nothing else hits the disk
 - exporters use Resource object scanned with a toolchain
 - progen exporters don't optionally build a project instead they have a
   build function that may be called afterwards
 - much of the code passes pylint (have a score of 9 or above):
   - project.py
   - project_api.py
   - export/__init__.py
   - export/exporters.py
   - test/export/build_test.py
pull/2245/head
Sarah Marsh 2016-07-20 14:43:09 -05:00 committed by Jimmy Brisson
parent 5dcd546fd5
commit f6a15fd3c8
21 changed files with 805 additions and 753 deletions

View File

@ -326,7 +326,7 @@ def prepare_toolchain(src_paths, target, toolchain_name,
return toolchain return toolchain
def scan_resources(src_paths, toolchain, dependencies_paths=None, def scan_resources(src_paths, toolchain, dependencies_paths=None,
inc_dirs=None): inc_dirs=None, base_path=None):
""" Scan resources using initialized toolcain """ Scan resources using initialized toolcain
Positional arguments Positional arguments
@ -338,9 +338,9 @@ def scan_resources(src_paths, toolchain, dependencies_paths=None,
""" """
# Scan src_path # Scan src_path
resources = toolchain.scan_resources(src_paths[0]) resources = toolchain.scan_resources(src_paths[0], base_path=base_path)
for path in src_paths[1:]: for path in src_paths[1:]:
resources.add(toolchain.scan_resources(path)) resources.add(toolchain.scan_resources(path, base_path=base_path))
# Scan dependency paths for include dirs # Scan dependency paths for include dirs
if dependencies_paths is not None: if dependencies_paths is not None:

View File

@ -1,27 +1,28 @@
"""The generic interface for all exporters.
""" """
mbed SDK # mbed SDK
Copyright (c) 2011-2013 ARM Limited # Copyright (c) 2011-2013 ARM Limited
#
Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
You may obtain a copy of the License at # You may obtain a copy of the License at
#
http://www.apache.org/licenses/LICENSE-2.0 # http://www.apache.org/licenses/LICENSE-2.0
#
Unless required by applicable law or agreed to in writing, software # Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, # distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
limitations under the License. # limitations under the License.
"""
import os, tempfile import os, tempfile
from os.path import join, exists, basename from os.path import join, exists, basename
from shutil import copytree, rmtree, copy from shutil import copytree, rmtree, copy
import yaml import yaml
from tools.utils import mkdir from tools.export import uvision4, uvision5, codered, gccarm, ds5_5, iar
from tools.export import uvision4, uvision5, codered, gccarm, ds5_5, iar, emblocks, coide, kds, zip, simplicityv3, atmelstudio, sw4stm32, e2studio from tools.export import emblocks, coide, kds, simplicityv3, atmelstudio
from tools.export.exporters import zip_working_directory_and_clean_up, OldLibrariesException, FailedBuildException from tools.export import sw4stm32, e2studio
from tools.export.exporters import OldLibrariesException, FailedBuildException
from tools.targets import TARGET_NAMES, EXPORT_MAP, TARGET_MAP from tools.targets import TARGET_NAMES, EXPORT_MAP, TARGET_MAP
from project_generator_definitions.definitions import ProGenDef from project_generator_definitions.definitions import ProGenDef
@ -52,162 +53,25 @@ ERROR_MESSAGE_NOT_EXPORT_LIBS = """
To export this project please <a href='http://mbed.org/compiler/?import=http://mbed.org/users/mbed_official/code/mbed-export/k&mode=lib' target='_blank'>import the export version of the mbed library</a>. To export this project please <a href='http://mbed.org/compiler/?import=http://mbed.org/users/mbed_official/code/mbed-export/k&mode=lib' target='_blank'>import the export version of the mbed library</a>.
""" """
def online_build_url_resolver(url): def mcu_ide_matrix(verbose_html=False):
# TODO: Retrieve the path and name of an online library build URL """Shows target map using prettytable
return {'path':'', 'name':''}
Keyword argumets:
def export(project_path, project_name, ide, target, destination='/tmp/', verbose_html - print the matrix in html format
tempdir=None, pgen_build = False, clean=True, extra_symbols=None, make_zip=True, sources_relative=False,
build_url_resolver=online_build_url_resolver, progen_build=False):
# Convention: we are using capitals for toolchain and target names
if target is not None:
target = target.upper()
if tempdir is None:
tempdir = tempfile.mkdtemp()
use_progen = False
supported = True
report = {'success': False, 'errormsg':'', 'skip': False}
if ide is None or ide == "zip":
# Simple ZIP exporter
try:
ide = "zip"
exporter = zip.ZIP(target, tempdir, project_name, build_url_resolver, extra_symbols=extra_symbols)
exporter.scan_and_copy_resources(project_path, tempdir, sources_relative)
exporter.generate()
report['success'] = True
except OldLibrariesException, e:
report['errormsg'] = ERROR_MESSAGE_NOT_EXPORT_LIBS
else:
if ide not in EXPORTERS:
report['errormsg'] = ERROR_MESSAGE_UNSUPPORTED_TOOLCHAIN % (target, ide)
report['skip'] = True
else:
Exporter = EXPORTERS[ide]
target = EXPORT_MAP.get(target, target)
try:
if Exporter.PROGEN_ACTIVE:
use_progen = True
except AttributeError:
pass
if target not in Exporter.TARGETS or Exporter.TOOLCHAIN not in TARGET_MAP[target].supported_toolchains:
supported = False
if use_progen:
if not ProGenDef(ide).is_supported(TARGET_MAP[target].progen['target']):
supported = False
if supported:
# target checked, export
try:
exporter = Exporter(target, tempdir, project_name, build_url_resolver, extra_symbols=extra_symbols, sources_relative=sources_relative)
exporter.scan_and_copy_resources(project_path, tempdir, sources_relative)
if progen_build:
#try to build with pgen ide builders
try:
exporter.generate(progen_build=True)
report['success'] = True
except FailedBuildException, f:
report['errormsg'] = "Build Failed"
else:
exporter.generate()
report['success'] = True
except OldLibrariesException, e:
report['errormsg'] = ERROR_MESSAGE_NOT_EXPORT_LIBS
else:
report['errormsg'] = ERROR_MESSAGE_UNSUPPORTED_TOOLCHAIN % (target, ide)
report['skip'] = True
zip_path = None
if report['success']:
# readme.txt to contain more exported data
exporter_yaml = {
'project_generator': {
'active' : False,
}
}
if use_progen:
try:
import pkg_resources
version = pkg_resources.get_distribution('project_generator').version
exporter_yaml['project_generator']['version'] = version
exporter_yaml['project_generator']['active'] = True;
exporter_yaml['project_generator_definitions'] = {}
version = pkg_resources.get_distribution('project_generator_definitions').version
exporter_yaml['project_generator_definitions']['version'] = version
except ImportError:
pass
with open(os.path.join(tempdir, 'exporter.yaml'), 'w') as outfile:
yaml.dump(exporter_yaml, outfile, default_flow_style=False)
# add readme file to every offline export.
open(os.path.join(tempdir, 'GettingStarted.htm'),'w').write('<meta http-equiv="refresh" content="0; url=http://mbed.org/handbook/Getting-Started-mbed-Exporters#%s"/>'% (ide))
# copy .hgignore file to exported direcotry as well.
if exists(os.path.join(exporter.TEMPLATE_DIR,'.hgignore')):
copy(os.path.join(exporter.TEMPLATE_DIR,'.hgignore'), tempdir)
if make_zip:
zip_path = zip_working_directory_and_clean_up(tempdir, destination, project_name, clean)
else:
zip_path = destination
return zip_path, report
###############################################################################
# Generate project folders following the online conventions
###############################################################################
def copy_tree(src, dst, clean=True):
if exists(dst):
if clean:
rmtree(dst)
else:
return
copytree(src, dst)
def setup_user_prj(user_dir, prj_path, lib_paths=None):
""" """
Setup a project with the same directory structure of the mbed online IDE supported_ides = sorted(EXPORTERS.keys())
""" # Only use it in this function so building works without extra modules
mkdir(user_dir) from prettytable import PrettyTable, ALL
# Project Path
copy_tree(prj_path, join(user_dir, "src"))
# Project Libraries
user_lib = join(user_dir, "lib")
mkdir(user_lib)
if lib_paths is not None:
for lib_path in lib_paths:
copy_tree(lib_path, join(user_lib, basename(lib_path)))
def mcu_ide_matrix(verbose_html=False, platform_filter=None):
""" Shows target map using prettytable """
supported_ides = []
for key in EXPORTERS.iterkeys():
supported_ides.append(key)
supported_ides.sort()
from prettytable import PrettyTable, ALL # Only use it in this function so building works without extra modules
# All tests status table print # All tests status table print
columns = ["Platform"] + supported_ides table_printer = PrettyTable(["Platform"] + supported_ides)
pt = PrettyTable(columns)
# Align table # Align table
for col in columns: for col in supported_ides:
pt.align[col] = "c" table_printer.align[col] = "c"
pt.align["Platform"] = "l" table_printer.align["Platform"] = "l"
perm_counter = 0 perm_counter = 0
target_counter = 0
for target in sorted(TARGET_NAMES): for target in sorted(TARGET_NAMES):
target_counter += 1
row = [target] # First column is platform name row = [target] # First column is platform name
for ide in supported_ides: for ide in supported_ides:
text = "-" text = "-"
@ -218,20 +82,24 @@ def mcu_ide_matrix(verbose_html=False, platform_filter=None):
text = "x" text = "x"
perm_counter += 1 perm_counter += 1
row.append(text) row.append(text)
pt.add_row(row) table_printer.add_row(row)
pt.border = True table_printer.border = True
pt.vrules = ALL table_printer.vrules = ALL
pt.hrules = ALL table_printer.hrules = ALL
# creates a html page suitable for a browser
# result = pt.get_html_string(format=True) if verbose_html else pt.get_string()
# creates a html page in a shorter format suitable for readme.md # creates a html page in a shorter format suitable for readme.md
result = pt.get_html_string() if verbose_html else pt.get_string() if verbose_html:
result = table_printer.get_html_string()
else:
result = table_printer.get_string()
result += "\n" result += "\n"
result += "Total IDEs: %d\n"% (len(supported_ides)) result += "Total IDEs: %d\n"% (len(supported_ides))
if verbose_html: result += "<br>" if verbose_html:
result += "Total platforms: %d\n"% (target_counter) result += "<br>"
if verbose_html: result += "<br>" result += "Total platforms: %d\n"% (len(TARGET_NAMES))
if verbose_html:
result += "<br>"
result += "Total permutations: %d"% (perm_counter) result += "Total permutations: %d"% (perm_counter)
if verbose_html: result = result.replace("&amp;", "&") if verbose_html:
result = result.replace("&amp;", "&")
return result return result

View File

@ -61,7 +61,7 @@ class AtmelStudio(Exporter):
ctx = { ctx = {
'target': self.target, 'target': self.target,
'name': self.program_name, 'name': self.project_name,
'source_files': source_files, 'source_files': source_files,
'source_folders': source_folders, 'source_folders': source_folders,
'object_files': self.resources.objects, 'object_files': self.resources.objects,
@ -73,7 +73,7 @@ class AtmelStudio(Exporter):
'solution_uuid': solution_uuid.upper(), 'solution_uuid': solution_uuid.upper(),
'project_uuid': project_uuid.upper() 'project_uuid': project_uuid.upper()
} }
ctx.update(self.progen_flags) ctx.update(self.flags)
target = self.target.lower() target = self.target.lower()
self.gen_file('atmelstudio6_2.atsln.tmpl', ctx, '%s.atsln' % self.program_name) self.gen_file('atmelstudio6_2.atsln.tmpl', ctx, '%s.atsln' % self.project_name)
self.gen_file('atmelstudio6_2.cppproj.tmpl', ctx, '%s.cppproj' % self.program_name) self.gen_file('atmelstudio6_2.cppproj.tmpl', ctx, '%s.cppproj' % self.project_name)

View File

@ -48,13 +48,13 @@ class CodeRed(Exporter):
libraries.append(l[3:]) libraries.append(l[3:])
ctx = { ctx = {
'name': self.program_name, 'name': self.project_name,
'include_paths': self.resources.inc_dirs, 'include_paths': self.resources.inc_dirs,
'linker_script': self.resources.linker_script, 'linker_script': self.resources.linker_script,
'object_files': self.resources.objects, 'object_files': self.resources.objects,
'libraries': libraries, 'libraries': libraries,
'symbols': self.get_symbols() 'symbols': self.get_symbols()
} }
ctx.update(self.progen_flags) ctx.update(self.flags)
self.gen_file('codered_%s_project.tmpl' % self.target.lower(), ctx, '.project') self.gen_file('codered_%s_project.tmpl' % self.target.lower(), ctx, '.project')
self.gen_file('codered_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject') self.gen_file('codered_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject')

View File

@ -98,7 +98,7 @@ class CoIDE(Exporter):
self.resources.linker_script = '' self.resources.linker_script = ''
ctx = { ctx = {
'name': self.program_name, 'name': self.project_name,
'source_files': source_files, 'source_files': source_files,
'header_files': header_files, 'header_files': header_files,
'include_paths': self.resources.inc_dirs, 'include_paths': self.resources.inc_dirs,
@ -111,4 +111,4 @@ class CoIDE(Exporter):
target = self.target.lower() target = self.target.lower()
# Project file # Project file
self.gen_file('coide_%s.coproj.tmpl' % target, ctx, '%s.coproj' % self.program_name) self.gen_file('coide_%s.coproj.tmpl' % target, ctx, '%s.coproj' % self.project_name)

View File

@ -54,7 +54,7 @@ class DS5_5(Exporter):
}) })
ctx = { ctx = {
'name': self.program_name, 'name': self.project_name,
'include_paths': self.resources.inc_dirs, 'include_paths': self.resources.inc_dirs,
'scatter_file': self.resources.linker_script, 'scatter_file': self.resources.linker_script,
'object_files': self.resources.objects + self.resources.libraries, 'object_files': self.resources.objects + self.resources.libraries,

View File

@ -33,7 +33,7 @@ class E2Studio(Exporter):
libraries.append(l[3:]) libraries.append(l[3:])
ctx = { ctx = {
'name': self.program_name, 'name': self.project_name,
'include_paths': self.resources.inc_dirs, 'include_paths': self.resources.inc_dirs,
'linker_script': self.resources.linker_script, 'linker_script': self.resources.linker_script,
@ -44,4 +44,4 @@ class E2Studio(Exporter):
self.gen_file('e2studio_%s_project.tmpl' % self.target.lower(), ctx, '.project') self.gen_file('e2studio_%s_project.tmpl' % self.target.lower(), ctx, '.project')
self.gen_file('e2studio_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject') self.gen_file('e2studio_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject')
self.gen_file('e2studio_%s_gdbinit.tmpl' % self.target.lower(), ctx, '.gdbinit') self.gen_file('e2studio_%s_gdbinit.tmpl' % self.target.lower(), ctx, '.gdbinit')
self.gen_file('e2studio_launch.tmpl', ctx, '%s OpenOCD.launch' % self.program_name) self.gen_file('e2studio_launch.tmpl', ctx, '%s OpenOCD.launch' % self.project_name)

View File

@ -60,7 +60,7 @@ class IntermediateFile(Exporter):
self.resources.linker_script = '' self.resources.linker_script = ''
ctx = { ctx = {
'name': self.program_name, 'name': self.project_name,
'target': self.target, 'target': self.target,
'toolchain': self.toolchain.name, 'toolchain': self.toolchain.name,
'source_files': source_files, 'source_files': source_files,
@ -77,4 +77,4 @@ class IntermediateFile(Exporter):
} }
# EmBlocks intermediate file template # EmBlocks intermediate file template
self.gen_file('emblocks.eix.tmpl', ctx, '%s.eix' % self.program_name) self.gen_file('emblocks.eix.tmpl', ctx, '%s.eix' % self.project_name)

View File

@ -1,256 +1,227 @@
"""Just a template for subclassing""" """Just a template for subclassing"""
import uuid, shutil, os, logging, fnmatch import os
from os import walk, remove import sys
from os.path import join, dirname, isdir, split import logging
from copy import copy from os.path import join, dirname, relpath
from jinja2 import Template, FileSystemLoader from itertools import groupby
from jinja2 import FileSystemLoader
from jinja2.environment import Environment from jinja2.environment import Environment
from contextlib import closing
from zipfile import ZipFile, ZIP_DEFLATED
from operator import add
from tools.utils import mkdir
from tools.toolchains import TOOLCHAIN_CLASSES
from tools.targets import TARGET_MAP from tools.targets import TARGET_MAP
from project_generator.project import Project, ProjectTemplateInternal
from project_generator.generate import Generator
from project_generator.project import Project
from project_generator.settings import ProjectSettings from project_generator.settings import ProjectSettings
from project_generator_definitions.definitions import ProGenDef
from tools.config import Config
class OldLibrariesException(Exception): pass class OldLibrariesException(Exception):
"""Exception that indicates an export can not complete due to an out of date
library version.
"""
pass
class FailedBuildException(Exception) : pass class FailedBuildException(Exception):
"""Exception that indicates that a build failed"""
pass
class TargetNotSupportedException(Exception):
"""Indicates that an IDE does not support a particular MCU"""
pass
# Exporter descriptor for TARGETS
# TARGETS as class attribute for backward compatibility (allows: if in Exporter.TARGETS)
class ExporterTargetsProperty(object): class ExporterTargetsProperty(object):
""" Exporter descriptor for TARGETS
TARGETS as class attribute for backward compatibility
(allows: if in Exporter.TARGETS)
"""
def __init__(self, func): def __init__(self, func):
self.func = func self.func = func
def __get__(self, inst, cls): def __get__(self, inst, cls):
return self.func(cls) return self.func(cls)
class Exporter(object): class Exporter(object):
"""Exporter base class
This class is meant to be extended by individual exporters, and provides a
few helper methods for implementing an exporter with either jinja2 or
progen.
"""
TEMPLATE_DIR = dirname(__file__) TEMPLATE_DIR = dirname(__file__)
DOT_IN_RELATIVE_PATH = False DOT_IN_RELATIVE_PATH = False
NAME = None
TARGETS = None
TOOLCHAIN = None
def __init__(self, target, inputDir, program_name, build_url_resolver, extra_symbols=None, sources_relative=True): def __init__(self, target, export_dir, project_name, toolchain,
self.inputDir = inputDir extra_symbols=None, resources=None):
"""Initialize an instance of class exporter
Positional arguments:
target - the target mcu/board for this project
export_dir - the directory of the exported project files
project_name - the name of the project
toolchain - an instance of class toolchain
extra_symbols - a list of extra macros for the toolchain
Keyword arguments:
resources - an instance of class Resources
"""
self.export_dir = export_dir
self.target = target self.target = target
self.program_name = program_name self.project_name = project_name
self.toolchain = TOOLCHAIN_CLASSES[self.get_toolchain()](TARGET_MAP[target]) self.toolchain = toolchain
self.build_url_resolver = build_url_resolver
jinja_loader = FileSystemLoader(os.path.dirname(os.path.abspath(__file__))) jinja_loader = FileSystemLoader(os.path.dirname(os.path.abspath(__file__)))
self.jinja_environment = Environment(loader=jinja_loader) self.jinja_environment = Environment(loader=jinja_loader)
self.extra_symbols = extra_symbols if extra_symbols else [] self.resources = resources
self.config_macros = [] self.symbols = self.toolchain.get_symbols()
self.sources_relative = sources_relative self.generated_files = []
self.config_header = None self.project = None
# Add extra symbols and config file symbols to the Exporter's list of
# symbols.
config_macros = self.toolchain.config.get_config_data_macros()
if config_macros:
self.symbols.extend(config_macros)
if extra_symbols:
self.symbols.extend(extra_symbols)
def get_toolchain(self): def get_toolchain(self):
"""A helper getter function that we should probably eliminate"""
return self.TOOLCHAIN return self.TOOLCHAIN
@property @property
def flags(self): def flags(self):
return self.toolchain.flags """Returns a dictionary of toolchain flags.
Keys of the dictionary are:
cxx_flags - c++ flags
c_flags - c flags
ld_flags - linker flags
asm_flags - assembler flags
common_flags - common options
"""
config_header = self.toolchain.get_config_header()
config_header = relpath(config_header,
self.resources.file_basepath[config_header])
flags = {key + "_flags": value for key, value
in self.toolchain.flags.iteritems()}
asm_defines = ["-D" + symbol for symbol in self.toolchain.get_symbols(True)]
c_defines = ["-D" + symbol for symbol in self.toolchain.get_symbols()]
flags['asm_flags'] += asm_defines
flags['c_flags'] += c_defines
flags['cxx_flags'] += c_defines
if config_header:
flags['c_flags'] += self.toolchain.get_config_option(config_header)
flags['cxx_flags'] += self.toolchain.get_config_option(
config_header)
return flags
@property def get_source_paths(self):
def progen_flags(self): """Returns a list of the directories where source files are contained"""
if not hasattr(self, "_progen_flag_cache") : source_keys = ['s_sources', 'c_sources', 'cpp_sources', 'hex_files',
self._progen_flag_cache = dict([(key + "_flags", value) for key,value in self.flags.iteritems()]) 'objects', 'libraries']
asm_defines = ["-D"+symbol for symbol in self.toolchain.get_symbols(True)] source_files = []
c_defines = ["-D" + symbol for symbol in self.toolchain.get_symbols()] for key in source_keys:
self._progen_flag_cache['asm_flags'] += asm_defines source_files.extend(getattr(self.resources, key))
self._progen_flag_cache['c_flags'] += c_defines return list(set([os.path.dirname(src) for src in source_files]))
self._progen_flag_cache['cxx_flags'] += c_defines
if self.config_header:
self._progen_flag_cache['c_flags'] += self.toolchain.get_config_option(self.config_header)
self._progen_flag_cache['cxx_flags'] += self.toolchain.get_config_option(self.config_header)
return self._progen_flag_cache
def __scan_and_copy(self, src_path, trg_path):
resources = self.toolchain.scan_resources(src_path)
for r_type in ['headers', 's_sources', 'c_sources', 'cpp_sources',
'objects', 'libraries', 'linker_script',
'lib_builds', 'lib_refs', 'hex_files', 'bin_files']:
r = getattr(resources, r_type)
if r:
self.toolchain.copy_files(r, trg_path, resources=resources)
return resources
@staticmethod
def _get_dir_grouped_files(files):
""" Get grouped files based on the dirname """
files_grouped = {}
for file in files:
rel_path = os.path.relpath(file, os.getcwd())
dir_path = os.path.dirname(rel_path)
if dir_path == '':
# all files within the current dir go into Source_Files
dir_path = 'Source_Files'
if not dir_path in files_grouped.keys():
files_grouped[dir_path] = []
files_grouped[dir_path].append(file)
return files_grouped
def progen_get_project_data(self): def progen_get_project_data(self):
""" Get ProGen project data """ """ Get ProGen project data """
# provide default data, some tools don't require any additional # provide default data, some tools don't require any additional
# tool specific settings # tool specific settings
code_files = []
for r_type in ['c_sources', 'cpp_sources', 's_sources']:
for file in getattr(self.resources, r_type):
code_files.append(file)
sources_files = code_files + self.resources.hex_files + self.resources.objects + \ def make_key(src):
self.resources.libraries """turn a source file into it's group name"""
sources_grouped = Exporter._get_dir_grouped_files(sources_files) key = os.path.basename(os.path.dirname(src))
headers_grouped = Exporter._get_dir_grouped_files(self.resources.headers) if not key:
key = os.path.basename(os.path.normpath(self.export_dir))
return key
project_data = { def grouped(sources):
'common': { """Group the source files by their encompassing directory"""
'sources': sources_grouped, data = sorted(sources, key=make_key)
'includes': headers_grouped, return {k: list(g) for k, g in groupby(data, make_key)}
'build_dir':'.build',
'target': [TARGET_MAP[self.target].progen['target']], if self.toolchain.get_config_header():
'macros': self.get_symbols(), config_header = self.toolchain.get_config_header()
'export_dir': [self.inputDir], config_header = relpath(config_header,
'linker_file': [self.resources.linker_script], self.resources.file_basepath[config_header])
} else:
} config_header = None
# we want to add this to our include dirs
config_dir = os.path.dirname(config_header) if config_header else []
project_data = ProjectTemplateInternal._get_project_template()
project_data['target'] = TARGET_MAP[self.target].progen['target']
project_data['source_paths'] = self.get_source_paths()
project_data['include_paths'] = self.resources.inc_dirs + [config_dir]
project_data['include_files'] = grouped(self.resources.headers)
project_data['source_files_s'] = grouped(self.resources.s_sources)
project_data['source_files_c'] = grouped(self.resources.c_sources)
project_data['source_files_cpp'] = grouped(self.resources.cpp_sources)
project_data['source_files_obj'] = grouped(self.resources.objects)
project_data['source_files_lib'] = grouped(self.resources.libraries)
project_data['output_dir']['path'] = self.export_dir
project_data['linker_file'] = self.resources.linker_script
project_data['macros'] = self.symbols
project_data['build_dir'] = 'build'
project_data['template'] = None
project_data['name'] = self.project_name
project_data['output_type'] = 'exe'
project_data['debugger'] = None
return project_data return project_data
def progen_gen_file(self, tool_name, project_data, progen_build=False): def progen_gen_file(self, project_data):
""" Generate project using ProGen Project API """ """ Generate project using ProGen Project API
Positional arguments:
tool_name - the tool for which to generate project files
project_data - a dict whose base key, values are specified in
progen_get_project_data, the items will have been
modified by Exporter subclasses
Keyword arguments:
progen_build - A boolean that determines if the tool will build the
project
"""
if not self.check_supported(self.NAME):
raise TargetNotSupportedException("Target not supported")
settings = ProjectSettings() settings = ProjectSettings()
project = Project(self.program_name, [project_data], settings) self.project = Project(self.project_name, [project_data], settings)
# TODO: Fix this, the inc_dirs are not valid (our scripts copy files), therefore progen self.project.project['export'] = project_data.copy()
# thinks it is not dict but a file, and adds them to workspace. self.project.generate(self.NAME, copied=False, fill=False)
project.project['common']['include_paths'] = self.resources.inc_dirs for middle in self.project.generated_files.values():
project.generate(tool_name, copied=not self.sources_relative) for field, thing in middle.iteritems():
if progen_build: if field == "files":
print("Project exported, building...") for filename in thing.values():
result = project.build(tool_name) self.generated_files.append(filename)
if result == -1:
raise FailedBuildException("Build Failed")
def __scan_all(self, path): def progen_build(self):
resources = [] """Build a project that was already generated by progen"""
print("Project {} exported, building for {}...".format(
self.project_name, self.NAME))
sys.stdout.flush()
result = self.project.build(self.NAME)
if result == -1:
raise FailedBuildException("Build Failed")
for root, dirs, files in walk(path): def check_supported(self, ide):
for d in copy(dirs): """Indicated if this combination of IDE and MCU is supported"""
if d == '.' or d == '..': if self.target not in self.TARGETS or \
dirs.remove(d) self.TOOLCHAIN not in TARGET_MAP[self.target].supported_toolchains:
return False
for file in files: if not ProGenDef(ide).is_supported(
file_path = join(root, file) TARGET_MAP[self.target].progen['target']):
resources.append(file_path) return False
return True
return resources
def scan_and_copy_resources(self, prj_paths, trg_path, relative=False):
# Copy only the file for the required target and toolchain
lib_builds = []
# Create the configuration object
if isinstance(prj_paths, basestring):
prj_paths = [prj_paths]
config = Config(self.target, prj_paths)
for src in ['lib', 'src']:
resources = self.__scan_and_copy(join(prj_paths[0], src), trg_path)
for path in prj_paths[1:]:
resources.add(self.__scan_and_copy(join(path, src), trg_path))
lib_builds.extend(resources.lib_builds)
# The repository files
#for repo_dir in resources.repo_dirs:
# repo_files = self.__scan_all(repo_dir)
# for path in prj_paths:
# self.toolchain.copy_files(repo_files, trg_path, rel_path=join(path, src))
# The libraries builds
for bld in lib_builds:
build_url = open(bld).read().strip()
lib_data = self.build_url_resolver(build_url)
lib_path = lib_data['path'].rstrip('\\/')
self.__scan_and_copy(lib_path, join(trg_path, lib_data['name']))
# Create .hg dir in mbed build dir so it's ignored when versioning
hgdir = join(trg_path, lib_data['name'], '.hg')
mkdir(hgdir)
fhandle = file(join(hgdir, 'keep.me'), 'a')
fhandle.close()
if not relative:
# Final scan of the actual exported resources
resources = self.toolchain.scan_resources(trg_path)
resources.relative_to(trg_path, self.DOT_IN_RELATIVE_PATH)
else:
# use the prj_dir (source, not destination)
resources = self.toolchain.scan_resources(prj_paths[0])
for path in prj_paths[1:]:
resources.add(toolchain.scan_resources(path))
# Loads the resources into the config system which might expand/modify resources based on config data
self.resources = config.load_resources(resources)
if hasattr(self, "MBED_CONFIG_HEADER_SUPPORTED") and self.MBED_CONFIG_HEADER_SUPPORTED :
# Add the configuration file to the target directory
self.config_header = self.toolchain.MBED_CONFIG_FILE_NAME
config.get_config_data_header(join(trg_path, self.config_header))
self.config_macros = []
self.resources.inc_dirs.append(".")
else:
# And add the configuration macros to the toolchain
self.config_macros = config.get_config_data_macros()
def gen_file(self, template_file, data, target_file): def gen_file(self, template_file, data, target_file):
template_path = join(Exporter.TEMPLATE_DIR, template_file) """Generates a project file from a template using jinja"""
template = self.jinja_environment.get_template(template_file) jinja_loader = FileSystemLoader(
os.path.dirname(os.path.abspath(__file__)))
jinja_environment = Environment(loader=jinja_loader)
template = jinja_environment.get_template(template_file)
target_text = template.render(data) target_text = template.render(data)
target_path = join(self.inputDir, target_file) target_path = join(self.export_dir, target_file)
logging.debug("Generating: %s" % target_path) logging.debug("Generating: %s", target_path)
open(target_path, "w").write(target_text) open(target_path, "w").write(target_text)
self.generated_files += [target_path]
def get_symbols(self, add_extra_symbols=True):
""" This function returns symbols which must be exported.
Please add / overwrite symbols in each exporter separately
"""
# We have extra symbols from e.g. libraries, we want to have them also added to export
extra = self.extra_symbols if add_extra_symbols else []
if hasattr(self, "MBED_CONFIG_HEADER_SUPPORTED") and self.MBED_CONFIG_HEADER_SUPPORTED:
# If the config header is supported, we will preinclude it and do not not
# need the macros as preprocessor flags
return extra
symbols = self.toolchain.get_symbols(True) + self.toolchain.get_symbols() \
+ self.config_macros + extra
return symbols
def zip_working_directory_and_clean_up(tempdirectory=None, destination=None, program_name=None, clean=True):
uid = str(uuid.uuid4())
zipfilename = '%s.zip'%uid
logging.debug("Zipping up %s to %s" % (tempdirectory, join(destination, zipfilename)))
# make zip
def zipdir(basedir, archivename):
assert isdir(basedir)
fakeroot = program_name + '/'
with closing(ZipFile(archivename, "w", ZIP_DEFLATED)) as z:
for root, _, files in os.walk(basedir):
# NOTE: ignore empty directories
for fn in files:
absfn = join(root, fn)
zfn = fakeroot + '/' + absfn[len(basedir)+len(os.sep):]
z.write(absfn, zfn)
zipdir(tempdirectory, join(destination, zipfilename))
if clean:
shutil.rmtree(tempdirectory)
return join(destination, zipfilename)

View File

@ -135,8 +135,6 @@ class GccArm(Exporter):
def generate(self): def generate(self):
# "make" wants Unix paths # "make" wants Unix paths
if self.sources_relative:
self.resources.relative_to(self.prj_paths[0])
self.resources.win_to_unix() self.resources.win_to_unix()
to_be_compiled = [] to_be_compiled = []
@ -152,19 +150,19 @@ class GccArm(Exporter):
l, _ = splitext(basename(lib)) l, _ = splitext(basename(lib))
libraries.append(l[3:]) libraries.append(l[3:])
build_dir = abspath(join(self.inputDir, ".build")) build_dir = abspath(join(self.export_dir, ".build"))
ctx = { ctx = {
'name': self.program_name, 'name': self.project_name,
'to_be_compiled': to_be_compiled, 'to_be_compiled': to_be_compiled,
'object_files': self.resources.objects, 'object_files': self.resources.objects,
'include_paths': self.resources.inc_dirs, 'include_paths': self.resources.inc_dirs,
'library_paths': self.resources.lib_dirs, 'library_paths': self.resources.lib_dirs,
'linker_script': self.resources.linker_script, 'linker_script': self.resources.linker_script,
'libraries': libraries, 'libraries': libraries,
'symbols': self.get_symbols(), 'symbols': self.toolchain.get_symbols(),
'cpu_flags': self.toolchain.cpu, 'cpu_flags': self.toolchain.cpu,
'vpath': [relpath(s, build_dir) for s in self.prj_paths] if self.sources_relative else [".."], 'hex_files': self.resources.hex_files,
'hex_files': self.resources.hex_files 'vpath': [".."]
} }
for key in ['include_paths', 'library_paths', 'linker_script', 'hex_files']: for key in ['include_paths', 'library_paths', 'linker_script', 'hex_files']:
@ -174,7 +172,7 @@ class GccArm(Exporter):
ctx[key] = ctx['vpath'][0] + "/" + ctx[key] ctx[key] = ctx['vpath'][0] + "/" + ctx[key]
if "../." not in ctx["include_paths"]: if "../." not in ctx["include_paths"]:
ctx["include_paths"] += ['../.'] ctx["include_paths"] += ['../.']
ctx.update(self.progen_flags) ctx.update(self.flags)
self.gen_file('gcc_arm_%s.tmpl' % self.target.lower(), ctx, 'Makefile') self.gen_file('gcc_arm_%s.tmpl' % self.target.lower(), ctx, 'Makefile')
def scan_and_copy_resources(self, prj_paths, trg_path, relative=False): def scan_and_copy_resources(self, prj_paths, trg_path, relative=False):

View File

@ -28,7 +28,7 @@ class IAREmbeddedWorkbench(Exporter):
Exporter class for IAR Systems. This class uses project generator. Exporter class for IAR Systems. This class uses project generator.
""" """
# These 2 are currently for exporters backward compatiblity # These 2 are currently for exporters backward compatiblity
NAME = 'IAR' NAME = 'iar_arm'
TOOLCHAIN = 'IAR' TOOLCHAIN = 'IAR'
# PROGEN_ACTIVE contains information for exporter scripts that this is using progen # PROGEN_ACTIVE contains information for exporter scripts that this is using progen
PROGEN_ACTIVE = True PROGEN_ACTIVE = True
@ -50,39 +50,23 @@ class IAREmbeddedWorkbench(Exporter):
continue continue
return cls._targets_supported return cls._targets_supported
def generate(self, progen_build=False): def generate(self):
""" Generates the project files """ """ Generates the project files """
project_data = self.progen_get_project_data() project_data = self.progen_get_project_data()
tool_specific = {}
# Expand tool specific settings by IAR specific settings which are required
try: try:
if TARGET_MAP[self.target].progen['iar']['template']: if TARGET_MAP[self.target].progen['iar']['template']:
tool_specific['iar'] = TARGET_MAP[self.target].progen['iar'] project_data['template']=TARGET_MAP[self.target].progen['iar']['template']
except KeyError: except KeyError:
# use default template # use default template
# by the mbed projects # by the mbed projects
tool_specific['iar'] = { project_data['template']=[os.path.join(os.path.dirname(__file__), 'iar_template.ewp.tmpl')]
# We currently don't use misc, template sets those for us
# 'misc': {
# 'cxx_flags': ['--no_rtti', '--no_exceptions'],
# 'c_flags': ['--diag_suppress=Pa050,Pa084,Pa093,Pa082'],
# 'ld_flags': ['--skip_dynamic_initialization'],
# },
'template': [os.path.join(os.path.dirname(__file__), 'iar_template.ewp.tmpl')],
}
project_data['tool_specific'] = {} project_data['misc'] = self.flags
project_data['tool_specific'].setdefault("iar", {})
project_data['tool_specific']['iar'].setdefault("misc", {})
project_data['tool_specific']['iar'].update(tool_specific['iar'])
project_data['tool_specific']['iar']['misc'].update(self.progen_flags)
# VLA is enabled via template IccAllowVLA # VLA is enabled via template IccAllowVLA
project_data['tool_specific']['iar']['misc']['c_flags'].remove("--vla") project_data['misc']['c_flags'].remove("--vla")
project_data['common']['build_dir'] = os.path.join(project_data['common']['build_dir'], 'iar_arm') project_data['misc']['asm_flags'] = list(set(project_data['misc']['asm_flags']))
if progen_build: project_data['build_dir'] = os.path.join(project_data['build_dir'], 'iar_arm')
self.progen_gen_file('iar_arm', project_data, True) self.progen_gen_file(project_data)
else:
self.progen_gen_file('iar_arm', project_data)
# Currently not used, we should reuse folder_name to create virtual folders # Currently not used, we should reuse folder_name to create virtual folders
class IarFolder(): class IarFolder():

View File

@ -35,7 +35,7 @@ class KDS(Exporter):
libraries.append(l[3:]) libraries.append(l[3:])
ctx = { ctx = {
'name': self.program_name, 'name': self.project_name,
'include_paths': self.resources.inc_dirs, 'include_paths': self.resources.inc_dirs,
'linker_script': self.resources.linker_script, 'linker_script': self.resources.linker_script,
'object_files': self.resources.objects, 'object_files': self.resources.objects,
@ -44,4 +44,4 @@ class KDS(Exporter):
} }
self.gen_file('kds_%s_project.tmpl' % self.target.lower(), ctx, '.project') self.gen_file('kds_%s_project.tmpl' % self.target.lower(), ctx, '.project')
self.gen_file('kds_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject') self.gen_file('kds_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject')
self.gen_file('kds_launch.tmpl', ctx, '%s.launch' % self.program_name) self.gen_file('kds_launch.tmpl', ctx, '%s.launch' % self.project_name)

View File

@ -157,7 +157,7 @@ class SimplicityV3(Exporter):
self.check_and_add_path(split(self.resources.linker_script)[0]) self.check_and_add_path(split(self.resources.linker_script)[0])
ctx = { ctx = {
'name': self.program_name, 'name': self.project_name,
'main_files': main_files, 'main_files': main_files,
'recursiveFolders': self.orderedPaths, 'recursiveFolders': self.orderedPaths,
'object_files': self.resources.objects, 'object_files': self.resources.objects,
@ -171,7 +171,7 @@ class SimplicityV3(Exporter):
'kit': self.KITS[self.target], 'kit': self.KITS[self.target],
'loopcount': 0 'loopcount': 0
} }
ctx.update(self.progen_flags) ctx.update(self.flags)
## Strip main folder from include paths because ssproj is not capable of handling it ## Strip main folder from include paths because ssproj is not capable of handling it
if '.' in ctx['include_paths']: if '.' in ctx['include_paths']:
@ -191,4 +191,4 @@ class SimplicityV3(Exporter):
print("\t" + bpath.name + "\n") print("\t" + bpath.name + "\n")
''' '''
self.gen_file('simplicityv3_slsproj.tmpl', ctx, '%s.slsproj' % self.program_name) self.gen_file('simplicityv3_slsproj.tmpl', ctx, '%s.slsproj' % self.project_name)

View File

@ -65,7 +65,7 @@ class Sw4STM32(Exporter):
TARGETS = BOARDS.keys() TARGETS = BOARDS.keys()
def __gen_dir(self, dirname): def __gen_dir(self, dirname):
settings = join(self.inputDir, dirname) settings = join(self.export_dir, dirname)
mkdir(settings) mkdir(settings)
def __generate_uid(self): def __generate_uid(self):
@ -78,7 +78,7 @@ class Sw4STM32(Exporter):
libraries.append(l[3:]) libraries.append(l[3:])
ctx = { ctx = {
'name': self.program_name, 'name': self.project_name,
'include_paths': self.resources.inc_dirs, 'include_paths': self.resources.inc_dirs,
'linker_script': self.resources.linker_script, 'linker_script': self.resources.linker_script,
'library_paths': self.resources.lib_dirs, 'library_paths': self.resources.lib_dirs,

View File

@ -28,7 +28,7 @@ class Uvision4(Exporter):
Exporter class for uvision. This class uses project generator. Exporter class for uvision. This class uses project generator.
""" """
# These 2 are currently for exporters backward compatiblity # These 2 are currently for exporters backward compatiblity
NAME = 'uVision4' NAME = 'uvision'
TOOLCHAIN = 'ARM' TOOLCHAIN = 'ARM'
# PROGEN_ACTIVE contains information for exporter scripts that this is using progen # PROGEN_ACTIVE contains information for exporter scripts that this is using progen
PROGEN_ACTIVE = True PROGEN_ACTIVE = True
@ -53,7 +53,7 @@ class Uvision4(Exporter):
def get_toolchain(self): def get_toolchain(self):
return TARGET_MAP[self.target].default_toolchain return TARGET_MAP[self.target].default_toolchain
def generate(self, progen_build=False): def generate(self):
""" Generates the project files """ """ Generates the project files """
project_data = self.progen_get_project_data() project_data = self.progen_get_project_data()
tool_specific = {} tool_specific = {}
@ -72,25 +72,32 @@ class Uvision4(Exporter):
project_data['tool_specific'].update(tool_specific) project_data['tool_specific'].update(tool_specific)
# get flags from toolchain and apply # get flags from toolchain and apply
project_data['tool_specific']['uvision']['misc'] = {} project_data['misc'] = {}
# need to make this a string for progen. Only adds preprocessor when "macros" set # need to make this a string for progen. Only adds preprocessor when "macros" set
asm_flag_string = '--cpreproc --cpreproc_opts=-D__ASSERT_MSG,' + ",".join( asm_flag_string = '--cpreproc --cpreproc_opts=-D__ASSERT_MSG,' + ",".join(
list(set(self.progen_flags['asm_flags']))) list(set(self.flags['asm_flags'])))
project_data['tool_specific']['uvision']['misc']['asm_flags'] = [asm_flag_string] # asm flags only, common are not valid within uvision project, they are armcc specific
project_data['misc']['asm_flags'] = [asm_flag_string]
# cxx flags included, as uvision have them all in one tab # cxx flags included, as uvision have them all in one tab
project_data['tool_specific']['uvision']['misc']['c_flags'] = list(set( project_data['misc']['c_flags'] = list(set(self.flags['common_flags'] + self.flags['c_flags'] + self.flags['cxx_flags']))
['-D__ASSERT_MSG'] + self.progen_flags['common_flags'] + self.progen_flags['c_flags'] + self.progen_flags[
'cxx_flags']))
# not compatible with c99 flag set in the template # not compatible with c99 flag set in the template
project_data['tool_specific']['uvision']['misc']['c_flags'].remove("--c99") project_data['misc']['c_flags'].remove("--c99")
# cpp is not required as it's implicit for cpp files # cpp is not required as it's implicit for cpp files
project_data['tool_specific']['uvision']['misc']['c_flags'].remove("--cpp") project_data['misc']['c_flags'].remove("--cpp")
# we want no-vla for only cxx, but it's also applied for C in IDE, thus we remove it # we want no-vla for only cxx, but it's also applied for C in IDE, thus we remove it
project_data['tool_specific']['uvision']['misc']['c_flags'].remove("--no_vla") project_data['misc']['c_flags'].remove("--no_vla")
project_data['tool_specific']['uvision']['misc']['ld_flags'] = self.progen_flags['ld_flags'] project_data['misc']['ld_flags'] = self.flags['ld_flags']
project_data['common']['build_dir'] = project_data['common']['build_dir'] + '\\' + 'uvision4' i = 0
if progen_build: for macro in self.symbols:
self.progen_gen_file('uvision', project_data, True) # armasm does not like floating numbers in macros, timestamp to int
else: if macro.startswith('MBED_BUILD_TIMESTAMP'):
self.progen_gen_file('uvision', project_data) timestamp = macro[len('MBED_BUILD_TIMESTAMP='):]
project_data['macros'][i] = 'MBED_BUILD_TIMESTAMP=' + str(int(float(timestamp)))
# armasm does not even accept MACRO=string
if macro.startswith('MBED_USERNAME'):
project_data['macros'].pop(i)
i += 1
project_data['macros'].append('__ASSERT_MSG')
project_data['build_dir'] = project_data['build_dir'] + '\\' + 'uvision4'
self.progen_gen_file(project_data)

View File

@ -28,7 +28,7 @@ class Uvision5(Exporter):
Exporter class for uvision5. This class uses project generator. Exporter class for uvision5. This class uses project generator.
""" """
# These 2 are currently for exporters backward compatiblity # These 2 are currently for exporters backward compatiblity
NAME = 'uVision5' NAME = 'uvision5'
TOOLCHAIN = 'ARM' TOOLCHAIN = 'ARM'
# PROGEN_ACTIVE contains information for exporter scripts that this is using progen # PROGEN_ACTIVE contains information for exporter scripts that this is using progen
PROGEN_ACTIVE = True PROGEN_ACTIVE = True
@ -53,7 +53,7 @@ class Uvision5(Exporter):
def get_toolchain(self): def get_toolchain(self):
return TARGET_MAP[self.target].default_toolchain return TARGET_MAP[self.target].default_toolchain
def generate(self, progen_build=False): def generate(self):
""" Generates the project files """ """ Generates the project files """
project_data = self.progen_get_project_data() project_data = self.progen_get_project_data()
tool_specific = {} tool_specific = {}
@ -68,27 +68,35 @@ class Uvision5(Exporter):
'template': [join(dirname(__file__), 'uvision.uvproj.tmpl')], 'template': [join(dirname(__file__), 'uvision.uvproj.tmpl')],
} }
#project_data['template'] = [tool_specific['uvision5']['template']]
project_data['tool_specific'] = {} project_data['tool_specific'] = {}
project_data['tool_specific'].update(tool_specific) project_data['tool_specific'].update(tool_specific)
# get flags from toolchain and apply # get flags from toolchain and apply
project_data['tool_specific']['uvision5']['misc'] = {} project_data['misc'] = {}
asm_flag_string = '--cpreproc --cpreproc_opts=-D__ASSERT_MSG,' + ",".join(list(set(self.flags['asm_flags'])))
# need to make this a string got progen. Only adds preprocessor when "macros" set # asm flags only, common are not valid within uvision project, they are armcc specific
asm_flag_string = '--cpreproc --cpreproc_opts=-D__ASSERT_MSG,' + ",".join(list(set(self.progen_flags['asm_flags']))) project_data['misc']['asm_flags'] = [asm_flag_string]
project_data['tool_specific']['uvision5']['misc']['asm_flags'] = [asm_flag_string]
# cxx flags included, as uvision have them all in one tab # cxx flags included, as uvision have them all in one tab
project_data['tool_specific']['uvision5']['misc']['c_flags'] = list(set(['-D__ASSERT_MSG']+self.progen_flags['common_flags'] + self.progen_flags['c_flags'] + self.progen_flags['cxx_flags'])) project_data['misc']['c_flags'] = list(set(self.flags['common_flags'] + self.flags['c_flags'] + self.flags['cxx_flags']))
# not compatible with c99 flag set in the template # not compatible with c99 flag set in the template
project_data['tool_specific']['uvision5']['misc']['c_flags'].remove("--c99") project_data['misc']['c_flags'].remove("--c99")
# cpp is not required as it's implicit for cpp files # cpp is not required as it's implicit for cpp files
project_data['tool_specific']['uvision5']['misc']['c_flags'].remove("--cpp") project_data['misc']['c_flags'].remove("--cpp")
# we want no-vla for only cxx, but it's also applied for C in IDE, thus we remove it # we want no-vla for only cxx, but it's also applied for C in IDE, thus we remove it
project_data['tool_specific']['uvision5']['misc']['c_flags'].remove("--no_vla") project_data['misc']['c_flags'].remove("--no_vla")
project_data['tool_specific']['uvision5']['misc']['ld_flags'] = self.progen_flags['ld_flags'] project_data['misc']['ld_flags'] = self.flags['ld_flags']
project_data['common']['build_dir'] = project_data['common']['build_dir'] + '\\' + 'uvision5' i = 0
if progen_build: for macro in self.symbols:
self.progen_gen_file('uvision5', project_data, True) # armasm does not like floating numbers in macros, timestamp to int
else: if macro.startswith('MBED_BUILD_TIMESTAMP'):
self.progen_gen_file('uvision5', project_data) timestamp = macro[len('MBED_BUILD_TIMESTAMP='):]
project_data['macros'][i] = 'MBED_BUILD_TIMESTAMP=' + str(int(float(timestamp)))
# armasm does not even accept MACRO=string
if macro.startswith('MBED_USERNAME'):
project_data['macros'].pop(i)
i += 1
project_data['macros'].append('__ASSERT_MSG')
project_data['build_dir'] = project_data['build_dir'] + '\\' + 'uvision5'
self.progen_gen_file(project_data)

View File

@ -1,3 +1,6 @@
""" The CLI entry point for exporting projects from the mbed tools to any of the
supported IDEs or project structures.
"""
import sys import sys
from os.path import join, abspath, dirname, exists, basename from os.path import join, abspath, dirname, exists, basename
ROOT = abspath(join(dirname(__file__), "..")) ROOT = abspath(join(dirname(__file__), ".."))
@ -5,21 +8,87 @@ sys.path.insert(0, ROOT)
from shutil import move, rmtree from shutil import move, rmtree
from argparse import ArgumentParser from argparse import ArgumentParser
from os import path from os.path import normpath
from tools.paths import EXPORT_DIR from tools.paths import EXPORT_DIR, MBED_BASE, MBED_LIBRARIES
from tools.export import export, EXPORTERS, mcu_ide_matrix from tools.export import EXPORTERS, mcu_ide_matrix
from tools.tests import TESTS, TEST_MAP from tools.tests import TESTS, TEST_MAP
from tools.tests import test_known, test_name_known from tools.tests import test_known, test_name_known, Test
from tools.targets import TARGET_NAMES from tools.targets import TARGET_NAMES
from tools.libraries import LIBRARIES from tools.utils import argparse_filestring_type, argparse_many, args_error
from utils import argparse_filestring_type, argparse_many, args_error from tools.utils import argparse_force_lowercase_type
from utils import argparse_force_lowercase_type, argparse_force_uppercase_type, argparse_dir_not_parent from tools.utils import argparse_force_uppercase_type
from project_api import setup_project, perform_export, print_results, get_lib_symbols from tools.project_api import export_project
def setup_project(ide, target, program=None, source_dir=None, build=None):
"""Generate a name, if not provided, and find dependencies
if __name__ == '__main__': Positional arguments:
ide - IDE or project structure that will soon be exported to
target - MCU that the project will build for
Keyword arguments:
program - the index of a test program
source_dir - the directory, or directories that contain all of the sources
build - a directory that will contain the result of the export
"""
# Some libraries have extra macros (called by exporter symbols) to we need
# to pass them to maintain compilation macros integrity between compiled
# library and header files we might use with it
if source_dir:
# --source is used to generate IDE files to toolchain directly
# in the source tree and doesn't generate zip file
project_dir = source_dir[0]
if program:
project_name = TESTS[program]
else:
project_name = basename(normpath(source_dir[0]))
src_paths = source_dir
lib_paths = None
else:
test = Test(program)
if not build:
# Substitute the mbed library builds with their sources
if MBED_LIBRARIES in test.dependencies:
test.dependencies.remove(MBED_LIBRARIES)
test.dependencies.append(MBED_BASE)
src_paths = [test.source_dir]
lib_paths = test.dependencies
project_name = "_".join([test.id, ide, target])
project_dir = join(EXPORT_DIR, project_name)
return project_dir, project_name, src_paths, lib_paths
def export(target, ide, build=None, src=None, macros=None, project_id=None,
clean=False, zip_proj=False):
"""Do an export of a project.
Positional arguments:
target - MCU that the project will compile for
ide - the IDE or project structure to export to
Keyword arguments:
build - to use the compiled mbed libraries or not
src - directory or directories that contain the source to export
macros - extra macros to add to the project
project_id - the name of the project
clean - start from a clean state before exporting
zip_proj - create a zip file or not
"""
project_dir, name, src, lib = setup_project(ide, target, program=project_id,
source_dir=src, build=build)
zip_name = name+".zip" if zip_proj else None
export_project(src, project_dir, target, ide, clean=clean, name=name,
macros=macros, libraries_paths=lib, zip_proj=zip_name)
def main():
"""Entry point"""
# Parse Options # Parse Options
parser = ArgumentParser() parser = ArgumentParser()
@ -29,32 +98,36 @@ if __name__ == '__main__':
toolchainlist.sort() toolchainlist.sort()
parser.add_argument("-m", "--mcu", parser.add_argument("-m", "--mcu",
metavar="MCU", metavar="MCU",
default='LPC1768', default='LPC1768',
type=argparse_many(argparse_force_uppercase_type(targetnames, "MCU")), type=argparse_many(
help="generate project for the given MCU (%s)"% ', '.join(targetnames)) argparse_force_uppercase_type(targetnames, "MCU")),
help="generate project for the given MCU ({})".format(
', '.join(targetnames)))
parser.add_argument("-i", parser.add_argument("-i",
dest="ide", dest="ide",
default='uvision', default='uvision',
type=argparse_force_lowercase_type(toolchainlist, "toolchain"), type=argparse_force_lowercase_type(
help="The target IDE: %s"% str(toolchainlist)) toolchainlist, "toolchain"),
help="The target IDE: %s"% str(toolchainlist))
parser.add_argument("-c", "--clean", parser.add_argument("-c", "--clean",
action="store_true", action="store_true",
default=False, default=False,
help="clean the export directory") help="clean the export directory")
group = parser.add_mutually_exclusive_group(required=False) group = parser.add_mutually_exclusive_group(required=False)
group.add_argument("-p", group.add_argument(
type=test_known, "-p",
dest="program", type=test_known,
help="The index of the desired test program: [0-%d]"% (len(TESTS)-1)) dest="program",
help="The index of the desired test program: [0-%s]"% (len(TESTS)-1))
group.add_argument("-n", group.add_argument("-n",
type=test_name_known, type=test_name_known,
dest="program", dest="program",
help="The name of the desired test program") help="The name of the desired test program")
parser.add_argument("-b", parser.add_argument("-b",
dest="build", dest="build",
@ -63,40 +136,40 @@ if __name__ == '__main__':
help="use the mbed library build, instead of the sources") help="use the mbed library build, instead of the sources")
group.add_argument("-L", "--list-tests", group.add_argument("-L", "--list-tests",
action="store_true", action="store_true",
dest="list_tests", dest="list_tests",
default=False, default=False,
help="list available programs in order and exit") help="list available programs in order and exit")
group.add_argument("-S", "--list-matrix", group.add_argument("-S", "--list-matrix",
action="store_true", action="store_true",
dest="supported_ides", dest="supported_ides",
default=False, default=False,
help="displays supported matrix of MCUs and IDEs") help="displays supported matrix of MCUs and IDEs")
parser.add_argument("-E", parser.add_argument("-E",
action="store_true", action="store_true",
dest="supported_ides_html", dest="supported_ides_html",
default=False, default=False,
help="writes tools/export/README.md") help="writes tools/export/README.md")
parser.add_argument("--source", parser.add_argument("--source",
action="append", action="append",
type=argparse_filestring_type, type=argparse_filestring_type,
dest="source_dir", dest="source_dir",
default=[], default=[],
help="The source (input) directory") help="The source (input) directory")
parser.add_argument("-D", parser.add_argument("-D",
action="append", action="append",
dest="macros", dest="macros",
help="Add a macro definition") help="Add a macro definition")
options = parser.parse_args() options = parser.parse_args()
# Print available tests in order and exit # Print available tests in order and exit
if options.list_tests is True: if options.list_tests is True:
print '\n'.join(map(str, sorted(TEST_MAP.values()))) print '\n'.join([str(test) for test in sorted(TEST_MAP.values())])
sys.exit() sys.exit()
# Only prints matrix of supported IDEs # Only prints matrix of supported IDEs
@ -108,13 +181,13 @@ if __name__ == '__main__':
if options.supported_ides_html: if options.supported_ides_html:
html = mcu_ide_matrix(verbose_html=True) html = mcu_ide_matrix(verbose_html=True)
try: try:
with open("./export/README.md","w") as f: with open("./export/README.md", "w") as readme:
f.write("Exporter IDE/Platform Support\n") readme.write("Exporter IDE/Platform Support\n")
f.write("-----------------------------------\n") readme.write("-----------------------------------\n")
f.write("\n") readme.write("\n")
f.write(html) readme.write(html)
except IOError as e: except IOError as exc:
print "I/O error({0}): {1}".format(e.errno, e.strerror) print "I/O error({0}): {1}".format(exc.errno, exc.strerror)
except: except:
print "Unexpected error:", sys.exc_info()[0] print "Unexpected error:", sys.exc_info()[0]
raise raise
@ -125,12 +198,9 @@ if __name__ == '__main__':
if exists(EXPORT_DIR): if exists(EXPORT_DIR):
rmtree(EXPORT_DIR) rmtree(EXPORT_DIR)
# Export results for mcu in options.mcu:
successes = [] zip_proj = not bool(options.source_dir)
failures = []
# source_dir = use relative paths, otherwise sources are copied
sources_relative = True if options.source_dir else False
# Target # Target
if not options.mcu: if not options.mcu:
args_error(parser, "argument -m/--mcu is required") args_error(parser, "argument -m/--mcu is required")
@ -141,32 +211,12 @@ if __name__ == '__main__':
if (options.program is None) and (not options.source_dir): if (options.program is None) and (not options.source_dir):
args_error(parser, "one of -p, -n, or --source is required") args_error(parser, "one of -p, -n, or --source is required")
# Export to selected toolchain
for mcu in options.mcu: for mcu in options.mcu:
# Program Number or name export(mcu, options.ide, build=options.build, src=options.source_dir,
p, src, ide = options.program, options.source_dir, options.ide macros=options.macros, project_id=options.program,
try: clean=options.clean, zip_proj=zip_proj)
project_dir, project_name, project_temp = setup_project(mcu, ide, p, src, options.build)
zip = not bool(src) # create zip when no src_dir provided
clean = not bool(src) # don't clean when source is provided, use acrual source tree for IDE files
# Export to selected toolchain
lib_symbols = get_lib_symbols(options.macros, src, p)
tmp_path, report = export(project_dir, project_name, ide, mcu, project_dir[0], project_temp, clean=clean, make_zip=zip, extra_symbols=lib_symbols, sources_relative=sources_relative)
except OSError as e:
if e.errno == 2:
report = dict(success=False, errormsg="Library path '%s' does not exist. Ensure that the library is built." % (e.filename))
else:
report = dict(success=False, errormsg="An OS error occured: errno #{}".format(e.errno))
if report['success']:
if not zip:
zip_path = join(project_temp, project_name)
else:
zip_path = join(EXPORT_DIR, "%s_%s_%s.zip" % (project_name, ide, mcu))
move(tmp_path, zip_path)
successes.append("%s::%s\t%s"% (mcu, ide, zip_path))
else:
failures.append("%s::%s\t%s"% (mcu, ide, report['errormsg']))
# Prints export results if __name__ == "__main__":
print_results(successes, failures) main()

View File

@ -1,110 +1,263 @@
""" The new way of doing exports """
import sys import sys
from os.path import join, abspath, dirname, exists, basename from os.path import join, abspath, dirname, exists
from os.path import basename, relpath, normpath
from os import makedirs
ROOT = abspath(join(dirname(__file__), "..")) ROOT = abspath(join(dirname(__file__), ".."))
sys.path.insert(0, ROOT) sys.path.insert(0, ROOT)
import copy
from shutil import rmtree
import zipfile
from tools.paths import EXPORT_WORKSPACE, EXPORT_TMP from tools.build_api import prepare_toolchain
from tools.paths import MBED_BASE, MBED_LIBRARIES from tools.build_api import scan_resources
from tools.export import export, setup_user_prj from tools.export import EXPORTERS
from tools.utils import mkdir
from tools.tests import Test, TEST_MAP, TESTS
from tools.libraries import LIBRARIES
try:
import tools.private_settings as ps
except:
ps = object()
def get_program(n): def get_exporter_toolchain(ide):
p = TEST_MAP[n].n """ Return the exporter class and the toolchain string as a tuple
return p
Positional arguments:
ide - the ide name of an exporter
"""
return EXPORTERS[ide], EXPORTERS[ide].TOOLCHAIN
def get_test(p): def rewrite_basepath(file_name, resources, export_path):
return Test(p) """ Replace the basepath of filename with export_path
Positional arguments:
file_name - the absolute path to a file
resources - the resources object that the file came from
export_path - the final destination of the file after export
"""
new_f = relpath(file_name, resources.file_basepath[file_name])
resources.file_basepath[join(export_path, new_f)] = export_path
return new_f
def get_test_from_name(n): def subtract_basepath(resources, export_path):
if not n in TEST_MAP.keys(): """ Rewrite all of the basepaths with the export_path
# Check if there is an alias for this in private_settings.py
if getattr(ps, "test_alias", None) is not None: Positional arguments:
alias = ps.test_alias.get(n, "") resources - the resource object to rewrite the basepaths of
if not alias in TEST_MAP.keys(): export_path - the final destination of the resources with respect to the
return None generated project files
else: """
n = alias keys = ['s_sources', 'c_sources', 'cpp_sources', 'hex_files',
'objects', 'libraries', 'inc_dirs', 'headers', 'linker_script']
for key in keys:
vals = getattr(resources, key)
if type(vals) is list:
new_vals = []
for val in vals:
new_vals.append(rewrite_basepath(val, resources, export_path))
setattr(resources, key, new_vals)
else: else:
return None setattr(resources, key, rewrite_basepath(vals, resources,
return get_program(n) export_path))
def get_lib_symbols(macros, src, program): def prepare_project(src_paths, export_path, target, ide,
# Some libraries have extra macros (called by exporter symbols) to we need to pass libraries_paths=None, options=None, linker_script=None,
# them to maintain compilation macros integrity between compiled library and clean=False, notify=None, verbose=False, name=None,
# header files we might use with it inc_dirs=None, jobs=1, silent=False, extra_verbose=False,
lib_symbols = [] config=None, macros=None):
if macros: """ This function normalizes the
lib_symbols += macros """
if src:
return lib_symbols # Convert src_path to a list if needed
test = get_test(program) if type(src_paths) != type([]):
for lib in LIBRARIES: src_paths = [src_paths]
if lib['build_dir'] in test.dependencies: # Extend src_paths wiht libraries_paths
lib_macros = lib.get('macros', None) if libraries_paths is not None:
if lib_macros is not None: src_paths.extend(libraries_paths)
lib_symbols.extend(lib_macros)
# Export Directory
if exists(export_path) and clean:
rmtree(export_path)
if not exists(export_path):
makedirs(export_path)
_, toolchain_name = get_exporter_toolchain(ide)
# Pass all params to the unified prepare_resources()
toolchain = prepare_toolchain(src_paths, export_path, target,
toolchain_name, macros=macros,
options=options, clean=clean, jobs=jobs,
notify=notify, silent=silent, verbose=verbose,
extra_verbose=extra_verbose, config=config)
def setup_project(mcu, ide, program=None, source_dir=None, build=None): # The first path will give the name to the library
if name is None:
name = basename(normpath(abspath(src_paths[0])))
# Some libraries have extra macros (called by exporter symbols) to we need to pass # Call unified scan_resources
# them to maintain compilation macros integrity between compiled library and resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs)
# header files we might use with it toolchain.build_dir = export_path
if source_dir: config_header = toolchain.get_config_header()
# --source is used to generate IDE files to toolchain directly in the source tree and doesn't generate zip file resources.headers.append(config_header)
project_dir = source_dir resources.file_basepath[config_header] = dirname(config_header)
project_name = TESTS[program] if program else "Unnamed_Project"
project_temp = join(source_dir[0], 'projectfiles', '%s_%s' % (ide, mcu))
mkdir(project_temp)
else:
test = get_test(program)
if not build:
# Substitute the library builds with the sources
# TODO: Substitute also the other library build paths
if MBED_LIBRARIES in test.dependencies:
test.dependencies.remove(MBED_LIBRARIES)
test.dependencies.append(MBED_BASE)
# Build the project with the same directory structure of the mbed online IDE # Change linker script if specified
project_name = test.id if linker_script is not None:
project_dir = [join(EXPORT_WORKSPACE, project_name)] resources.linker_script = linker_script
project_temp = EXPORT_TMP
setup_user_prj(project_dir[0], test.source_dir, test.dependencies)
return project_dir, project_name, project_temp return resources, toolchain
def perform_export(dir, name, ide, mcu, temp, clean=False, zip=False, lib_symbols='', def generate_project_files(resources, export_path, target, name, toolchain, ide,
sources_relative=False, progen_build=False): macros=None):
"""Generate the project files for a project
tmp_path, report = export(dir, name, ide, mcu, dir[0], temp, clean=clean, Positional arguments:
make_zip=zip, extra_symbols=lib_symbols, sources_relative=sources_relative, resources - a Resources object containing all of the files needed to build
progen_build=progen_build) this project
return tmp_path, report export_path - location to place project files
name - name of the project
toolchain - a toolchain class that corresponds to the toolchain used by the
IDE or makefile
ide - IDE name to export to
Optional arguments:
macros - additional macros that should be defined within the exported
project
"""
exporter_cls, _ = get_exporter_toolchain(ide)
exporter = exporter_cls(target, export_path, name, toolchain,
extra_symbols=macros, resources=resources)
exporter.generate()
files = exporter.generated_files
return files, exporter
def print_results(successes, failures, skips = []): def zip_export(file_name, prefix, resources, project_files):
"""Create a zip file from an exported project.
Positional Parameters:
file_name - the file name of the resulting zip file
prefix - a directory name that will prefix the entire zip file's contents
resources - a resources object with files that must be included in the zip
project_files - a list of extra files to be added to the root of the prefix
directory
"""
with zipfile.ZipFile(file_name, "w") as zip_file:
for prj_file in project_files:
zip_file.write(prj_file, join(prefix, basename(prj_file)))
for source in resources.headers + resources.s_sources + \
resources.c_sources + resources.cpp_sources + \
resources.libraries + resources.hex_files + \
[resources.linker_script] + resources.bin_files \
+ resources.objects + resources.json_files:
zip_file.write(source,
join(prefix, relpath(source,
resources.file_basepath[source])))
def export_project(src_paths, export_path, target, ide,
libraries_paths=None, options=None, linker_script=None,
clean=False, notify=None, verbose=False, name=None,
inc_dirs=None, jobs=1, silent=False, extra_verbose=False,
config=None, macros=None, zip_proj=None):
"""Generates a project file and creates a zip archive if specified
Positional Arguments:
src_paths - a list of paths from which to find source files
export_path - a path specifying the location of generated project files
target - the mbed board/mcu for which to generate the executable
ide - the ide for which to generate the project fields
Keyword Arguments:
libraries_paths - paths to additional libraries
options - build options passed by -o flag
linker_script - path to the linker script for the specified target
clean - removes the export_path if it exists
notify - function is passed all events, and expected to handle notification
of the user, emit the events to a log, etc.
verbose - assigns the notify function to toolchains print_notify_verbose
name - project name
inc_dirs - additional include directories
jobs - number of threads
silent - silent build - no output
extra_verbose - assigns the notify function to toolchains
print_notify_verbose
config - toolchain's config object
macros - User-defined macros
zip_proj - string name of the zip archive you wish to creat (exclude arg
if you do not wish to create an archive
"""
# Convert src_path to a list if needed
if type(src_paths) != type([]):
src_paths = [src_paths]
# Extend src_paths wiht libraries_paths
if libraries_paths is not None:
src_paths.extend(libraries_paths)
# Export Directory
if exists(export_path) and clean:
rmtree(export_path)
if not exists(export_path):
makedirs(export_path)
_, toolchain_name = get_exporter_toolchain(ide)
# Pass all params to the unified prepare_resources()
toolchain = prepare_toolchain(src_paths, target, toolchain_name,
macros=macros, options=options, clean=clean,
jobs=jobs, notify=notify, silent=silent,
verbose=verbose, extra_verbose=extra_verbose,
config=config)
# The first path will give the name to the library
if name is None:
name = basename(normpath(abspath(src_paths[0])))
# Call unified scan_resources
resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs)
toolchain.build_dir = export_path
config_header = toolchain.get_config_header()
resources.headers.append(config_header)
resources.file_basepath[config_header] = dirname(config_header)
temp = copy.deepcopy(resources)
if zip_proj:
subtract_basepath(resources, export_path)
# Change linker script if specified
if linker_script is not None:
resources.linker_script = linker_script
files, exporter = generate_project_files(resources, export_path,
target, name, toolchain, ide,
macros=macros)
if zip_proj:
zip_export(join(export_path, zip_proj), name, temp, files)
return exporter
def print_results(successes, failures, skips=None):
""" Print out the results of an export process
Positional arguments:
successes - The list of exports that succeeded
failures - The list of exports that failed
Keyword arguments:
skips - The list of exports that were skipped
"""
print print
if len(successes) > 0: if successes:
print "Successful: " print "Successful: "
for success in successes: for success in successes:
print " * %s" % success print " * %s" % success
if len(failures) > 0: if failures:
print "Failed: " print "Failed: "
for failure in failures: for failure in failures:
print " * %s" % failure print " * %s" % failure
if len(skips) > 0: if skips:
print "Skipped: " print "Skipped: "
for skip in skips: for skip in skips:
print " * %s" % skip print " * %s" % skip

View File

@ -16,154 +16,166 @@ See the License for the specific language governing permissions and
limitations under the License. limitations under the License.
""" """
import sys import sys
import argparse from os import path, remove, rename
import os
import shutil import shutil
from os.path import join, abspath, dirname, exists, basename ROOT = path.abspath(path.join(path.dirname(__file__), "..", "..", ".."))
r=dirname(__file__)
ROOT = abspath(join(r, "..","..",".."))
sys.path.insert(0, ROOT) sys.path.insert(0, ROOT)
import argparse
from tools.export import EXPORTERS from tools.export import EXPORTERS
from tools.targets import TARGET_NAMES, TARGET_MAP from tools.targets import TARGET_NAMES
from tools.project_api import setup_project, perform_export, print_results, get_test_from_name, get_lib_symbols from tools.tests import TESTS
from project_generator_definitions.definitions import ProGenDef from tools.project import setup_project
from tools.utils import args_error from tools.project_api import print_results, export_project
from tools.tests import test_name_known, Test
from tools.export.exporters import FailedBuildException, \
TargetNotSupportedException
from tools.utils import argparse_force_lowercase_type, \
argparse_force_uppercase_type, argparse_many
class ProgenBuildTest(): class ProgenBuildTest(object):
def __init__(self, desired_ides, targets): """Object to encapsulate logic for progen build testing"""
#map of targets and the ides that can build programs for them def __init__(self, desired_ides, mcus, tests):
self.target_ides = {} """
for target in targets: Initialize an instance of class ProgenBuildTest
self.target_ides[target] =[] Args:
for ide in desired_ides: desired_ides: the IDEs you wish to make/build project files for
if target in EXPORTERS[ide].TARGETS: mcus: the mcus to specify in project files
#target is supported by ide tests: the test projects to make/build project files from
self.target_ides[target].append(ide) """
if len(self.target_ides[target]) == 0: self.ides = desired_ides
del self.target_ides[target] self.mcus = mcus
self.tests = tests
@property
def mcu_ide_pairs(self):
"""Yields tuples of valid mcu, ide combinations"""
for mcu in self.mcus:
for ide in self.ides:
if mcu in EXPORTERS[ide].TARGETS:
yield mcu, ide
@staticmethod @staticmethod
def get_pgen_targets(ides): def handle_log_files(project_dir, tool, name):
#targets supported by pgen and desired ides for tests """
targs = [] Renames/moves log files
for ide in ides: Args:
for target in TARGET_NAMES: project_dir: the directory that contains project files
if target not in targs and hasattr(TARGET_MAP[target],'progen') \ tool: the ide that created the project files
and ProGenDef(ide).is_supported(TARGET_MAP[target].progen['target']): name: the name of the project
targs.append(target) clean: a boolean value determining whether to remove the
return targs created project files
"""
@staticmethod
def handle_project_files(project_dir, mcu, test, tool, clean=False):
log = '' log = ''
if tool == 'uvision' or tool == 'uvision5': if tool == 'uvision' or tool == 'uvision5':
log = os.path.join(project_dir,"build","build_log.txt") log = path.join(project_dir, "build", "build_log.txt")
elif tool == 'iar': elif tool == 'iar':
log = os.path.join(project_dir, 'build_log.txt') log = path.join(project_dir, 'build_log.txt')
try: try:
with open(log, 'r') as f: with open(log, 'r') as in_log:
print f.read() print in_log.read()
except: log_name = path.join(path.dirname(project_dir), name + "_log.txt")
return
prefix = "_".join([test, mcu, tool]) # check if a log already exists for this platform+test+ide
log_name = os.path.join(os.path.dirname(project_dir), prefix+"_log.txt") if path.exists(log_name):
# delete it if so
remove(log_name)
rename(log, log_name)
except IOError:
pass
#check if a log already exists for this platform+test+ide def generate_and_build(self, clean=False):
if os.path.exists(log_name): """
#delete it if so Generate the project file and build the project
os.remove(log_name) Args:
os.rename(log, log_name) clean: a boolean value determining whether to remove the
created project files
if clean: Returns:
shutil.rmtree(project_dir, ignore_errors=True) successes: a list of strings that contain the mcu, ide, test
return properties of a successful build test
skips: a list of strings that contain the mcu, ide, test properties
of a skipped test (if the ide does not support mcu)
failures: a list of strings that contain the mcu, ide, test
properties of a failed build test
def generate_and_build(self, tests, clean=False): """
#build results
successes = [] successes = []
failures = [] failures = []
skips = [] skips = []
for mcu, ides in self.target_ides.items(): for mcu, ide in self.mcu_ide_pairs:
for test in tests: for test in self.tests:
#resolve name alias export_location, name, src, lib = setup_project(ide, mcu,
test = get_test_from_name(test) program=test)
for ide in ides: test_name = Test(test).id
lib_symbols = get_lib_symbols(None, None, test) try:
project_dir, project_name, project_temp = setup_project(mcu, ide, test) exporter = export_project(src, export_location, mcu, ide,
clean=clean, name=name,
libraries_paths=lib)
exporter.progen_build()
successes.append("%s::%s\t%s" % (mcu, ide, test_name))
except FailedBuildException:
failures.append("%s::%s\t%s" % (mcu, ide, test_name))
except TargetNotSupportedException:
skips.append("%s::%s\t%s" % (mcu, ide, test_name))
dest_dir = os.path.dirname(project_temp) ProgenBuildTest.handle_log_files(export_location, ide, name)
destination = os.path.join(dest_dir,"_".join([project_name, mcu, ide])) if clean:
shutil.rmtree(export_location, ignore_errors=True)
tmp_path, report = perform_export(project_dir, project_name, ide, mcu, destination,
lib_symbols=lib_symbols, progen_build = True)
if report['success']:
successes.append("build for %s::%s\t%s" % (mcu, ide, project_name))
elif report['skip']:
skips.append("%s::%s\t%s" % (mcu, ide, project_name))
else:
failures.append("%s::%s\t%s for %s" % (mcu, ide, report['errormsg'], project_name))
ProgenBuildTest.handle_project_files(destination, mcu, project_name, ide, clean)
return successes, failures, skips return successes, failures, skips
if __name__ == '__main__': def main():
accepted_ides = ["iar", "uvision", "uvision5"] """Entry point"""
accepted_targets = sorted(ProgenBuildTest.get_pgen_targets(accepted_ides)) toolchainlist = ["iar", "uvision", "uvision5"]
default_tests = ["MBED_BLINKY"] default_tests = [test_name_known("MBED_BLINKY")]
targetnames = TARGET_NAMES
targetnames.sort()
parser = argparse.ArgumentParser(description = "Test progen builders. Leave any flag off to run with all possible options.") parser = argparse.ArgumentParser(description=
parser.add_argument("-i", "--IDEs", "Test progen builders. Leave any flag off"
nargs = '+', " to run with all possible options.")
dest="ides", parser.add_argument("-i",
help="tools you wish to perfrom build tests. (%s)" % ', '.join(accepted_ides), dest="ides",
default = accepted_ides) default=toolchainlist,
type=argparse_many(argparse_force_lowercase_type(
toolchainlist, "toolchain")),
help="The target IDE: %s"% str(toolchainlist))
parser.add_argument(
"-p",
type=argparse_many(test_name_known),
dest="programs",
help="The index of the desired test program: [0-%d]" % (len(TESTS) - 1),
default=default_tests)
parser.add_argument("-n", parser.add_argument("-n",
nargs='+', type=argparse_many(test_name_known),
dest="tests", dest="programs",
help="names of desired test programs", help="The name of the desired test program",
default = default_tests) default=default_tests)
parser.add_argument("-m", "--mcus", parser.add_argument(
nargs='+', "-m", "--mcu",
dest ="targets", metavar="MCU",
help="generate project for the given MCUs (%s)" % '\n '.join(accepted_targets), default='LPC1768',
default = accepted_targets) nargs="+",
type=argparse_force_uppercase_type(targetnames, "MCU"),
help="generate project for the given MCU (%s)" % ', '.join(targetnames))
parser.add_argument("-c", "--clean", parser.add_argument("-c", "--clean",
dest="clean", dest="clean",
action = "store_true", action="store_true",
help="clean up the exported project files", help="clean up the exported project files",
default=False) default=False)
options = parser.parse_args() options = parser.parse_args()
test = ProgenBuildTest(options.ides, options.mcu, options.programs)
tests = options.tests successes, failures, skips = test.generate_and_build(clean=options.clean)
ides = [ide.lower() for ide in options.ides]
targets = [target.upper() for target in options.targets]
if any(get_test_from_name(test) is None for test in tests):
args_error(parser, "[ERROR] test name not recognized")
if any(target not in accepted_targets for target in targets):
args_error(parser, "[ERROR] mcu must be one of the following:\n %s" % '\n '.join(accepted_targets))
if any(ide not in accepted_ides for ide in ides):
args_error(parser, "[ERROR] ide must be in %s" % ', '.join(accepted_ides))
build_test = ProgenBuildTest(ides, targets)
successes, failures, skips = build_test.generate_and_build(tests, options.clean)
print_results(successes, failures, skips) print_results(successes, failures, skips)
sys.exit(len(failures)) sys.exit(len(failures))
if __name__ == "__main__":
main()

View File

@ -1253,7 +1253,7 @@ def test_known(string):
def test_name_known(string): def test_name_known(string):
if string not in TEST_MAP.keys() and \ if string not in TEST_MAP.keys() and \
(getattr(ps, "test_alias", None) is None or \ (getattr(ps, "test_alias", None) is None or \
ps.test_alias.get(test_id, "") not in TEST_MAP.keys()): ps.test_alias.get(string, "") not in TEST_MAP.keys()):
raise ArgumentTypeError("Program with name '{0}' not found. Supported tests are: \n{1}".format(string, columnate([t['id'] for t in TESTS]))) raise ArgumentTypeError("Program with name '{0}' not found. Supported tests are: \n{1}".format(string, columnate([t['id'] for t in TESTS])))
return TEST_MAP[string].n return TEST_MAP[string].n

View File

@ -564,6 +564,7 @@ class mbedToolchain:
# Add root to include paths # Add root to include paths
resources.inc_dirs.append(root) resources.inc_dirs.append(root)
resources.file_basepath[root] = base_path
for file in files: for file in files:
file_path = join(root, file) file_path = join(root, file)