mirror of https://github.com/ARMmbed/mbed-os.git
Merge pull request #3172 from sarahmarshy/export-test-rev
[Exporters] New export-build testspull/3182/head
commit
2502a795b9
|
@ -18,7 +18,6 @@
|
|||
from tools.export import codered, ds5_5, iar, makefile
|
||||
from tools.export import emblocks, coide, kds, simplicityv3, atmelstudio
|
||||
from tools.export import sw4stm32, e2studio, zip, cmsis, uvision, cdt
|
||||
from tools.export.exporters import OldLibrariesException, FailedBuildException
|
||||
from tools.targets import TARGET_NAMES
|
||||
|
||||
EXPORTERS = {
|
||||
|
|
|
@ -30,38 +30,41 @@ class DeviceCMSIS():
|
|||
"""CMSIS Device class
|
||||
|
||||
Encapsulates target information retrieved by arm-pack-manager"""
|
||||
|
||||
CACHE = Cache(True, False)
|
||||
def __init__(self, target):
|
||||
cache = Cache(True, False)
|
||||
|
||||
t = TARGET_MAP[target]
|
||||
self.core = t.core
|
||||
try:
|
||||
cpu_name = t.device_name
|
||||
target_info = cache.index[cpu_name]
|
||||
# Target does not have device name or pdsc file
|
||||
except:
|
||||
try:
|
||||
# Try to find the core as a generic CMSIS target
|
||||
cpu_name = self.cpu_cmsis()
|
||||
target_info = cache.index[cpu_name]
|
||||
except:
|
||||
raise TargetNotSupportedException("Target not in CMSIS packs")
|
||||
|
||||
self.target_info = target_info
|
||||
target_info = self.check_supported(target)
|
||||
if not target_info:
|
||||
raise TargetNotSupportedException("Target not supported in CMSIS pack")
|
||||
|
||||
self.url = target_info['pdsc_file']
|
||||
self.pack_url, self.pack_id = ntpath.split(self.url)
|
||||
self.dname = cpu_name
|
||||
self.dname = target_info["_cpu_name"]
|
||||
self.core = target_info["_core"]
|
||||
self.dfpu = target_info['processor']['fpu']
|
||||
self.debug, self.dvendor = self.vendor_debug(target_info['vendor'])
|
||||
self.dendian = target_info['processor'].get('endianness','Little-endian')
|
||||
self.debug_svd = target_info.get('debug', '')
|
||||
self.compile_header = target_info['compile']['header']
|
||||
self.target_info = target_info
|
||||
|
||||
def check_version(self, filename):
|
||||
with open(filename) as data_file:
|
||||
data = json.load(data_file)
|
||||
return data.get("version", "0") == "0.1.0"
|
||||
@staticmethod
|
||||
def check_supported(target):
|
||||
t = TARGET_MAP[target]
|
||||
try:
|
||||
cpu_name = t.device_name
|
||||
target_info = DeviceCMSIS.CACHE.index[cpu_name]
|
||||
# Target does not have device name or pdsc file
|
||||
except:
|
||||
try:
|
||||
# Try to find the core as a generic CMSIS target
|
||||
cpu_name = DeviceCMSIS.cpu_cmsis(t.core)
|
||||
target_info = DeviceCMSIS.index[cpu_name]
|
||||
except:
|
||||
return False
|
||||
target_info["_cpu_name"] = cpu_name
|
||||
target_info["_core"] = t.core
|
||||
return target_info
|
||||
|
||||
def vendor_debug(self, vendor):
|
||||
reg = "([\w\s]+):?\d*?"
|
||||
|
@ -74,9 +77,9 @@ class DeviceCMSIS():
|
|||
}
|
||||
return debug_map.get(vendor_match, "CMSIS-DAP"), vendor_match
|
||||
|
||||
def cpu_cmsis(self):
|
||||
@staticmethod
|
||||
def cpu_cmsis(cpu):
|
||||
#Cortex-M4F => ARMCM4_FP, Cortex-M0+ => ARMCM0P
|
||||
cpu = self.core
|
||||
cpu = cpu.replace("Cortex-","ARMC")
|
||||
cpu = cpu.replace("+","P")
|
||||
cpu = cpu.replace("F","_FP")
|
||||
|
|
|
@ -11,16 +11,6 @@ import copy
|
|||
from tools.targets import TARGET_MAP
|
||||
|
||||
|
||||
class OldLibrariesException(Exception):
|
||||
"""Exception that indicates an export can not complete due to an out of date
|
||||
library version.
|
||||
"""
|
||||
pass
|
||||
|
||||
class FailedBuildException(Exception):
|
||||
"""Exception that indicates that a build failed"""
|
||||
pass
|
||||
|
||||
class TargetNotSupportedException(Exception):
|
||||
"""Indicates that an IDE does not support a particular MCU"""
|
||||
pass
|
||||
|
@ -119,13 +109,6 @@ class Exporter(object):
|
|||
source_files.extend(getattr(self.resources, key))
|
||||
return list(set([os.path.dirname(src) for src in source_files]))
|
||||
|
||||
def check_supported(self):
|
||||
"""Indicated if this combination of IDE and MCU is supported"""
|
||||
if self.target not in self.TARGETS or \
|
||||
self.TOOLCHAIN not in TARGET_MAP[self.target].supported_toolchains:
|
||||
raise TargetNotSupportedException()
|
||||
return True
|
||||
|
||||
def gen_file(self, template_file, data, target_file):
|
||||
"""Generates a project file from a template using jinja"""
|
||||
jinja_loader = FileSystemLoader(
|
||||
|
@ -153,9 +136,31 @@ class Exporter(object):
|
|||
def group_project_files(self, sources):
|
||||
"""Group the source files by their encompassing directory
|
||||
Positional Arguments:
|
||||
sources - array of sourc locations
|
||||
sources - array of source locations
|
||||
|
||||
Returns a dictionary of {group name: list of source locations}
|
||||
"""
|
||||
data = sorted(sources, key=self.make_key)
|
||||
return {k: list(g) for k,g in groupby(data, self.make_key)}
|
||||
|
||||
@staticmethod
|
||||
def build(project_name, log_name='build_log.txt', cleanup=True):
|
||||
"""Invoke exporters build command within a subprocess.
|
||||
This method is assumed to be executed at the same level as exporter
|
||||
project files and project source code.
|
||||
See uvision/__init__.py, iar/__init__.py, and makefile/__init__.py for
|
||||
example implemenation.
|
||||
|
||||
Positional Arguments:
|
||||
project_name - the name of the project to build; often required by
|
||||
exporter's build command.
|
||||
|
||||
Keyword Args:
|
||||
log_name - name of the build log to create. Written and printed out,
|
||||
deleted if cleanup = True
|
||||
cleanup - a boolean dictating whether exported project files and
|
||||
build log are removed after build
|
||||
|
||||
Returns -1 on failure and 0 on success
|
||||
"""
|
||||
raise NotImplemented("Implement in derived Exporter class.")
|
||||
|
|
|
@ -2,12 +2,12 @@ import os
|
|||
from os.path import sep, join, exists
|
||||
from collections import namedtuple
|
||||
from subprocess import Popen, PIPE
|
||||
from distutils.spawn import find_executable
|
||||
import shutil
|
||||
import re
|
||||
import sys
|
||||
|
||||
from tools.targets import TARGET_MAP
|
||||
from tools.export.exporters import Exporter, FailedBuildException
|
||||
from tools.export.exporters import Exporter
|
||||
import json
|
||||
from tools.export.cmsis import DeviceCMSIS
|
||||
from multiprocessing import cpu_count
|
||||
|
@ -29,7 +29,8 @@ class IAR(Exporter):
|
|||
#iar_definitions.json
|
||||
TARGETS = [target for target, obj in TARGET_MAP.iteritems()
|
||||
if hasattr(obj, 'device_name') and
|
||||
obj.device_name in IAR_DEFS.keys()]
|
||||
obj.device_name in IAR_DEFS.keys() and "IAR" in obj.supported_toolchains
|
||||
and DeviceCMSIS.check_supported(target)]
|
||||
|
||||
SPECIAL_TEMPLATES = {
|
||||
'rz_a1h' : 'iar/iar_rz_a1h.ewp.tmpl',
|
||||
|
@ -120,22 +121,13 @@ class IAR(Exporter):
|
|||
self.gen_file('iar/ewd.tmpl', ctx, self.project_name + ".ewd")
|
||||
self.gen_file(self.get_ewp_template(), ctx, self.project_name + ".ewp")
|
||||
|
||||
def build(self):
|
||||
@staticmethod
|
||||
def build(project_name, cleanup=True):
|
||||
""" Build IAR project """
|
||||
# > IarBuild [project_path] -build [project_name]
|
||||
proj_file = join(self.export_dir, self.project_name + ".ewp")
|
||||
|
||||
if find_executable("IarBuild"):
|
||||
iar_exe = "IarBuild.exe"
|
||||
else:
|
||||
iar_exe = join('C:', sep,
|
||||
'Program Files (x86)', 'IAR Systems',
|
||||
'Embedded Workbench 7.5', 'common', 'bin',
|
||||
'IarBuild.exe')
|
||||
if not exists(iar_exe):
|
||||
raise Exception("IarBuild.exe not found. Add to path.")
|
||||
|
||||
cmd = [iar_exe, proj_file, '-build', self.project_name]
|
||||
proj_file = project_name + ".ewp"
|
||||
cmd = ["IarBuild.exe", proj_file, '-build', project_name]
|
||||
|
||||
# IAR does not support a '0' option to automatically use all
|
||||
# available CPUs, so we use Python's multiprocessing library
|
||||
|
@ -156,7 +148,14 @@ class IAR(Exporter):
|
|||
m = re.match(error_re, line)
|
||||
if m is not None:
|
||||
num_errors = int(m.group(1))
|
||||
|
||||
if cleanup:
|
||||
os.remove(project_name + ".ewp")
|
||||
os.remove(project_name + ".ewd")
|
||||
os.remove(project_name + ".eww")
|
||||
shutil.rmtree('.build')
|
||||
|
||||
if num_errors !=0:
|
||||
# Seems like something went wrong.
|
||||
raise FailedBuildException("Project: %s build failed with %s erros" % (
|
||||
proj_file, num_errors))
|
||||
return -1
|
||||
return 0
|
||||
|
|
|
@ -16,7 +16,10 @@ limitations under the License.
|
|||
"""
|
||||
from os.path import splitext, basename, relpath, join, abspath, dirname,\
|
||||
exists
|
||||
from os import curdir, getcwd
|
||||
from os import remove
|
||||
import sys
|
||||
from subprocess import check_output, CalledProcessError, Popen, PIPE
|
||||
import shutil
|
||||
from jinja2.exceptions import TemplateNotFound
|
||||
from tools.export.exporters import Exporter
|
||||
from tools.utils import NotSupportedException
|
||||
|
@ -102,6 +105,38 @@ class Makefile(Exporter):
|
|||
else:
|
||||
raise NotSupportedException("This make tool is in development")
|
||||
|
||||
@staticmethod
|
||||
def build(project_name, log_name="build_log.txt", cleanup=True):
|
||||
""" Build Make project """
|
||||
# > Make -j
|
||||
cmd = ["make", "-j"]
|
||||
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
|
||||
ret = p.communicate()
|
||||
out, err = ret[0], ret[1]
|
||||
ret_code = p.returncode
|
||||
with open(log_name, 'w+') as f:
|
||||
f.write("=" * 10 + "OUT" + "=" * 10 + "\n")
|
||||
f.write(out)
|
||||
f.write("=" * 10 + "ERR" + "=" * 10 + "\n")
|
||||
f.write(err)
|
||||
if ret_code == 0:
|
||||
f.write("SUCCESS")
|
||||
else:
|
||||
f.write("FAILURE")
|
||||
with open(log_name, 'r') as f:
|
||||
print "\n".join(f.readlines())
|
||||
sys.stdout.flush()
|
||||
|
||||
if cleanup:
|
||||
remove("Makefile")
|
||||
remove(log_name)
|
||||
if exists('.build'):
|
||||
shutil.rmtree('.build')
|
||||
if ret_code != 0:
|
||||
# Seems like something went wrong.
|
||||
return -1
|
||||
return 0
|
||||
|
||||
|
||||
class GccArm(Makefile):
|
||||
"""GCC ARM specific makefile target"""
|
||||
|
|
|
@ -3,13 +3,13 @@ from os.path import sep, normpath, join, exists
|
|||
import ntpath
|
||||
import copy
|
||||
from collections import namedtuple
|
||||
from distutils.spawn import find_executable
|
||||
import shutil
|
||||
import subprocess
|
||||
import re
|
||||
|
||||
from tools.arm_pack_manager import Cache
|
||||
from tools.targets import TARGET_MAP
|
||||
from tools.export.exporters import Exporter, FailedBuildException
|
||||
from tools.export.exporters import Exporter
|
||||
from tools.export.cmsis import DeviceCMSIS
|
||||
|
||||
cache_d = False
|
||||
|
@ -117,10 +117,15 @@ class Uvision(Exporter):
|
|||
project file (.uvprojx).
|
||||
The needed information can be viewed in uvision.tmpl
|
||||
"""
|
||||
NAME = 'cmsis'
|
||||
NAME = 'uvision5'
|
||||
TOOLCHAIN = 'ARM'
|
||||
TARGETS = [target for target, obj in TARGET_MAP.iteritems()
|
||||
if "ARM" in obj.supported_toolchains]
|
||||
TARGETS = []
|
||||
for target, obj in TARGET_MAP.iteritems():
|
||||
if not ("ARM" in obj.supported_toolchains and hasattr(obj, "device_name")):
|
||||
continue
|
||||
if not DeviceCMSIS.check_supported(target):
|
||||
continue
|
||||
TARGETS.append(target)
|
||||
#File associations within .uvprojx file
|
||||
file_types = {'.cpp': 8, '.c': 1, '.s': 2,
|
||||
'.obj': 3, '.o': 3, '.lib': 4,
|
||||
|
@ -200,35 +205,24 @@ class Uvision(Exporter):
|
|||
self.gen_file('uvision/uvision.tmpl', ctx, self.project_name+".uvprojx")
|
||||
self.gen_file('uvision/uvision_debug.tmpl', ctx, self.project_name + ".uvoptx")
|
||||
|
||||
def build(self):
|
||||
ERRORLEVEL = {
|
||||
0: 'success (0 warnings, 0 errors)',
|
||||
1: 'warnings',
|
||||
2: 'errors',
|
||||
3: 'fatal errors',
|
||||
11: 'cant write to project file',
|
||||
12: 'device error',
|
||||
13: 'error writing',
|
||||
15: 'error reading xml file',
|
||||
}
|
||||
@staticmethod
|
||||
def build(project_name, log_name='build_log.txt', cleanup=True):
|
||||
""" Build Uvision project """
|
||||
# > UV4.exe -r -j0 -o [log_name] [project_name].uvprojx
|
||||
success = 0
|
||||
warn = 1
|
||||
if find_executable("UV4"):
|
||||
uv_exe = "UV4.exe"
|
||||
else:
|
||||
uv_exe = join('C:', sep,
|
||||
'Keil_v5', 'UV4', 'UV4.exe')
|
||||
if not exists(uv_exe):
|
||||
raise Exception("UV4.exe not found. Add to path.")
|
||||
cmd = [uv_exe, '-r', '-j0', '-o', join(self.export_dir,'build_log.txt'), join(self.export_dir,self.project_name+".uvprojx")]
|
||||
cmd = ["UV4.exe", '-r', '-j0', '-o', log_name, project_name+".uvprojx"]
|
||||
ret_code = subprocess.call(cmd)
|
||||
with open(join(self.export_dir, 'build_log.txt'), 'r') as build_log:
|
||||
with open(log_name, 'r') as build_log:
|
||||
print build_log.read()
|
||||
if cleanup:
|
||||
os.remove(log_name)
|
||||
os.remove(project_name+".uvprojx")
|
||||
os.remove(project_name+".uvoptx")
|
||||
shutil.rmtree(".build")
|
||||
|
||||
|
||||
if ret_code != success and ret_code != warn:
|
||||
# Seems like something went wrong.
|
||||
raise FailedBuildException("Project: %s build failed with the status: %s" % (
|
||||
self.project_name, ERRORLEVEL.get(ret_code, "Unknown")))
|
||||
else:
|
||||
return "Project: %s build succeeded with the status: %s" % (
|
||||
self.project_name, ERRORLEVEL.get(ret_code, "Unknown"))
|
||||
return -1
|
||||
return 0
|
||||
|
|
|
@ -233,7 +233,9 @@ def main():
|
|||
if (options.program is None) and (not options.source_dir):
|
||||
args_error(parser, "one of -p, -n, or --source is required")
|
||||
# Export to selected toolchain
|
||||
_, toolchain_name = get_exporter_toolchain(options.ide)
|
||||
exporter, toolchain_name = get_exporter_toolchain(options.ide)
|
||||
if options.mcu not in exporter.TARGETS:
|
||||
args_error(parser, "%s not supported by %s"%(options.mcu,options.ide))
|
||||
profile = extract_profile(parser, options, toolchain_name)
|
||||
export(options.mcu, options.ide, build=options.build,
|
||||
src=options.source_dir, macros=options.macros,
|
||||
|
|
|
@ -86,7 +86,6 @@ def generate_project_files(resources, export_path, target, name, toolchain, ide,
|
|||
exporter_cls, _ = get_exporter_toolchain(ide)
|
||||
exporter = exporter_cls(target, export_path, name, toolchain,
|
||||
extra_symbols=macros, resources=resources)
|
||||
exporter.check_supported()
|
||||
exporter.generate()
|
||||
files = exporter.generated_files
|
||||
return files, exporter
|
||||
|
|
|
@ -9,7 +9,9 @@
|
|||
"features" : [],
|
||||
"targets" : [],
|
||||
"toolchains" : [],
|
||||
"exporters": [],
|
||||
"compile" : true,
|
||||
"export": true,
|
||||
"auto-update" : true
|
||||
},
|
||||
{
|
||||
|
@ -24,7 +26,9 @@
|
|||
"features" : [],
|
||||
"targets" : ["K64F", "NUCLEO_F429ZI"],
|
||||
"toolchains" : ["GCC_ARM", "ARM"],
|
||||
"exporters": [],
|
||||
"compile" : true,
|
||||
"export": false,
|
||||
"auto-update" : true
|
||||
},
|
||||
{
|
||||
|
@ -36,7 +40,9 @@
|
|||
"features" : ["IPV6"],
|
||||
"targets" : [],
|
||||
"toolchains" : [],
|
||||
"exporters": [],
|
||||
"compile" : true,
|
||||
"export": false,
|
||||
"auto-update" : true
|
||||
},
|
||||
{
|
||||
|
@ -57,7 +63,9 @@
|
|||
"features" : ["BLE"],
|
||||
"targets" : ["NRF51_DK", "NRF52_DK", "K64F", "NUCLEO_F401RE"],
|
||||
"toolchains" : [],
|
||||
"exporters": [],
|
||||
"compile" : true,
|
||||
"export": false,
|
||||
"auto-update" : true
|
||||
},
|
||||
{
|
||||
|
@ -69,7 +77,9 @@
|
|||
"features" : ["IPV6"],
|
||||
"targets" : [],
|
||||
"toolchains" : [],
|
||||
"exporters": [],
|
||||
"compile" : true,
|
||||
"export": false,
|
||||
"auto-update" : true
|
||||
},
|
||||
{
|
||||
|
@ -80,7 +90,9 @@
|
|||
"features" : ["IPV6"],
|
||||
"targets" : [],
|
||||
"toolchains" : [],
|
||||
"exporters": [],
|
||||
"compile" : true,
|
||||
"export": false,
|
||||
"auto-update" : true
|
||||
},
|
||||
{
|
||||
|
@ -91,7 +103,9 @@
|
|||
"features" : [],
|
||||
"targets" : [],
|
||||
"toolchains" : [],
|
||||
"exporters": [],
|
||||
"compile" : false,
|
||||
"export": false,
|
||||
"auto-update" : true
|
||||
},
|
||||
{
|
||||
|
@ -102,7 +116,9 @@
|
|||
"features" : [],
|
||||
"targets" : ["K64F"],
|
||||
"toolchains" : ["GCC_ARM"],
|
||||
"exporters": [],
|
||||
"compile" : true,
|
||||
"export": false,
|
||||
"auto-update" : false
|
||||
}
|
||||
]
|
||||
|
|
|
@ -13,7 +13,10 @@ sys.path.insert(0, ROOT)
|
|||
|
||||
from tools.utils import argparse_force_uppercase_type
|
||||
import examples_lib as lib
|
||||
from examples_lib import SUPPORTED_TOOLCHAINS
|
||||
from examples_lib import SUPPORTED_TOOLCHAINS, SUPPORTED_IDES
|
||||
|
||||
EXAMPLES = json.load(open(os.path.join(os.path.dirname(__file__),
|
||||
"examples.json")))
|
||||
|
||||
|
||||
def main():
|
||||
|
@ -27,29 +30,46 @@ def main():
|
|||
version_cmd.add_argument("tag")
|
||||
version_cmd.set_defaults(fn=do_versionning)
|
||||
compile_cmd = subparsers.add_parser("compile")
|
||||
compile_cmd.set_defaults(fn=do_compile)
|
||||
compile_cmd.set_defaults(fn=do_compile),
|
||||
compile_cmd.add_argument(
|
||||
"toolchains", nargs="*", default=SUPPORTED_TOOLCHAINS,
|
||||
type=argparse_force_uppercase_type(SUPPORTED_TOOLCHAINS,
|
||||
"toolchain"))
|
||||
"toolchain")),
|
||||
export_cmd = subparsers.add_parser("export")
|
||||
export_cmd.set_defaults(fn=do_export),
|
||||
export_cmd.add_argument(
|
||||
"ide", nargs="*", default=SUPPORTED_IDES,
|
||||
type=argparse_force_uppercase_type(SUPPORTED_IDES,
|
||||
"ide"))
|
||||
args = parser.parse_args()
|
||||
config = json.load(open(os.path.join(os.path.dirname(__file__),
|
||||
args.config)))
|
||||
|
||||
return args.fn(args, config)
|
||||
|
||||
|
||||
|
||||
def do_export(args, config):
|
||||
"""Do export and build step"""
|
||||
results = {}
|
||||
results = lib.export_repos(config, args.ide)
|
||||
|
||||
lib.print_summary(results, export=True)
|
||||
failures = lib.get_num_failures(results, export=True)
|
||||
print("Number of failures = %d" % failures)
|
||||
return failures
|
||||
|
||||
|
||||
def do_import(_, config):
|
||||
"""Do the import step of this process"""
|
||||
lib.source_repos(config)
|
||||
return 0
|
||||
|
||||
|
||||
def do_compile(args, config):
|
||||
"""Do the compile step"""
|
||||
results = {}
|
||||
results = lib.compile_repos(config, args.toolchains)
|
||||
|
||||
lib.print_compilation_summary(results)
|
||||
lib.print_summary(results)
|
||||
failures = lib.get_num_failures(results)
|
||||
print("Number of failures = %d" % failures)
|
||||
return failures
|
||||
|
|
|
@ -16,8 +16,10 @@ sys.path.insert(0, ROOT)
|
|||
|
||||
from tools.build_api import get_mbed_official_release
|
||||
from tools.targets import TARGET_MAP
|
||||
from tools.export import EXPORTERS
|
||||
|
||||
SUPPORTED_TOOLCHAINS = ["ARM", "IAR", "GCC_ARM"]
|
||||
SUPPORTED_IDES = ["iar", "uvision", "make_gcc_arm", "make_iar", "make_armc5"]
|
||||
|
||||
def print_list(lst):
|
||||
"""Prints to screen the contents of a list
|
||||
|
@ -30,13 +32,13 @@ def print_list(lst):
|
|||
for thing in lst:
|
||||
print("# %s" % thing)
|
||||
|
||||
def print_compilation_summary(results):
|
||||
"""Prints to screen the results of compiling combinations of example programs,
|
||||
targets and compile chains.
|
||||
def print_summary(results, export=False):
|
||||
"""Prints to screen the results of compiling/exporting combinations of example programs,
|
||||
targets and compile toolchains/IDEs.
|
||||
|
||||
Args:
|
||||
results - results of the compilation stage. See compile_repos() for
|
||||
details of the format.
|
||||
results - results of the compilation stage. See compile_repos() and export_repos()
|
||||
for details of the format.
|
||||
|
||||
"""
|
||||
|
||||
|
@ -48,12 +50,23 @@ def print_compilation_summary(results):
|
|||
print("#")
|
||||
for key, val in results.iteritems():
|
||||
print_list(val[2])
|
||||
|
||||
second_result = "Failed example combinations" if not export else \
|
||||
"Failed export example combinations"
|
||||
|
||||
print("#")
|
||||
print("# Failed example combinations")
|
||||
print("# %s"%second_result)
|
||||
print("#")
|
||||
for key, val in results.iteritems():
|
||||
print_list(val[3])
|
||||
|
||||
if export:
|
||||
print("#")
|
||||
print("# Failed build example combinations")
|
||||
print("#")
|
||||
for key, val in results.iteritems():
|
||||
print_list(val[4])
|
||||
|
||||
print("#")
|
||||
print("#"*80)
|
||||
|
||||
|
@ -82,6 +95,31 @@ def target_cross_toolchain(allowed_toolchains,
|
|||
yield target, toolchain
|
||||
|
||||
|
||||
def target_cross_ide(allowed_ides,
|
||||
features=[], targets=[]):
|
||||
"""Generate pairs of target and ides
|
||||
|
||||
Args:
|
||||
allowed_ides - a list of all possible IDEs
|
||||
|
||||
Kwargs:
|
||||
features - the features that must be in the features array of a
|
||||
target
|
||||
targets - a list of available targets
|
||||
"""
|
||||
if len(targets) == 0:
|
||||
targets=TARGET_MAP.keys()
|
||||
|
||||
for target, toolchains in get_mbed_official_release("5"):
|
||||
for ide in allowed_ides:
|
||||
if (EXPORTERS[ide].TOOLCHAIN in toolchains and
|
||||
target in EXPORTERS[ide].TARGETS and
|
||||
target in targets and
|
||||
all(feature in TARGET_MAP[target].features
|
||||
for feature in features)):
|
||||
yield target, ide
|
||||
|
||||
|
||||
def get_repo_list(example):
|
||||
""" Returns a list of all the repos associated with the specific example in the json
|
||||
config file.
|
||||
|
@ -121,7 +159,7 @@ def source_repos(config):
|
|||
|
||||
subprocess.call(["mbed-cli", "import", repo])
|
||||
|
||||
def get_num_failures(results):
|
||||
def get_num_failures(results, export=False):
|
||||
""" Returns the number of failed compilations from the results summary
|
||||
Args:
|
||||
results - results summary of the compilation stage. See compile_repos() for
|
||||
|
@ -133,9 +171,68 @@ def get_num_failures(results):
|
|||
|
||||
for key, val in results.iteritems():
|
||||
num_failures = num_failures + len(val[3])
|
||||
if export:
|
||||
num_failures += len(val[4])
|
||||
|
||||
return num_failures
|
||||
|
||||
|
||||
|
||||
def export_repos(config, ides):
|
||||
def print_message(message, name):
|
||||
print(message+ " %s"%name)
|
||||
sys.stdout.flush()
|
||||
|
||||
results = {}
|
||||
print("\nExporting example repos....\n")
|
||||
for example in config['examples']:
|
||||
export_failures = []
|
||||
build_failures = []
|
||||
successes = []
|
||||
exported = True
|
||||
pass_status = True
|
||||
if example['export']:
|
||||
for repo in get_repo_list(example):
|
||||
example_project_name = basename(repo)
|
||||
os.chdir(example_project_name)
|
||||
# Check that the target, IDE, and features combinations are valid and return a
|
||||
# list of valid combinations to work through
|
||||
for target, ide in target_cross_ide(ides,
|
||||
example['features'],
|
||||
example['targets']):
|
||||
example_name = "{} {} {}".format(example_project_name, target,
|
||||
ide)
|
||||
def status(message):
|
||||
print(message + " %s" % example_name)
|
||||
sys.stdout.flush()
|
||||
|
||||
status("Exporting")
|
||||
proc = subprocess.Popen(["mbed-cli", "export", "-i", ide,
|
||||
"-m", target])
|
||||
proc.wait()
|
||||
if proc.returncode:
|
||||
export_failures.append(example_name)
|
||||
status("FAILURE exporting")
|
||||
else:
|
||||
status("SUCCESS exporting")
|
||||
status("Building")
|
||||
if EXPORTERS[ide].build(example_project_name):
|
||||
status("FAILURE building")
|
||||
build_failures.append(example_name)
|
||||
else:
|
||||
status("SUCCESS building")
|
||||
successes.append(example_name)
|
||||
os.chdir("..")
|
||||
|
||||
if len(build_failures+export_failures) > 0:
|
||||
pass_status= False
|
||||
else:
|
||||
exported = False
|
||||
|
||||
results[example['name']] = [exported, pass_status, successes, export_failures, build_failures]
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def compile_repos(config, toolchains):
|
||||
"""Compiles combinations of example programs, targets and compile chains.
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ from os.path import join, dirname, exists, abspath
|
|||
ROOT = abspath(join(dirname(__file__), "..", "..", ".."))
|
||||
sys.path.insert(0, ROOT)
|
||||
import argparse
|
||||
import os
|
||||
from argparse import ArgumentTypeError
|
||||
import sys
|
||||
from shutil import rmtree
|
||||
|
@ -37,9 +38,8 @@ from tools.project import export
|
|||
from Queue import Queue
|
||||
from threading import Thread, Lock
|
||||
from tools.project_api import print_results, get_exporter_toolchain
|
||||
from tools.tests import test_name_known, test_known, Test
|
||||
from tools.export.exporters import FailedBuildException, \
|
||||
TargetNotSupportedException
|
||||
from tools.tests import test_name_known, test_known
|
||||
from tools.export import EXPORTERS
|
||||
from tools.utils import argparse_force_lowercase_type, \
|
||||
argparse_many, columnate, args_error, \
|
||||
argparse_filestring_type
|
||||
|
@ -125,9 +125,12 @@ class ExportBuildTest(object):
|
|||
% (test_case.mcu,
|
||||
test_case.ide,
|
||||
test_case.name))
|
||||
try:
|
||||
exporter.build()
|
||||
except FailedBuildException:
|
||||
|
||||
cwd = os.getcwd()
|
||||
os.chdir(exporter.export_dir)
|
||||
res = EXPORTERS[exporter.NAME.lower()].build(exporter.project_name, cleanup=False)
|
||||
os.chdir(cwd)
|
||||
if res:
|
||||
self.failures.append("%s::%s\t%s" % (test_case.mcu,
|
||||
test_case.ide,
|
||||
test_case.name))
|
||||
|
@ -157,20 +160,19 @@ class ExportBuildTest(object):
|
|||
self.display_counter("Exporting test case %s::%s\t%s" % (test_case.mcu,
|
||||
test_case.ide,
|
||||
test_case.name))
|
||||
|
||||
try:
|
||||
_, toolchain = get_exporter_toolchain(test_case.ide)
|
||||
profile = extract_profile(self.parser, self.options, toolchain)
|
||||
exporter = export(test_case.mcu, test_case.ide,
|
||||
exporter, toolchain = get_exporter_toolchain(test_case.ide)
|
||||
if test_case.mcu not in exporter.TARGETS:
|
||||
self.skips.append("%s::%s\t%s" % (test_case.mcu, test_case.ide,
|
||||
test_case.name))
|
||||
return
|
||||
profile = extract_profile(self.parser, self.options, toolchain)
|
||||
exporter = export(test_case.mcu, test_case.ide,
|
||||
project_id=test_case.id, zip_proj=None,
|
||||
clean=True, src=test_case.src,
|
||||
export_path=join(EXPORT_DIR,name_str),
|
||||
silent=True, build_profile=profile)
|
||||
exporter.generated_files.append(join(EXPORT_DIR,name_str,test_case.log))
|
||||
self.build_queue.put((exporter,test_case))
|
||||
except TargetNotSupportedException:
|
||||
self.skips.append("%s::%s\t%s" % (test_case.mcu, test_case.ide,
|
||||
test_case.name))
|
||||
exporter.generated_files.append(join(EXPORT_DIR,name_str,test_case.log))
|
||||
self.build_queue.put((exporter,test_case))
|
||||
# Check if the specified name is in all_os_tests
|
||||
|
||||
|
||||
|
@ -265,7 +267,7 @@ def main():
|
|||
test_targets = options.mcu or targetnames
|
||||
if not all([t in targetnames for t in test_targets]):
|
||||
args_error(parser, "Only specify targets in release %s:\n%s"
|
||||
%(options.release, columnate(targetnames)))
|
||||
%(options.release, columnate(sorted(targetnames))))
|
||||
|
||||
v2_tests, v5_tests = [],[]
|
||||
if options.release == '5':
|
||||
|
|
Loading…
Reference in New Issue