spaces removal - workspace tools (not tmpl)

pull/339/head
0xc0170 2014-05-29 15:42:03 +02:00
parent a5e0dea136
commit 00ddb5570b
40 changed files with 899 additions and 545 deletions

View File

@ -33,10 +33,10 @@ from workspace_tools.build_api import build_mbed_libs, build_lib
if __name__ == '__main__':
start = time()
# Parse Options
parser = get_default_options_parser()
# Extra libraries
parser.add_option("-r", "--rtos", action="store_true", dest="rtos",
default=False, help="Compile the rtos")
@ -55,7 +55,7 @@ if __name__ == '__main__':
parser.add_option("-D", "", action="append", dest="macros",
help="Add a macro definition")
(options, args) = parser.parse_args()
# Get target list
if options.mcu:
if options.mcu not in TARGET_NAMES:
@ -64,7 +64,7 @@ if __name__ == '__main__':
targets = [options.mcu]
else:
targets = TARGET_NAMES
# Get toolchains list
if options.tool:
if options.tool not in TOOLCHAINS:
@ -73,10 +73,10 @@ if __name__ == '__main__':
toolchains = [options.tool]
else:
toolchains = TOOLCHAINS
# Get libraries list
libraries = []
# Additional Libraries
if options.rtos:
libraries.extend(["rtx", "rtos"])
@ -90,7 +90,7 @@ if __name__ == '__main__':
libraries.extend(["cmsis_dsp", "dsp"])
if options.ublox:
libraries.extend(["rtx", "rtos", "usb_host", "ublox"])
# Build
failures = []
successes = []
@ -112,17 +112,17 @@ if __name__ == '__main__':
import sys, traceback
traceback.print_exc(file=sys.stdout)
sys.exit(1)
failures.append(id)
print e
# Write summary of the builds
print "\n\nCompleted in: (%.2f)s" % (time() - start)
if successes:
print "\n\nBuild successes:"
print "\n".join([" * %s" % s for s in successes])
if failures:
print "\n\nBuild failures:"
print "\n".join([" * %s" % f for f in failures])

View File

@ -36,7 +36,7 @@ def build_project(src_path, build_path, target, toolchain_name,
if name is None:
name = basename(src_paths[0])
toolchain.info("\n>>> BUILD PROJECT: %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
# Scan src_path and libraries_paths for resources
resources = toolchain.scan_resources(src_paths[0])
for path in src_paths[1:]:
@ -45,22 +45,22 @@ def build_project(src_path, build_path, target, toolchain_name,
src_paths.extend(libraries_paths)
for path in libraries_paths:
resources.add(toolchain.scan_resources(path))
if linker_script is not None:
resources.linker_script = linker_script
# Build Directory
if clean:
if exists(build_path):
rmtree(build_path)
mkdir(build_path)
# Compile Sources
for path in src_paths:
src = toolchain.scan_resources(path)
objects = toolchain.compile_sources(src, build_path, resources.inc_dirs)
resources.objects.extend(objects)
# Link Program
return toolchain.link_program(resources, build_path, name)
@ -79,48 +79,48 @@ def build_library(src_paths, build_path, target, toolchain_name,
dependencies_paths=None, options=None, name=None, clean=False,
notify=None, verbose=False, macros=None):
if type(src_paths) != ListType: src_paths = [src_paths]
for src_path in src_paths:
if not exists(src_path):
raise Exception("The library source folder does not exist: %s", src_path)
# Toolchain instance
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, notify, macros)
toolchain.VERBOSE = verbose
toolchain.build_all = clean
# The first path will give the name to the library
name = basename(src_paths[0])
toolchain.info("\n>>> BUILD LIBRARY %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
# Scan Resources
resources = []
for src_path in src_paths:
resources.append(toolchain.scan_resources(src_path))
# Dependencies Include Paths
dependencies_include_dir = []
if dependencies_paths is not None:
for path in dependencies_paths:
lib_resources = toolchain.scan_resources(path)
dependencies_include_dir.extend(lib_resources.inc_dirs)
# Create the desired build directory structure
bin_path = join(build_path, toolchain.obj_path)
mkdir(bin_path)
tmp_path = join(build_path, '.temp', toolchain.obj_path)
mkdir(tmp_path)
# Copy Headers
for resource in resources:
toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path)
dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs)
# Compile Sources
objects = []
for resource in resources:
objects.extend(toolchain.compile_sources(resource, tmp_path, dependencies_include_dir))
toolchain.build_library(objects, bin_path, name)
@ -140,38 +140,38 @@ def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=F
if toolchain_name not in target.supported_toolchains:
print '\n%s target is not yet supported by toolchain %s' % (target.name, toolchain_name)
return
# Toolchain
toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros)
toolchain.VERBOSE = verbose
toolchain.build_all = clean
# Source and Build Paths
BUILD_TARGET = join(MBED_LIBRARIES, "TARGET_" + target.name)
BUILD_TOOLCHAIN = join(BUILD_TARGET, "TOOLCHAIN_" + toolchain.name)
mkdir(BUILD_TOOLCHAIN)
TMP_PATH = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
mkdir(TMP_PATH)
# CMSIS
toolchain.info("\n>>> BUILD LIBRARY %s (%s, %s)" % ('CMSIS', target.name, toolchain_name))
cmsis_src = join(MBED_TARGETS_PATH, "cmsis")
resources = toolchain.scan_resources(cmsis_src)
toolchain.copy_files(resources.headers, BUILD_TARGET)
toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN)
objects = toolchain.compile_sources(resources, TMP_PATH)
toolchain.copy_files(objects, BUILD_TOOLCHAIN)
# mbed
toolchain.info("\n>>> BUILD LIBRARY %s (%s, %s)" % ('MBED', target.name, toolchain_name))
# Common Headers
toolchain.copy_files(toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES)
toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES)
# Target specific sources
HAL_SRC = join(MBED_TARGETS_PATH, "hal")
hal_implementation = toolchain.scan_resources(HAL_SRC)

View File

@ -38,7 +38,7 @@ OFFICIAL_MBED_LIBRARY_BUILD = (
('LPC11U35_401', ('ARM', 'uARM','GCC_ARM','GCC_CR')),
('LPC11U35_501', ('ARM', 'uARM','GCC_ARM','GCC_CR')),
('LPC1549', ('uARM',)),
('KL05Z', ('ARM', 'uARM', 'GCC_ARM')),
('KL25Z', ('ARM', 'GCC_ARM')),
('KL46Z', ('ARM', 'GCC_ARM')),
@ -81,14 +81,14 @@ if __name__ == '__main__':
except Exception, e:
failures.append(id)
print e
# Write summary of the builds
print "\n\nCompleted in: (%.2f)s" % (time() - start)
if successes:
print "\n\nBuild successes:"
print "\n".join([" * %s" % s for s in successes])
if failures:
print "\n\nBuild failures:"
print "\n".join([" * %s" % f for f in failures])

View File

@ -1,9 +1,9 @@
class Rpc{{name}} : public RPC {
public:
Rpc{{name}}({{cons_proto}}) : RPC(name), o({{cons_call}}) {}
{{methods}}
virtual const struct rpc_method *get_rpc_methods() {
static const rpc_method rpc_methods[] = {
{{rpc_methods}},

View File

@ -21,7 +21,7 @@ CORTEX_ARM_SUPPORT = {}
for target in TARGETS:
DEFAULT_SUPPORT[target.name] = target.supported_toolchains
if target.core.startswith('Cortex'):
CORTEX_ARM_SUPPORT[target.name] = [t for t in target.supported_toolchains
if (t=='ARM' or t=='uARM')]

View File

@ -8,13 +8,13 @@ def sections(h):
if last_address is None:
start, last_address = a, a
continue
if a > last_address + 1:
yield (start, last_address)
start = a
last_address = a
if start:
yield (start, last_address)

View File

@ -153,38 +153,38 @@ for c in RPC_CLASSES:
"cons_proto": get_args_proto(c_args, ["const char *name=NULL"]),
"cons_call": get_args_call(c_args)
}
c_name = "Rpc" + c['name']
methods = []
rpc_methods = []
for r, m, a in c['methods']:
ret_proto = r if r else "void"
args_proto = "void"
ret_defin = "return " if r else ""
args_defin = ""
if a:
args_proto = get_args_proto(a)
args_defin = get_args_call(a)
proto = "%s %s(%s)" % (ret_proto, m, args_proto)
defin = "{%so.%s(%s);}" % (ret_defin, m, args_defin)
methods.append("%s %s" % (proto, defin))
rpc_method_type = [r] if r else []
rpc_method_type.append(c_name)
rpc_method_type.extend(a)
rpc_methods.append('{"%s", rpc_method_caller<%s, &%s::%s>}' % (m, ', '.join(rpc_method_type), c_name, m))
data['methods'] = "\n ".join(methods)
data['rpc_methods'] = ",\n ".join(rpc_methods)
class_decl = class_template.render(data)
if 'required' in c:
class_decl = "#if DEVICE_%s\n%s\n#endif" % (c['required'], class_decl)
classes.append(class_decl)
write_rpc_classes('\n\n'.join(classes))

View File

@ -28,7 +28,7 @@ OBJ_EXT = ['.o', '.a', '.ar']
def find_sym_in_lib(sym, obj_path):
contain_symbol = False
out = Popen(["nm", "-C", obj_path], stdout=PIPE, stderr=PIPE).communicate()[0]
for line in out.splitlines():
tokens = line.split()
@ -41,24 +41,24 @@ def find_sym_in_lib(sym, obj_path):
sym_name = tokens[2]
else:
continue
if sym_type == "U":
# This object is using this symbol, not defining it
continue
if sym_name == sym:
contain_symbol = True
return contain_symbol
def find_sym_in_path(sym, dir_path):
for root, _, files in walk(dir_path):
for file in files:
_, ext = splitext(file)
if ext not in OBJ_EXT: continue
path = join(root, file)
if find_sym_in_lib(sym, path):
print path
@ -71,5 +71,5 @@ if __name__ == '__main__':
parser.add_argument('-p', '--path', required=True,
help='The path where to search')
args = parser.parse_args()
find_sym_in_path(args.sym, args.path)

View File

@ -21,7 +21,7 @@ from os.path import splitext, basename
class CodeRed(Exporter):
NAME = 'CodeRed'
TOOLCHAIN = 'GCC_CR'
TARGETS = [
'LPC1768',
'LPC4088',
@ -30,13 +30,13 @@ class CodeRed(Exporter):
'LPC11U35_501',
'UBLOX_C027',
]
def generate(self):
libraries = []
for lib in self.resources.libraries:
l, _ = splitext(basename(lib))
libraries.append(l[3:])
ctx = {
'name': self.program_name,
'include_paths': self.resources.inc_dirs,

View File

@ -21,18 +21,18 @@ from os.path import splitext, basename
class CodeSourcery(Exporter):
NAME = 'CodeSourcery'
TOOLCHAIN = 'GCC_CS'
TARGETS = [
'LPC1768',
'UBLOX_C027',
]
DOT_IN_RELATIVE_PATH = True
def generate(self):
# "make" wants Unix paths
self.resources.win_to_unix()
to_be_compiled = []
for r_type in ['s_sources', 'c_sources', 'cpp_sources']:
r = getattr(self.resources, r_type)
@ -40,12 +40,12 @@ class CodeSourcery(Exporter):
for source in r:
base, ext = splitext(source)
to_be_compiled.append(base + '.o')
libraries = []
for lib in self.resources.libraries:
l, _ = splitext(basename(lib))
libraries.append(l[3:])
ctx = {
'name': self.program_name,
'to_be_compiled': to_be_compiled,

View File

@ -21,12 +21,12 @@ from os.path import splitext, basename
class CoIDE(Exporter):
NAME = 'CoIDE'
TOOLCHAIN = 'GCC_ARM'
TARGETS = [
'KL25Z',
'KL05Z',
]
# seems like CoIDE currently supports only one type
FILE_TYPES = {
'c_sources':'1',

View File

@ -20,18 +20,18 @@ from os.path import basename
class DS5_5(Exporter):
NAME = 'DS5'
TARGETS = [
'LPC1768',
'LPC11U24',
'LPC812',
'UBLOX_C027',
]
USING_MICROLIB = [
'LPC812',
]
FILE_TYPES = {
'c_sources':'1',
'cpp_sources':'8',
@ -40,7 +40,7 @@ class DS5_5(Exporter):
def get_toolchain(self):
return 'uARM' if (self.target in self.USING_MICROLIB) else 'ARM'
def generate(self):
source_files = []
for r_type, n in DS5_5.FILE_TYPES.iteritems():
@ -48,7 +48,7 @@ class DS5_5(Exporter):
source_files.append({
'name': basename(file), 'type': n, 'path': file
})
ctx = {
'name': self.program_name,
'include_paths': self.resources.inc_dirs,
@ -58,7 +58,7 @@ class DS5_5(Exporter):
'symbols': self.toolchain.get_symbols()
}
target = self.target.lower()
# Project file
self.gen_file('ds5_5_%s.project.tmpl' % target, ctx, '.project')
self.gen_file('ds5_5_%s.cproject.tmpl' % target, ctx, '.cproject')

View File

@ -16,20 +16,20 @@ class OldLibrariesException(Exception): pass
class Exporter():
TEMPLATE_DIR = dirname(__file__)
DOT_IN_RELATIVE_PATH = False
def __init__(self, target, inputDir, program_name, build_url_resolver):
self.inputDir = inputDir
self.target = target
self.program_name = program_name
self.toolchain = TOOLCHAIN_CLASSES[self.get_toolchain()](TARGET_MAP[target])
self.build_url_resolver = build_url_resolver
def get_toolchain(self):
return self.TOOLCHAIN
def __scan_and_copy(self, src_path, trg_path):
resources = self.toolchain.scan_resources(src_path)
for r_type in ['headers', 's_sources', 'c_sources', 'cpp_sources',
'objects', 'libraries', 'linker_script',
'lib_builds', 'lib_refs', 'repo_files', 'hex_files']:
@ -40,7 +40,7 @@ class Exporter():
def __scan_all(self, path):
resources = []
for root, dirs, files in walk(path):
for d in copy(dirs):
if d == '.' or d == '..':
@ -49,9 +49,9 @@ class Exporter():
for file in files:
file_path = join(root, file)
resources.append(file_path)
return resources
def scan_and_copy_resources(self, prj_path, trg_path):
# Copy only the file for the required target and toolchain
lib_builds = []
@ -63,7 +63,7 @@ class Exporter():
for repo_dir in resources.repo_dirs:
repo_files = self.__scan_all(repo_dir)
self.toolchain.copy_files(repo_files, trg_path, rel_path=join(prj_path, src))
# The libraries builds
for bld in lib_builds:
build_url = open(bld).read().strip()
@ -90,7 +90,7 @@ class Exporter():
template_text = open(template_path).read()
template = Template(template_text)
target_text = template.render(data)
target_path = join(self.inputDir, target_file)
logging.debug("Generating: %s" % target_path)
open(target_path, "w").write(target_text)
@ -99,7 +99,7 @@ class Exporter():
def zip_working_directory_and_clean_up(tempdirectory=None, destination=None, program_name=None, clean=True):
uid = str(uuid.uuid4())
zipfilename = '%s.zip'%uid
logging.debug("Zipping up %s to %s" % (tempdirectory, join(destination, zipfilename)))
# make zip
def zipdir(basedir, archivename):
@ -112,10 +112,10 @@ def zip_working_directory_and_clean_up(tempdirectory=None, destination=None, pro
absfn = join(root, fn)
zfn = fakeroot + '/' + absfn[len(basedir)+len(os.sep):]
z.write(absfn, zfn)
zipdir(tempdirectory, join(destination, zipfilename))
if clean:
shutil.rmtree(tempdirectory)
return join(destination, zipfilename)

View File

@ -21,7 +21,7 @@ from os.path import splitext, basename
class GccArm(Exporter):
NAME = 'GccArm'
TOOLCHAIN = 'GCC_ARM'
TARGETS = [
'LPC1768',
'KL05Z',
@ -40,13 +40,13 @@ class GccArm(Exporter):
'DISCO_F303VC',
'UBLOX_C027',
]
DOT_IN_RELATIVE_PATH = True
def generate(self):
# "make" wants Unix paths
self.resources.win_to_unix()
to_be_compiled = []
for r_type in ['s_sources', 'c_sources', 'cpp_sources']:
r = getattr(self.resources, r_type)
@ -54,12 +54,12 @@ class GccArm(Exporter):
for source in r:
base, ext = splitext(source)
to_be_compiled.append(base + '.o')
libraries = []
for lib in self.resources.libraries:
l, _ = splitext(basename(lib))
libraries.append(l[3:])
ctx = {
'name': self.program_name,
'to_be_compiled': to_be_compiled,

View File

@ -0,0 +1,58 @@
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from exporters import Exporter
from os.path import splitext, basename
class GccArm(Exporter):
NAME = 'GccArm'
TOOLCHAIN = 'GCC_ARM'
<<<<<<< HEAD
TARGETS = ['LPC1768','KL25Z','KL46Z','LPC4088']
=======
TARGETS = ['LPC1768','KL05Z','KL25Z','LPC4088']
>>>>>>> KL05Z GCC support, exporters for arm_gcc, uvision
DOT_IN_RELATIVE_PATH = True
def generate(self):
# "make" wants Unix paths
self.resources.win_to_unix()
to_be_compiled = []
for r_type in ['s_sources', 'c_sources', 'cpp_sources']:
r = getattr(self.resources, r_type)
if r:
for source in r:
base, ext = splitext(source)
to_be_compiled.append(base + '.o')
libraries = []
for lib in self.resources.libraries:
l, _ = splitext(basename(lib))
libraries.append(l[3:])
ctx = {
'name': self.program_name,
'to_be_compiled': to_be_compiled,
'object_files': self.resources.objects,
'include_paths': self.resources.inc_dirs,
'library_paths': self.resources.lib_dirs,
'linker_script': self.resources.linker_script,
'libraries': libraries,
'symbols': self.toolchain.get_symbols()
}
self.gen_file('gcc_arm_%s.tmpl' % self.target.lower(), ctx, 'Makefile')

View File

@ -20,12 +20,12 @@ from exporters import Exporter
class IAREmbeddedWorkbench(Exporter):
NAME = 'IAR'
TOOLCHAIN = 'IAR'
TARGETS = [
'LPC1768',
'UBLOX_C027',
]
def generate(self):
ctx = {
'name': self.program_name,

View File

@ -20,7 +20,7 @@ from os.path import basename
class Uvision4(Exporter):
NAME = 'uVision4'
TARGETS = [
'LPC1768',
'LPC11U24',
@ -76,10 +76,10 @@ class Uvision4(Exporter):
# By convention uVision projects do not show header files in the editor:
# 'headers':'5',
def get_toolchain(self):
return 'uARM' if (self.target in self.USING_MICROLIB) else 'ARM'
def get_flags(self):
return self.FLAGS

View File

@ -0,0 +1,71 @@
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from exporters import Exporter
from os.path import basename
class Uvision4(Exporter):
NAME = 'uVision4'
<<<<<<< HEAD
TARGETS = ['LPC1768', 'LPC11U24', 'KL25Z', 'KL46Z', 'LPC1347', 'LPC1114', 'LPC11C24', 'LPC4088', 'LPC812', 'NUCLEO_F103RB']
=======
TARGETS = ['LPC1768', 'LPC11U24', 'KL05Z', 'KL25Z', 'LPC1347', 'LPC1114', 'LPC11C24', 'LPC4088', 'LPC812', 'NUCLEO_F103RB']
>>>>>>> KL05Z GCC support, exporters for arm_gcc, uvision
USING_MICROLIB = ['LPC11U24', 'LPC1114', 'LPC11C24', 'LPC812', 'NUCLEO_F103RB']
FILE_TYPES = {
'c_sources':'1',
'cpp_sources':'8',
's_sources':'2'
}
# By convention uVision projects do not show header files in the editor:
# 'headers':'5',
def get_toolchain(self):
return 'uARM' if (self.target in self.USING_MICROLIB) else 'ARM'
def generate(self):
source_files = {
'mbed': [],
'hal': [],
'src': []
}
for r_type, n in Uvision4.FILE_TYPES.iteritems():
for file in getattr(self.resources, r_type):
f = {'name': basename(file), 'type': n, 'path': file}
if file.startswith("mbed\\common"):
source_files['mbed'].append(f)
elif file.startswith("mbed\\targets"):
source_files['hal'].append(f)
else:
source_files['src'].append(f)
source_files = dict( [(k,v) for k,v in source_files.items() if len(v)>0])
ctx = {
'name': self.program_name,
'include_paths': self.resources.inc_dirs,
'scatter_file': self.resources.linker_script,
'object_files': self.resources.objects + self.resources.libraries,
'source_files': source_files.items(),
'symbols': self.toolchain.get_symbols()
}
target = self.target.lower()
# Project file
self.gen_file('uvision4_%s.uvproj.tmpl' % target, ctx, '%s.uvproj' % self.program_name)
self.gen_file('uvision4_%s.uvopt.tmpl' % target, ctx, '%s.uvopt' % self.program_name)

View File

@ -35,9 +35,9 @@ def setup_test_user_prj():
if exists(USER_PRJ):
print 'Test user project already generated...'
return
setup_user_prj(USER_PRJ, join(TEST_DIR, "rtos", "mbed", "basic"), [join(LIB_DIR, "rtos")])
# FAKE BUILD URL
open(join(USER_SRC, "mbed.bld"), 'w').write("http://mbed.org/users/mbed_official/code/mbed/builds/976df7c37ad5\n")
@ -54,9 +54,9 @@ def test_export(toolchain, target, expected_error=None):
base_dir = join(EXPORT_TMP, toolchain, target)
temp_dir = join(base_dir, "temp")
mkdir(temp_dir)
zip_path, report = export(USER_PRJ, USR_PRJ_NAME, toolchain, target, base_dir, temp_dir, False, fake_build_url_resolver)
if report['success']:
move(zip_path, join(EXPORT_DIR, "export_%s_%s.zip" % (toolchain, target)))
print "[OK]"
@ -74,7 +74,7 @@ def test_export(toolchain, target, expected_error=None):
if __name__ == '__main__':
setup_test_user_prj()
for toolchain, target in [
('uvision', 'LPC1768'), ('uvision', 'LPC11U24'), ('uvision', 'KL25Z'), ('uvision', 'LPC1347'), ('uvision', 'LPC1114'), ('uvision', 'LPC4088'),
@ -92,7 +92,7 @@ if __name__ == '__main__':
# Linux path: /home/emimon01/bin/gcc-cs/bin/
# Windows path: "C:/Program Files (x86)/CodeSourcery/Sourcery_CodeBench_Lite_for_ARM_EABI/bin/"
('codesourcery', 'LPC1768'),
# Linux path: /home/emimon01/bin/gcc-arm/bin/
# Windows path: C:/arm-none-eabi-gcc-4_7/bin/
('gcc_arm', 'LPC1768'),
@ -106,16 +106,16 @@ if __name__ == '__main__':
('gcc_arm', 'DISCO_F407VG'),
('gcc_arm', 'DISCO_F303VC'),
('ds5_5', 'LPC1768'), ('ds5_5', 'LPC11U24'),
('iar', 'LPC1768'),
(None, None)
]:
print '\n=== Exporting to "%s::%s" ===' % (toolchain, target)
test_export(toolchain, target)
print "\n=== Test error messages ==="
test_export('lpcxpresso', 'LPC11U24', expected_error='lpcxpresso')

View File

@ -22,7 +22,7 @@ class EchoTest(Test):
Test.__init__(self)
self.mbed.init_serial(115200)
self.mbed.reset()
def test(self):
self.mbed.flush()
self.notify("Starting the ECHO test")
@ -32,14 +32,14 @@ class EchoTest(Test):
self.mbed.serial.write(TEST + "\n")
l = self.mbed.serial.readline().strip()
if not l: continue
if l != TEST:
check = False
self.notify('"%s" != "%s"' % (l, TEST))
else:
if (i % 10) == 0:
self.notify('.')
return check

View File

@ -23,7 +23,7 @@ class EchoTest(Test):
self.mbed.init_serial()
self.mbed.extra_serial.rtscts = True
self.mbed.reset()
def test(self):
self.mbed.flush()
self.notify("Starting the ECHO test")
@ -33,14 +33,14 @@ class EchoTest(Test):
self.mbed.extra_serial.write(TEST + "\n")
l = self.mbed.extra_serial.readline().strip()
if not l: continue
if l != TEST:
check = False
self.notify('"%s" != "%s"' % (l, TEST))
else:
if (i % 10) == 0:
self.notify('.')
return check

View File

@ -15,7 +15,7 @@ See the License for the specific language governing permissions and
limitations under the License.
"""
import socket
ECHO_PORT = 7
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)

View File

@ -66,7 +66,7 @@ p30 = pin("p30")
class mbed:
def __init__(self):
print("This will work as a demo but no transport mechanism has been selected")
def rpc(self, name, method, args):
print("Superclass method not overridden")
@ -84,18 +84,18 @@ class SerialRPC(mbed):
print "Reset mbed"
self.ser.sendBreak()
time.sleep(2)
def rpc(self, name, method, args):
request = "/" + name + "/" + method + " " + " ".join(args)
if self.debug:
print "[RPC::TX] %s" % request
self.ser.write(request + "\n")
while True:
response = self.ser.readline().strip()
if self.debug:
print "[RPC::RX] %s" % response
# Ignore comments
if not response.startswith('#'): break
return response
@ -104,7 +104,7 @@ class SerialRPC(mbed):
class HTTPRPC(mbed):
def __init__(self, ip):
self.host = "http://" + ip
def rpc(self, name, method, args):
response = urllib2.urlopen(self.host + "/rpc/" + name + "/" + method + "," + ",".join(args))
return response.read().strip()
@ -119,13 +119,13 @@ class DigitalOut():
self.name = mpin
elif isinstance(mpin, pin):
self.name = self.mbed.rpc("DigitalOut", "new", [mpin.name])
def __del__(self):
r = self.mbed.rpc(self.name, "delete", [])
def write(self, value):
r = self.mbed.rpc(self.name, "write", [str(value)])
def read(self):
r = self.mbed.rpc(self.name, "read", [])
return int(r)
@ -138,14 +138,14 @@ class AnalogIn():
self.name = mpin
elif isinstance(mpin, pin):
self.name = self.mbed.rpc("AnalogIn", "new", [mpin.name])
def __del__(self):
r = self.mbed.rpc(self.name, "delete", [])
def read(self):
r = self.mbed.rpc(self.name, "read", [])
return float(r)
def read_u16(self):
r = self.mbed.rpc(self.name, "read_u16", [])
return int(r)
@ -158,16 +158,16 @@ class AnalogOut():
self.name = mpin
elif isinstance(mpin, pin):
self.name = self.mbed.rpc("AnalogOut", "new", [mpin.name])
def __del__(self):
r = self.mbed.rpc(self.name, "delete", [])
def write(self, value):
r = self.mbed.rpc(self.name, "write", [str(value)])
def write_u16(self, value):
r = self.mbed.rpc(self.name, "write_u16", [str(value)])
def read(self):
r = self.mbed.rpc(self.name, "read", [])
return float(r)
@ -180,10 +180,10 @@ class DigitalIn():
self.name = mpin
elif isinstance(mpin, pin):
self.name = self.mbed.rpc("DigitalIn", "new", [mpin.name])
def __del__(self):
r = self.mbed.rpc(self.name, "delete", [])
def read(self):
r = self.mbed.rpc(self.name, "read", [])
return int(r)
@ -196,29 +196,29 @@ class PwmOut():
self.name = mpin
elif isinstance(mpin, pin):
self.name = self.mbed.rpc("PwmOut", "new", [mpin.name])
def __del__(self):
r = self.mbed.rpc(self.name, "delete", [])
def write(self, value):
r = self.mbed.rpc(self.name, "write", [str(value)])
def read(self):
r = self.mbed.rpc(self.name, "read", [])
return float(r)
def period(self, value):
r = self.mbed.rpc(self.name, "period", [str(value)])
def period_ms(self, value):
r = self.mbed.rpc(self.name, "period_ms", [str(value)])
def period_us(self, value):
r = self.mbed.rpc(self.name, "period_us", [str(value)])
def puslewidth(self, value):
r = self.mbed.rpc(self.name, "pulsewidth", [str(value)])
def puslewidth_ms(self, value):
r = self.mbed.rpc(self.name, "pulsewidth_ms", [str(value)])
@ -233,16 +233,16 @@ class Serial():
self.name = mpin
elif isinstance(mpin, pin):
self.name = self.mbed.rpc("Serial", "new", [tx.name, rx.name])
def __del__(self):
r = self.mbed.rpc(self.name, "delete", [])
def putc(self, value):
r = self.mbed.rpc(self.name, "putc", [str(value)])
def puts(self, value):
r = self.mbed.rpc(self.name, "puts", [ "\"" + str(value) + "\""])
def getc(self):
r = self.mbed.rpc(self.name, "getc", [])
return int(r)
@ -253,14 +253,14 @@ class RPCFunction():
self.mbed = this_mbed
if isinstance(name, str):
self.name = name
def __del__(self):
r = self.mbed.rpc(self.name, "delete", [])
def read(self):
r = self.mbed.rpc(self.name, "read", [])
return int(r)
def run(self, input):
r = self.mbed.rpc(self.name, "run", [input])
return r
@ -271,13 +271,13 @@ class RPCVariable():
self.mbed = this_mbed
if isinstance(name, str):
self.name = name
def __del__(self):
r = self.mbed.rpc(self.name, "delete", [])
def write(self, value):
self.mbed.rpc(self.name, "write", [str(value)])
def read(self):
r = self.mbed.rpc(self.name, "read", [])
return r

View File

@ -22,26 +22,26 @@ class RpcTest(Test):
def test(self):
self.notify("RPC Test")
s = SerialRPC(self.mbed.port, debug=True)
self.notify("Init remote objects")
p_out = pin("p10")
p_in = pin("p11")
if hasattr(self.mbed.options, 'micro'):
if self.mbed.options.micro == 'M0+':
print "Freedom Board: PTA12 <-> PTC4"
p_out = pin("PTA12")
p_in = pin("PTC4")
self.output = DigitalOut(s, p_out);
self.input = DigitalIn(s, p_in);
self.check = True
self.write_read_test(1)
self.write_read_test(0)
return self.check
def write_read_test(self, v):
self.notify("Check %d" % v)
self.output.write(v)

View File

@ -30,17 +30,17 @@ UPDATE_STEP = (N_PACKETS/10)
class TCP_EchoClient:
def __init__(self, host):
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.connect((host, ECHO_PORT))
self.packet = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(LEN_PACKET))
def __packet(self):
# Comment out the checks when measuring the throughput
# self.packet = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(LEN_PACKET))
self.s.send(self.packet)
data = self.s.recv(LEN_PACKET)
# assert self.packet == data, "packet error:\n%s\n%s\n" % (self.packet, data)
def test(self):
start = time()
for i in range(N_PACKETS):
@ -48,9 +48,9 @@ class TCP_EchoClient:
self.__packet()
t = time() - start
print 'Throughput: (%.2f)Mbits/s' % ((TOT_BITS / t)/MEGA)
def __del__(self):
self.s.close()
self.s.close()
while True:
e = TCP_EchoClient(SERVER_ADDRESS)

View File

@ -31,7 +31,7 @@ class TCP_EchoHandler(BaseRequestHandler):
while True:
data = self.request.recv(1024)
if not data: break
bytes += len(data)
for n in map(ord, data):
if n != index:
@ -39,7 +39,7 @@ class TCP_EchoHandler(BaseRequestHandler):
index += 1
if index > MAX_INDEX:
index = 0
self.request.sendall(data)
t = time() - start
b = float(bytes * 8) * 2

View File

@ -30,18 +30,18 @@ UPDATE_STEP = (N_PACKETS/10)
class UDP_EchoClient:
s = socket(AF_INET, SOCK_DGRAM)
def __init__(self, host):
self.host = host
self.packet = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(LEN_PACKET))
def __packet(self):
# Comment out the checks when measuring the throughput
# packet = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(LEN_PACKET))
UDP_EchoClient.s.sendto(packet, (self.host, ECHO_PORT))
data = UDP_EchoClient.s.recv(LEN_PACKET)
# assert packet == data, "packet error:\n%s\n%s\n" % (packet, data)
def test(self):
start = time()
for i in range(N_PACKETS):

View File

@ -32,7 +32,7 @@ LIBRARIES = [
"build_dir": RTOS_LIBRARIES,
"dependencies": [MBED_LIBRARIES, MBED_RTX],
},
# USB Device libraries
{
"id": "usb",
@ -40,7 +40,7 @@ LIBRARIES = [
"build_dir": USB_LIBRARIES,
"dependencies": [MBED_LIBRARIES],
},
# USB Host libraries
{
"id": "usb_host",
@ -48,7 +48,7 @@ LIBRARIES = [
"build_dir": USB_HOST_LIBRARIES,
"dependencies": [MBED_LIBRARIES, FAT_FS, MBED_RTX, RTOS_ABSTRACTION],
},
# DSP libraries
{
"id": "cmsis_dsp",
@ -62,7 +62,7 @@ LIBRARIES = [
"build_dir": DSP_LIBRARIES,
"dependencies": [MBED_LIBRARIES, DSP_CMSIS],
},
# Network libraries
{
"id": "eth",
@ -70,7 +70,7 @@ LIBRARIES = [
"build_dir": ETH_LIBRARY,
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES]
},
{
"id": "ublox",
"source_dir": [UBLOX_SOURCES, CELLULAR_SOURCES, CELLULAR_USB_SOURCES, LWIP_SOURCES],
@ -92,7 +92,7 @@ class Library:
def __init__(self, lib_id):
self.__dict__.update(Library.DEFAULTS)
self.__dict__.update(LIBRARY_MAP[lib_id])
def is_supported(self, target, toolchain):
if not hasattr(self, 'supported'):
return True

225
workspace_tools/mow.py Normal file
View File

@ -0,0 +1,225 @@
#!/usr/bin/env python -u
import subprocess
import time
import sys
import argparse
# == Script configuration ==
# TODO: Refactor this into a preferences file
# We want to process files with these extensions.
sourceFileExtensions = [ 'java', 'rb', 'php', 'js', 'scala', 'c', 'cpp', 'h', 'py']
# == Command line options ==
parser = argparse.ArgumentParser(description='Remove trailing whitespaces from source files', formatter_class=argparse.RawTextHelpFormatter)
# ==== Generic options ====
parser.add_argument('--debug', dest='debug', action='store_true',
help='Enable debug mode')
parser.add_argument('--ext', '-e', dest='extensions', action='append',
help='Add an additional file extension that should be processed')
# ==== File mode options ====
group = parser.add_argument_group('"Specific files"-mode')
group.add_argument('files', nargs='*', metavar='FILE',
help='Specifies files to be processed.')
# ==== Git mode options ====
group = parser.add_argument_group('Git-mode', 'NOTE: Always operates recursively due to a limitation in git ls-files!')
group.add_argument('--force-git', dest='forceGit', action='store_true')
group.add_argument('--not-only-modified', '-m', dest='onlyModified', action='store_false',
help='Process only files that have been modified')
# ==== Find mode options ====
group = parser.add_argument_group('Find-mode', 'Find files to process using the "find"-command')
group.add_argument('--force-find', dest='forceFind', action='store_true')
group.add_argument('--recursive', '-r', dest='recursive', action='store_true')
# == Initialization ==
args = parser.parse_args()
# Used to ignore subprocess output as explained
# [here](http://mail.python.org/pipermail/python-dev/2006-June/066111.html).
FNULL = open('/dev/null', 'w')
# Are we currently in a directory under Git version control?
# [(See Stackoverflow)](http://stackoverflow.com/a/2044677/124257)
gitRevParse = subprocess.Popen(["git", "rev-parse"], stdout=FNULL, stderr=FNULL)
gitRevParse.wait()
inGitDir = gitRevParse.returncode == 0
# == List command generation ==
# These functions generate the shell commands that will be used to gather
# a the list of files that should be processed. Only one of them is called,
# depending on the mode `mow.py` is running in.
def gitLsFilesCommand(wildcards, onlyModified=False):
command = ["git", "ls-files"]
if onlyModified:
command.append('--modified')
for wildcard in wildcards:
command.append(wildcard)
return command
def findFilesCommand(wildcards, paths=['.'], recursive=True):
command = ["find"]
command.extend(paths)
command.extend(['-type', 'f'])
if not recursive:
command.extend(['-depth', '1'])
command.append('(')
for wildcard in wildcards:
command.extend(['-name', wildcard, '-or'])
command.pop() # Remove the dangling "-or" at the end
command.append(')')
return command
def specificFilesCommand(files):
command = ['printf']
filesString = ""
for file in files:
filesString += "%s\n" % file
return command
# == Processing functions ==
# This is where the actual space trimming happens.
def processFiles(files, listFilesCommand, debug=False):
sed = None
if(debug):
print "Data that would be passed to sed:"
sed = subprocess.Popen([ 'cat' ], stdin=subprocess.PIPE)
else:
sys.stdout.write("Processing files")
# Don't remove the empty argument after -i
# [as Mac OS X doesn't allow -i without parameter](http://blog.mpdaugherty.com/2010/05/27/difference-with-sed-in-place-editing-on-mac-os-x-vs-linux/).
sed = subprocess.Popen([ 'xargs', 'sed', '-i', '', '-e', 's/[[:space:]]*$//' ], stdin=subprocess.PIPE)
while len(files) > 0:
sed.stdin.writelines(files.pop())
if not debug:
sys.stdout.write('.')
if not debug:
print ' Done!'
# == User interaction functions ==
# These functions present the interactive dialogs to the user.
def askProcessFiles(files, listFilesCommand):
options = {
'y': lambda a, b: processFiles(a, b, args.debug),
'n': lambda a, b: exit("Bye!"),
'f': lambda a, b: lambdaPrint(a) or True,
'c': lambda a, b: lambdaPrint(b) or True,
'd': lambda a, b: printDebug(a, b) or True,
'q': lambda a, b: exit("Bye!"),
# The ? is handled in the eval-loop, and thus doesn't need a callback function.
'?': None
}
choice = None
while choice == None:
sys.stdout.write("\033[1;94m" + "About to process %d files! Continue? [y,n,f,c,q,?] \033[0m" % len(files))
choice = raw_input()
if not choice in options.keys() or choice == '?':
choice = None
printProcessHelp()
else:
result = options[choice](files, listFilesCommand)
if result: # We continue the loop if the callback functions returns True.
choice = None
def lambdaPrint(x):
print x
def printDebug(files, listFilesCommand):
print "\nArguments as found by argparse:"
print args
print "\nFiles to be processed:"
print files
print "\nCommand that generated the file list:"
print listFilesCommand
def printProcessHelp():
print "\033[1;91m" + """y - Start processing files
n - Abort execution, equivalent to "q"
f - List files to be processed
c - Show command used to generate the file list
d - Print debug information
q - Abort execution
? - Print help \033[0m"""
# == Script execution ==
# Generate wildcards from the extensions that should be processed
if args.extensions != None:
sourceFileExtensions.extend(args.extensions)
sourceFileWildcards = [ "*.%s" % x for x in sourceFileExtensions]
# This is where we decide which functions are actually invoked depending on input parameters.
listFilesCommand = None
if args.files != None and len(args.files) > 0:
listFilesCommand = specificFilesCommand(args.files)
elif args.forceGit:
if not args.recursive:
exit("Git-mode does not currently support non-recursive processing! Exiting.")
if not inGitDir:
exit("Not a git repository (or any of the parent directories)! Exiting.")
listFilesCommand = gitLsFilesCommand(sourceFileWildcards, onlyModified=args.onlyModified)
else:
listFilesCommand = findFilesCommand(sourceFileWildcards, recursive=args.recursive)
# **Process those files!**
files = subprocess.Popen(listFilesCommand, stdout=subprocess.PIPE).stdout.readlines()
askProcessFiles(files, listFilesCommand)

View File

@ -22,19 +22,19 @@ from workspace_tools.targets import TARGET_NAMES
def get_default_options_parser():
parser = OptionParser()
parser.add_option("-m", "--mcu",
help="build for the given MCU (%s)" % ', '.join(TARGET_NAMES),
metavar="MCU")
parser.add_option("-t", "--tool",
help="build using the given TOOLCHAIN (%s)" % ', '.join(TOOLCHAINS),
metavar="TOOLCHAIN")
parser.add_option("-c", "--clean", action="store_true", default=False,
help="clean the build directory")
parser.add_option("-o", "--options", action="append",
help='Add a build option ("save-asm": save the asm generated by the compiler, "debug-info": generate debugging information, "analyze": run static code analyzer")')
return parser

View File

@ -31,7 +31,7 @@ def patch(bin_path):
with open(bin_path, 'r+b') as bin:
# Read entries 0 through 6 (Little Endian 32bits words)
vector = [unpack('<I', bin.read(4))[0] for _ in range(7)]
# location 7 (offset 0x1C in the vector table) should contain the 2's
# complement of the check-sum of table entries 0 through 6
bin.seek(0x1C)

View File

@ -17,50 +17,50 @@ from workspace_tools.targets import TARGET_NAMES
if __name__ == '__main__':
# Parse Options
parser = OptionParser()
parser.add_option("-m", "--mcu", metavar="MCU", default='LPC1768',
help="generate project for the given MCU (%s)" % ', '.join(TARGET_NAMES))
parser.add_option("-p", type="int", dest="program",
help="The index of the desired test program: [0-%d]" % (len(TESTS)-1))
parser.add_option("-i", dest="ide", default='uvision',
help="The target IDE: %s" % str(EXPORTERS.keys()))
parser.add_option("-b", dest="build", action="store_true", default=False,
help="Use the mbed library build, instead of the sources")
(options, args) = parser.parse_args()
# Target
if options.mcu is None :
args_error(parser, "[ERROR] You should specify an MCU")
mcu = options.mcu
# IDE
if options.ide is None:
args_error(parser, "[ERROR] You should specify an IDE")
ide = options.ide
# Project
if options.program is None or (options.program < 0) or (options.program > (len(TESTS)-1)):
message = "[ERROR] You have to specify one of the following tests:\n"
message += '\n'.join(map(str, sorted(TEST_MAP.values())))
args_error(parser, message)
test = Test(options.program)
if not options.build:
# Substitute the library builds with the sources
# TODO: Substitute also the other library build paths
if MBED_LIBRARIES in test.dependencies:
test.dependencies.remove(MBED_LIBRARIES)
test.dependencies.append(MBED_BASE)
# Build the projectwith the same directory structure of the mbed online IDE
project_dir = join(EXPORT_WORKSPACE, test.id)
setup_user_prj(project_dir, test.source_dir, test.dependencies)
# Export to selected toolchain
# Export to selected toolchain
tmp_path, report = export(project_dir, test.id, ide, mcu, EXPORT_WORKSPACE, EXPORT_TMP)
if report['success']:
zip_path = join(EXPORT_DIR, "%s_%s_%s.zip" % (test.id, ide, mcu))

View File

@ -61,13 +61,13 @@ def benchmarks():
# CSV Data
csv_data = csv.writer(open(BENCHMARK_DATA_PATH, 'wb'))
csv_data.writerow(['Toolchain', "Target", "Benchmark", "code", "data", "bss", "flash"])
# Build
for toolchain in ['ARM', 'uARM', 'GCC_CR', 'GCC_CS', 'GCC_ARM']:
for mcu in ["LPC1768", "LPC11U24"]:
# Build Libraries
build_mbed_libs(mcu, toolchain)
# Build benchmarks
build_dir = join(BUILD_DIR, "benchmarks", mcu, toolchain)
for test_id, title in BENCHMARKS:
@ -90,23 +90,23 @@ def compare(t1, t2, target):
benchmarks()
else:
print "Loading: %s" % BENCHMARK_DATA_PATH
data = csv.reader(open(BENCHMARK_DATA_PATH, 'rb'))
benchmarks_data = defaultdict(dict)
for (toolchain, mcu, name, code, data, bss, flash) in data:
if target == mcu:
for t in [t1, t2]:
if toolchain == t:
benchmarks_data[name][t] = map(int, (code, data, bss, flash))
print "%s vs %s for %s" % (t1, t2, target)
for name, data in benchmarks_data.iteritems():
try:
# Check Size
code_a, data_a, bss_a, flash_a = data[t1]
code_u, data_u, bss_u, flash_u = data[t2]
print "\n=== %s ===" % name
print_diff("code", code_a , code_u)
print_diff("data", data_a , data_u)

View File

@ -46,19 +46,19 @@ commit_msg = ''
# Code that does have a mirror in the mbed SDK
# Tuple data: (repo_name, list_of_code_dirs, [team])
# team is optional - if not specified, the code is published under mbed_official
OFFICIAL_CODE = (
OFFICIAL_CODE = (
("mbed-src" , "mbed"),
("mbed-rtos", "rtos"),
("mbed-dsp" , "dsp"),
("mbed-rpc" , "rpc"),
("lwip" , "net/lwip/lwip"),
("lwip-sys", "net/lwip/lwip-sys"),
("Socket" , "net/lwip/Socket"),
("lwip-eth" , "net/eth/lwip-eth"),
("EthernetInterface", "net/eth/EthernetInterface"),
("USBDevice", "USBDevice"),
("USBHost" , "USBHost"),
@ -75,7 +75,7 @@ OFFICIAL_CODE = (
CODE_WITH_DEPENDENCIES = (
# Libraries
"EthernetInterface",
# RTOS Examples
"rtos_basic",
"rtos_isr",
@ -85,7 +85,7 @@ CODE_WITH_DEPENDENCIES = (
"rtos_semaphore",
"rtos_signals",
"rtos_timer",
# Net Examples
"TCPEchoClient",
"TCPEchoServer",
@ -95,7 +95,7 @@ CODE_WITH_DEPENDENCIES = (
"UDPEchoServer",
"BroadcastReceive",
"BroadcastSend",
# mbed sources
"mbed-src-program",
)
@ -123,7 +123,7 @@ class MbedRepository:
def run_and_print(command, cwd):
stdout, _, _ = run_cmd(command, wd=cwd, redirect=True)
print(stdout)
def __init__(self, name, team = None):
self.name = name
self.path = join(MBED_ORG_PATH, name)
@ -135,14 +135,14 @@ class MbedRepository:
# Checkout code
if not exists(MBED_ORG_PATH):
makedirs(MBED_ORG_PATH)
self.run_and_print(['hg', 'clone', self.url % name], cwd=MBED_ORG_PATH)
else:
# Update
self.run_and_print(['hg', 'pull'], cwd=self.path)
self.run_and_print(['hg', 'update'], cwd=self.path)
def publish(self):
# The maintainer has to evaluate the changes first and explicitly accept them
self.run_and_print(['hg', 'addremove'], cwd=self.path)
@ -205,7 +205,7 @@ def get_line_endings(f):
return 'cr'
# Copy file to destination, but preserve destination line endings if possible
# This prevents very annoying issues with huge diffs that appear because of
# This prevents very annoying issues with huge diffs that appear because of
# differences in line endings
def copy_with_line_endings(sdk_file, repo_file):
if not isfile(repo_file):
@ -237,11 +237,11 @@ def visit_files(path, visit):
if ignore_path(full, IGNORE_DIRS):
print "Skipping '%s'" % full
dirs.remove(d)
for file in files:
if ignore_path(file, IGNORE_FILES):
continue
visit(join(root, file))
@ -250,15 +250,15 @@ def update_repo(repo_name, sdk_paths, team_name):
# copy files from mbed SDK to mbed_official repository
def visit_mbed_sdk(sdk_file):
repo_file = join(repo.path, relpath(sdk_file, sdk_path))
repo_dir = dirname(repo_file)
if not exists(repo_dir):
makedirs(repo_dir)
copy_with_line_endings(sdk_file, repo_file)
for sdk_path in sdk_paths:
visit_files(sdk_path, visit_mbed_sdk)
# remove repository files that do not exist in the mbed SDK
def visit_repo(repo_file):
for sdk_path in sdk_paths:
@ -269,7 +269,7 @@ def update_repo(repo_name, sdk_paths, team_name):
remove(repo_file)
print "remove: %s" % repo_file
visit_files(repo.path, visit_repo)
if repo.publish():
changed.append(repo_name)
@ -294,7 +294,7 @@ def update_dependencies(repositories):
for repo_name in repositories:
print '\n=== Updating "%s" ===' % repo_name
repo = MbedRepository(repo_name)
# point to the latest libraries
def visit_repo(repo_file):
with open(repo_file, "r") as f:
@ -302,7 +302,7 @@ def update_dependencies(repositories):
with open(repo_file, "w") as f:
f.write(url[:(url.rindex('/')+1)])
visit_files(repo.path, visit_repo, None, MBED_REPO_EXT)
if repo.publish():
changed.append(repo_name)
@ -317,13 +317,13 @@ def do_sync(options):
quiet = options.quiet
commit_msg = options.msg
chnaged = []
if options.code:
update_code(OFFICIAL_CODE)
if options.dependencies:
update_dependencies(CODE_WITH_DEPENDENCIES)
if options.mbed:
update_mbed()
@ -337,19 +337,19 @@ def do_sync(options):
if __name__ == '__main__':
parser = OptionParser()
parser.add_option("-c", "--code",
action="store_true", default=False,
help="Update the mbed_official code")
parser.add_option("-d", "--dependencies",
action="store_true", default=False,
help="Update the mbed_official code dependencies")
parser.add_option("-m", "--mbed",
action="store_true", default=False,
help="Release a build of the mbed library")
parser.add_option("-n", "--nopush",
action="store_true", default=False,
help="Commit the changes locally only, don't push them")
@ -361,12 +361,12 @@ if __name__ == '__main__':
parser.add_option("-r", "--repository",
action="store", type="string", default='', dest='repo',
help="Synchronize only the given repository")
parser.add_option("-q", "--quiet",
action="store_true", default=False,
help="Don't ask for confirmation before commiting or pushing")
(options, args) = parser.parse_args()
do_sync(options)

View File

@ -32,13 +32,13 @@ class Target:
def __init__(self):
# ARM Core
self.core = None
# Is the disk provided by the interface chip of this board virtual?
self.is_disk_virtual = False
# list of toolchains that are supported by the mbed SDK for this target
self.supported_toolchains = None
# list of extra specific labels
self.extra_labels = []
@ -46,13 +46,13 @@ class Target:
self.macros = []
self.name = self.__class__.__name__
def program_cycle_s(self):
return 4 if self.is_disk_virtual else 1.5
def get_labels(self):
return [self.name, CORE_LABELS[self.core]] + self.extra_labels
def init_hooks(self, hook, toolchain_name):
pass
@ -60,22 +60,22 @@ class Target:
class LPC2368(Target):
def __init__(self):
Target.__init__(self)
self.core = "ARM7TDMI-S"
self.extra_labels = ['NXP', 'LPC23XX']
self.supported_toolchains = ["ARM","GCC_ARM","GCC_CR"]
class LPC1768(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M3"
self.extra_labels = ['NXP', 'LPC176X', 'MBED_LPC1768']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM", "GCC_CS", "GCC_CR", "IAR"]
@ -84,22 +84,22 @@ class LPC11U24(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['NXP', 'LPC11UXX', 'LPC11U24_401']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM"]
class LPC11U24_301(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['NXP', 'LPC11UXX']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM"]
@ -108,58 +108,58 @@ class KL05Z(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0+"
self.extra_labels = ['Freescale', 'KLXX']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM"]
self.supported_form_factors = ["ARDUINO"]
self.is_disk_virtual = True
class KL25Z(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0+"
self.extra_labels = ['Freescale', 'KLXX']
self.supported_toolchains = ["ARM", "GCC_CW_EWL", "GCC_CW_NEWLIB", "GCC_ARM"]
self.supported_form_factors = ["ARDUINO"]
self.is_disk_virtual = True
class KL46Z(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0+"
self.extra_labels = ['Freescale', 'KLXX']
self.supported_toolchains = ["GCC_ARM", "ARM"]
self.supported_form_factors = ["ARDUINO"]
self.is_disk_virtual = True
class K20D5M(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4"
self.extra_labels = ['Freescale']
self.supported_toolchains = ["GCC_ARM", "ARM"]
self.is_disk_virtual = True
@ -174,7 +174,7 @@ class K64F(Target):
self.macros = ["CPU_MK64FN1M0VMD12", "FSL_RTOS_MBED"]
self.supported_toolchains = ["ARM"]
self.supported_form_factors = ["ARDUINO"]
self.is_disk_virtual = True
@ -185,15 +185,15 @@ class LPC812(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0+"
self.extra_labels = ['NXP', 'LPC81X']
self.supported_toolchains = ["uARM"]
self.supported_form_factors = ["ARDUINO"]
self.is_disk_virtual = True
@ -202,32 +202,32 @@ class LPC810(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0+"
self.extra_labels = ['NXP', 'LPC81X']
self.supported_toolchains = ["uARM"]
self.is_disk_virtual = True
class LPC4088(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4F"
self.extra_labels = ['NXP', 'LPC408X']
self.supported_toolchains = ["ARM", "GCC_CR", "GCC_ARM"]
self.is_disk_virtual = True
def init_hooks(self, hook, toolchain_name):
if toolchain_name in ['ARM_STD', 'ARM_MICRO']:
hook.hook_add_binary("post", self.binary_hook)
@staticmethod
def binary_hook(t_self, resources, elf, binf):
if not os.path.isdir(binf):
@ -260,44 +260,44 @@ class LPC4088(Target):
class LPC4330_M4(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4F"
self.extra_labels = ['NXP', 'LPC43XX']
self.supported_toolchains = ["ARM", "GCC_CR", "IAR", "GCC_ARM"]
class LPC4330_M0(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['NXP', 'LPC43XX']
self.supported_toolchains = ["ARM", "GCC_CR", "IAR"]
class LPC1800(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M3"
self.extra_labels = ['NXP', 'LPC43XX']
self.supported_toolchains = ["ARM", "GCC_CR", "IAR"]
class STM32F407(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4F"
self.extra_labels = ['STM', 'STM32F4XX']
self.supported_toolchains = ["ARM", "GCC_ARM"]
@ -307,13 +307,13 @@ class NUCLEO_F030R8(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['STM', 'STM32F0', 'STM32F030R8']
self.supported_toolchains = ["ARM", "uARM"]
self.supported_form_factors = ["ARDUINO", "MORPHO"]
@ -323,13 +323,13 @@ class NUCLEO_F072RB(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['STM', 'STM32F0', 'STM32F072RB']
self.supported_toolchains = ["ARM", "uARM"]
self.supported_form_factors = ["ARDUINO", "MORPHO"]
@ -339,13 +339,13 @@ class NUCLEO_F103RB(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M3"
self.extra_labels = ['STM', 'STM32F1', 'STM32F103RB']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM"]
self.supported_form_factors = ["ARDUINO", "MORPHO"]
@ -355,13 +355,13 @@ class NUCLEO_F302R8(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4F"
self.extra_labels = ['STM', 'STM32F3', 'STM32F302R8']
self.supported_toolchains = ["ARM", "uARM"]
self.supported_form_factors = ["ARDUINO", "MORPHO"]
@ -371,13 +371,13 @@ class NUCLEO_F401RE(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4F"
self.extra_labels = ['STM', 'STM32F4', 'STM32F401RE']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM"]
self.supported_form_factors = ["ARDUINO", "MORPHO"]
@ -387,13 +387,13 @@ class NUCLEO_L053R8(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0+"
self.extra_labels = ['STM', 'STM32L0', 'STM32L053R8']
self.supported_toolchains = ["ARM", "uARM"]
self.supported_form_factors = ["ARDUINO", "MORPHO"]
@ -403,13 +403,13 @@ class NUCLEO_L152RE(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M3"
self.extra_labels = ['STM', 'STM32L1', 'STM32L152RE']
self.supported_toolchains = ["ARM", "uARM"]
self.supported_form_factors = ["ARDUINO", "MORPHO"]
@ -419,23 +419,23 @@ class STM32F3XX(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4"
self.extra_labels = ['STM', 'STM32F3XX']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM"]
self.core = "Cortex-M4"
self.extra_labels = ['STM', 'STM32F3XX']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM"]
class LPC1347(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M3"
self.extra_labels = ['NXP', 'LPC13XX']
self.supported_toolchains = ["ARM", "GCC_ARM"]
@ -444,22 +444,22 @@ class LPC1114(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['NXP', 'LPC11XX_11CXX', 'LPC11XX']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM","GCC_CR"]
class LPC11C24(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['NXP', 'LPC11XX_11CXX', 'LPC11CXX']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM"]
@ -468,11 +468,11 @@ class LPC11U35_401(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['NXP', 'LPC11UXX']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM","GCC_CR"]
@ -481,50 +481,50 @@ class LPC11U35_501(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['NXP', 'LPC11UXX']
self.supported_toolchains = ["ARM", "uARM","GCC_ARM","GCC_CR"]
class UBLOX_C027(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M3"
self.extra_labels = ['NXP', 'LPC176X']
self.supported_toolchains = ["ARM", "uARM", "GCC_ARM", "GCC_CS", "GCC_CR", "IAR"]
self.macros = ['TARGET_LPC1768']
self.supported_form_factors = ["ARDUINO"]
class NRF51822(Target):
EXPECTED_SOFTDEVICE = 's110_nrf51822_6.0.0_softdevice.hex'
OUTPUT_EXT = '.hex'
APPCODE_OFFSET = 0x14000
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ["NORDIC"]
self.supported_toolchains = ["ARM"]
self.is_disk_virtual = True
def init_hooks(self, hook, toolchain_name):
if toolchain_name in ['ARM_STD', 'ARM_MICRO']:
hook.hook_add_binary("post", self.binary_hook)
@staticmethod
def binary_hook(t_self, resources, elf, binf):
for hexf in resources.hex_files:
@ -533,46 +533,46 @@ class NRF51822(Target):
else:
t_self.debug("Hex file not found. Aborting.")
return
# Merge user code with softdevice
from intelhex import IntelHex
binh = IntelHex()
binh.loadbin(binf, offset = NRF51822.APPCODE_OFFSET)
sdh = IntelHex(hexf)
sdh.merge(binh)
with open(binf.replace(".bin", ".hex"), "w") as f:
sdh.tofile(f, format = 'hex')
class LPC1549(Target):
ONLINE_TOOLCHAIN = "uARM"
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M3"
self.extra_labels = ['NXP', 'LPC15XX']
self.supported_toolchains = ["uARM"]
self.supported_form_factors = ["ARDUINO"]
class LPC11U68(Target):
ONLINE_TOOLCHAIN = "uARM"
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0+"
self.extra_labels = ['NXP', 'LPC11U6X']
self.supported_toolchains = ["uARM"]
self.supported_form_factors = ["ARDUINO"]
@ -582,39 +582,39 @@ class DISCO_F100RB(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M3"
self.extra_labels = ['STM', 'STM32F1', 'STM32F100RB']
self.supported_toolchains = ["GCC_ARM"]
class DISCO_F051R8(Target):
ONLINE_TOOLCHAIN = "uARM"
OUTPUT_NAMING = "8.3"
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M0"
self.extra_labels = ['STM', 'STM32F0', 'STM32F051','STM32F051R8']
self.supported_toolchains = ["GCC_ARM"]
class DISCO_F407VG(Target):
ONLINE_TOOLCHAIN = "uARM"
OUTPUT_NAMING = "8.3"
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4F"
self.extra_labels = ['STM', 'STM32F4', 'STM32F407','STM32F407VG']
self.supported_toolchains = ["GCC_ARM"]
@ -624,13 +624,13 @@ class DISCO_F303VC(Target):
def __init__(self):
Target.__init__(self)
self.core = "Cortex-M4F"
self.extra_labels = ['STM', 'STM32F3', 'STM32F303','STM32F303VC']
self.supported_toolchains = ["GCC_ARM"]
class XADOW_M0(LPC11U35_501):
def __init__(self):
LPC11U35_501.__init__(self)

View File

@ -33,12 +33,12 @@ def print_notify(event):
# Default command line notification
if event['type'] in ['info', 'debug']:
print event['message']
elif event['type'] == 'cc':
event['severity'] = event['severity'].title()
event['file'] = basename(event['file'])
print '[%(severity)s] %(file)s@%(line)s: %(message)s' % event
elif event['type'] == 'progress':
print '%s: %s' % (event['action'].title(), basename(event['file']))
@ -46,53 +46,53 @@ def print_notify(event):
class Resources:
def __init__(self, base_path=None):
self.base_path = base_path
self.inc_dirs = []
self.headers = []
self.s_sources = []
self.c_sources = []
self.cpp_sources = []
self.lib_dirs = set([])
self.objects = []
self.libraries = []
# mbed special files
self.lib_builds = []
self.lib_refs = []
self.repo_dirs = []
self.repo_files = []
self.linker_script = None
# Other files
self.hex_files = []
def add(self, resources):
self.inc_dirs += resources.inc_dirs
self.headers += resources.headers
self.s_sources += resources.s_sources
self.c_sources += resources.c_sources
self.cpp_sources += resources.cpp_sources
self.lib_dirs |= resources.lib_dirs
self.objects += resources.objects
self.libraries += resources.libraries
self.lib_builds += resources.lib_builds
self.lib_refs += resources.lib_refs
self.repo_dirs += resources.repo_dirs
self.repo_files += resources.repo_files
if resources.linker_script is not None:
self.linker_script = resources.linker_script
self.hex_files += resources.hex_files
def relative_to(self, base, dot=False):
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
'cpp_sources', 'lib_dirs', 'objects', 'libraries',
@ -101,7 +101,7 @@ class Resources:
setattr(self, field, v)
if self.linker_script is not None:
self.linker_script = rel_path(self.linker_script, base, dot)
def win_to_unix(self):
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
'cpp_sources', 'lib_dirs', 'objects', 'libraries',
@ -110,18 +110,18 @@ class Resources:
setattr(self, field, v)
if self.linker_script is not None:
self.linker_script = self.linker_script.replace('\\', '/')
def __str__(self):
s = []
for (label, resources) in (
('Include Directories', self.inc_dirs),
('Headers', self.headers),
('Assembly sources', self.s_sources),
('C sources', self.c_sources),
('C++ sources', self.cpp_sources),
('Library directories', self.lib_dirs),
('Objects', self.objects),
('Libraries', self.libraries),
@ -130,10 +130,10 @@ class Resources:
):
if resources:
s.append('%s:\n ' % label + '\n '.join(resources))
if self.linker_script:
s.append('Linker Script: ' + self.linker_script)
return '\n'.join(s)
@ -153,7 +153,7 @@ LEGACY_TOOLCHAIN_NAMES = {
class mbedToolchain:
VERBOSE = True
CORTEX_SYMBOLS = {
"Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3"],
"Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0"],
@ -169,14 +169,14 @@ class mbedToolchain:
self.target = target
self.name = self.__class__.__name__
self.hook = hooks.Hook(target, self)
self.legacy_ignore_dirs = LEGACY_IGNORE_DIRS - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]])
if notify is not None:
self.notify = notify
else:
self.notify = print_notify
if options is None:
self.options = []
else:
@ -185,13 +185,13 @@ class mbedToolchain:
self.options.extend(BUILD_OPTIONS)
if self.options:
self.info("Build Options: %s" % (', '.join(self.options)))
self.obj_path = join("TARGET_"+target.name, "TOOLCHAIN_"+self.name)
self.symbols = None
self.labels = None
self.has_config = False
self.build_all = False
self.timestamp = time()
@ -200,18 +200,18 @@ class mbedToolchain:
return self.GOANNA_DIAGNOSTIC_PATTERN.match(line)
else:
return None
def get_symbols(self):
if self.symbols is None:
# Target and Toolchain symbols
labels = self.get_labels()
self.symbols = ["TARGET_%s" % t for t in labels['TARGET']]
self.symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']])
# Config support
if self.has_config:
self.symbols.append('HAVE_MBED_CONFIG_H')
# Cortex CPU symbols
if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
self.symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
@ -220,7 +220,7 @@ class mbedToolchain:
self.symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, '__MBED__=1'])
if MBED_ORG_USER:
self.symbols.append('MBED_USERNAME=' + MBED_ORG_USER)
# Add target's symbols
for macro in self.target.macros:
self.symbols.append(macro)
@ -228,9 +228,9 @@ class mbedToolchain:
# Form factor variables
if hasattr(self.target, 'supported_form_factors'):
self.symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors])
return self.symbols
def get_labels(self):
if self.labels is None:
toolchain_labels = [c.__name__ for c in getmro(self.__class__)]
@ -240,33 +240,33 @@ class mbedToolchain:
'TOOLCHAIN': toolchain_labels
}
return self.labels
def need_update(self, target, dependencies):
if self.build_all:
return True
if not exists(target):
return True
target_mod_time = stat(target).st_mtime
for d in dependencies:
# Some objects are not provided with full path and here we do not have
# information about the library paths. Safe option: assume an update
if not d or not exists(d):
return True
if stat(d).st_mtime >= target_mod_time:
return True
return False
def scan_resources(self, path):
labels = self.get_labels()
resources = Resources(path)
self.has_config = False
""" os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
When topdown is True, the caller can modify the dirnames list in-place
(perhaps using del or slice assignment), and walk() will only recurse into
@ -289,36 +289,36 @@ class mbedToolchain:
(d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
(d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN'])):
dirs.remove(d)
# Add root to include paths
resources.inc_dirs.append(root)
for file in files:
file_path = join(root, file)
_, ext = splitext(file)
ext = ext.lower()
if ext == '.s':
resources.s_sources.append(file_path)
elif ext == '.c':
resources.c_sources.append(file_path)
elif ext == '.cpp':
resources.cpp_sources.append(file_path)
elif ext == '.h':
if basename(file_path) == "mbed_config.h":
self.has_config = True
resources.headers.append(file_path)
elif ext == '.o':
resources.objects.append(file_path)
elif ext == self.LIBRARY_EXT:
resources.libraries.append(file_path)
resources.lib_dirs.add(root)
elif ext == self.LINKER_EXT:
resources.linker_script = file_path
@ -338,17 +338,17 @@ class mbedToolchain:
def scan_repository(self, path):
resources = []
for root, dirs, files in walk(path):
# Remove ignored directories
for d in copy(dirs):
if d == '.' or d == '..':
dirs.remove(d)
for file in files:
file_path = join(root, file)
resources.append(file_path)
return resources
def copy_files(self, files_paths, trg_path, rel_path=None):
@ -364,30 +364,30 @@ class mbedToolchain:
relative_path = relpath(source, rel_path)
else:
_, relative_path = split(source)
target = join(trg_path, relative_path)
if (target != source) and (self.need_update(target, [source])):
self.progress("copy", relative_path)
mkdir(dirname(target))
copyfile(source, target)
def relative_object_path(self, build_path, base_dir, source):
source_dir, name, _ = split_path(source)
obj_dir = join(build_path, relpath(source_dir, base_dir))
mkdir(obj_dir)
return join(obj_dir, name + '.o')
def compile_sources(self, resources, build_path, inc_dirs=None):
# Web IDE progress bar for project build
self.to_be_compiled = len(resources.s_sources) + len(resources.c_sources) + len(resources.cpp_sources)
self.compiled = 0
objects = []
inc_paths = resources.inc_dirs
if inc_dirs is not None:
inc_paths.extend(inc_dirs)
base_path = resources.base_path
for source in resources.s_sources:
self.compiled += 1
@ -396,97 +396,97 @@ class mbedToolchain:
self.progress("assemble", source, build_update=True)
self.assemble(source, object, inc_paths)
objects.append(object)
# The dependency checking for C/C++ is delegated to the specific compiler
for source in resources.c_sources:
object = self.relative_object_path(build_path, base_path, source)
self.compile_c(source, object, inc_paths)
objects.append(object)
for source in resources.cpp_sources:
object = self.relative_object_path(build_path, base_path, source)
self.compile_cpp(source, object, inc_paths)
objects.append(object)
return objects
def compile(self, cc, source, object, includes):
# Check dependencies
base, _ = splitext(object)
dep_path = base + '.d'
self.compiled += 1
if (not exists(dep_path) or
self.need_update(object, self.parse_dependencies(dep_path))):
self.progress("compile", source, build_update=True)
# Compile
command = cc + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source]
if hasattr(self, "get_dep_opt"):
command.extend(self.get_dep_opt(dep_path))
if hasattr(self, "cc_extra"):
command.extend(self.cc_extra(base))
self.debug(command)
_, stderr, rc = run_cmd(self.hook.get_cmdline_compiler(command), dirname(object))
# Parse output for Warnings and Errors
self.parse_output(stderr)
# Check return code
if rc != 0:
raise ToolException(stderr)
def compile_c(self, source, object, includes):
self.compile(self.cc, source, object, includes)
def compile_cpp(self, source, object, includes):
self.compile(self.cppc, source, object, includes)
def build_library(self, objects, dir, name):
lib = self.STD_LIB_NAME % name
fout = join(dir, lib)
if self.need_update(fout, objects):
self.info("Library: %s" % lib)
self.archive(objects, fout)
def link_program(self, r, tmp_path, name):
ext = 'bin'
if hasattr(self.target, 'OUTPUT_NAMING'):
self.var("binary_naming", self.target.OUTPUT_NAMING)
if self.target.OUTPUT_NAMING == "8.3":
name = name[0:8]
ext = ext[0:3]
filename = name+'.'+ext
elf = join(tmp_path, name + '.elf')
bin = join(tmp_path, filename)
if self.need_update(elf, r.objects + r.libraries + [r.linker_script]):
self.progress("link", name)
self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
if self.need_update(bin, [elf]):
self.progress("elf2bin", name)
self.binary(r, elf, bin)
if self.target.name.startswith('LPC'):
self.debug("LPC Patch %s" % filename)
patch(bin)
self.var("compile_succeded", True)
self.var("binary", filename)
if hasattr(self.target, 'OUTPUT_EXT'):
bin = bin.replace('.bin', self.target.OUTPUT_EXT)
return bin
def default_cmd(self, command):
self.debug(command)
stdout, stderr, rc = run_cmd(command)
@ -495,29 +495,29 @@ class mbedToolchain:
for line in stderr.splitlines():
self.tool_error(line)
raise ToolException(stderr)
### NOTIFICATIONS ###
def info(self, message):
self.notify({'type': 'info', 'message': message})
def debug(self, message):
if self.VERBOSE:
if type(message) is ListType:
message = ' '.join(message)
self.notify({'type': 'debug', 'message': message})
def cc_info(self, severity, file, line, message):
self.notify({'type': 'cc', 'severity': severity, 'file': file, 'line': line, 'message': message})
def progress(self, action, file, build_update=False):
msg = {'type': 'progress', 'action': action, 'file': file}
if build_update:
msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled)
self.notify(msg)
def tool_error(self, message):
self.notify({'type': 'tool_error', 'message': message})
def var(self, key, value):
self.notify({'type': 'var', 'key': key, 'val': value})

View File

@ -25,28 +25,28 @@ from workspace_tools.settings import GOANNA_PATH
class ARM(mbedToolchain):
LINKER_EXT = '.sct'
LIBRARY_EXT = '.ar'
STD_LIB_NAME = "%s.ar"
DIAGNOSTIC_PATTERN = re.compile('"(?P<file>[^"]+)", line (?P<line>\d+): (?P<severity>Warning|Error): (?P<message>.+)')
DEP_PATTERN = re.compile('\S+:\s(?P<file>.+)\n')
def __init__(self, target, options=None, notify=None, macros=None):
mbedToolchain.__init__(self, target, options, notify, macros)
if target.core == "Cortex-M0+":
cpu = "Cortex-M0"
elif target.core == "Cortex-M4F":
cpu = "Cortex-M4.fp"
else:
cpu = target.core
main_cc = join(ARM_BIN, "armcc")
common = ["-c",
"--cpu=%s" % cpu, "--gnu",
"-Otime", "--split_sections", "--apcs=interwork",
"--brief_diagnostics", "--restrict"
]
if "save-asm" in self.options:
common.extend(["--asm", "--interleave"])
@ -55,12 +55,12 @@ class ARM(mbedToolchain):
common.append("-O0")
else:
common.append("-O3")
common_c = [
"--md", "--no_depend_system_headers",
'-I%s' % ARM_INC
]
self.asm = [main_cc] + common + ['-I%s' % ARM_INC]
if not "analyze" in self.options:
self.cc = [main_cc] + common + common_c + ["--c99"]
@ -68,24 +68,24 @@ class ARM(mbedToolchain):
else:
self.cc = [join(GOANNA_PATH, "goannacc"), "--with-cc=" + main_cc.replace('\\', '/'), "--dialect=armcc", '--output-format="%s"' % self.GOANNA_FORMAT] + common + common_c + ["--c99"]
self.cppc= [join(GOANNA_PATH, "goannac++"), "--with-cxx=" + main_cc.replace('\\', '/'), "--dialect=armcc", '--output-format="%s"' % self.GOANNA_FORMAT] + common + common_c + ["--cpp", "--no_rtti"]
self.ld = [join(ARM_BIN, "armlink")]
self.sys_libs = []
self.ar = join(ARM_BIN, "armar")
self.elf2bin = join(ARM_BIN, "fromelf")
def remove_option(self, option):
for tool in [self.asm, self.cc, self.cppc]:
if option in tool:
tool.remove(option)
def assemble(self, source, object, includes):
# Preprocess first, then assemble
tempfile = object + '.E.s'
self.default_cmd(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-E", "-o", tempfile, source])
self.default_cmd(self.hook.get_cmdline_assembler(self.asm + ["-o", object, tempfile]))
def parse_dependencies(self, dep_path):
dependencies = []
for line in open(dep_path).readlines():
@ -93,7 +93,7 @@ class ARM(mbedToolchain):
if match is not None:
dependencies.append(match.group('file'))
return dependencies
def parse_output(self, output):
for line in output.splitlines():
match = ARM.DIAGNOSTIC_PATTERN.match(line)
@ -115,7 +115,7 @@ class ARM(mbedToolchain):
def archive(self, objects, lib_path):
self.default_cmd([self.ar, '-r', lib_path] + objects)
def link(self, output, objects, libraries, lib_dirs, mem_map):
if len(lib_dirs):
args = ["-o", output, "--userlibpath", ",".join(lib_dirs), "--info=totals", "--list=.link_totals.txt"]
@ -129,14 +129,14 @@ class ARM(mbedToolchain):
args = self.target.link_cmdline_hook(self.__class__.__name__, args)
self.default_cmd(self.ld + args + objects + libraries + self.sys_libs)
@hook_tool
def binary(self, resources, elf, bin):
args = [self.elf2bin, '--bin', '-o', bin, elf]
if hasattr(self.target, "binary_cmdline_hook"):
args = self.target.binary_cmdline_hook(self.__class__.__name__, args)
self.default_cmd(args)
class ARM_STD(ARM):
@ -147,29 +147,29 @@ class ARM_STD(ARM):
class ARM_MICRO(ARM):
PATCHED_LIBRARY = False
def __init__(self, target, options=None, notify=None, macros=None):
ARM.__init__(self, target, options, notify, macros)
# Compiler
self.asm += ["-D__MICROLIB"]
self.cc += ["--library_type=microlib", "-D__MICROLIB"]
self.cppc += ["--library_type=microlib", "-D__MICROLIB"]
# Linker
self.ld.append("--library_type=microlib")
# We had to patch microlib to add C++ support
# In later releases this patch should have entered mainline
if ARM_MICRO.PATCHED_LIBRARY:
self.ld.append("--noscanlib")
# System Libraries
self.sys_libs.extend([join(MY_ARM_CLIB, lib+".l") for lib in ["mc_p", "mf_p", "m_ps"]])
if target.core == "Cortex-M3":
self.sys_libs.extend([join(ARM_CPPLIB, lib+".l") for lib in ["cpp_ws", "cpprt_w"]])
elif target.core in ["Cortex-M0", "Cortex-M0+"]:
self.sys_libs.extend([join(ARM_CPPLIB, lib+".l") for lib in ["cpp_ps", "cpprt_p"]])
else:

View File

@ -24,29 +24,29 @@ from workspace_tools.settings import GOANNA_PATH
class GCC(mbedToolchain):
LINKER_EXT = '.ld'
LIBRARY_EXT = '.a'
STD_LIB_NAME = "lib%s.a"
CIRCULAR_DEPENDENCIES = True
DIAGNOSTIC_PATTERN = re.compile('((?P<line>\d+):)(\d+:)? (?P<severity>warning|error): (?P<message>.+)')
def __init__(self, target, options=None, notify=None, macros=None, tool_path=""):
mbedToolchain.__init__(self, target, options, notify, macros)
if target.core == "Cortex-M0+":
cpu = "cortex-m0"
elif target.core == "Cortex-M4F":
cpu = "cortex-m4"
else:
cpu = target.core.lower()
self.cpu = ["-mcpu=%s" % cpu]
if target.core.startswith("Cortex"):
self.cpu.append("-mthumb")
if target.core == "Cortex-M4F":
self.cpu.append("-mfpu=fpv4-sp-d16")
self.cpu.append("-mfloat-abi=softfp")
# Note: We are using "-O2" instead of "-Os" to avoid this known GCC bug:
# http://gcc.gnu.org/bugzilla/show_bug.cgi?id=46762
common_flags = ["-c", "-Wall", "-Wextra",
@ -55,7 +55,7 @@ class GCC(mbedToolchain):
"-ffunction-sections", "-fdata-sections",
"-MMD", "-fno-delete-null-pointer-checks",
] + self.cpu
if "save-asm" in self.options:
common_flags.append("-save-temps")
@ -74,16 +74,16 @@ class GCC(mbedToolchain):
else:
self.cc = [join(GOANNA_PATH, "goannacc"), "--with-cc=" + main_cc.replace('\\', '/'), "-std=gnu99", "--dialect=gnu", '--output-format="%s"' % self.GOANNA_FORMAT] + common_flags
self.cppc= [join(GOANNA_PATH, "goannac++"), "--with-cxx=" + main_cppc.replace('\\', '/'), "-std=gnu++98", "--dialect=gnu", '--output-format="%s"' % self.GOANNA_FORMAT] + common_flags
self.ld = [join(tool_path, "arm-none-eabi-gcc"), "-Wl,--gc-sections", "-Wl,--wrap,main"] + self.cpu
self.sys_libs = ["stdc++", "supc++", "m", "c", "gcc"]
self.ar = join(tool_path, "arm-none-eabi-ar")
self.elf2bin = join(tool_path, "arm-none-eabi-objcopy")
def assemble(self, source, object, includes):
self.default_cmd(self.hook.get_cmdline_assembler(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source]))
def parse_dependencies(self, dep_path):
dependencies = []
for line in open(dep_path).readlines()[1:]:
@ -102,7 +102,7 @@ class GCC(mbedToolchain):
else:
dependencies = dependencies + [f.replace('\a', ' ') for f in file.split(" ")]
return dependencies
def parse_output(self, output):
# The warning/error notification is multiline
WHERE, WHAT = 0, 1
@ -117,50 +117,50 @@ class GCC(mbedToolchain):
match.group('message')
)
continue
# Each line should start with the file information: "filepath: ..."
# i should point past the file path ^
# avoid the first column in Windows (C:\)
i = line.find(':', 2)
if i == -1: continue
if state == WHERE:
file = line[:i]
message = line[i+1:].strip() + ' '
state = WHAT
elif state == WHAT:
match = GCC.DIAGNOSTIC_PATTERN.match(line[i+1:])
if match is None:
state = WHERE
continue
self.cc_info(
match.group('severity'),
file, match.group('line'),
message + match.group('message')
)
def archive(self, objects, lib_path):
self.default_cmd([self.ar, "rcs", lib_path] + objects)
def link(self, output, objects, libraries, lib_dirs, mem_map):
libs = []
for l in libraries:
name, _ = splitext(basename(l))
libs.append("-l%s" % name[3:])
libs.extend(["-l%s" % l for l in self.sys_libs])
# NOTE: There is a circular dependency between the mbed library and the clib
# We could define a set of week symbols to satisfy the clib dependencies in "sys.o",
# but if an application uses only clib symbols and not mbed symbols, then the final
# image is not correctly retargeted
if self.CIRCULAR_DEPENDENCIES:
libs.extend(libs)
self.default_cmd(self.hook.get_cmdline_linker(self.ld + ["-T%s" % mem_map, "-o", output] +
objects + ["-L%s" % L for L in lib_dirs] + libs))
def binary(self, resources, elf, bin):
self.default_cmd(self.hook.get_cmdline_binary([self.elf2bin, "-O", "binary", elf, bin]))
@ -168,25 +168,25 @@ class GCC(mbedToolchain):
class GCC_ARM(GCC):
def __init__(self, target, options=None, notify=None, macros=None):
GCC.__init__(self, target, options, notify, macros, GCC_ARM_PATH)
# Use latest gcc nanolib
self.ld.append("--specs=nano.specs")
if target.name in ["LPC1768", "LPC4088", "LPC4330", "UBLOX_C027"]:
self.ld.extend(["-u", "_printf_float", "-u", "_scanf_float"])
self.sys_libs.append("nosys")
class GCC_CR(GCC):
def __init__(self, target, options=None, notify=None, macros=None):
GCC.__init__(self, target, options, notify, macros, GCC_CR_PATH)
additional_compiler_flags = [
"-D__NEWLIB__", "-D__CODE_RED", "-D__USE_CMSIS", "-DCPP_USE_HEAP",
]
self.cc += additional_compiler_flags
self.cppc += additional_compiler_flags
self.ld += ["-nostdlib"]
@ -199,7 +199,7 @@ class GCC_CW(GCC):
ARCH_LIB = {
"Cortex-M0+": "armv6-m",
}
def __init__(self, target, options=None, notify=None, macros=None):
GCC.__init__(self, target, options, notify, macros, CW_GCC_PATH)
@ -207,7 +207,7 @@ class GCC_CW(GCC):
class GCC_CW_EWL(GCC_CW):
def __init__(self, target, options=None, notify=None, macros=None):
GCC_CW.__init__(self, target, options, notify, macros)
# Compiler
common = [
'-mfloat-abi=soft',
@ -220,7 +220,7 @@ class GCC_CW_EWL(GCC_CW):
'-nostdinc++', '-I%s' % join(CW_EWL_PATH, "EWL_C++", "include"),
'-include', join(CW_EWL_PATH, "EWL_C++", "include", 'lib_ewl_c++.prefix')
]
# Linker
self.sys_libs = []
self.CIRCULAR_DEPENDENCIES = False

View File

@ -26,12 +26,12 @@ class IAR(mbedToolchain):
LIBRARY_EXT = '.a'
LINKER_EXT = '.icf'
STD_LIB_NAME = "%s.a"
DIAGNOSTIC_PATTERN = re.compile('"(?P<file>[^"]+)",(?P<line>[\d]+)\s+(?P<severity>Warning|Error)(?P<message>.+)')
def __init__(self, target, options=None, notify=None, macros=None):
mbedToolchain.__init__(self, target, options, notify, macros)
c_flags = [
"--cpu=%s" % target.core, "--thumb",
"--dlib_config", join(IAR_PATH, "inc", "c", "DLib_Config_Full.h"),
@ -49,7 +49,7 @@ class IAR(mbedToolchain):
c_flags.append("-On")
else:
c_flags.append("-Oh")
IAR_BIN = join(IAR_PATH, "bin")
main_cc = join(IAR_BIN, "iccarm")
self.asm = [join(IAR_BIN, "iasmarm")] + ["--cpu", target.core]
@ -62,7 +62,7 @@ class IAR(mbedToolchain):
self.ld = join(IAR_BIN, "ilinkarm")
self.ar = join(IAR_BIN, "iarchive")
self.elf2bin = join(IAR_BIN, "ielftool")
def parse_output(self, output):
for line in output.splitlines():
match = IAR.DIAGNOSTIC_PATTERN.match(line)
@ -81,28 +81,28 @@ class IAR(mbedToolchain):
match.group('line'),
match.group('message')
)
def get_dep_opt(self, dep_path):
return ["--dependencies", dep_path]
def cc_extra(self, base):
return ["-l", base + '.s']
def parse_dependencies(self, dep_path):
return [path.strip() for path in open(dep_path).readlines()
if (path and not path.isspace())]
def assemble(self, source, object, includes):
self.default_cmd(self.hook.get_cmdline_assembler(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source]))
def archive(self, objects, lib_path):
if exists(lib_path):
remove(lib_path)
self.default_cmd([self.ar, lib_path] + objects)
def link(self, output, objects, libraries, lib_dirs, mem_map):
args = [self.ld, "-o", output, "--config", mem_map]
self.default_cmd(self.hook.get_cmdline_linker(args + objects + libraries))
def binary(self, resources, elf, bin):
self.default_cmd(self.hook.get_cmdline_binary([self.elf2bin, '--bin', elf, bin]))

View File

@ -24,9 +24,9 @@ from shutil import copyfile
def cmd(l, check=True, verbose=False, shell=False, cwd=None):
text = l if shell else ' '.join(l)
if verbose: print text
rc = call(l, shell=shell, cwd=cwd)
if check and rc != 0:
raise Exception('ERROR %d: "%s"' % (rc, text))
@ -34,7 +34,7 @@ def cmd(l, check=True, verbose=False, shell=False, cwd=None):
def run_cmd(command, wd=None, redirect=False):
p = Popen(command, stdout=PIPE, stderr=STDOUT if redirect else PIPE, cwd=wd)
stdout, stderr = p.communicate()
return stdout, stderr, p.returncode
@ -57,7 +57,7 @@ def copy_file(src, dst):
def delete_dir_files(dir):
if not exists(dir):
return
for f in listdir(dir):
file = join(dir, f)
if not isdir(file):