2013-08-06 13:38:00 +00:00
"""
mbed SDK
2016-07-26 20:30:59 +00:00
Copyright ( c ) 2011 - 2016 ARM Limited
2013-08-06 13:38:00 +00:00
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
http : / / www . apache . org / licenses / LICENSE - 2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
"""
2018-01-12 22:34:32 +00:00
from __future__ import print_function , division , absolute_import
2014-06-11 13:47:54 +00:00
2014-07-29 13:48:48 +00:00
import re
2017-04-04 16:35:00 +00:00
import datetime
2017-04-07 16:03:02 +00:00
import uuid
2018-01-25 21:57:48 +00:00
import struct
import zlib
import hashlib
2014-08-15 10:17:33 +00:00
from shutil import rmtree
2017-02-01 22:24:39 +00:00
from os . path import join , exists , dirname , basename , abspath , normpath , splitext
2017-04-06 16:17:54 +00:00
from os . path import relpath
2017-02-01 22:24:39 +00:00
from os import linesep , remove , makedirs
2015-11-05 20:42:45 +00:00
from time import time
2017-02-01 22:24:39 +00:00
from intelhex import IntelHex
2017-04-06 16:17:54 +00:00
from json import load , dump
2015-03-31 22:56:00 +00:00
from jinja2 import FileSystemLoader
from jinja2 . environment import Environment
2018-01-12 22:34:32 +00:00
from . arm_pack_manager import Cache
from . utils import ( mkdir , run_cmd , run_cmd_ext , NotSupportedException ,
ToolException , InvalidReleaseTargetException ,
2018-11-13 18:33:05 +00:00
intelhex_offset , integer , generate_update_filename , copy_when_different )
2018-01-12 22:34:32 +00:00
from . paths import ( MBED_CMSIS_PATH , MBED_TARGETS_PATH , MBED_LIBRARIES ,
MBED_HEADER , MBED_DRIVERS , MBED_PLATFORM , MBED_HAL ,
MBED_CONFIG_FILE , MBED_LIBRARIES_DRIVERS ,
MBED_LIBRARIES_PLATFORM , MBED_LIBRARIES_HAL ,
BUILD_DIR )
2018-07-05 17:19:39 +00:00
from . resources import Resources , FileType , FileRef
2018-05-04 18:06:49 +00:00
from . notifier . mock import MockNotifier
2018-12-06 15:43:34 +00:00
from . targets import TARGET_NAMES , TARGET_MAP , CORE_ARCH , Target
2018-01-12 22:34:32 +00:00
from . libraries import Library
from . toolchains import TOOLCHAIN_CLASSES
from . config import Config
2013-06-10 14:44:08 +00:00
2016-07-27 01:40:51 +00:00
RELEASE_VERSIONS = [ ' 2 ' , ' 5 ' ]
2015-11-05 20:42:45 +00:00
def prep_report ( report , target_name , toolchain_name , id_name ) :
2016-08-15 15:23:35 +00:00
""" Setup report keys
Positional arguments :
report - the report to fill
target_name - the target being used
toolchain_name - the toolchain being used
id_name - the name of the executable or library being built
"""
2015-11-05 20:42:45 +00:00
if not target_name in report :
report [ target_name ] = { }
if not toolchain_name in report [ target_name ] :
report [ target_name ] [ toolchain_name ] = { }
if not id_name in report [ target_name ] [ toolchain_name ] :
report [ target_name ] [ toolchain_name ] [ id_name ] = [ ]
2015-11-24 23:39:20 +00:00
def prep_properties ( properties , target_name , toolchain_name , vendor_label ) :
2016-08-15 15:23:35 +00:00
""" Setup test properties
Positional arguments :
properties - the dict to fill
target_name - the target the test is targeting
toolchain_name - the toolchain that will compile the test
vendor_label - the vendor
"""
2015-11-05 20:42:45 +00:00
if not target_name in properties :
properties [ target_name ] = { }
if not toolchain_name in properties [ target_name ] :
properties [ target_name ] [ toolchain_name ] = { }
properties [ target_name ] [ toolchain_name ] [ " target " ] = target_name
2015-11-24 23:39:20 +00:00
properties [ target_name ] [ toolchain_name ] [ " vendor " ] = vendor_label
2015-11-05 20:42:45 +00:00
properties [ target_name ] [ toolchain_name ] [ " toolchain " ] = toolchain_name
def create_result ( target_name , toolchain_name , id_name , description ) :
2016-08-15 15:23:35 +00:00
""" Create a result dictionary
Positional arguments :
target_name - the target being built for
toolchain_name - the toolchain doing the building
id_name - the name of the executable or library being built
description - a human readable description of what ' s going on
"""
2015-11-05 20:42:45 +00:00
cur_result = { }
cur_result [ " target_name " ] = target_name
cur_result [ " toolchain_name " ] = toolchain_name
cur_result [ " id " ] = id_name
cur_result [ " description " ] = description
cur_result [ " elapsed_time " ] = 0
cur_result [ " output " ] = " "
return cur_result
def add_result_to_report ( report , result ) :
2016-08-15 15:23:35 +00:00
""" Add a single result to a report dictionary
Positional arguments :
report - the report to append to
result - the result to append
"""
2017-04-07 15:54:41 +00:00
result [ " date " ] = datetime . datetime . utcnow ( ) . isoformat ( )
2017-04-07 16:03:02 +00:00
result [ " uuid " ] = str ( uuid . uuid1 ( ) )
2015-11-05 20:42:45 +00:00
target = result [ " target_name " ]
toolchain = result [ " toolchain_name " ]
id_name = result [ ' id ' ]
2016-08-12 16:27:39 +00:00
result_wrap = { 0 : result }
2015-11-05 20:42:45 +00:00
report [ target ] [ toolchain ] [ id_name ] . append ( result_wrap )
2019-01-08 23:51:31 +00:00
def get_toolchain_name ( target , toolchain_name ) :
2019-02-20 04:57:24 +00:00
if int ( target . build_tools_metadata [ " version " ] ) > 0 :
if toolchain_name == " ARM " or toolchain_name == " ARMC6 " :
if ( " ARM " in target . supported_toolchains or " ARMC6 " in target . supported_toolchains ) :
return " ARMC6 "
elif ( " ARMC5 " in target . supported_toolchains ) :
if toolchain_name == " ARM " :
return " ARM " #note that returning ARM here means, use ARMC5 toolchain
else :
return " ARMC6 " #ARMC6 explicitly specified by user, try ARMC6 anyway although the target doesnt explicitly specify ARMC6, as ARMC6 is our default ARM toolchain
elif toolchain_name == " uARM " :
if ( " ARMC5 " in target . supported_toolchains ) :
return " uARM " #use ARM_MICRO to use AC5+microlib
2019-02-12 23:43:57 +00:00
else :
2019-02-20 04:57:24 +00:00
return " ARMC6 " #use AC6+microlib
else :
if toolchain_name == " ARM " :
if CORE_ARCH [ target . core ] == 8 :
return " ARMC6 "
elif getattr ( target , " default_toolchain " , None ) == " uARM " :
return " uARM "
2019-01-08 23:51:31 +00:00
return toolchain_name
2018-05-04 18:40:40 +00:00
def get_config ( src_paths , target , toolchain_name = None , app_config = None ) :
2016-08-15 15:23:35 +00:00
""" Get the configuration object for a target-toolchain combination
Positional arguments :
src_paths - paths to scan for the configuration files
target - the device we are building for
toolchain_name - the string that identifies the build tools
"""
2016-07-18 18:57:59 +00:00
# Convert src_paths to a list if needed
2018-01-11 21:05:46 +00:00
if not isinstance ( src_paths , list ) :
2016-07-18 18:57:59 +00:00
src_paths = [ src_paths ]
2016-06-09 22:50:03 +00:00
2018-05-04 18:40:40 +00:00
res = Resources ( MockNotifier ( ) )
if toolchain_name :
toolchain = prepare_toolchain ( src_paths , None , target , toolchain_name ,
app_config = app_config )
config = toolchain . config
res . scan_with_toolchain ( src_paths , toolchain , exclude = False )
else :
config = Config ( target , src_paths , app_config = app_config )
2018-06-11 17:57:28 +00:00
res . scan_with_config ( src_paths , config )
2018-05-04 18:40:40 +00:00
if config . has_regions :
_ = list ( config . regions )
cfg , macros = config . get_config_data ( )
features = config . get_features ( )
2019-01-16 17:04:39 +00:00
return cfg , macros , features , res
2016-06-09 22:50:03 +00:00
2016-07-26 15:22:02 +00:00
def is_official_target ( target_name , version ) :
""" Returns True, None if a target is part of the official release for the
given version . Return False , ' reason ' if a target is not part of the
official release for the given version .
2016-07-27 01:40:51 +00:00
2016-08-15 15:23:35 +00:00
Positional arguments :
target_name - Name if the target ( ex . ' K64F ' )
version - The release version string . Should be a string contained within
RELEASE_VERSIONS
2016-07-26 15:22:02 +00:00
"""
2016-08-12 16:27:39 +00:00
2016-07-26 15:22:02 +00:00
result = True
reason = None
target = TARGET_MAP [ target_name ]
2016-08-12 16:27:39 +00:00
if hasattr ( target , ' release_versions ' ) \
and version in target . release_versions :
2016-07-26 15:22:02 +00:00
if version == ' 2 ' :
# For version 2, either ARM or uARM toolchain support is required
required_toolchains = set ( [ ' ARM ' , ' uARM ' ] )
2016-08-12 16:27:39 +00:00
if not len ( required_toolchains . intersection (
set ( target . supported_toolchains ) ) ) > 0 :
result = False
2016-07-26 15:22:02 +00:00
reason = ( " Target ' %s ' must support " % target . name ) + \
2016-08-12 16:27:39 +00:00
( " one of the folowing toolchains to be included in the " ) + \
( ( " mbed 2.0 official release: %s " + linesep ) %
" , " . join ( required_toolchains ) ) + \
2016-07-26 15:22:02 +00:00
( " Currently it is only configured to support the " ) + \
2016-08-12 16:27:39 +00:00
( " following toolchains: %s " %
" , " . join ( target . supported_toolchains ) )
2016-07-26 15:22:02 +00:00
elif version == ' 5 ' :
# For version 5, ARM, GCC_ARM, and IAR toolchain support is required
2018-07-11 14:34:38 +00:00
required_toolchains = [
2019-02-26 16:26:13 +00:00
set ( [ ' ARM ' , ' GCC_ARM ' ] ) ,
2018-07-16 13:42:55 +00:00
set ( [ ' ARMC6 ' ] )
2018-07-11 14:34:38 +00:00
]
2016-07-26 15:22:02 +00:00
supported_toolchains = set ( target . supported_toolchains )
2016-08-12 16:27:39 +00:00
2018-07-11 14:34:38 +00:00
if not any ( r . issubset ( supported_toolchains )
for r in required_toolchains ) :
2016-07-26 15:22:02 +00:00
result = False
reason = ( " Target ' %s ' must support " % target . name ) + \
2016-08-12 16:27:39 +00:00
( " ALL of the folowing toolchains to be included in the " ) + \
( ( " mbed OS 5.0 official release: %s " + linesep ) %
2018-07-11 14:34:38 +00:00
" , " . join ( sorted ( required_toolchains [ 0 ] ) ) ) + \
2016-07-26 15:22:02 +00:00
( " Currently it is only configured to support the " ) + \
2016-08-12 16:27:39 +00:00
( " following toolchains: %s " %
2018-07-11 14:34:38 +00:00
" , " . join ( sorted ( supported_toolchains ) ) )
2016-07-28 16:06:53 +00:00
2016-08-17 12:16:07 +00:00
elif not target . default_lib == ' std ' :
2016-07-28 16:06:53 +00:00
result = False
2016-08-12 16:27:39 +00:00
reason = ( " Target ' %s ' must set the " % target . name ) + \
2016-08-17 12:16:07 +00:00
( " ' default_lib ' to ' std ' to be included in the " ) + \
2016-08-12 16:27:39 +00:00
( " mbed OS 5.0 official release. " + linesep ) + \
2016-08-17 12:16:07 +00:00
( " Currently it is set to ' %s ' " % target . default_lib )
2016-07-28 16:06:53 +00:00
2016-07-27 01:40:51 +00:00
else :
result = False
2016-08-12 16:27:39 +00:00
reason = ( " Target ' %s ' has set an invalid release version of ' %s ' " %
version ) + \
( " Please choose from the following release versions: %s " %
' , ' . join ( RELEASE_VERSIONS ) )
2016-07-27 01:40:51 +00:00
2016-07-26 15:22:02 +00:00
else :
result = False
2016-07-27 18:30:53 +00:00
if not hasattr ( target , ' release_versions ' ) :
2016-08-12 16:27:39 +00:00
reason = " Target ' %s ' " % target . name
reason + = " does not have the ' release_versions ' key set "
2016-07-27 18:30:53 +00:00
elif not version in target . release_versions :
2016-08-12 16:27:39 +00:00
reason = " Target ' %s ' does not contain the version ' %s ' " % \
( target . name , version )
reason + = " in its ' release_versions ' key "
2016-07-26 15:22:02 +00:00
return result , reason
2016-07-27 01:40:51 +00:00
def transform_release_toolchains ( toolchains , version ) :
""" Given a list of toolchains and a release version, return a list of
only the supported toolchains for that release
2016-08-15 15:23:35 +00:00
Positional arguments :
toolchains - The list of toolchains
version - The release version string . Should be a string contained within
RELEASE_VERSIONS
2016-07-27 01:40:51 +00:00
"""
if version == ' 5 ' :
return [ ' ARM ' , ' GCC_ARM ' , ' IAR ' ]
else :
return toolchains
2016-07-26 15:22:02 +00:00
def get_mbed_official_release ( version ) :
2016-07-27 01:40:51 +00:00
""" Given a release version string, return a tuple that contains a target
and the supported toolchains for that release .
2016-08-12 16:27:39 +00:00
Ex . Given ' 2 ' , return ( ( ' LPC1768 ' , ( ' ARM ' , ' GCC_ARM ' ) ) ,
( ' K64F ' , ( ' ARM ' , ' GCC_ARM ' ) ) , . . . )
2016-07-27 01:40:51 +00:00
2016-08-15 15:23:35 +00:00
Positional arguments :
version - The version string . Should be a string contained within
RELEASE_VERSIONS
2016-07-27 01:40:51 +00:00
"""
2016-08-12 16:27:39 +00:00
mbed_official_release = (
2016-07-26 15:22:02 +00:00
tuple (
tuple (
[
TARGET_MAP [ target ] . name ,
2016-08-12 16:27:39 +00:00
tuple ( transform_release_toolchains (
TARGET_MAP [ target ] . supported_toolchains , version ) )
2016-07-26 15:22:02 +00:00
]
2016-08-12 16:27:39 +00:00
) for target in TARGET_NAMES \
if ( hasattr ( TARGET_MAP [ target ] , ' release_versions ' )
and version in TARGET_MAP [ target ] . release_versions )
2018-12-06 15:43:34 +00:00
and not Target . get_target ( target ) . is_PSA_secure_target
2016-07-26 15:22:02 +00:00
)
)
2016-08-12 16:27:39 +00:00
for target in mbed_official_release :
2016-07-26 15:22:02 +00:00
is_official , reason = is_official_target ( target [ 0 ] , version )
2016-08-12 16:27:39 +00:00
2016-07-26 15:22:02 +00:00
if not is_official :
raise InvalidReleaseTargetException ( reason )
2016-08-12 16:27:39 +00:00
return mbed_official_release
2016-07-26 15:22:02 +00:00
2018-04-04 19:02:13 +00:00
def target_supports_toolchain ( target , toolchain_name ) :
2019-02-20 04:57:24 +00:00
if int ( target . build_tools_metadata [ " version " ] ) > 0 :
if toolchain_name in target . supported_toolchains :
return True
else :
if ( toolchain_name == " ARM " ) :
#we cant find ARM, see if one ARMC5, ARMC6 or uARM listed
return any ( tc in target . supported_toolchains for tc in ( " ARMC5 " , " ARMC6 " , " uARM " ) )
if ( toolchain_name == " ARMC6 " ) :
#we did not find ARMC6, but check for ARM is listed
2019-02-25 18:47:04 +00:00
return " ARM " in target . supported_toolchains
#return False in other cases
2019-02-20 04:57:24 +00:00
return False
2018-04-04 19:02:13 +00:00
else :
2019-02-20 04:57:24 +00:00
ARM_COMPILERS = ( " ARM " , " ARMC6 " , " uARM " )
if toolchain_name in ARM_COMPILERS :
return any ( tc in target . supported_toolchains for tc in ARM_COMPILERS )
else :
return toolchain_name in target . supported_toolchains
2016-07-26 15:22:02 +00:00
2017-02-28 20:04:54 +00:00
def prepare_toolchain ( src_paths , build_dir , target , toolchain_name ,
2016-09-27 18:15:22 +00:00
macros = None , clean = False , jobs = 1 ,
2018-04-25 19:21:25 +00:00
notify = None , config = None , app_config = None ,
2018-05-11 04:00:01 +00:00
build_profile = None , ignore = None ) :
2016-07-18 18:57:59 +00:00
""" Prepares resource related objects - toolchain, target, config
2016-08-15 15:23:35 +00:00
Positional arguments :
src_paths - the paths to source directories
2016-10-28 13:48:49 +00:00
target - [ ' LPC1768 ' , ' LPC11U24 ' , etc . ]
2018-10-02 15:51:54 +00:00
toolchain_name - [ ' ARM ' , ' uARM ' , ' GCC_ARM ' , ' IAR ' ]
2016-08-15 15:23:35 +00:00
Keyword arguments :
macros - additional macros
clean - Rebuild everything if True
jobs - how many compilers we can run at once
notify - Notify function for logs
config - a Config object to use instead of creating one
2016-08-31 15:20:59 +00:00
app_config - location of a chosen mbed_app . json file
2017-05-24 18:36:47 +00:00
build_profile - a list of mergeable build profiles
2018-05-11 04:00:01 +00:00
ignore - list of paths to add to mbedignore
2014-09-25 10:03:37 +00:00
"""
2016-06-09 22:50:03 +00:00
2014-07-01 16:45:12 +00:00
# We need to remove all paths which are repeated to avoid
# multiple compilations and linking with the same objects
src_paths = [ src_paths [ 0 ] ] + list ( set ( src_paths [ 1 : ] ) )
2016-06-09 22:50:03 +00:00
# If the configuration object was not yet created, create it now
2016-09-29 18:37:22 +00:00
config = config or Config ( target , src_paths , app_config = app_config )
2016-09-01 20:44:36 +00:00
target = config . target
2019-02-20 04:57:24 +00:00
2018-04-04 19:02:13 +00:00
if not target_supports_toolchain ( target , toolchain_name ) :
raise NotSupportedException (
" Target {} is not supported by toolchain {} " . format (
target . name , toolchain_name ) )
2019-01-08 23:51:31 +00:00
2019-02-25 18:47:04 +00:00
selected_toolchain_name = get_toolchain_name ( target , toolchain_name )
#If a target supports ARMC6 and we want to build UARM with it,
#then set the default_toolchain to uARM to link AC6 microlib.
if ( selected_toolchain_name == " ARMC6 " and toolchain_name == " uARM " ) :
target . default_toolchain = " uARM "
toolchain_name = selected_toolchain_name
2019-02-21 00:33:34 +00:00
2016-06-09 22:50:03 +00:00
try :
2017-02-01 22:24:39 +00:00
cur_tc = TOOLCHAIN_CLASSES [ toolchain_name ]
2016-08-12 16:27:39 +00:00
except KeyError :
2016-06-09 22:50:03 +00:00
raise KeyError ( " Toolchain %s not supported " % toolchain_name )
2017-05-24 18:36:47 +00:00
profile = { ' c ' : [ ] , ' cxx ' : [ ] , ' common ' : [ ] , ' asm ' : [ ] , ' ld ' : [ ] }
for contents in build_profile or [ ] :
for key in profile :
2018-03-02 17:27:38 +00:00
profile [ key ] . extend ( contents [ toolchain_name ] . get ( key , [ ] ) )
2017-05-24 18:36:47 +00:00
2018-04-25 19:21:25 +00:00
toolchain = cur_tc (
target , notify , macros , build_dir = build_dir , build_profile = profile )
2016-06-09 22:50:03 +00:00
2016-07-18 18:57:59 +00:00
toolchain . config = config
2016-06-09 22:50:03 +00:00
toolchain . jobs = jobs
toolchain . build_all = clean
2014-07-14 16:45:01 +00:00
2018-05-11 04:00:01 +00:00
if ignore :
toolchain . add_ignore_patterns ( root = " . " , base_path = " . " , patterns = ignore )
2016-07-18 18:57:59 +00:00
return toolchain
2018-01-25 21:57:48 +00:00
def _printihex ( ihex ) :
import pprint
pprint . PrettyPrinter ( ) . pprint ( ihex . todict ( ) )
def _real_region_size ( region ) :
try :
part = intelhex_offset ( region . filename , offset = region . start )
return ( part . maxaddr ( ) - part . minaddr ( ) ) + 1
except AttributeError :
return region . size
2018-02-05 16:10:43 +00:00
2018-01-25 21:57:48 +00:00
def _fill_header ( region_list , current_region ) :
""" Fill an application header region
This is done it three steps :
* Fill the whole region with zeros
* Fill const , timestamp and size entries with their data
* Fill the digests using this header as the header region
"""
region_dict = { r . name : r for r in region_list }
header = IntelHex ( )
header . puts ( current_region . start , b ' \x00 ' * current_region . size )
start = current_region . start
for member in current_region . filename :
_ , type , subtype , data = member
member_size = Config . header_member_size ( member )
if type == " const " :
2018-02-02 22:21:48 +00:00
fmt = {
" 8le " : " >B " , " 16le " : " <H " , " 32le " : " <L " , " 64le " : " <Q " ,
" 8be " : " <B " , " 16be " : " >H " , " 32be " : " >L " , " 64be " : " >Q "
} [ subtype ]
2018-02-05 16:10:43 +00:00
header . puts ( start , struct . pack ( fmt , integer ( data , 0 ) ) )
2018-01-25 21:57:48 +00:00
elif type == " timestamp " :
2018-02-02 22:21:48 +00:00
fmt = { " 32le " : " <L " , " 64le " : " <Q " ,
" 32be " : " >L " , " 64be " : " >Q " } [ subtype ]
Tools: Fix Python3 + firmware header traeback
### Description
Traceback:
```
Merging Regions
Filling region bootloader with mbed-cloud-client-example/mbed-os/features/FEATURE_BOOTLOADER/targets/TARGET_Freescale/TARGET_MCUXpresso_MCUS/TARGET_MCU_K64F/TARGET_FRDM/mbed-bootloader-k64f-block_device-sotp-v3_4_0.bin
Padding region bootloader with 0x9a4 bytes
Traceback (most recent call last):
File "mbed-cloud-client-example/mbed-os/tools/make.py", line 293, in <module>
ignore=options.ignore
File "mbed-cloud-client-example/mbed-os/tools/build_api.py", line 548, in build_project
merge_region_list(region_list, res, notify)
File "mbed-cloud-client-example/mbed-os/tools/build_api.py", line 423, in merge_region_list
_fill_header(region_list, region).tofile(header_filename, format='hex')
File "mbed-cloud-client-example/mbed-os/tools/build_api.py", line 380, in _fill_header
header.puts(start, struct.pack(fmt, time()))
struct.error: required argument is not an integer
```
reason: `time()` returns a float. So the fix is to force it to be an `int`.
### Pull request type
[x] Fix
[ ] Refactor
[ ] Target update
[ ] Functionality change
[ ] Breaking change
2018-10-04 16:56:18 +00:00
header . puts ( start , struct . pack ( fmt , int ( time ( ) ) ) )
2018-01-25 21:57:48 +00:00
elif type == " size " :
2018-02-02 22:21:48 +00:00
fmt = { " 32le " : " <L " , " 64le " : " <Q " ,
" 32be " : " >L " , " 64be " : " >Q " } [ subtype ]
2018-01-25 21:57:48 +00:00
size = sum ( _real_region_size ( region_dict [ r ] ) for r in data )
header . puts ( start , struct . pack ( fmt , size ) )
2018-02-02 23:06:48 +00:00
elif type == " digest " :
2018-01-25 21:57:48 +00:00
if data == " header " :
2018-02-02 23:06:48 +00:00
ih = header [ : start ]
2018-01-25 21:57:48 +00:00
else :
ih = intelhex_offset ( region_dict [ data ] . filename , offset = region_dict [ data ] . start )
2018-02-02 22:21:48 +00:00
if subtype . startswith ( " CRCITT32 " ) :
2019-02-20 23:34:05 +00:00
fmt = { " CRCITT32be " : " >L " , " CRCITT32le " : " <L " } [ subtype ]
crc_val = zlib . crc32 ( ih . tobinarray ( ) ) & 0xffffffff
header . puts ( start , struct . pack ( fmt , crc_val ) )
2018-01-25 21:57:48 +00:00
elif subtype . startswith ( " SHA " ) :
if subtype == " SHA256 " :
hash = hashlib . sha256 ( )
elif subtype == " SHA512 " :
hash = hashlib . sha512 ( )
hash . update ( ih . tobinarray ( ) )
header . puts ( start , hash . digest ( ) )
start + = Config . header_member_size ( member )
return header
2018-10-29 21:36:56 +00:00
2019-01-07 20:29:37 +00:00
def merge_region_list ( region_list , destination , notify , config , padding = b ' \xFF ' ) :
2018-01-25 21:57:48 +00:00
""" Merge the region_list into a single image
2017-02-01 22:24:39 +00:00
Positional Arguments :
region_list - list of regions , which should contain filenames
destination - file name to write all regions to
2019-01-17 18:56:09 +00:00
padding - bytes to fill gaps with
2017-02-01 22:24:39 +00:00
"""
merged = IntelHex ( )
2018-02-02 21:44:32 +00:00
_ , format = splitext ( destination )
2018-04-26 14:54:01 +00:00
notify . info ( " Merging Regions " )
2019-02-10 18:13:14 +00:00
# Merged file list: Keep track of binary/hex files that we have already
# merged. e.g In some cases, bootloader may be split into multiple parts, but
# all internally referring to the same bootloader file.
merged_list = [ ]
2017-02-01 22:24:39 +00:00
for region in region_list :
if region . active and not region . filename :
raise ToolException ( " Active region has no contents: No file found. " )
2018-01-25 21:57:48 +00:00
if isinstance ( region . filename , list ) :
header_basename , _ = splitext ( destination )
header_filename = header_basename + " _header.hex "
_fill_header ( region_list , region ) . tofile ( header_filename , format = ' hex ' )
region = region . _replace ( filename = header_filename )
2019-02-10 18:13:14 +00:00
if region . filename and ( region . filename not in merged_list ) :
2018-04-26 14:54:01 +00:00
notify . info ( " Filling region %s with %s " % ( region . name , region . filename ) )
2017-02-01 22:24:39 +00:00
part = intelhex_offset ( region . filename , offset = region . start )
2018-05-11 16:23:01 +00:00
part . start_addr = None
2019-01-07 20:29:37 +00:00
# Normally, we assume that part.maxddr() can be beyond
# end of rom. However, if the size is restricted with config, do check.
if config . target . restrict_size is not None :
part_size = ( part . maxaddr ( ) - part . minaddr ( ) ) + 1
if part_size > region . size :
raise ToolException ( " Contents of region %s does not fit "
% region . name )
2019-02-10 18:13:14 +00:00
merged_list . append ( region . filename )
2017-02-01 22:24:39 +00:00
merged . merge ( part )
2019-02-10 18:13:14 +00:00
elif region . filename in merged_list :
notify . info ( " Skipping %s as it is merged previously " % ( region . name ) )
2018-09-04 08:06:18 +00:00
2018-11-07 20:32:01 +00:00
# Hex file can have gaps, so no padding needed. While other formats may
# need padding. Iterate through segments and pad the gaps.
2018-11-08 18:52:56 +00:00
if format != " .hex " :
2018-11-09 16:32:24 +00:00
# begin patching from the end of the first segment
_ , begin = merged . segments ( ) [ 0 ]
for start , stop in merged . segments ( ) [ 1 : ] :
2018-11-08 18:52:56 +00:00
pad_size = start - begin
merged . puts ( begin , padding * pad_size )
begin = stop + 1
2017-02-01 22:24:39 +00:00
if not exists ( dirname ( destination ) ) :
makedirs ( dirname ( destination ) )
2018-04-26 14:54:01 +00:00
notify . info ( " Space used after regions merged: 0x %x " %
( merged . maxaddr ( ) - merged . minaddr ( ) + 1 ) )
2018-06-04 17:02:49 +00:00
merged . tofile ( destination , format = format . strip ( " . " ) )
2017-02-01 22:24:39 +00:00
2014-06-02 14:44:45 +00:00
2018-07-20 15:53:00 +00:00
UPDATE_WHITELIST = (
" application " ,
)
2016-07-18 18:57:59 +00:00
def build_project ( src_paths , build_path , target , toolchain_name ,
2018-04-26 14:39:23 +00:00
libraries_paths = None , linker_script = None , clean = False ,
2018-04-25 19:21:25 +00:00
notify = None , name = None , macros = None , inc_dirs = None , jobs = 1 ,
2016-08-12 16:27:39 +00:00
report = None , properties = None , project_id = None ,
2018-04-25 19:21:25 +00:00
project_description = None , config = None ,
2018-11-13 18:33:05 +00:00
app_config = None , build_profile = None , stats_depth = None ,
ignore = None , spe_build = False ) :
2016-08-15 15:23:35 +00:00
""" Build a project. A project may be a test or a user program.
Positional arguments :
src_paths - a path or list of paths that contain all files needed to build
the project
build_path - the directory where all of the object files will be placed
target - the MCU or board that the project will compile for
toolchain_name - the name of the build tools
Keyword arguments :
libraries_paths - The location of libraries to include when linking
linker_script - the file that drives the linker to do it ' s job
clean - Rebuild everything if True
notify - Notify function for logs
name - the name of the project
macros - additional macros
inc_dirs - additional directories where include files may be found
jobs - how many compilers we can run at once
report - a dict where a result may be appended
properties - UUUUHHHHH beats me
project_id - the name put in the report
project_description - the human - readable version of what this thing does
config - a Config object to use instead of creating one
2016-08-31 15:20:59 +00:00
app_config - location of a chosen mbed_app . json file
2016-09-27 18:15:22 +00:00
build_profile - a dict of flags that will be passed to the compiler
2017-05-25 11:09:18 +00:00
stats_depth - depth level for memap to display file / dirs
2018-05-07 07:48:06 +00:00
ignore - list of paths to add to mbedignore
2016-07-18 18:57:59 +00:00
"""
# Convert src_path to a list if needed
2018-01-11 21:05:46 +00:00
if not isinstance ( src_paths , list ) :
2016-07-18 18:57:59 +00:00
src_paths = [ src_paths ]
# Extend src_paths wiht libraries_paths
if libraries_paths is not None :
src_paths . extend ( libraries_paths )
2016-10-05 16:06:27 +00:00
inc_dirs . extend ( map ( dirname , libraries_paths ) )
2016-07-18 18:57:59 +00:00
2016-08-12 16:27:39 +00:00
if clean and exists ( build_path ) :
rmtree ( build_path )
2016-07-18 18:57:59 +00:00
mkdir ( build_path )
2019-02-21 00:33:34 +00:00
2016-08-12 16:27:39 +00:00
toolchain = prepare_toolchain (
2017-02-28 20:04:54 +00:00
src_paths , build_path , target , toolchain_name , macros = macros ,
2018-04-25 19:21:25 +00:00
clean = clean , jobs = jobs , notify = notify , config = config ,
2018-05-11 04:00:01 +00:00
app_config = app_config , build_profile = build_profile , ignore = ignore )
2018-06-18 19:03:09 +00:00
toolchain . version_check ( )
2019-02-21 00:33:34 +00:00
2016-07-18 18:57:59 +00:00
# The first path will give the name to the library
2017-04-04 00:20:28 +00:00
name = ( name or toolchain . config . name or
basename ( normpath ( abspath ( src_paths [ 0 ] ) ) ) )
2018-04-25 19:21:25 +00:00
notify . info ( " Building project %s ( %s , %s ) " %
( name , toolchain . target . name , toolchain_name ) )
2016-07-18 18:57:59 +00:00
# Initialize reporting
2015-11-05 20:42:45 +00:00
if report != None :
2015-11-12 18:16:10 +00:00
start = time ( )
2016-06-09 22:50:03 +00:00
# If project_id is specified, use that over the default name
id_name = project_id . upper ( ) if project_id else name . upper ( )
description = project_description if project_description else name
2016-07-18 18:57:59 +00:00
vendor_label = toolchain . target . extra_labels [ 0 ]
prep_report ( report , toolchain . target . name , toolchain_name , id_name )
2016-08-12 16:27:39 +00:00
cur_result = create_result ( toolchain . target . name , toolchain_name ,
id_name , description )
2015-11-12 18:16:10 +00:00
if properties != None :
2016-08-12 16:27:39 +00:00
prep_properties ( properties , toolchain . target . name , toolchain_name ,
vendor_label )
2015-11-05 20:42:45 +00:00
try :
2018-05-04 18:06:49 +00:00
resources = Resources ( notify ) . scan_with_toolchain (
src_paths , toolchain , inc_dirs = inc_dirs )
2018-11-13 18:33:05 +00:00
if spe_build :
resources . filter_spe ( )
2016-07-18 18:57:59 +00:00
# Change linker script if specified
2015-11-05 20:42:45 +00:00
if linker_script is not None :
2018-10-22 16:14:51 +00:00
resources . add_file_ref ( FileType . LD_SCRIPT , linker_script , linker_script )
2018-09-25 17:52:25 +00:00
if not resources . get_file_refs ( FileType . LD_SCRIPT ) :
2018-10-19 15:25:49 +00:00
raise NotSupportedException ( " No Linker Script found " )
2015-11-05 20:42:45 +00:00
# Compile Sources
2018-06-19 18:50:19 +00:00
objects = toolchain . compile_sources ( resources , sorted ( resources . get_file_paths ( FileType . INC_DIR ) ) )
2018-06-12 20:00:56 +00:00
resources . add_files_to_type ( FileType . OBJECT , objects )
2015-11-05 20:42:45 +00:00
# Link Program
2017-02-01 22:24:39 +00:00
if toolchain . config . has_regions :
2018-07-20 15:53:00 +00:00
binary , _ = toolchain . link_program ( resources , build_path , name + " _application " )
2017-02-01 22:24:39 +00:00
region_list = list ( toolchain . config . regions )
2018-07-20 15:53:00 +00:00
region_list = [ r . _replace ( filename = binary ) if r . active else r
2017-02-01 22:24:39 +00:00
for r in region_list ]
2018-02-02 21:40:44 +00:00
res = " %s . %s " % ( join ( build_path , name ) ,
getattr ( toolchain . target , " OUTPUT_EXT " , " bin " ) )
2019-01-07 20:29:37 +00:00
merge_region_list ( region_list , res , notify , toolchain . config )
2018-07-20 15:53:00 +00:00
update_regions = [
r for r in region_list if r . name in UPDATE_WHITELIST
]
if update_regions :
2018-09-18 09:13:29 +00:00
update_res = join ( build_path , generate_update_filename ( name , toolchain . target ) )
2019-01-07 20:29:37 +00:00
merge_region_list ( update_regions , update_res , notify , toolchain . config )
2018-07-20 15:53:00 +00:00
res = ( res , update_res )
else :
res = ( res , None )
2017-02-01 22:24:39 +00:00
else :
res , _ = toolchain . link_program ( resources , build_path , name )
2018-07-20 15:53:00 +00:00
res = ( res , None )
2014-06-02 14:44:45 +00:00
2018-11-13 18:33:05 +00:00
into_dir , extra_artifacts = toolchain . config . deliver_into ( )
if into_dir :
copy_when_different ( res [ 0 ] , into_dir )
if not extra_artifacts :
if (
CORE_ARCH [ toolchain . target . core ] == 8 and
not toolchain . target . core . endswith ( " NS " )
) :
cmse_lib = join ( dirname ( res [ 0 ] ) , " cmse_lib.o " )
copy_when_different ( cmse_lib , into_dir )
else :
for tc , art in extra_artifacts :
if toolchain_name == tc :
copy_when_different ( join ( build_path , art ) , into_dir )
2016-10-11 23:24:01 +00:00
memap_instance = getattr ( toolchain , ' memap_instance ' , None )
if memap_instance :
2018-02-23 04:07:22 +00:00
# Write output to stdout in text (pretty table) format
memap_table = memap_instance . generate_output ( ' table ' , stats_depth )
2018-04-26 14:39:23 +00:00
notify . info ( memap_table )
2016-10-11 23:24:01 +00:00
# Write output to file in JSON format
map_out = join ( build_path , name + " _map.json " )
2018-02-23 04:07:22 +00:00
memap_instance . generate_output ( ' json ' , stats_depth , map_out )
2016-10-11 23:24:01 +00:00
# Write output to file in CSV format for the CI
map_csv = join ( build_path , name + " _map.csv " )
2018-02-23 04:07:22 +00:00
memap_instance . generate_output ( ' csv-ci ' , stats_depth , map_csv )
2016-10-11 23:24:01 +00:00
2018-04-09 21:33:32 +00:00
map_html = join ( build_path , name + " _map.html " )
memap_instance . generate_output ( ' html ' , stats_depth , map_html )
2018-06-11 18:40:44 +00:00
resources . detect_duplicates ( )
2016-09-28 18:16:22 +00:00
2016-06-10 14:19:02 +00:00
if report != None :
2015-11-05 20:42:45 +00:00
end = time ( )
cur_result [ " elapsed_time " ] = end - start
cur_result [ " result " ] = " OK "
2017-11-18 03:09:49 +00:00
cur_result [ " memory_usage " ] = ( memap_instance . mem_report
if memap_instance is not None else None )
2018-07-25 15:04:18 +00:00
cur_result [ " bin " ] = res [ 0 ]
cur_result [ " elf " ] = splitext ( res [ 0 ] ) [ 0 ] + " .elf "
2017-04-04 16:35:00 +00:00
cur_result . update ( toolchain . report )
2014-06-02 14:44:45 +00:00
2015-11-05 20:42:45 +00:00
add_result_to_report ( report , cur_result )
2014-06-02 14:44:45 +00:00
2015-11-05 20:42:45 +00:00
return res
2014-07-01 16:45:12 +00:00
2016-08-12 16:27:39 +00:00
except Exception as exc :
2015-11-12 18:16:10 +00:00
if report != None :
end = time ( )
2016-02-25 22:29:26 +00:00
2016-08-12 16:27:39 +00:00
if isinstance ( exc , NotSupportedException ) :
2016-02-25 22:29:26 +00:00
cur_result [ " result " ] = " NOT_SUPPORTED "
else :
cur_result [ " result " ] = " FAIL "
2015-11-12 18:16:10 +00:00
cur_result [ " elapsed_time " ] = end - start
2014-06-02 14:44:45 +00:00
2015-11-12 18:16:10 +00:00
add_result_to_report ( report , cur_result )
2015-11-05 20:42:45 +00:00
# Let Exception propagate
2016-06-14 17:36:41 +00:00
raise
2013-06-10 14:44:08 +00:00
2013-04-18 14:43:29 +00:00
def build_library ( src_paths , build_path , target , toolchain_name ,
2016-09-27 18:15:22 +00:00
dependencies_paths = None , name = None , clean = False ,
2018-04-25 19:21:25 +00:00
archive = True , notify = None , macros = None , inc_dirs = None , jobs = 1 ,
report = None , properties = None , project_id = None ,
2016-09-27 18:15:22 +00:00
remove_config_header_file = False , app_config = None ,
2018-05-07 07:48:06 +00:00
build_profile = None , ignore = None ) :
2016-08-15 15:23:35 +00:00
""" Build a library
Positional arguments :
src_paths - a path or list of paths that contain all files needed to build
the library
build_path - the directory where all of the object files will be placed
target - the MCU or board that the project will compile for
toolchain_name - the name of the build tools
Keyword arguments :
dependencies_paths - The location of libraries to include when linking
name - the name of the library
clean - Rebuild everything if True
archive - whether the library will create an archive file
notify - Notify function for logs
macros - additional macros
inc_dirs - additional directories where include files may be found
jobs - how many compilers we can run at once
report - a dict where a result may be appended
properties - UUUUHHHHH beats me
project_id - the name that goes in the report
2016-08-16 21:59:33 +00:00
remove_config_header_file - delete config header file when done building
2016-08-31 15:20:59 +00:00
app_config - location of a chosen mbed_app . json file
2016-09-27 18:15:22 +00:00
build_profile - a dict of flags that will be passed to the compiler
2018-05-07 07:48:06 +00:00
ignore - list of paths to add to mbedignore
2014-09-25 10:03:37 +00:00
"""
2016-07-18 18:57:59 +00:00
# Convert src_path to a list if needed
2018-01-11 21:05:46 +00:00
if not isinstance ( src_paths , list ) :
2014-07-01 16:45:12 +00:00
src_paths = [ src_paths ]
2018-06-19 20:23:50 +00:00
src_paths = [ relpath ( s ) for s in src_paths ]
2014-06-02 14:44:45 +00:00
2016-07-18 18:57:59 +00:00
# Build path
if archive :
# Use temp path when building archive
tmp_path = join ( build_path , ' .temp ' )
mkdir ( tmp_path )
else :
tmp_path = build_path
2014-06-02 14:44:45 +00:00
2016-08-01 23:16:48 +00:00
# Clean the build directory
2016-08-12 16:27:39 +00:00
if clean and exists ( tmp_path ) :
rmtree ( tmp_path )
2016-08-01 23:16:48 +00:00
mkdir ( tmp_path )
2016-07-18 18:57:59 +00:00
# Pass all params to the unified prepare_toolchain()
2016-08-12 16:27:39 +00:00
toolchain = prepare_toolchain (
2017-02-28 20:04:54 +00:00
src_paths , build_path , target , toolchain_name , macros = macros ,
2018-04-25 19:21:25 +00:00
clean = clean , jobs = jobs , notify = notify , app_config = app_config ,
2018-05-11 04:00:01 +00:00
build_profile = build_profile , ignore = ignore )
2018-05-07 07:48:06 +00:00
2016-07-18 18:57:59 +00:00
# The first path will give the name to the library
if name is None :
name = basename ( normpath ( abspath ( src_paths [ 0 ] ) ) )
2018-04-25 19:21:25 +00:00
notify . info ( " Building library %s ( %s , %s ) " %
2016-08-12 16:27:39 +00:00
( name , toolchain . target . name , toolchain_name ) )
2016-07-05 12:40:12 +00:00
2016-07-18 18:57:59 +00:00
# Initialize reporting
2015-11-05 20:42:45 +00:00
if report != None :
2015-11-12 18:16:10 +00:00
start = time ( )
2016-06-09 22:50:03 +00:00
# If project_id is specified, use that over the default name
id_name = project_id . upper ( ) if project_id else name . upper ( )
2015-11-12 18:16:10 +00:00
description = name
2016-07-18 18:57:59 +00:00
vendor_label = toolchain . target . extra_labels [ 0 ]
prep_report ( report , toolchain . target . name , toolchain_name , id_name )
2016-08-12 16:27:39 +00:00
cur_result = create_result ( toolchain . target . name , toolchain_name ,
id_name , description )
2017-04-07 17:38:49 +00:00
cur_result [ ' type ' ] = ' library '
2015-11-12 18:16:10 +00:00
if properties != None :
2016-08-12 16:27:39 +00:00
prep_properties ( properties , toolchain . target . name , toolchain_name ,
vendor_label )
2013-02-18 15:32:11 +00:00
2015-11-05 20:42:45 +00:00
for src_path in src_paths :
if not exists ( src_path ) :
error_msg = " The library source folder does not exist: %s " , src_path
if report != None :
cur_result [ " output " ] = error_msg
cur_result [ " result " ] = " FAIL "
add_result_to_report ( report , cur_result )
raise Exception ( error_msg )
try :
2018-06-19 20:23:50 +00:00
res = Resources ( notify ) . scan_with_toolchain (
2018-04-24 20:01:24 +00:00
src_paths , toolchain , dependencies_paths , inc_dirs = inc_dirs )
2015-11-05 20:42:45 +00:00
2016-08-12 16:27:39 +00:00
# Copy headers, objects and static libraries - all files needed for
# static lib
2018-06-19 20:23:50 +00:00
to_copy = (
res . get_file_refs ( FileType . HEADER ) +
res . get_file_refs ( FileType . OBJECT ) +
res . get_file_refs ( FileType . LIB ) +
res . get_file_refs ( FileType . JSON ) +
res . get_file_refs ( FileType . LD_SCRIPT ) +
res . get_file_refs ( FileType . HEX ) +
res . get_file_refs ( FileType . BIN )
)
toolchain . copy_files ( to_copy , build_path )
2015-11-05 20:42:45 +00:00
# Compile Sources
2018-06-19 20:23:50 +00:00
objects = toolchain . compile_sources (
res , res . get_file_paths ( FileType . INC_DIR ) )
res . add_files_to_type ( FileType . OBJECT , objects )
2015-11-05 22:53:23 +00:00
2016-06-09 22:50:03 +00:00
if archive :
2016-06-10 14:19:02 +00:00
toolchain . build_library ( objects , build_path , name )
2015-11-05 20:42:45 +00:00
2016-08-16 21:59:33 +00:00
if remove_config_header_file :
config_header_path = toolchain . get_config_header ( )
if config_header_path :
remove ( config_header_path )
2016-06-10 14:19:02 +00:00
if report != None :
2015-11-05 20:42:45 +00:00
end = time ( )
cur_result [ " elapsed_time " ] = end - start
cur_result [ " result " ] = " OK "
add_result_to_report ( report , cur_result )
2016-07-14 18:54:23 +00:00
return True
2015-11-05 20:42:45 +00:00
2016-08-12 16:27:39 +00:00
except Exception as exc :
2015-11-05 22:53:23 +00:00
if report != None :
end = time ( )
2016-06-28 15:34:28 +00:00
2016-08-12 16:27:39 +00:00
if isinstance ( exc , ToolException ) :
2016-06-09 22:50:03 +00:00
cur_result [ " result " ] = " FAIL "
2016-08-12 16:27:39 +00:00
elif isinstance ( exc , NotSupportedException ) :
2016-06-09 22:50:03 +00:00
cur_result [ " result " ] = " NOT_SUPPORTED "
2016-06-28 15:34:28 +00:00
2015-11-05 22:53:23 +00:00
cur_result [ " elapsed_time " ] = end - start
2015-11-05 20:42:45 +00:00
2015-11-05 22:53:23 +00:00
add_result_to_report ( report , cur_result )
2015-11-05 20:42:45 +00:00
# Let Exception propagate
2016-08-12 16:27:39 +00:00
raise
2015-11-05 20:42:45 +00:00
2016-06-09 22:51:26 +00:00
######################
### Legacy methods ###
######################
2017-03-07 00:23:16 +00:00
def mbed2_obj_path ( target_name , toolchain_name ) :
2017-03-10 17:08:56 +00:00
real_tc_name = TOOLCHAIN_CLASSES [ toolchain_name ] . __name__
return join ( " TARGET_ " + target_name , " TOOLCHAIN_ " + real_tc_name )
2017-03-07 00:23:16 +00:00
2018-04-25 19:21:25 +00:00
def build_lib ( lib_id , target , toolchain_name , clean = False , macros = None ,
notify = None , jobs = 1 , report = None , properties = None ,
2018-05-11 04:00:01 +00:00
build_profile = None , ignore = None ) :
2016-06-09 22:51:26 +00:00
""" Legacy method for building mbed libraries
2016-08-12 16:27:39 +00:00
2016-08-15 15:23:35 +00:00
Positional arguments :
lib_id - the library ' s unique identifier
target - the MCU or board that the project will compile for
toolchain_name - the name of the build tools
Keyword arguments :
clean - Rebuild everything if True
macros - additional macros
notify - Notify function for logs
jobs - how many compilers we can run at once
report - a dict where a result may be appended
properties - UUUUHHHHH beats me
2016-09-27 18:15:22 +00:00
build_profile - a dict of flags that will be passed to the compiler
2018-05-11 04:00:01 +00:00
ignore - list of paths to add to mbedignore
2014-09-25 10:03:37 +00:00
"""
2013-02-18 15:32:11 +00:00
lib = Library ( lib_id )
2016-06-09 22:51:26 +00:00
if not lib . is_supported ( target , toolchain_name ) :
2016-08-12 16:27:39 +00:00
print ( ' Library " %s " is not yet supported on target %s with toolchain %s '
% ( lib_id , target . name , toolchain_name ) )
2015-11-05 20:42:45 +00:00
return False
2016-06-28 15:34:28 +00:00
2016-08-12 16:27:39 +00:00
# We need to combine macros from parameter list with macros from library
# definition
lib_macros = lib . macros if lib . macros else [ ]
2016-06-09 22:51:26 +00:00
if macros :
2016-08-12 16:27:39 +00:00
macros . extend ( lib_macros )
2016-06-09 22:51:26 +00:00
else :
2016-08-12 16:27:39 +00:00
macros = lib_macros
2016-06-09 22:51:26 +00:00
src_paths = lib . source_dir
build_path = lib . build_dir
dependencies_paths = lib . dependencies
inc_dirs = lib . inc_dirs
2016-06-28 15:34:28 +00:00
2018-01-11 21:05:46 +00:00
if not isinstance ( src_paths , list ) :
2016-06-09 22:51:26 +00:00
src_paths = [ src_paths ]
# The first path will give the name to the library
name = basename ( src_paths [ 0 ] )
2018-06-11 17:57:28 +00:00
if report is not None :
2016-06-09 22:51:26 +00:00
start = time ( )
id_name = name . upper ( )
description = name
vendor_label = target . extra_labels [ 0 ]
cur_result = None
prep_report ( report , target . name , toolchain_name , id_name )
2016-08-12 16:27:39 +00:00
cur_result = create_result ( target . name , toolchain_name , id_name ,
description )
2016-06-09 22:51:26 +00:00
if properties != None :
2016-08-12 16:27:39 +00:00
prep_properties ( properties , target . name , toolchain_name ,
vendor_label )
2016-06-09 22:51:26 +00:00
for src_path in src_paths :
if not exists ( src_path ) :
error_msg = " The library source folder does not exist: %s " , src_path
if report != None :
cur_result [ " output " ] = error_msg
cur_result [ " result " ] = " FAIL "
add_result_to_report ( report , cur_result )
raise Exception ( error_msg )
try :
# Toolchain instance
2017-03-07 00:23:16 +00:00
# Create the desired build directory structure
bin_path = join ( build_path , mbed2_obj_path ( target . name , toolchain_name ) )
mkdir ( bin_path )
tmp_path = join ( build_path , ' .temp ' , mbed2_obj_path ( target . name ,
toolchain_name ) )
mkdir ( tmp_path )
toolchain = prepare_toolchain (
src_paths , tmp_path , target , toolchain_name , macros = macros ,
2018-05-11 04:00:01 +00:00
notify = notify , build_profile = build_profile , jobs = jobs , clean = clean ,
ignore = ignore )
2013-02-18 15:32:11 +00:00
2018-04-25 19:21:25 +00:00
notify . info ( " Building library %s ( %s , %s ) " %
2018-06-11 17:57:28 +00:00
( name . upper ( ) , target . name , toolchain_name ) )
2016-06-09 22:51:26 +00:00
2016-08-11 10:54:41 +00:00
# Take into account the library configuration (MBED_CONFIG_FILE)
2017-03-07 00:23:16 +00:00
config = toolchain . config
2016-08-11 10:54:41 +00:00
config . add_config_files ( [ MBED_CONFIG_FILE ] )
2016-06-09 22:51:26 +00:00
# Scan Resources
2018-06-11 17:57:28 +00:00
resources = Resources ( notify ) . scan_with_toolchain (
src_paths + ( lib . inc_dirs_ext or [ ] ) , toolchain ,
inc_dirs = inc_dirs , dependencies_paths = dependencies_paths )
2016-06-09 22:51:26 +00:00
# Copy Headers
2018-06-19 20:23:50 +00:00
toolchain . copy_files (
resources . get_file_refs ( FileType . HEADER ) , build_path )
2016-06-28 15:34:28 +00:00
2018-06-11 17:57:28 +00:00
dependencies_include_dir = Resources ( notify ) . sacn_with_toolchain ( [ build_path ] , toolchain ) . inc_dirs
2016-06-09 22:51:26 +00:00
# Compile Sources
objects = [ ]
for resource in resources :
2017-02-28 20:04:54 +00:00
objects . extend ( toolchain . compile_sources ( resource , dependencies_include_dir ) )
2016-06-09 22:51:26 +00:00
needed_update = toolchain . build_library ( objects , bin_path , name )
if report != None and needed_update :
end = time ( )
cur_result [ " elapsed_time " ] = end - start
cur_result [ " result " ] = " OK "
add_result_to_report ( report , cur_result )
2016-07-14 18:54:23 +00:00
return True
2016-06-09 22:51:26 +00:00
2016-08-12 16:27:39 +00:00
except Exception :
2016-06-09 22:51:26 +00:00
if report != None :
end = time ( )
cur_result [ " result " ] = " FAIL "
cur_result [ " elapsed_time " ] = end - start
add_result_to_report ( report , cur_result )
# Let Exception propagate
2016-08-12 16:27:39 +00:00
raise
2013-02-18 15:32:11 +00:00
2018-06-15 19:40:17 +00:00
# A number of compiled files need to be copied as objects as the linker
# will not search for weak symbol overrides in archives. These are:
# - mbed_retarget.o: to make sure that the C standard lib symbols get
# overridden
# - mbed_board.o: `mbed_die` is weak
# - mbed_overrides.o: this contains platform overrides of various
# weak SDK functions
# - mbed_main.o: this contains main redirection
# - mbed_sdk_boot.o: this contains the main boot code in
# - PeripheralPins.o: PinMap can be weak
SEPARATE_NAMES = [
' PeripheralPins.o ' ,
' mbed_retarget.o ' ,
' mbed_board.o ' ,
' mbed_overrides.o ' ,
' mbed_main.o ' ,
' mbed_sdk_boot.o ' ,
]
2018-07-18 19:10:42 +00:00
2018-06-15 19:40:17 +00:00
2018-04-25 19:21:25 +00:00
def build_mbed_libs ( target , toolchain_name , clean = False , macros = None ,
notify = None , jobs = 1 , report = None , properties = None ,
2018-05-11 04:00:01 +00:00
build_profile = None , ignore = None ) :
2018-06-15 19:40:17 +00:00
""" Build legacy libraries for a target and toolchain pair
2016-08-15 15:23:35 +00:00
Positional arguments :
target - the MCU or board that the project will compile for
toolchain_name - the name of the build tools
Keyword arguments :
clean - Rebuild everything if True
macros - additional macros
notify - Notify function for logs
jobs - how many compilers we can run at once
report - a dict where a result may be appended
properties - UUUUHHHHH beats me
2016-09-27 18:15:22 +00:00
build_profile - a dict of flags that will be passed to the compiler
2018-05-11 04:00:01 +00:00
ignore - list of paths to add to mbedignore
2018-06-15 19:40:17 +00:00
Return - True if target + toolchain built correctly , False if not supported
2016-08-12 16:27:39 +00:00
"""
2015-11-05 20:42:45 +00:00
2019-02-25 18:47:04 +00:00
selected_toolchain_name = get_toolchain_name ( target , toolchain_name )
#If a target supports ARMC6 and we want to build UARM with it,
#then set the default_toolchain to uARM to link AC6 microlib.
if ( selected_toolchain_name == " ARMC6 " and toolchain_name == " uARM " ) :
target . default_toolchain = " uARM "
toolchain_name = selected_toolchain_name
2019-01-08 23:51:31 +00:00
2018-06-15 19:40:17 +00:00
if report is not None :
2015-11-12 18:16:10 +00:00
start = time ( )
id_name = " MBED "
description = " mbed SDK "
2015-11-24 23:39:20 +00:00
vendor_label = target . extra_labels [ 0 ]
2015-11-12 18:16:10 +00:00
cur_result = None
2015-11-05 20:42:45 +00:00
prep_report ( report , target . name , toolchain_name , id_name )
2018-06-15 19:40:17 +00:00
cur_result = create_result (
target . name , toolchain_name , id_name , description )
if properties is not None :
prep_properties (
properties , target . name , toolchain_name , vendor_label )
2015-11-05 20:42:45 +00:00
2013-06-10 14:44:08 +00:00
if toolchain_name not in target . supported_toolchains :
2014-08-01 14:27:34 +00:00
supported_toolchains_text = " , " . join ( target . supported_toolchains )
2018-06-15 19:40:17 +00:00
notify . info ( ' The target {} does not support the toolchain {} ' . format (
target . name ,
toolchain_name
) )
notify . info ( ' {} supports {} toolchain {} ' . format (
target . name ,
supported_toolchains_text ,
' s ' if len ( target . supported_toolchains ) > 1 else ' '
) )
if report is not None :
2015-11-05 20:42:45 +00:00
cur_result [ " result " ] = " SKIP "
add_result_to_report ( report , cur_result )
2014-06-02 14:44:45 +00:00
2015-11-05 20:42:45 +00:00
return False
2014-06-09 15:10:47 +00:00
2015-11-05 20:42:45 +00:00
try :
2017-03-07 00:23:16 +00:00
# Source and Build Paths
2018-06-15 19:40:17 +00:00
build_toolchain = join (
MBED_LIBRARIES , mbed2_obj_path ( target . name , toolchain_name ) )
2017-03-07 00:23:16 +00:00
mkdir ( build_toolchain )
2018-06-15 19:40:17 +00:00
tmp_path = join (
MBED_LIBRARIES ,
' .temp ' ,
mbed2_obj_path ( target . name , toolchain_name )
)
2017-02-28 20:04:54 +00:00
mkdir ( tmp_path )
2018-06-15 19:40:17 +00:00
# Toolchain and config
2017-03-07 00:23:16 +00:00
toolchain = prepare_toolchain (
2018-04-25 19:21:25 +00:00
[ " " ] , tmp_path , target , toolchain_name , macros = macros , notify = notify ,
2018-05-11 04:00:01 +00:00
build_profile = build_profile , jobs = jobs , clean = clean , ignore = ignore )
2017-02-28 20:04:54 +00:00
2017-03-07 00:23:16 +00:00
config = toolchain . config
2016-08-10 16:03:13 +00:00
config . add_config_files ( [ MBED_CONFIG_FILE ] )
toolchain . set_config_data ( toolchain . config . get_config_data ( ) )
2018-06-15 19:40:17 +00:00
# distribute header files
2018-06-19 20:23:50 +00:00
toolchain . copy_files (
[ FileRef ( basename ( MBED_HEADER ) , MBED_HEADER ) ] , MBED_LIBRARIES )
2016-10-01 23:08:38 +00:00
library_incdirs = [ dirname ( MBED_LIBRARIES ) , MBED_LIBRARIES ]
2016-10-01 18:34:57 +00:00
for dir , dest in [ ( MBED_DRIVERS , MBED_LIBRARIES_DRIVERS ) ,
( MBED_PLATFORM , MBED_LIBRARIES_PLATFORM ) ,
( MBED_HAL , MBED_LIBRARIES_HAL ) ] :
2018-06-11 17:57:28 +00:00
resources = Resources ( notify ) . scan_with_toolchain ( [ dir ] , toolchain )
2018-06-19 20:23:50 +00:00
toolchain . copy_files (
[ FileRef ( basename ( p ) , p ) for p
in resources . get_file_paths ( FileType . HEADER ) ] ,
dest )
2016-10-01 23:08:38 +00:00
library_incdirs . append ( dest )
2015-11-05 20:42:45 +00:00
2018-06-15 19:40:17 +00:00
# collect resources of the libs to compile
cmsis_res = Resources ( notify ) . scan_with_toolchain (
[ MBED_CMSIS_PATH ] , toolchain )
hal_res = Resources ( notify ) . scan_with_toolchain (
[ MBED_TARGETS_PATH ] , toolchain )
2018-06-11 17:57:28 +00:00
mbed_resources = Resources ( notify ) . scan_with_toolchain (
[ MBED_DRIVERS , MBED_PLATFORM , MBED_HAL ] , toolchain )
2016-10-01 18:34:57 +00:00
2018-06-15 19:40:17 +00:00
incdirs = cmsis_res . inc_dirs + hal_res . inc_dirs + library_incdirs
# Build Things
notify . info ( " Building library %s ( %s , %s ) " %
( ' MBED ' , target . name , toolchain_name ) )
objects = toolchain . compile_sources ( mbed_resources , incdirs )
separate_objects = [ ]
2015-11-05 20:42:45 +00:00
2016-08-12 16:27:39 +00:00
for obj in objects :
2018-06-15 19:40:17 +00:00
for name in SEPARATE_NAMES :
2016-08-12 16:27:39 +00:00
if obj . endswith ( name ) :
separate_objects . append ( obj )
2015-11-05 20:42:45 +00:00
2016-08-12 16:27:39 +00:00
for obj in separate_objects :
objects . remove ( obj )
2015-11-05 20:42:45 +00:00
2016-08-12 16:27:39 +00:00
toolchain . build_library ( objects , build_toolchain , " mbed " )
2018-06-15 19:40:17 +00:00
notify . info ( " Building library %s ( %s , %s ) " %
( ' CMSIS ' , target . name , toolchain_name ) )
cmsis_objects = toolchain . compile_sources ( cmsis_res , incdirs + [ tmp_path ] )
notify . info ( " Building library %s ( %s , %s ) " %
( ' HAL ' , target . name , toolchain_name ) )
hal_objects = toolchain . compile_sources ( hal_res , incdirs + [ tmp_path ] )
# Copy everything into the build directory
2018-07-10 18:42:10 +00:00
to_copy_paths = [
hal_res . get_file_paths ( FileType . HEADER ) ,
hal_res . get_file_paths ( FileType . HEX ) ,
hal_res . get_file_paths ( FileType . BIN ) ,
hal_res . get_file_paths ( FileType . LIB ) ,
cmsis_res . get_file_paths ( FileType . HEADER ) ,
cmsis_res . get_file_paths ( FileType . BIN ) ,
cmsis_res . get_file_paths ( FileType . LD_SCRIPT ) ,
hal_res . get_file_paths ( FileType . LD_SCRIPT ) ,
[ MBED_CONFIG_FILE ] ,
2018-06-15 19:40:17 +00:00
cmsis_objects ,
hal_objects ,
separate_objects ,
2018-07-10 18:42:10 +00:00
]
to_copy = [ FileRef ( basename ( p ) , p ) for p in sum ( to_copy_paths , [ ] ) ]
2018-06-15 19:40:17 +00:00
toolchain . copy_files ( to_copy , build_toolchain )
if report is not None :
2015-11-05 20:42:45 +00:00
end = time ( )
cur_result [ " elapsed_time " ] = end - start
cur_result [ " result " ] = " OK "
add_result_to_report ( report , cur_result )
return True
2015-11-05 22:53:23 +00:00
2016-08-12 16:27:39 +00:00
except Exception as exc :
2018-06-15 19:40:17 +00:00
if report is not None :
2015-11-05 22:53:23 +00:00
end = time ( )
cur_result [ " result " ] = " FAIL "
cur_result [ " elapsed_time " ] = end - start
2015-11-05 20:42:45 +00:00
2016-08-12 16:27:39 +00:00
cur_result [ " output " ] + = str ( exc )
2015-11-05 23:21:21 +00:00
2015-11-05 22:53:23 +00:00
add_result_to_report ( report , cur_result )
2016-08-12 16:27:39 +00:00
raise
2014-08-04 13:29:20 +00:00
2016-06-09 22:51:26 +00:00
2016-07-27 01:40:51 +00:00
def get_unique_supported_toolchains ( release_targets = None ) :
""" Get list of all unique toolchains supported by targets
2016-08-15 15:23:35 +00:00
Keyword arguments :
release_targets - tuple structure returned from get_mbed_official_release ( ) .
If release_targets is not specified , then it queries all
known targets
2016-07-27 01:40:51 +00:00
"""
2018-10-03 17:47:33 +00:00
return [
name for name , cls in TOOLCHAIN_CLASSES . items ( )
if cls . OFFICIALLY_SUPPORTED
]
2014-06-09 15:10:47 +00:00
2018-07-11 14:42:16 +00:00
def _lowercase_release_version ( release_version ) :
try :
return release_version . lower ( )
except AttributeError :
return ' all '
2017-03-22 18:06:57 +00:00
def mcu_toolchain_list ( release_version = ' 5 ' ) :
""" Shows list of toolchains
"""
2018-07-11 14:42:16 +00:00
release_version = _lowercase_release_version ( release_version )
2017-03-22 18:06:57 +00:00
version_release_targets = { }
version_release_target_names = { }
for version in RELEASE_VERSIONS :
version_release_targets [ version ] = get_mbed_official_release ( version )
version_release_target_names [ version ] = [ x [ 0 ] for x in
version_release_targets [
version ] ]
if release_version in RELEASE_VERSIONS :
release_targets = version_release_targets [ release_version ]
else :
release_targets = None
unique_supported_toolchains = get_unique_supported_toolchains (
release_targets )
columns = [ " mbed OS %s " % x for x in RELEASE_VERSIONS ] + unique_supported_toolchains
return " \n " . join ( columns )
def mcu_target_list ( release_version = ' 5 ' ) :
2017-05-25 11:09:18 +00:00
""" Shows target list
2017-03-22 18:06:57 +00:00
"""
2018-07-11 14:42:16 +00:00
release_version = _lowercase_release_version ( release_version )
2017-03-22 18:06:57 +00:00
version_release_targets = { }
version_release_target_names = { }
for version in RELEASE_VERSIONS :
version_release_targets [ version ] = get_mbed_official_release ( version )
version_release_target_names [ version ] = [ x [ 0 ] for x in
version_release_targets [
version ] ]
if release_version in RELEASE_VERSIONS :
release_targets = version_release_targets [ release_version ]
else :
release_targets = None
target_names = [ ]
if release_targets :
target_names = [ x [ 0 ] for x in release_targets ]
else :
target_names = TARGET_NAMES
return " \n " . join ( target_names )
2014-06-09 15:10:47 +00:00
2016-08-12 16:27:39 +00:00
def mcu_toolchain_matrix ( verbose_html = False , platform_filter = None ,
release_version = ' 5 ' ) :
2016-08-15 15:23:35 +00:00
""" Shows target map using prettytable
Keyword arguments :
verbose_html - emit html instead of a simple table
platform_filter - remove results that match the string
release_version - get the matrix for this major version number
"""
2016-08-12 16:27:39 +00:00
# Only use it in this function so building works without extra modules
2018-08-07 12:30:23 +00:00
from prettytable import PrettyTable , HEADER
2018-07-11 14:42:16 +00:00
release_version = _lowercase_release_version ( release_version )
2016-07-27 01:40:51 +00:00
version_release_targets = { }
version_release_target_names = { }
for version in RELEASE_VERSIONS :
version_release_targets [ version ] = get_mbed_official_release ( version )
2016-08-12 16:27:39 +00:00
version_release_target_names [ version ] = [ x [ 0 ] for x in
version_release_targets [
version ] ]
2016-07-27 01:40:51 +00:00
if release_version in RELEASE_VERSIONS :
release_targets = version_release_targets [ release_version ]
else :
release_targets = None
2016-08-12 16:27:39 +00:00
unique_supported_toolchains = get_unique_supported_toolchains (
release_targets )
2016-07-27 01:40:51 +00:00
prepend_columns = [ " Target " ] + [ " mbed OS %s " % x for x in RELEASE_VERSIONS ]
2014-06-09 15:10:47 +00:00
# All tests status table print
2016-07-27 01:40:51 +00:00
columns = prepend_columns + unique_supported_toolchains
2018-08-07 12:30:23 +00:00
table_printer = PrettyTable ( columns , junction_char = " | " , hrules = HEADER )
2014-06-09 15:10:47 +00:00
# Align table
for col in columns :
2016-08-12 16:27:39 +00:00
table_printer . align [ col ] = " c "
table_printer . align [ " Target " ] = " l "
2014-06-09 15:10:47 +00:00
2014-07-01 16:45:12 +00:00
perm_counter = 0
2014-07-29 14:43:41 +00:00
target_counter = 0
2016-07-27 01:40:51 +00:00
target_names = [ ]
if release_targets :
target_names = [ x [ 0 ] for x in release_targets ]
else :
target_names = TARGET_NAMES
for target in sorted ( target_names ) :
2014-07-29 13:48:48 +00:00
if platform_filter is not None :
# FIlter out platforms using regex
if re . search ( platform_filter , target ) is None :
continue
2014-07-29 14:43:41 +00:00
target_counter + = 1
2014-07-29 13:48:48 +00:00
2014-06-09 15:10:47 +00:00
row = [ target ] # First column is platform name
2016-07-27 01:40:51 +00:00
for version in RELEASE_VERSIONS :
if target in version_release_target_names [ version ] :
text = " Supported "
else :
text = " - "
row . append ( text )
2014-06-09 15:10:47 +00:00
for unique_toolchain in unique_supported_toolchains :
2018-07-25 16:48:14 +00:00
tgt_obj = TARGET_MAP [ target ]
if ( unique_toolchain in tgt_obj . supported_toolchains or
2017-09-07 20:06:25 +00:00
( unique_toolchain == " ARMC6 " and
2018-07-25 16:48:14 +00:00
" ARM " in tgt_obj . supported_toolchains ) or
( unique_toolchain == " ARM " and
" ARMC6 " in tgt_obj . supported_toolchains and
CORE_ARCH [ tgt_obj . core ] == 8 ) ) :
2014-06-09 15:10:47 +00:00
text = " Supported "
2014-06-09 15:25:53 +00:00
perm_counter + = 1
2016-06-12 00:06:15 +00:00
else :
text = " - "
2016-06-28 15:34:28 +00:00
2014-07-01 16:45:12 +00:00
row . append ( text )
2016-08-12 16:27:39 +00:00
table_printer . add_row ( row )
2014-06-11 12:15:14 +00:00
2016-08-12 16:27:39 +00:00
result = table_printer . get_html_string ( ) if verbose_html \
else table_printer . get_string ( )
2014-06-11 12:15:14 +00:00
result + = " \n "
2016-06-12 00:06:15 +00:00
result + = " Supported targets: %d \n " % ( target_counter )
if target_counter == 1 :
result + = " Supported toolchains: %d " % ( perm_counter )
2014-06-11 12:15:14 +00:00
return result
2014-06-10 14:29:15 +00:00
2014-07-10 09:56:14 +00:00
def get_target_supported_toolchains ( target ) :
2016-08-15 15:23:35 +00:00
""" Returns target supported toolchains list
Positional arguments :
target - the target to get the supported toolchains of
"""
2016-08-12 16:27:39 +00:00
return TARGET_MAP [ target ] . supported_toolchains if target in TARGET_MAP \
else None
2014-07-10 09:56:14 +00:00
2014-06-23 12:36:55 +00:00
def print_build_results ( result_list , build_name ) :
2016-08-15 15:23:35 +00:00
""" Generate result string for build results
Positional arguments :
result_list - the list of results to print
build_name - the name of the build we are printing result for
"""
2014-06-23 12:36:55 +00:00
result = " "
2015-05-02 22:08:00 +00:00
if len ( result_list ) > 0 :
2014-06-23 12:36:55 +00:00
result + = build_name + " \n "
result + = " \n " . join ( [ " * %s " % f for f in result_list ] )
2014-07-28 13:20:17 +00:00
result + = " \n "
2014-06-23 12:36:55 +00:00
return result
2015-03-27 23:55:50 +00:00
2016-08-12 16:27:39 +00:00
def print_build_memory_usage ( report ) :
2016-06-28 15:34:28 +00:00
""" Generate result table with memory usage values for build results
2016-08-12 16:27:39 +00:00
Aggregates ( puts together ) reports obtained from self . get_memory_summary ( )
2016-08-15 15:23:35 +00:00
Positional arguments :
report - Report generated during build procedure .
2016-06-28 15:34:28 +00:00
"""
2018-08-07 12:30:23 +00:00
from prettytable import PrettyTable , HEADER
2016-06-28 15:34:28 +00:00
columns_text = [ ' name ' , ' target ' , ' toolchain ' ]
2017-08-06 13:03:09 +00:00
columns_int = [ ' static_ram ' , ' total_flash ' ]
2018-08-07 12:30:23 +00:00
table = PrettyTable ( columns_text + columns_int , junction_char = " | " , hrules = HEADER )
2016-06-28 15:34:28 +00:00
for col in columns_text :
table . align [ col ] = ' l '
for col in columns_int :
table . align [ col ] = ' r '
for target in report :
for toolchain in report [ target ] :
for name in report [ target ] [ toolchain ] :
for dlist in report [ target ] [ toolchain ] [ name ] :
for dlistelem in dlist :
2016-08-12 16:27:39 +00:00
# Get 'memory_usage' record and build table with
# statistics
2016-06-28 15:34:28 +00:00
record = dlist [ dlistelem ]
if ' memory_usage ' in record and record [ ' memory_usage ' ] :
# Note that summary should be in the last record of
2016-08-12 16:27:39 +00:00
# 'memory_usage' section. This is why we are
# grabbing last "[-1]" record.
2016-06-28 15:34:28 +00:00
row = [
record [ ' description ' ] ,
record [ ' target_name ' ] ,
record [ ' toolchain_name ' ] ,
2016-08-12 16:27:39 +00:00
record [ ' memory_usage ' ] [ - 1 ] [ ' summary ' ] [
' static_ram ' ] ,
record [ ' memory_usage ' ] [ - 1 ] [ ' summary ' ] [
' total_flash ' ] ,
2016-06-28 15:34:28 +00:00
]
table . add_row ( row )
result = " Memory map breakdown for built projects (values in Bytes): \n "
result + = table . get_string ( sortby = ' name ' )
return result
2015-04-01 18:15:15 +00:00
def write_build_report ( build_report , template_filename , filename ) :
2016-08-15 15:23:35 +00:00
""" Write a build report to disk using a template file
Positional arguments :
build_report - a report generated by the build system
template_filename - a file that contains the template for the style of build
report
filename - the location on disk to write the file to
"""
2015-03-31 22:56:00 +00:00
build_report_failing = [ ]
build_report_passing = [ ]
2015-03-27 23:55:50 +00:00
2015-03-31 22:56:00 +00:00
for report in build_report :
if len ( report [ " failing " ] ) > 0 :
build_report_failing . append ( report )
else :
build_report_passing . append ( report )
2015-04-01 18:15:15 +00:00
2015-03-31 22:56:00 +00:00
env = Environment ( extensions = [ ' jinja2.ext.with_ ' ] )
2015-04-01 18:15:15 +00:00
env . loader = FileSystemLoader ( ' ci_templates ' )
template = env . get_template ( template_filename )
2015-03-31 22:56:00 +00:00
2016-08-12 16:27:39 +00:00
with open ( filename , ' w+ ' ) as placeholder :
placeholder . write ( template . render (
failing_builds = build_report_failing ,
passing_builds = build_report_passing ) )
2017-04-06 16:17:54 +00:00
2017-04-07 17:38:49 +00:00
def merge_build_data ( filename , toolchain_report , app_type ) :
2017-04-06 16:17:54 +00:00
path_to_file = dirname ( abspath ( filename ) )
try :
build_data = load ( open ( filename ) )
except ( IOError , ValueError ) :
build_data = { ' builds ' : [ ] }
for tgt in toolchain_report . values ( ) :
for tc in tgt . values ( ) :
for project in tc . values ( ) :
for build in project :
try :
2018-01-23 10:26:14 +00:00
build [ 0 ] [ ' bin_fullpath ' ] = build [ 0 ] [ ' bin ' ]
build [ 0 ] [ ' elf_fullpath ' ] = build [ 0 ] [ ' elf ' ]
2017-04-06 16:17:54 +00:00
build [ 0 ] [ ' elf ' ] = relpath ( build [ 0 ] [ ' elf ' ] , path_to_file )
build [ 0 ] [ ' bin ' ] = relpath ( build [ 0 ] [ ' bin ' ] , path_to_file )
except KeyError :
pass
2017-04-07 17:38:49 +00:00
if ' type ' not in build [ 0 ] :
build [ 0 ] [ ' type ' ] = app_type
2018-01-23 10:26:14 +00:00
build_data [ ' builds ' ] . insert ( 0 , build [ 0 ] )
2018-09-19 10:59:28 +00:00
dump ( build_data , open ( filename , " w " ) , indent = 4 , separators = ( ' , ' , ' : ' ) )