""" mbed SDK Copyright (c) 2011-2016 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from __future__ import print_function, division, absolute_import import re import datetime import uuid import struct import zlib import hashlib from shutil import rmtree from os.path import join, exists, dirname, basename, abspath, normpath, splitext from os.path import relpath from os import linesep, remove, makedirs from time import time from intelhex import IntelHex from json import load, dump from jinja2 import FileSystemLoader from jinja2.environment import Environment from .arm_pack_manager import Cache from .utils import (mkdir, run_cmd, run_cmd_ext, NotSupportedException, ToolException, InvalidReleaseTargetException, intelhex_offset, integer, generate_update_filename, copy_when_different) from .paths import (MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES, MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL, MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS, MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL, BUILD_DIR) from .resources import Resources, FileType, FileRef from .notifier.mock import MockNotifier from .targets import TARGET_NAMES, TARGET_MAP, CORE_ARCH, Target from .libraries import Library from .toolchains import TOOLCHAIN_CLASSES from .config import Config RELEASE_VERSIONS = ['2', '5'] def prep_report(report, target_name, toolchain_name, id_name): """Setup report keys Positional arguments: report - the report to fill target_name - the target being used toolchain_name - the toolchain being used id_name - the name of the executable or library being built """ if not target_name in report: report[target_name] = {} if not toolchain_name in report[target_name]: report[target_name][toolchain_name] = {} if not id_name in report[target_name][toolchain_name]: report[target_name][toolchain_name][id_name] = [] def prep_properties(properties, target_name, toolchain_name, vendor_label): """Setup test properties Positional arguments: properties - the dict to fill target_name - the target the test is targeting toolchain_name - the toolchain that will compile the test vendor_label - the vendor """ if not target_name in properties: properties[target_name] = {} if not toolchain_name in properties[target_name]: properties[target_name][toolchain_name] = {} properties[target_name][toolchain_name]["target"] = target_name properties[target_name][toolchain_name]["vendor"] = vendor_label properties[target_name][toolchain_name]["toolchain"] = toolchain_name def create_result(target_name, toolchain_name, id_name, description): """Create a result dictionary Positional arguments: target_name - the target being built for toolchain_name - the toolchain doing the building id_name - the name of the executable or library being built description - a human readable description of what's going on """ cur_result = {} cur_result["target_name"] = target_name cur_result["toolchain_name"] = toolchain_name cur_result["id"] = id_name cur_result["description"] = description cur_result["elapsed_time"] = 0 cur_result["output"] = "" return cur_result def add_result_to_report(report, result): """Add a single result to a report dictionary Positional arguments: report - the report to append to result - the result to append """ result["date"] = datetime.datetime.utcnow().isoformat() result["uuid"] = str(uuid.uuid1()) target = result["target_name"] toolchain = result["toolchain_name"] id_name = result['id'] result_wrap = {0: result} report[target][toolchain][id_name].append(result_wrap) def get_config(src_paths, target, toolchain_name=None, app_config=None): """Get the configuration object for a target-toolchain combination Positional arguments: src_paths - paths to scan for the configuration files target - the device we are building for toolchain_name - the string that identifies the build tools """ # Convert src_paths to a list if needed if not isinstance(src_paths, list): src_paths = [src_paths] res = Resources(MockNotifier()) if toolchain_name: toolchain = prepare_toolchain(src_paths, None, target, toolchain_name, app_config=app_config) config = toolchain.config res.scan_with_toolchain(src_paths, toolchain, exclude=False) else: config = Config(target, src_paths, app_config=app_config) res.scan_with_config(src_paths, config) if config.has_regions: _ = list(config.regions) cfg, macros = config.get_config_data() features = config.get_features() return cfg, macros, features def is_official_target(target_name, version): """ Returns True, None if a target is part of the official release for the given version. Return False, 'reason' if a target is not part of the official release for the given version. Positional arguments: target_name - Name if the target (ex. 'K64F') version - The release version string. Should be a string contained within RELEASE_VERSIONS """ result = True reason = None target = TARGET_MAP[target_name] if hasattr(target, 'release_versions') \ and version in target.release_versions: if version == '2': # For version 2, either ARM or uARM toolchain support is required required_toolchains = set(['ARM', 'uARM']) if not len(required_toolchains.intersection( set(target.supported_toolchains))) > 0: result = False reason = ("Target '%s' must support " % target.name) + \ ("one of the folowing toolchains to be included in the") + \ ((" mbed 2.0 official release: %s" + linesep) % ", ".join(required_toolchains)) + \ ("Currently it is only configured to support the ") + \ ("following toolchains: %s" % ", ".join(target.supported_toolchains)) elif version == '5': # For version 5, ARM, GCC_ARM, and IAR toolchain support is required required_toolchains = [ set(['ARM', 'GCC_ARM', 'IAR']), set(['ARMC6']) ] supported_toolchains = set(target.supported_toolchains) if not any(r.issubset(supported_toolchains) for r in required_toolchains): result = False reason = ("Target '%s' must support " % target.name) + \ ("ALL of the folowing toolchains to be included in the") + \ ((" mbed OS 5.0 official release: %s" + linesep) % ", ".join(sorted(required_toolchains[0]))) + \ ("Currently it is only configured to support the ") + \ ("following toolchains: %s" % ", ".join(sorted(supported_toolchains))) elif not target.default_lib == 'std': result = False reason = ("Target '%s' must set the " % target.name) + \ ("'default_lib' to 'std' to be included in the ") + \ ("mbed OS 5.0 official release." + linesep) + \ ("Currently it is set to '%s'" % target.default_lib) else: result = False reason = ("Target '%s' has set an invalid release version of '%s'" % version) + \ ("Please choose from the following release versions: %s" % ', '.join(RELEASE_VERSIONS)) else: result = False if not hasattr(target, 'release_versions'): reason = "Target '%s' " % target.name reason += "does not have the 'release_versions' key set" elif not version in target.release_versions: reason = "Target '%s' does not contain the version '%s' " % \ (target.name, version) reason += "in its 'release_versions' key" return result, reason def transform_release_toolchains(toolchains, version): """ Given a list of toolchains and a release version, return a list of only the supported toolchains for that release Positional arguments: toolchains - The list of toolchains version - The release version string. Should be a string contained within RELEASE_VERSIONS """ if version == '5': return ['ARM', 'GCC_ARM', 'IAR'] else: return toolchains def get_mbed_official_release(version): """ Given a release version string, return a tuple that contains a target and the supported toolchains for that release. Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')), ('K64F', ('ARM', 'GCC_ARM')), ...) Positional arguments: version - The version string. Should be a string contained within RELEASE_VERSIONS """ mbed_official_release = ( tuple( tuple( [ TARGET_MAP[target].name, tuple(transform_release_toolchains( TARGET_MAP[target].supported_toolchains, version)) ] ) for target in TARGET_NAMES \ if (hasattr(TARGET_MAP[target], 'release_versions') and version in TARGET_MAP[target].release_versions) and not Target.get_target(target).is_PSA_secure_target ) ) for target in mbed_official_release: is_official, reason = is_official_target(target[0], version) if not is_official: raise InvalidReleaseTargetException(reason) return mbed_official_release ARM_COMPILERS = ("ARM", "ARMC6", "uARM") def target_supports_toolchain(target, toolchain_name): if toolchain_name in ARM_COMPILERS: return any(tc in target.supported_toolchains for tc in ARM_COMPILERS) else: return toolchain_name in target.supported_toolchains def prepare_toolchain(src_paths, build_dir, target, toolchain_name, macros=None, clean=False, jobs=1, notify=None, config=None, app_config=None, build_profile=None, ignore=None): """ Prepares resource related objects - toolchain, target, config Positional arguments: src_paths - the paths to source directories target - ['LPC1768', 'LPC11U24', etc.] toolchain_name - ['ARM', 'uARM', 'GCC_ARM', 'IAR'] Keyword arguments: macros - additional macros clean - Rebuild everything if True jobs - how many compilers we can run at once notify - Notify function for logs config - a Config object to use instead of creating one app_config - location of a chosen mbed_app.json file build_profile - a list of mergeable build profiles ignore - list of paths to add to mbedignore """ # We need to remove all paths which are repeated to avoid # multiple compilations and linking with the same objects src_paths = [src_paths[0]] + list(set(src_paths[1:])) # If the configuration object was not yet created, create it now config = config or Config(target, src_paths, app_config=app_config) target = config.target if not target_supports_toolchain(target, toolchain_name): raise NotSupportedException( "Target {} is not supported by toolchain {}".format( target.name, toolchain_name)) if (toolchain_name == "ARM" and CORE_ARCH[target.core] == 8): toolchain_name = "ARMC6" try: cur_tc = TOOLCHAIN_CLASSES[toolchain_name] except KeyError: raise KeyError("Toolchain %s not supported" % toolchain_name) profile = {'c': [], 'cxx': [], 'common': [], 'asm': [], 'ld': []} for contents in build_profile or []: for key in profile: profile[key].extend(contents[toolchain_name].get(key, [])) toolchain = cur_tc( target, notify, macros, build_dir=build_dir, build_profile=profile) toolchain.config = config toolchain.jobs = jobs toolchain.build_all = clean if ignore: toolchain.add_ignore_patterns(root=".", base_path=".", patterns=ignore) return toolchain def _printihex(ihex): import pprint pprint.PrettyPrinter().pprint(ihex.todict()) def _real_region_size(region): try: part = intelhex_offset(region.filename, offset=region.start) return (part.maxaddr() - part.minaddr()) + 1 except AttributeError: return region.size def _fill_header(region_list, current_region): """Fill an application header region This is done it three steps: * Fill the whole region with zeros * Fill const, timestamp and size entries with their data * Fill the digests using this header as the header region """ region_dict = {r.name: r for r in region_list} header = IntelHex() header.puts(current_region.start, b'\x00' * current_region.size) start = current_region.start for member in current_region.filename: _, type, subtype, data = member member_size = Config.header_member_size(member) if type == "const": fmt = { "8le": ">B", "16le": "H", "32be": ">L", "64be": ">Q" }[subtype] header.puts(start, struct.pack(fmt, integer(data, 0))) elif type == "timestamp": fmt = {"32le": "L", "64be": ">Q"}[subtype] header.puts(start, struct.pack(fmt, int(time()))) elif type == "size": fmt = {"32le": "L", "64be": ">Q"}[subtype] size = sum(_real_region_size(region_dict[r]) for r in data) header.puts(start, struct.pack(fmt, size)) elif type == "digest": if data == "header": ih = header[:start] else: ih = intelhex_offset(region_dict[data].filename, offset=region_dict[data].start) if subtype.startswith("CRCITT32"): fmt = {"CRCITT32be": ">l", "CRCITT32le": " 1 else '' )) if report is not None: cur_result["result"] = "SKIP" add_result_to_report(report, cur_result) return False try: # Source and Build Paths build_toolchain = join( MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name)) mkdir(build_toolchain) tmp_path = join( MBED_LIBRARIES, '.temp', mbed2_obj_path(target.name, toolchain_name) ) mkdir(tmp_path) # Toolchain and config toolchain = prepare_toolchain( [""], tmp_path, target, toolchain_name, macros=macros, notify=notify, build_profile=build_profile, jobs=jobs, clean=clean, ignore=ignore) config = toolchain.config config.add_config_files([MBED_CONFIG_FILE]) toolchain.set_config_data(toolchain.config.get_config_data()) # distribute header files toolchain.copy_files( [FileRef(basename(MBED_HEADER),MBED_HEADER)], MBED_LIBRARIES) library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES] for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS), (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM), (MBED_HAL, MBED_LIBRARIES_HAL)]: resources = Resources(notify).scan_with_toolchain([dir], toolchain) toolchain.copy_files( [FileRef(basename(p), p) for p in resources.get_file_paths(FileType.HEADER)] , dest) library_incdirs.append(dest) # collect resources of the libs to compile cmsis_res = Resources(notify).scan_with_toolchain( [MBED_CMSIS_PATH], toolchain) hal_res = Resources(notify).scan_with_toolchain( [MBED_TARGETS_PATH], toolchain) mbed_resources = Resources(notify).scan_with_toolchain( [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL], toolchain) incdirs = cmsis_res.inc_dirs + hal_res.inc_dirs + library_incdirs # Build Things notify.info("Building library %s (%s, %s)" % ('MBED', target.name, toolchain_name)) objects = toolchain.compile_sources(mbed_resources, incdirs) separate_objects = [] for obj in objects: for name in SEPARATE_NAMES: if obj.endswith(name): separate_objects.append(obj) for obj in separate_objects: objects.remove(obj) toolchain.build_library(objects, build_toolchain, "mbed") notify.info("Building library %s (%s, %s)" % ('CMSIS', target.name, toolchain_name)) cmsis_objects = toolchain.compile_sources(cmsis_res, incdirs + [tmp_path]) notify.info("Building library %s (%s, %s)" % ('HAL', target.name, toolchain_name)) hal_objects = toolchain.compile_sources(hal_res, incdirs + [tmp_path]) # Copy everything into the build directory to_copy_paths = [ hal_res.get_file_paths(FileType.HEADER), hal_res.get_file_paths(FileType.HEX), hal_res.get_file_paths(FileType.BIN), hal_res.get_file_paths(FileType.LIB), cmsis_res.get_file_paths(FileType.HEADER), cmsis_res.get_file_paths(FileType.BIN), cmsis_res.get_file_paths(FileType.LD_SCRIPT), hal_res.get_file_paths(FileType.LD_SCRIPT), [MBED_CONFIG_FILE], cmsis_objects, hal_objects, separate_objects, ] to_copy = [FileRef(basename(p), p) for p in sum(to_copy_paths, [])] toolchain.copy_files(to_copy, build_toolchain) if report is not None: end = time() cur_result["elapsed_time"] = end - start cur_result["result"] = "OK" add_result_to_report(report, cur_result) return True except Exception as exc: if report is not None: end = time() cur_result["result"] = "FAIL" cur_result["elapsed_time"] = end - start cur_result["output"] += str(exc) add_result_to_report(report, cur_result) raise def get_unique_supported_toolchains(release_targets=None): """ Get list of all unique toolchains supported by targets Keyword arguments: release_targets - tuple structure returned from get_mbed_official_release(). If release_targets is not specified, then it queries all known targets """ return [ name for name, cls in TOOLCHAIN_CLASSES.items() if cls.OFFICIALLY_SUPPORTED ] def _lowercase_release_version(release_version): try: return release_version.lower() except AttributeError: return 'all' def mcu_toolchain_list(release_version='5'): """ Shows list of toolchains """ release_version = _lowercase_release_version(release_version) version_release_targets = {} version_release_target_names = {} for version in RELEASE_VERSIONS: version_release_targets[version] = get_mbed_official_release(version) version_release_target_names[version] = [x[0] for x in version_release_targets[ version]] if release_version in RELEASE_VERSIONS: release_targets = version_release_targets[release_version] else: release_targets = None unique_supported_toolchains = get_unique_supported_toolchains( release_targets) columns = ["mbed OS %s" % x for x in RELEASE_VERSIONS] + unique_supported_toolchains return "\n".join(columns) def mcu_target_list(release_version='5'): """ Shows target list """ release_version = _lowercase_release_version(release_version) version_release_targets = {} version_release_target_names = {} for version in RELEASE_VERSIONS: version_release_targets[version] = get_mbed_official_release(version) version_release_target_names[version] = [x[0] for x in version_release_targets[ version]] if release_version in RELEASE_VERSIONS: release_targets = version_release_targets[release_version] else: release_targets = None target_names = [] if release_targets: target_names = [x[0] for x in release_targets] else: target_names = TARGET_NAMES return "\n".join(target_names) def mcu_toolchain_matrix(verbose_html=False, platform_filter=None, release_version='5'): """ Shows target map using prettytable Keyword arguments: verbose_html - emit html instead of a simple table platform_filter - remove results that match the string release_version - get the matrix for this major version number """ # Only use it in this function so building works without extra modules from prettytable import PrettyTable, HEADER release_version = _lowercase_release_version(release_version) version_release_targets = {} version_release_target_names = {} for version in RELEASE_VERSIONS: version_release_targets[version] = get_mbed_official_release(version) version_release_target_names[version] = [x[0] for x in version_release_targets[ version]] if release_version in RELEASE_VERSIONS: release_targets = version_release_targets[release_version] else: release_targets = None unique_supported_toolchains = get_unique_supported_toolchains( release_targets) prepend_columns = ["Target"] + ["mbed OS %s" % x for x in RELEASE_VERSIONS] # All tests status table print columns = prepend_columns + unique_supported_toolchains table_printer = PrettyTable(columns, junction_char="|", hrules=HEADER) # Align table for col in columns: table_printer.align[col] = "c" table_printer.align["Target"] = "l" perm_counter = 0 target_counter = 0 target_names = [] if release_targets: target_names = [x[0] for x in release_targets] else: target_names = TARGET_NAMES for target in sorted(target_names): if platform_filter is not None: # FIlter out platforms using regex if re.search(platform_filter, target) is None: continue target_counter += 1 row = [target] # First column is platform name for version in RELEASE_VERSIONS: if target in version_release_target_names[version]: text = "Supported" else: text = "-" row.append(text) for unique_toolchain in unique_supported_toolchains: tgt_obj = TARGET_MAP[target] if (unique_toolchain in tgt_obj.supported_toolchains or (unique_toolchain == "ARMC6" and "ARM" in tgt_obj.supported_toolchains) or (unique_toolchain == "ARM" and "ARMC6" in tgt_obj.supported_toolchains and CORE_ARCH[tgt_obj.core] == 8)): text = "Supported" perm_counter += 1 else: text = "-" row.append(text) table_printer.add_row(row) result = table_printer.get_html_string() if verbose_html \ else table_printer.get_string() result += "\n" result += "Supported targets: %d\n"% (target_counter) if target_counter == 1: result += "Supported toolchains: %d"% (perm_counter) return result def get_target_supported_toolchains(target): """ Returns target supported toolchains list Positional arguments: target - the target to get the supported toolchains of """ return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP \ else None def print_build_results(result_list, build_name): """ Generate result string for build results Positional arguments: result_list - the list of results to print build_name - the name of the build we are printing result for """ result = "" if len(result_list) > 0: result += build_name + "\n" result += "\n".join([" * %s" % f for f in result_list]) result += "\n" return result def print_build_memory_usage(report): """ Generate result table with memory usage values for build results Aggregates (puts together) reports obtained from self.get_memory_summary() Positional arguments: report - Report generated during build procedure. """ from prettytable import PrettyTable, HEADER columns_text = ['name', 'target', 'toolchain'] columns_int = ['static_ram', 'total_flash'] table = PrettyTable(columns_text + columns_int, junction_char="|", hrules=HEADER) for col in columns_text: table.align[col] = 'l' for col in columns_int: table.align[col] = 'r' for target in report: for toolchain in report[target]: for name in report[target][toolchain]: for dlist in report[target][toolchain][name]: for dlistelem in dlist: # Get 'memory_usage' record and build table with # statistics record = dlist[dlistelem] if 'memory_usage' in record and record['memory_usage']: # Note that summary should be in the last record of # 'memory_usage' section. This is why we are # grabbing last "[-1]" record. row = [ record['description'], record['target_name'], record['toolchain_name'], record['memory_usage'][-1]['summary'][ 'static_ram'], record['memory_usage'][-1]['summary'][ 'total_flash'], ] table.add_row(row) result = "Memory map breakdown for built projects (values in Bytes):\n" result += table.get_string(sortby='name') return result def write_build_report(build_report, template_filename, filename): """Write a build report to disk using a template file Positional arguments: build_report - a report generated by the build system template_filename - a file that contains the template for the style of build report filename - the location on disk to write the file to """ build_report_failing = [] build_report_passing = [] for report in build_report: if len(report["failing"]) > 0: build_report_failing.append(report) else: build_report_passing.append(report) env = Environment(extensions=['jinja2.ext.with_']) env.loader = FileSystemLoader('ci_templates') template = env.get_template(template_filename) with open(filename, 'w+') as placeholder: placeholder.write(template.render( failing_builds=build_report_failing, passing_builds=build_report_passing)) def merge_build_data(filename, toolchain_report, app_type): path_to_file = dirname(abspath(filename)) try: build_data = load(open(filename)) except (IOError, ValueError): build_data = {'builds': []} for tgt in toolchain_report.values(): for tc in tgt.values(): for project in tc.values(): for build in project: try: build[0]['bin_fullpath'] = build[0]['bin'] build[0]['elf_fullpath'] = build[0]['elf'] build[0]['elf'] = relpath(build[0]['elf'], path_to_file) build[0]['bin'] = relpath(build[0]['bin'], path_to_file) except KeyError: pass if 'type' not in build[0]: build[0]['type'] = app_type build_data['builds'].insert(0, build[0]) dump(build_data, open(filename, "w"), indent=4, separators=(',', ': '))