mirror of https://github.com/ARMmbed/mbed-os.git
Merge pull request #10254 from theotherjimmy/remove-some-deadcode
Remove unused tools modules and document the used ones.pull/10382/head
commit
dc1198b5c8
|
@ -0,0 +1,45 @@
|
||||||
|
# Mbed OS Build Tools
|
||||||
|
|
||||||
|
This directory contains the python tools used for building Mbed OS and
|
||||||
|
Mbed 2.
|
||||||
|
|
||||||
|
Quick navigation:
|
||||||
|
|
||||||
|
| file/directory | Purpose/function |
|
||||||
|
|-----------------------|------------------------------------------------|
|
||||||
|
| `build.py` | implementation of `mbed compile --library` |
|
||||||
|
| `build_api.py` | full-project build operations |
|
||||||
|
| `build_release.py` | CLI for making an mbed 2 release |
|
||||||
|
| `config` | implementation of Config System |
|
||||||
|
| `debug_tools` | Crash log parsing |
|
||||||
|
| `default_settings.py` | Default version of project local `settings.py` |
|
||||||
|
| `detect_targets.py` | implementation of `mbed detect` |
|
||||||
|
| `device_management.py`| implementation of `mbed device-management` |
|
||||||
|
| `export` | Export plugins and API for woking with them |
|
||||||
|
| `flash_algo` | CMSIS flash algorithm parser |
|
||||||
|
| `get_config.py` | implementation of `mbed compile --config` |
|
||||||
|
| `host_tests` | location of pre-htrun host tests |
|
||||||
|
| `importer` | code importer for use with CMSIS, TFM/PSA etc. |
|
||||||
|
| `libraries.py` | constants for building mbed 2 libraries |
|
||||||
|
| `make.py` | implementation of `mbed compile` |
|
||||||
|
| `memap.py` | map file parser and sumary generator |
|
||||||
|
| `notifier` | API for seting compile status to a frontend |
|
||||||
|
| `options.py` | Default option parser and option utilities |
|
||||||
|
| `paths.py` | constants for many paths used |
|
||||||
|
| `profiles` | location of the default build profiles |
|
||||||
|
| `project.py` | implementation of `mbed export` |
|
||||||
|
| `psa` | PSA |
|
||||||
|
| `regions.py` | merging from managed bootloader mode |
|
||||||
|
| `resources` | scans directories for files used in a project |
|
||||||
|
| `run_icetea.py` | implementation of `mbed test --icetea` |
|
||||||
|
| `settings.py` | project specific settings from env vars |
|
||||||
|
| `singletest.py` | location of pre-greentea greentea |
|
||||||
|
| `targets` | target description reader and post-build steps |
|
||||||
|
| `test` | unit tests for tools |
|
||||||
|
| `test_api.py` | part of pre-greentea greentea |
|
||||||
|
| `test_configs` | configuration files used by `mbed test` |
|
||||||
|
| `test_exporters.py` | part of pre-greentea greentea |
|
||||||
|
| `tests.py` | implementation of `mbed test --greentea` |
|
||||||
|
| `toolchains` | API for calling the selected compiler |
|
||||||
|
| `utils.py` | General purpose utilities like file moving |
|
||||||
|
|
|
@ -1,408 +0,0 @@
|
||||||
#!/usr/bin/env python2
|
|
||||||
|
|
||||||
"""
|
|
||||||
Travis-CI build script
|
|
||||||
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2013 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
"""
|
|
||||||
from __future__ import print_function, division, absolute_import
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from argparse import ArgumentParser
|
|
||||||
|
|
||||||
################################################################################
|
|
||||||
# Configure builds here
|
|
||||||
# "libs" can contain "dsp"
|
|
||||||
|
|
||||||
build_list = [
|
|
||||||
{
|
|
||||||
"STM":
|
|
||||||
(
|
|
||||||
{ "target": "B96B_F446VE", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_L053R8", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "MTB_RAK811", "toolchains": "GCC_ARM"},
|
|
||||||
{ "target": "NUCLEO_L152RE", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F030R8", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F031K6", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F042K6", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F070RB", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F072RB", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F091RC", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F103RB", "toolchains": "GCC_ARM" },
|
|
||||||
{ "target": "NUCLEO_F207ZG", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F302R8", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F303K8", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F303RE", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F303ZE", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F334R8", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F401RE", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "STEVAL_3DP001V1", "toolchains": "GCC_ARM", "libs": ["dsp", "usb"] },
|
|
||||||
{ "target": "NUCLEO_F410RB", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F411RE", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F412ZG", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F413ZH", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_L432KC", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "MTB_ADV_WISE_1510", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_L476RG", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_L011K4", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_L031K6", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_L073RZ", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F429ZI", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F446RE", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F446ZE", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F746ZG", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_F767ZI", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_L496ZG", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUCLEO_WB55RG", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
|
|
||||||
{ "target": "MOTE_L152RC", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
|
|
||||||
{ "target": "ELMO_F411RE", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
|
|
||||||
{ "target": "MTS_MDOT_F405RG", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "MTS_MDOT_F411RE", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "MTS_DRAGONFLY_F411RE", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "ARCH_MAX", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
|
|
||||||
{ "target": "DISCO_F051R8", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "DISCO_F303VC", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "DISCO_F334C8", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "DISCO_F401VC", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
|
|
||||||
{ "target": "DISCO_F407VG", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "DISCO_F413ZH", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "DISCO_F429ZI", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "DISCO_F469NI", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "DISCO_F746NG", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "DISCO_F769NI", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "DISCO_L475VG_IOT01A", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "DISCO_L476VG", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "DISCO_L072CZ_LRWAN1", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
|
|
||||||
# module manufacturer : muRata
|
|
||||||
{ "target": "MTB_MURATA_ABZ", "toolchains": "GCC_ARM", "libs": [] },
|
|
||||||
),
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"NXP":
|
|
||||||
(
|
|
||||||
{ "target": "LPC1768", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "LPC11U24", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "OC_MBUINO", "toolchains": "GCC_ARM", "libs": [] },
|
|
||||||
|
|
||||||
{ "target": "LPC11U24_301", "toolchains": "GCC_ARM", "libs": [] },
|
|
||||||
{ "target": "LPC1114", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "LPC11U35_401", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "UBLOX_C027", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "LPC11U35_501", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "LPC11U68", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "LPC11U37H_401", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "LPC1549", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "KL05Z", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "KL25Z", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "KL27Z", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "KL43Z", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "KL46Z", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "K20D50M", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "TEENSY3_1", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "K64F", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "K22F", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "LPC4088", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "ARCH_PRO", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
)
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"NORDIC":
|
|
||||||
(
|
|
||||||
{ "target": "NRF51822", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "DELTA_DFCM_NNN40", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NRF51_DK", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NRF51_MICROBIT", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
)
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"SILICON_LABS":
|
|
||||||
(
|
|
||||||
{ "target": "EFM32ZG_STK3200", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "EFM32HG_STK3400", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "EFM32LG_STK3600", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "EFM32GG_STK3700", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "EFM32WG_STK3800", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "EFM32PG_STK3401", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
)
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"ATMEL":
|
|
||||||
(
|
|
||||||
{ "target": "SAMR21G18A", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "SAMD21J18A", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "SAMD21G18A", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "SAML21J18A", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
)
|
|
||||||
},
|
|
||||||
|
|
||||||
|
|
||||||
{
|
|
||||||
"NUVOTON":
|
|
||||||
(
|
|
||||||
{ "target": "NUMAKER_PFM_NUC472", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUMAKER_PFM_M453", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
{ "target": "NUMAKER_PFM_M487", "toolchains": "GCC_ARM", "libs": ["dsp"] },
|
|
||||||
)
|
|
||||||
},
|
|
||||||
|
|
||||||
|
|
||||||
{
|
|
||||||
"RENESAS":
|
|
||||||
(
|
|
||||||
{ "target": "RZ_A1H", "toolchains": "GCC_ARM" },
|
|
||||||
{ "target": "GR_LYCHEE", "toolchains": "GCC_ARM" },
|
|
||||||
)
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
################################################################################
|
|
||||||
# Configure example test building (linking against external mbed SDK libraries)
|
|
||||||
|
|
||||||
linking_list = [
|
|
||||||
{
|
|
||||||
"NXP": (
|
|
||||||
{"target": "LPC1768",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_15", "MBED_16", "MBED_17"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "K64F",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "K22F",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "KL43Z",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"STM": (
|
|
||||||
{"target": "NUCLEO_F446RE",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "NUCLEO_F446ZE",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "NUCLEO_F401RE",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "NUCLEO_F411RE",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "NUCLEO_F412ZG",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "NUCLEO_F413ZH",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "NUCLEO_F429ZI",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "NUCLEO_F207ZG",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "NUCLEO_F746ZG",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "NUCLEO_F767ZI",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "NUCLEO_L476RG",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "DISCO_F429ZI",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "DISCO_F407VG",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "DISCO_F413ZH",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "NUCLEO_F303ZE",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "DISCO_L475VG_IOT01A",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "DISCO_L476VG",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "DISCO_L072CZ_LRWAN1",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "MTB_MURATA_ABZ",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"NUVOTON": (
|
|
||||||
{"target": "NUMAKER_PFM_NUC472",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "NUMAKER_PFM_M453",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"target": "NUMAKER_PFM_M487",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"RENESAS":
|
|
||||||
(
|
|
||||||
{
|
|
||||||
"target": "RZ_A1H",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"target": "GR_LYCHEE",
|
|
||||||
"toolchains": "GCC_ARM",
|
|
||||||
"tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_16"],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
################################################################################
|
|
||||||
|
|
||||||
# Driver
|
|
||||||
|
|
||||||
def run_builds(dry_run, vendor):
|
|
||||||
for vendor_list in build_list:
|
|
||||||
if vendor in vendor_list:
|
|
||||||
for build in vendor_list[vendor]:
|
|
||||||
toolchain_list = build["toolchains"]
|
|
||||||
if type(toolchain_list) != type([]): toolchain_list = [toolchain_list]
|
|
||||||
for toolchain in toolchain_list:
|
|
||||||
cmdline = ("%s tools/build.py -m %s -t %s -c --silent "%
|
|
||||||
(sys.executable, build["target"], toolchain))
|
|
||||||
libs = build.get("libs", [])
|
|
||||||
if libs:
|
|
||||||
cmdline = cmdline + " ".join(["--" + l for l in libs])
|
|
||||||
print("Executing: %s" % cmdline)
|
|
||||||
if not dry_run:
|
|
||||||
if os.system(cmdline) != 0:
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def run_test_linking(dry_run, vendor):
|
|
||||||
""" Function run make.py commands to build and link simple mbed SDK
|
|
||||||
tests against few libraries to make sure there are no simple linking errors.
|
|
||||||
"""
|
|
||||||
for vendor_list in linking_list:
|
|
||||||
if vendor in vendor_list:
|
|
||||||
for link in vendor_list[vendor]:
|
|
||||||
toolchain_list = link["toolchains"]
|
|
||||||
if type(toolchain_list) != type([]):
|
|
||||||
toolchain_list = [toolchain_list]
|
|
||||||
for toolchain in toolchain_list:
|
|
||||||
tests = link["tests"]
|
|
||||||
# Call make.py for each test group for particular library
|
|
||||||
for test_lib in tests:
|
|
||||||
test_names = tests[test_lib]
|
|
||||||
test_lib_switch = "--" + test_lib if test_lib else ""
|
|
||||||
cmdline = ("%s tools/make.py -m %s -t %s -c --silent %s "
|
|
||||||
"-n %s" % (sys.executable, link["target"],
|
|
||||||
toolchain, test_lib_switch,
|
|
||||||
",".join(test_names)))
|
|
||||||
print("Executing: %s" % cmdline)
|
|
||||||
if not dry_run:
|
|
||||||
if os.system(cmdline) != 0:
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
parser = ArgumentParser()
|
|
||||||
|
|
||||||
parser.add_argument("--vendor",
|
|
||||||
metavar="vendor",
|
|
||||||
type=str.upper,
|
|
||||||
help="Select a vendor to run travis tests"
|
|
||||||
)
|
|
||||||
|
|
||||||
options = parser.parse_args()
|
|
||||||
|
|
||||||
run_builds("-s" in sys.argv, options.vendor)
|
|
||||||
run_test_linking("-s" in sys.argv, options.vendor)
|
|
|
@ -1,18 +0,0 @@
|
||||||
{
|
|
||||||
"config" : {
|
|
||||||
"mbed_repo_path" : "C:/Users/annbri01/Work/Mercurial"
|
|
||||||
},
|
|
||||||
"test_list" : [
|
|
||||||
{
|
|
||||||
"name" : "test_compile_mbed_lib",
|
|
||||||
"lib" : "mbed"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name" : "test_compile_mbed_dev",
|
|
||||||
"lib" : "mbed-dev"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"target_list" : [],
|
|
||||||
"ignore_list" : []
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,548 +0,0 @@
|
||||||
"""
|
|
||||||
Copyright (c) 2016-2019 ARM Limited. All rights reserved.
|
|
||||||
|
|
||||||
SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Script to check a new mbed 2 release by compiling a set of specified test apps
|
|
||||||
# for all currently supported platforms. Each test app must include an mbed library.
|
|
||||||
# This can either be the pre-compiled version 'mbed' or the source version 'mbed-dev'.
|
|
||||||
#
|
|
||||||
# Setup:
|
|
||||||
# 1. Set up your global .hgrc file
|
|
||||||
#
|
|
||||||
# If you don't already have a .hgrc file in your $HOME directory, create one there.
|
|
||||||
# Then add the following section:
|
|
||||||
#
|
|
||||||
# [auth]
|
|
||||||
# x.prefix = *
|
|
||||||
# x.username = <put your mbed org username here>
|
|
||||||
# x.password = <put your mbed org password here>
|
|
||||||
#
|
|
||||||
# This has 2 purposes, the first means you don't get prompted for your password
|
|
||||||
# whenever you run hg commands on the commandline. The second is that this script
|
|
||||||
# reads these details in order to fully automate the Mercurial commands.
|
|
||||||
#
|
|
||||||
# Edit "check_release.json". This has the following structure:
|
|
||||||
#{
|
|
||||||
# "config" : {
|
|
||||||
# "mbed_repo_path" : "C:/Users/annbri01/Work/Mercurial"
|
|
||||||
# },
|
|
||||||
# "test_list" : [
|
|
||||||
# {
|
|
||||||
# "name" : "test_compile_mbed_lib",
|
|
||||||
# "lib" : "mbed"
|
|
||||||
# },
|
|
||||||
# {
|
|
||||||
# "name" : "test_compile_mbed_dev",
|
|
||||||
# "lib" : "mbed-dev"
|
|
||||||
# }
|
|
||||||
# ],
|
|
||||||
# "target_list" : []
|
|
||||||
#}
|
|
||||||
#
|
|
||||||
# The mbed_repo_path field should be changed to point to where your local
|
|
||||||
# working directory is for Mercurial repositories.
|
|
||||||
# For each test app you wish to run, add an entry to the test list. The example
|
|
||||||
# above has 2 test apps
|
|
||||||
# "test_compile_mbed_lib" and "test_compile_mbed_dev"
|
|
||||||
# The lib field in each says which type of mbed 2 library the app contains.
|
|
||||||
# These test apps MUST be available as repos in the user's online Mercurial area.
|
|
||||||
# The target_list allows the user to override the set of targets/platforms used
|
|
||||||
# for the compilation.
|
|
||||||
# E.g to just compile for 2 targets, K64F and K22F :
|
|
||||||
# "target_list" : ["K64F", "K22F"]
|
|
||||||
#
|
|
||||||
# Run the script from the mbed-os directory as follows:
|
|
||||||
# > python tools/check_release.py
|
|
||||||
#
|
|
||||||
# It will look for local clones of the test app repos. If they don't exist
|
|
||||||
# it will clone them. It will then read the latest versions of mbed and mbed-dev
|
|
||||||
# (an assumption is made that both of these are already cloned in your Mercurial area).
|
|
||||||
# The lib files within the test apps are then updated to the corresponding version in
|
|
||||||
# the associated lib itself. The test apps are then committed and pushed back to the users
|
|
||||||
# fork.
|
|
||||||
# The test apps will then be compiled for all supported targets and a % result output at
|
|
||||||
# the end.
|
|
||||||
#
|
|
||||||
# Uses the online compiler API at https://mbed.org/handbook/Compile-API
|
|
||||||
# Based on the example from https://mbed.org/teams/mbed/code/mbed-API-helper/
|
|
||||||
|
|
||||||
|
|
||||||
import os, getpass, sys, json, time, requests, logging
|
|
||||||
from os.path import dirname, abspath, basename, join
|
|
||||||
import argparse
|
|
||||||
import subprocess
|
|
||||||
import re
|
|
||||||
import hglib
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
# Be sure that the tools directory is in the search path
|
|
||||||
ROOT = abspath(join(dirname(__file__), ".."))
|
|
||||||
sys.path.insert(0, ROOT)
|
|
||||||
|
|
||||||
from tools.build_api import get_mbed_official_release
|
|
||||||
|
|
||||||
OFFICIAL_MBED_LIBRARY_BUILD = get_mbed_official_release('2')
|
|
||||||
|
|
||||||
def get_compilation_failure(messages):
|
|
||||||
""" Reads the json formatted 'messages' and checks for compilation errors.
|
|
||||||
If there is a genuine compilation error then there should be a new
|
|
||||||
message containing a severity field = Error and an accompanying message
|
|
||||||
with the compile error text. Any other combination is considered an
|
|
||||||
internal compile engine failure
|
|
||||||
Args:
|
|
||||||
messages - json formatted text returned by the online compiler API.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Either "Error" or "Internal" to indicate an actual compilation error or an
|
|
||||||
internal IDE API fault.
|
|
||||||
|
|
||||||
"""
|
|
||||||
for m in messages:
|
|
||||||
# Get message text if it exists
|
|
||||||
try:
|
|
||||||
message = m['message']
|
|
||||||
message = message + "\n"
|
|
||||||
except KeyError:
|
|
||||||
# Skip this message as it has no 'message' field
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Get type of message text
|
|
||||||
try:
|
|
||||||
msg_type = m['type']
|
|
||||||
except KeyError:
|
|
||||||
# Skip this message as it has no 'type' field
|
|
||||||
continue
|
|
||||||
|
|
||||||
if msg_type == 'error' or msg_type == 'tool_error':
|
|
||||||
rel_log.error(message)
|
|
||||||
return "Error"
|
|
||||||
else:
|
|
||||||
rel_log.debug(message)
|
|
||||||
|
|
||||||
return "Internal"
|
|
||||||
|
|
||||||
def invoke_api(payload, url, auth, polls, begin="start/"):
|
|
||||||
""" Sends an API command request to the online IDE. Waits for a task completed
|
|
||||||
response before returning the results.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
payload - Configuration parameters to be passed to the API
|
|
||||||
url - THe URL for the online compiler API
|
|
||||||
auth - Tuple containing authentication credentials
|
|
||||||
polls - Number of times to poll for results
|
|
||||||
begin - Default value = "start/", start command to be appended to URL
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
result - True/False indicating the success/failure of the compilation
|
|
||||||
fail_type - the failure text if the compilation failed, else None
|
|
||||||
"""
|
|
||||||
|
|
||||||
# send task to api
|
|
||||||
rel_log.debug(url + begin + "| data: " + str(payload))
|
|
||||||
r = requests.post(url + begin, data=payload, auth=auth)
|
|
||||||
rel_log.debug(r.request.body)
|
|
||||||
|
|
||||||
if r.status_code != 200:
|
|
||||||
rel_log.error("HTTP code %d reported.", r.status_code)
|
|
||||||
return False, "Internal"
|
|
||||||
|
|
||||||
response = r.json()
|
|
||||||
rel_log.debug(response)
|
|
||||||
uuid = response['result']['data']['task_id']
|
|
||||||
rel_log.debug("Task accepted and given ID: %s", uuid)
|
|
||||||
result = False
|
|
||||||
fail_type = None
|
|
||||||
|
|
||||||
# It currently seems to take the onlide IDE API ~30s to process the compile
|
|
||||||
# request and provide a response. Set the poll time to half that in case it
|
|
||||||
# does manage to compile quicker.
|
|
||||||
poll_delay = 15
|
|
||||||
rel_log.debug("Running with a poll for response delay of: %ss", poll_delay)
|
|
||||||
|
|
||||||
# poll for output
|
|
||||||
for check in range(polls):
|
|
||||||
time.sleep(poll_delay)
|
|
||||||
|
|
||||||
try:
|
|
||||||
r = requests.get(url + "output/%s" % uuid, auth=auth)
|
|
||||||
|
|
||||||
except ConnectionError:
|
|
||||||
return "Internal"
|
|
||||||
|
|
||||||
response = r.json()
|
|
||||||
|
|
||||||
data = response['result']['data']
|
|
||||||
if data['task_complete']:
|
|
||||||
# Task completed. Now determine the result. Should be one of :
|
|
||||||
# 1) Successful compilation
|
|
||||||
# 2) Failed compilation with an error message
|
|
||||||
# 3) Internal failure of the online compiler
|
|
||||||
result = bool(data['compilation_success'])
|
|
||||||
if result:
|
|
||||||
rel_log.info("COMPILATION SUCCESSFUL\n")
|
|
||||||
else:
|
|
||||||
# Did this fail due to a genuine compilation error or a failue of
|
|
||||||
# the api itself ?
|
|
||||||
rel_log.info("COMPILATION FAILURE\n")
|
|
||||||
fail_type = get_compilation_failure(data['new_messages'])
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
rel_log.info("COMPILATION FAILURE\n")
|
|
||||||
|
|
||||||
if not result and fail_type == None:
|
|
||||||
fail_type = "Internal"
|
|
||||||
|
|
||||||
return result, fail_type
|
|
||||||
|
|
||||||
|
|
||||||
def build_repo(target, program, user, pw, polls=25,
|
|
||||||
url="https://developer.mbed.org/api/v2/tasks/compiler/"):
|
|
||||||
""" Wrapper for sending an API command request to the online IDE. Sends a
|
|
||||||
build request.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
target - Target to be built
|
|
||||||
program - Test program to build
|
|
||||||
user - mbed username
|
|
||||||
pw - mbed password
|
|
||||||
polls - Number of times to poll for results
|
|
||||||
url - THe URL for the online compiler API
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
result - True/False indicating the success/failure of the compilation
|
|
||||||
fail_type - the failure text if the compilation failed, else None
|
|
||||||
"""
|
|
||||||
payload = {'clean':True, 'target':target, 'program':program}
|
|
||||||
auth = (user, pw)
|
|
||||||
return invoke_api(payload, url, auth, polls)
|
|
||||||
|
|
||||||
def run_cmd(command, exit_on_failure=False):
|
|
||||||
""" Passes a command to the system and returns a True/False result once the
|
|
||||||
command has been executed, indicating success/failure. Commands are passed
|
|
||||||
as a list of tokens.
|
|
||||||
E.g. The command 'git remote -v' would be passed in as ['git', 'remote', '-v']
|
|
||||||
|
|
||||||
Args:
|
|
||||||
command - system command as a list of tokens
|
|
||||||
exit_on_failure - If True exit the program on failure (default = False)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
result - True/False indicating the success/failure of the command
|
|
||||||
"""
|
|
||||||
rel_log.debug('[Exec] %s', ' '.join(command))
|
|
||||||
return_code = subprocess.call(command, shell=True)
|
|
||||||
|
|
||||||
if return_code:
|
|
||||||
rel_log.warning("The command '%s' failed with return code: %s",
|
|
||||||
(' '.join(command), return_code))
|
|
||||||
if exit_on_failure:
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
return return_code
|
|
||||||
|
|
||||||
def run_cmd_with_output(command, exit_on_failure=False):
|
|
||||||
""" Passes a command to the system and returns a True/False result once the
|
|
||||||
command has been executed, indicating success/failure. If the command was
|
|
||||||
successful then the output from the command is returned to the caller.
|
|
||||||
Commands are passed as a list of tokens.
|
|
||||||
E.g. The command 'git remote -v' would be passed in as ['git', 'remote', '-v']
|
|
||||||
|
|
||||||
Args:
|
|
||||||
command - system command as a list of tokens
|
|
||||||
exit_on_failure - If True exit the program on failure (default = False)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
result - True/False indicating the success/failure of the command
|
|
||||||
output - The output of the command if it was successful, else empty string
|
|
||||||
"""
|
|
||||||
rel_log.debug('[Exec] %s', ' '.join(command))
|
|
||||||
returncode = 0
|
|
||||||
output = ""
|
|
||||||
try:
|
|
||||||
output = subprocess.check_output(command, shell=True)
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
rel_log.warning("The command '%s' failed with return code: %s",
|
|
||||||
(' '.join(command), e.returncode))
|
|
||||||
returncode = e.returncode
|
|
||||||
if exit_on_failure:
|
|
||||||
sys.exit(1)
|
|
||||||
return returncode, output
|
|
||||||
|
|
||||||
def upgrade_test_repo(test, user, library, ref, repo_path):
|
|
||||||
""" Upgrades a local version of a test repo to the latest version of its
|
|
||||||
embedded library.
|
|
||||||
If the test repo is not present in the user area specified in the json
|
|
||||||
config file, then it will first be cloned.
|
|
||||||
Args:
|
|
||||||
test - Mercurial test repo name
|
|
||||||
user - Mercurial user name
|
|
||||||
library - library name
|
|
||||||
ref - SHA corresponding to the latest version of the library
|
|
||||||
repo_path - path to the user's repo area
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
updated - True if library was updated, False otherwise
|
|
||||||
"""
|
|
||||||
rel_log.info("Updating test repo: '%s' to SHA: %s", test, ref)
|
|
||||||
cwd = os.getcwd()
|
|
||||||
|
|
||||||
repo = "https://" + user + '@developer.mbed.org/users/' + user + '/code/' + test
|
|
||||||
|
|
||||||
# Clone the repo if it doesn't already exist
|
|
||||||
path = abspath(repo_path + '/' + test)
|
|
||||||
if not os.path.exists(path):
|
|
||||||
rel_log.info("Test repo doesn't exist, cloning...")
|
|
||||||
os.chdir(abspath(repo_path))
|
|
||||||
clone_cmd = ['hg', 'clone', repo]
|
|
||||||
run_cmd(clone_cmd, exit_on_failure=True)
|
|
||||||
|
|
||||||
os.chdir(path)
|
|
||||||
|
|
||||||
client = hglib.open(path)
|
|
||||||
|
|
||||||
lib_file = library + '.lib'
|
|
||||||
if os.path.isfile(lib_file):
|
|
||||||
# Rename command will fail on some OS's if the target file already exist,
|
|
||||||
# so ensure if it does, it is deleted first.
|
|
||||||
bak_file = library + '_bak'
|
|
||||||
if os.path.isfile(bak_file):
|
|
||||||
os.remove(bak_file)
|
|
||||||
|
|
||||||
os.rename(lib_file, bak_file)
|
|
||||||
else:
|
|
||||||
rel_log.error("Failure to backup lib file prior to updating.")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# mbed 2 style lib file contains one line with the following format
|
|
||||||
# e.g. https://developer.mbed.org/users/<user>/code/mbed-dev/#156823d33999
|
|
||||||
exp = 'https://developer.mbed.org/users/' + user + '/code/' + library + '/#[A-Za-z0-9]+'
|
|
||||||
lib_re = re.compile(exp)
|
|
||||||
updated = False
|
|
||||||
|
|
||||||
# Scan through mbed-os.lib line by line, looking for lib version and update
|
|
||||||
# it if found
|
|
||||||
with open(bak_file, 'r') as ip, open(lib_file, 'w') as op:
|
|
||||||
for line in ip:
|
|
||||||
|
|
||||||
opline = line
|
|
||||||
|
|
||||||
regexp = lib_re.match(line)
|
|
||||||
if regexp:
|
|
||||||
opline = 'https://developer.mbed.org/users/' + user + '/code/' + library + '/#' + ref
|
|
||||||
updated = True
|
|
||||||
|
|
||||||
op.write(opline)
|
|
||||||
|
|
||||||
if updated:
|
|
||||||
|
|
||||||
# Setup the default commit message
|
|
||||||
commit_message = '"Updating ' + library + ' to ' + ref + '"'
|
|
||||||
|
|
||||||
# Setup and run the commit command. Need to use the rawcommand in the hglib
|
|
||||||
# for this in order to pass the string value to the -m option. run_cmd using
|
|
||||||
# subprocess does not like this syntax.
|
|
||||||
try:
|
|
||||||
client.rawcommand(['commit','-m '+commit_message, lib_file])
|
|
||||||
|
|
||||||
cmd = ['hg', 'push', '-f', repo]
|
|
||||||
run_cmd(cmd, exit_on_failure=True)
|
|
||||||
|
|
||||||
except:
|
|
||||||
rel_log.info("Lib file already up to date and thus nothing to commit")
|
|
||||||
|
|
||||||
os.chdir(cwd)
|
|
||||||
return updated
|
|
||||||
|
|
||||||
def get_sha(repo_path, library):
|
|
||||||
""" Gets the latest SHA for the library specified. The library is assumed to be
|
|
||||||
located at the repo_path. If a SHA cannot be obtained this script will exit.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
library - library name
|
|
||||||
repo_path - path to the user's repo area
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
sha - last commit SHA
|
|
||||||
"""
|
|
||||||
cwd = os.getcwd()
|
|
||||||
sha = None
|
|
||||||
os.chdir(abspath(repo_path + '/' + library))
|
|
||||||
|
|
||||||
cmd = ['hg', 'log', '-l', '1']
|
|
||||||
ret, output = run_cmd_with_output(cmd, exit_on_failure=True)
|
|
||||||
|
|
||||||
# Output should contain a 4 line string of the form:
|
|
||||||
# changeset: 135:176b8275d35d
|
|
||||||
# tag: tip
|
|
||||||
# user: <>
|
|
||||||
# date: Thu Feb 02 16:02:30 2017 +0000
|
|
||||||
# summary: Release 135 of the mbed library
|
|
||||||
# All we want is the changeset string after version number
|
|
||||||
|
|
||||||
lines = output.split('\n')
|
|
||||||
fields = lines[0].split(':')
|
|
||||||
sha = fields[2]
|
|
||||||
|
|
||||||
os.chdir(cwd)
|
|
||||||
return sha
|
|
||||||
|
|
||||||
def get_latest_library_versions(repo_path):
|
|
||||||
""" Returns the latest library versions (SHAs) for 'mbed' and 'mbed-dev'.
|
|
||||||
If the SHAs cannot be obtained this script will exit.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
repo_path - path to the user's repo area
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
mbed - last commit SHA for mbed library
|
|
||||||
mbed_dev - last commit SHA for mbed_dev library
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
mbed = get_sha(repo_path, 'mbed')
|
|
||||||
mbed_dev = get_sha(repo_path, 'mbed-dev')
|
|
||||||
|
|
||||||
return mbed, mbed_dev
|
|
||||||
|
|
||||||
def log_results(lst, title):
|
|
||||||
if len(lst) == 0:
|
|
||||||
rel_log.info("%s - None", title)
|
|
||||||
else:
|
|
||||||
for entry in lst:
|
|
||||||
rel_log.info("%s - Test: %s, Target: %s", title, entry[0], entry[1])
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description=__doc__,
|
|
||||||
formatter_class=argparse.RawDescriptionHelpFormatter)
|
|
||||||
parser.add_argument('-l', '--log-level',
|
|
||||||
help="Level for providing logging output",
|
|
||||||
default='INFO')
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
default = getattr(logging, 'INFO')
|
|
||||||
level = getattr(logging, args.log_level.upper(), default)
|
|
||||||
|
|
||||||
# Set logging level
|
|
||||||
logging.basicConfig(level=level)
|
|
||||||
rel_log = logging.getLogger("check-release")
|
|
||||||
|
|
||||||
# Read configuration data
|
|
||||||
with open(os.path.join(os.path.dirname(__file__), "check_release.json")) as config:
|
|
||||||
json_data = json.load(config)
|
|
||||||
|
|
||||||
supported_targets = []
|
|
||||||
|
|
||||||
if len(json_data["target_list"]) > 0:
|
|
||||||
# Compile user supplied subset of targets
|
|
||||||
supported_targets = json_data["target_list"]
|
|
||||||
else:
|
|
||||||
# Get a list of the officially supported mbed-os 2 targets
|
|
||||||
for tgt in OFFICIAL_MBED_LIBRARY_BUILD:
|
|
||||||
supported_targets.append(tgt[0])
|
|
||||||
|
|
||||||
ignore_list = []
|
|
||||||
|
|
||||||
if len(json_data["ignore_list"]) > 0:
|
|
||||||
# List of tuples of (test, target) to be ignored in this test
|
|
||||||
ignore_list = json_data["ignore_list"]
|
|
||||||
|
|
||||||
config = json_data["config"]
|
|
||||||
test_list = json_data["test_list"]
|
|
||||||
repo_path = config["mbed_repo_path"]
|
|
||||||
tests = []
|
|
||||||
|
|
||||||
# get username
|
|
||||||
cmd = ['hg', 'config', 'auth.x.username']
|
|
||||||
ret, output = run_cmd_with_output(cmd, exit_on_failure=True)
|
|
||||||
output = output.split('\n')
|
|
||||||
user = output[0]
|
|
||||||
|
|
||||||
# get password
|
|
||||||
cmd = ['hg', 'config', 'auth.x.password']
|
|
||||||
ret, output = run_cmd_with_output(cmd, exit_on_failure=True)
|
|
||||||
output = output.split('\n')
|
|
||||||
password = output[0]
|
|
||||||
|
|
||||||
mbed, mbed_dev = get_latest_library_versions(repo_path)
|
|
||||||
|
|
||||||
if not mbed or not mbed_dev:
|
|
||||||
rel_log.error("Could not obtain latest versions of library files!!")
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
rel_log.info("Latest mbed lib version = %s", mbed)
|
|
||||||
rel_log.info("Latest mbed-dev lib version = %s", mbed_dev)
|
|
||||||
|
|
||||||
# First update test repos to latest versions of their embedded libraries
|
|
||||||
for test in test_list:
|
|
||||||
tests.append(test['name'])
|
|
||||||
upgrade_test_repo(test['name'], user, test['lib'],
|
|
||||||
mbed if test['lib'] == "mbed" else mbed_dev,
|
|
||||||
repo_path)
|
|
||||||
|
|
||||||
total = len(supported_targets) * len(tests)
|
|
||||||
current = 0
|
|
||||||
retries = 10
|
|
||||||
passes = 0
|
|
||||||
failures = []
|
|
||||||
skipped = []
|
|
||||||
|
|
||||||
# Compile each test for each supported target
|
|
||||||
for test in tests:
|
|
||||||
for target in supported_targets:
|
|
||||||
|
|
||||||
combo = [test, target]
|
|
||||||
|
|
||||||
if combo in ignore_list:
|
|
||||||
rel_log.info("SKIPPING TEST: %s, TARGET: %s", test, target)
|
|
||||||
total -= 1
|
|
||||||
skipped.append(combo)
|
|
||||||
continue
|
|
||||||
|
|
||||||
current += 1
|
|
||||||
for retry in range(0, retries):
|
|
||||||
rel_log.info("COMPILING (%d/%d): TEST %s, TARGET: %s , attempt %u\n", current, total, test, target, retry)
|
|
||||||
result, mesg = build_repo(target, test, user, password)
|
|
||||||
if not result:
|
|
||||||
if mesg == 'Internal':
|
|
||||||
# Internal compiler error thus retry
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
# Actual error thus move on to next compilation
|
|
||||||
failures.append(combo)
|
|
||||||
break
|
|
||||||
|
|
||||||
passes += (int)(result)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
rel_log.error("Compilation failed due to internal errors.")
|
|
||||||
rel_log.error("Skipping test/target combination.")
|
|
||||||
total -= 1
|
|
||||||
skipped.append(combo)
|
|
||||||
|
|
||||||
rel_log.info(" SUMMARY OF COMPILATION RESULTS")
|
|
||||||
rel_log.info(" ------------------------------")
|
|
||||||
rel_log.info(" NUMBER OF TEST APPS: %d, NUMBER OF TARGETS: %d",
|
|
||||||
len(tests), len(supported_targets))
|
|
||||||
log_results(failures, " FAILED")
|
|
||||||
log_results(skipped, " SKIPPED")
|
|
||||||
|
|
||||||
# Output a % pass rate, indicate a failure if not 100% successful
|
|
||||||
pass_rate = (float(passes) / float(total)) * 100.0
|
|
||||||
rel_log.info(" PASS RATE %.1f %%\n", pass_rate)
|
|
||||||
sys.exit(not (pass_rate == 100))
|
|
|
@ -1,16 +0,0 @@
|
||||||
"""
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2015 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
"""
|
|
|
@ -1,69 +0,0 @@
|
||||||
"""
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2015 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
Author: Przemyslaw Wirkus <Przemyslaw.Wirkus@arm.com>
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
from colorama import Fore
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
COLORAMA = 'colorama' in sys.modules
|
|
||||||
|
|
||||||
|
|
||||||
class IOperTestCaseBase():
|
|
||||||
""" Interoperability test case base class
|
|
||||||
@return list of tuple (severity, Description)
|
|
||||||
Example: (result.append((IOperTestSeverity.INFO, ""))
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, scope=None):
|
|
||||||
self.PASS = 'PASS'
|
|
||||||
self.INFO = 'INFO'
|
|
||||||
self.ERROR = 'ERROR'
|
|
||||||
self.WARN = 'WARN'
|
|
||||||
|
|
||||||
self.scope = scope # Default test scope (basic, pedantic, mbed-enabled etc...)
|
|
||||||
|
|
||||||
def test(self, param=None):
|
|
||||||
result = []
|
|
||||||
return result
|
|
||||||
|
|
||||||
def RED(self, text):
|
|
||||||
return self.color_text(text, color=Fore.RED, delim=Fore.RESET) if COLORAMA else text
|
|
||||||
|
|
||||||
def GREEN(self, text):
|
|
||||||
return self.color_text(text, color=Fore.GREEN, delim=Fore.RESET) if COLORAMA else text
|
|
||||||
|
|
||||||
def YELLOW(self, text):
|
|
||||||
return self.color_text(text, color=Fore.YELLOW, delim=Fore.RESET) if COLORAMA else text
|
|
||||||
|
|
||||||
def color_text(self, text, color='', delim=''):
|
|
||||||
return color + text + color + delim
|
|
||||||
|
|
||||||
def COLOR(self, severity, text):
|
|
||||||
colors = {
|
|
||||||
self.PASS : self.GREEN,
|
|
||||||
self.ERROR : self.RED,
|
|
||||||
self.WARN : self.YELLOW
|
|
||||||
}
|
|
||||||
if severity in colors:
|
|
||||||
return colors[severity](text)
|
|
||||||
return text
|
|
|
@ -1,125 +0,0 @@
|
||||||
#!/usr/bin/env python2
|
|
||||||
"""
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2015 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
Author: Przemyslaw Wirkus <Przemyslaw.Wirkus@arm.com>
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import mbed_lstools
|
|
||||||
from prettytable import PrettyTable
|
|
||||||
|
|
||||||
try:
|
|
||||||
from colorama import init
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
COLORAMA = 'colorama' in sys.modules
|
|
||||||
|
|
||||||
from ioper_base import IOperTestCaseBase
|
|
||||||
from ioper_test_fs import IOperTest_FileStructure_Basic
|
|
||||||
from ioper_test_fs import IOperTest_FileStructure_MbedEnabled
|
|
||||||
from ioper_test_target_id import IOperTest_TargetID_Basic
|
|
||||||
from ioper_test_target_id import IOperTest_TargetID_MbedEnabled
|
|
||||||
|
|
||||||
|
|
||||||
TEST_LIST = [IOperTest_TargetID_Basic('basic'),
|
|
||||||
IOperTest_TargetID_MbedEnabled('mbed-enabled'),
|
|
||||||
IOperTest_FileStructure_Basic('basic'),
|
|
||||||
IOperTest_FileStructure_MbedEnabled('mbed-enabled'),
|
|
||||||
IOperTestCaseBase('all'), # Dummy used to add 'all' option
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IOperTestRunner():
|
|
||||||
""" Calls all i/face interoperability tests
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, scope=None):
|
|
||||||
""" Test scope:
|
|
||||||
'pedantic' - all
|
|
||||||
'mbed-enabled' - let's try to check if this device is mbed-enabled
|
|
||||||
'basic' - just simple, passive tests (no device flashing)
|
|
||||||
"""
|
|
||||||
self.requested_scope = scope # Test scope given by user
|
|
||||||
self.raw_test_results = {} # Raw test results, can be used by exporters: { Platform: [test results]}
|
|
||||||
|
|
||||||
# Test scope definitions
|
|
||||||
self.SCOPE_BASIC = 'basic' # Basic tests, sanity checks
|
|
||||||
self.SCOPE_MBED_ENABLED = 'mbed-enabled' # Let's try to check if this device is mbed-enabled
|
|
||||||
self.SCOPE_PEDANTIC = 'pedantic' # Extensive tests
|
|
||||||
self.SCOPE_ALL = 'all' # All tests, equal to highest scope level
|
|
||||||
|
|
||||||
# This structure will help us sort test scopes so we can include them
|
|
||||||
# e.g. pedantic also includes basic and mbed-enabled tests
|
|
||||||
self.scopes = {self.SCOPE_BASIC : 0,
|
|
||||||
self.SCOPE_MBED_ENABLED : 1,
|
|
||||||
self.SCOPE_PEDANTIC : 2,
|
|
||||||
self.SCOPE_ALL : 99,
|
|
||||||
}
|
|
||||||
|
|
||||||
if COLORAMA:
|
|
||||||
init() # colorama.init()
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
""" Run tests, calculate overall score and print test results
|
|
||||||
"""
|
|
||||||
mbeds = mbed_lstools.create()
|
|
||||||
muts_list = mbeds.list_mbeds()
|
|
||||||
test_base = IOperTestCaseBase()
|
|
||||||
|
|
||||||
self.raw_test_results = {}
|
|
||||||
for i, mut in enumerate(muts_list):
|
|
||||||
result = []
|
|
||||||
self.raw_test_results[mut['platform_name']] = []
|
|
||||||
|
|
||||||
print "MBEDLS: Detected %s, port: %s, mounted: %s"% (mut['platform_name'],
|
|
||||||
mut['serial_port'],
|
|
||||||
mut['mount_point'])
|
|
||||||
print "Running interoperability test suite, scope '%s'" % (self.requested_scope)
|
|
||||||
for test_case in TEST_LIST:
|
|
||||||
if self.scopes[self.requested_scope] >= self.scopes[test_case.scope]:
|
|
||||||
res = test_case.test(param=mut)
|
|
||||||
result.extend(res)
|
|
||||||
self.raw_test_results[mut['platform_name']].extend(res)
|
|
||||||
|
|
||||||
columns = ['Platform', 'Test Case', 'Result', 'Scope', 'Description']
|
|
||||||
pt = PrettyTable(columns)
|
|
||||||
for col in columns:
|
|
||||||
pt.align[col] = 'l'
|
|
||||||
|
|
||||||
for tr in result:
|
|
||||||
severity, tr_name, tr_scope, text = tr
|
|
||||||
tr = (test_base.COLOR(severity, mut['platform_name']),
|
|
||||||
test_base.COLOR(severity, tr_name),
|
|
||||||
test_base.COLOR(severity, severity),
|
|
||||||
test_base.COLOR(severity, tr_scope),
|
|
||||||
test_base.COLOR(severity, text))
|
|
||||||
pt.add_row(list(tr))
|
|
||||||
print pt.get_string(border=True, sortby='Result')
|
|
||||||
if i + 1 < len(muts_list):
|
|
||||||
print
|
|
||||||
return self.raw_test_results
|
|
||||||
|
|
||||||
def get_available_oper_test_scopes():
|
|
||||||
""" Get list of available test scopes
|
|
||||||
"""
|
|
||||||
scopes = set()
|
|
||||||
for oper_test in TEST_LIST:
|
|
||||||
if oper_test.scope is not None:
|
|
||||||
scopes.add(oper_test.scope)
|
|
||||||
return list(scopes)
|
|
|
@ -1,69 +0,0 @@
|
||||||
"""
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2015 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
Author: Przemyslaw Wirkus <Przemyslaw.Wirkus@arm.com>
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os.path
|
|
||||||
from ioper_base import IOperTestCaseBase
|
|
||||||
|
|
||||||
|
|
||||||
class IOperTest_FileStructure(IOperTestCaseBase):
|
|
||||||
|
|
||||||
def __init__(self, scope=None):
|
|
||||||
IOperTestCaseBase.__init__(self, scope)
|
|
||||||
|
|
||||||
def if_file_exist(self, fname, fail_severity=None):
|
|
||||||
file_path = os.path.join(self.param['mount_point'], fname)
|
|
||||||
exist = os.path.isfile(file_path)
|
|
||||||
tr_name = "FILE_EXIST(%s)" % fname.upper()
|
|
||||||
if exist:
|
|
||||||
self.result.append((self.PASS, tr_name, self.scope, "File '%s' exists" % file_path))
|
|
||||||
else:
|
|
||||||
self.result.append((fail_severity if fail_severity else self.ERROR, tr_name, self.scope, "File '%s' not found" % file_path))
|
|
||||||
|
|
||||||
def test(self, param=None):
|
|
||||||
self.result = []
|
|
||||||
if param:
|
|
||||||
pass
|
|
||||||
return self.result
|
|
||||||
|
|
||||||
|
|
||||||
class IOperTest_FileStructure_Basic(IOperTest_FileStructure):
|
|
||||||
def __init__(self, scope=None):
|
|
||||||
IOperTest_FileStructure.__init__(self, scope)
|
|
||||||
|
|
||||||
def test(self, param=None):
|
|
||||||
self.param = param
|
|
||||||
self.result = []
|
|
||||||
if param:
|
|
||||||
self.if_file_exist('mbed.htm', self.ERROR)
|
|
||||||
return self.result
|
|
||||||
|
|
||||||
|
|
||||||
class IOperTest_FileStructure_MbedEnabled(IOperTest_FileStructure):
|
|
||||||
def __init__(self, scope=None):
|
|
||||||
IOperTest_FileStructure.__init__(self, scope)
|
|
||||||
|
|
||||||
def test(self, param=None):
|
|
||||||
self.param = param
|
|
||||||
self.result = []
|
|
||||||
if param:
|
|
||||||
self.if_file_exist('mbed.htm', self.ERROR)
|
|
||||||
self.if_file_exist('DETAILS.TXT', self.ERROR)
|
|
||||||
self.if_file_exist('FAIL.TXT', self.INFO)
|
|
||||||
return self.result
|
|
|
@ -1,111 +0,0 @@
|
||||||
"""
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2015 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
Author: Przemyslaw Wirkus <Przemyslaw.Wirkus@arm.com>
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
from ioper_base import IOperTestCaseBase
|
|
||||||
|
|
||||||
|
|
||||||
class IOperTest_TargetID(IOperTestCaseBase):
|
|
||||||
""" tests related to target_id value
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, scope=None):
|
|
||||||
IOperTestCaseBase.__init__(self, scope)
|
|
||||||
self.TARGET_ID_LEN = 24
|
|
||||||
|
|
||||||
def test_target_id_format(self, target_id, target_id_name):
|
|
||||||
# Expected length == 24, eg. "02400203D94B0E7724B7F3CF"
|
|
||||||
result = []
|
|
||||||
target_id_len = len(target_id) if target_id else 0
|
|
||||||
if target_id_len == self.TARGET_ID_LEN:
|
|
||||||
result.append((self.PASS, "TARGET_ID_LEN", self.scope, "%s '%s' is %d chars long " % (target_id_name, target_id, target_id_len)))
|
|
||||||
result.append((self.INFO, "FW_VER_STR", self.scope, "%s Version String is %s.%s.%s " % (target_id_name,
|
|
||||||
target_id[0:4],
|
|
||||||
target_id[4:8],
|
|
||||||
target_id[8:24],
|
|
||||||
)))
|
|
||||||
else:
|
|
||||||
result.append((self.ERROR, "TARGET_ID_LEN", self.scope, "%s '%s' is %d chars long. Expected %d chars" % (target_id_name, target_id, target_id_len, self.TARGET_ID_LEN)))
|
|
||||||
return result
|
|
||||||
|
|
||||||
def test_decode_target_id(self, target_id, target_id_name):
|
|
||||||
result = []
|
|
||||||
target_id_len = len(target_id) if target_id else 0
|
|
||||||
if target_id_len >= 4:
|
|
||||||
result.append((self.INFO, "FW_VEN_CODE", self.scope, "%s Vendor Code is '%s'" % (target_id_name, target_id[0:2])))
|
|
||||||
result.append((self.INFO, "FW_PLAT_CODE", self.scope, "%s Platform Code is '%s'" % (target_id_name, target_id[2:4])))
|
|
||||||
result.append((self.INFO, "FW_VER", self.scope, "%s Firmware Version is '%s'" % (target_id_name, target_id[4:8])))
|
|
||||||
result.append((self.INFO, "FW_HASH_SEC", self.scope, "%s Hash of secret is '%s'" % (target_id_name, target_id[8:24])))
|
|
||||||
return result
|
|
||||||
|
|
||||||
def test(self, param=None):
|
|
||||||
result = []
|
|
||||||
if param:
|
|
||||||
pass
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
class IOperTest_TargetID_Basic(IOperTest_TargetID):
|
|
||||||
""" Basic interoperability tests checking TargetID compliance
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, scope=None):
|
|
||||||
IOperTest_TargetID.__init__(self, scope)
|
|
||||||
|
|
||||||
def test(self, param=None):
|
|
||||||
result = []
|
|
||||||
|
|
||||||
if param:
|
|
||||||
result.append((self.PASS, "TARGET_ID", self.scope, "TargetID '%s' found" % param['target_id']))
|
|
||||||
|
|
||||||
# Check if target name can be decoded with mbed-ls
|
|
||||||
if param['platform_name']:
|
|
||||||
result.append((self.PASS, "TARGET_ID_DECODE", self.scope, "TargetID '%s' decoded as '%s'" % (param['target_id'][0:4], param['platform_name'])))
|
|
||||||
else:
|
|
||||||
result.append((self.ERROR, "TARGET_ID_DECODE", self.scope, "TargetID '%s'... not decoded" % (param['target_id'] if param['target_id'] else '')))
|
|
||||||
|
|
||||||
# Test for USBID and mbed.htm consistency
|
|
||||||
if param['target_id_mbed_htm'] == param['target_id_usb_id']:
|
|
||||||
result.append((self.PASS, "TARGET_ID_MATCH", self.scope, "TargetID (USBID) and TargetID (mbed.htm) match"))
|
|
||||||
else:
|
|
||||||
text = "TargetID (USBID) and TargetID (mbed.htm) don't match: '%s' != '%s'" % (param['target_id_usb_id'], param['target_id_mbed_htm'])
|
|
||||||
result.append((self.WARN, "TARGET_ID_MATCH", self.scope, text))
|
|
||||||
else:
|
|
||||||
result.append((self.ERROR, "TARGET_ID", self.scope, "TargetID not found"))
|
|
||||||
return result
|
|
||||||
|
|
||||||
class IOperTest_TargetID_MbedEnabled(IOperTest_TargetID):
|
|
||||||
""" Basic interoperability tests checking TargetID compliance
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, scope=None):
|
|
||||||
IOperTest_TargetID.__init__(self, scope)
|
|
||||||
|
|
||||||
def test(self, param=None):
|
|
||||||
result = []
|
|
||||||
|
|
||||||
if param:
|
|
||||||
# Target ID tests:
|
|
||||||
result += self.test_target_id_format(param['target_id_usb_id'], "TargetId (USBID)")
|
|
||||||
result += self.test_target_id_format(param['target_id_mbed_htm'], "TargetId (mbed.htm)")
|
|
||||||
|
|
||||||
# Some extra info about TargetID itself
|
|
||||||
result += self.test_decode_target_id(param['target_id_usb_id'], "TargetId (USBID)")
|
|
||||||
result += self.test_decode_target_id(param['target_id_mbed_htm'], "TargetId (mbed.htm)")
|
|
||||||
return result
|
|
|
@ -1,16 +0,0 @@
|
||||||
"""
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2013 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
"""
|
|
|
@ -1,34 +0,0 @@
|
||||||
/* mbed Microcontroller Library
|
|
||||||
* Copyright (c) 2006-2012 ARM Limited
|
|
||||||
*
|
|
||||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
* of this software and associated documentation files (the "Software"), to deal
|
|
||||||
* in the Software without restriction, including without limitation the rights
|
|
||||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
* copies of the Software, and to permit persons to whom the Software is
|
|
||||||
* furnished to do so, subject to the following conditions:
|
|
||||||
*
|
|
||||||
* The above copyright notice and this permission notice shall be included in
|
|
||||||
* all copies or substantial portions of the Software.
|
|
||||||
*
|
|
||||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
* SOFTWARE.
|
|
||||||
*/
|
|
||||||
#ifndef MBED_CLASSES_H
|
|
||||||
#define MBED_CLASSES_H
|
|
||||||
|
|
||||||
#include "rpc.h"
|
|
||||||
|
|
||||||
namespace mbed {
|
|
||||||
|
|
||||||
{{classes}}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
|
@ -1,24 +0,0 @@
|
||||||
class Rpc{{name}} : public RPC {
|
|
||||||
public:
|
|
||||||
Rpc{{name}}({{cons_proto}}) : RPC(name), o({{cons_call}}) {}
|
|
||||||
|
|
||||||
{{methods}}
|
|
||||||
|
|
||||||
virtual const struct rpc_method *get_rpc_methods() {
|
|
||||||
static const rpc_method rpc_methods[] = {
|
|
||||||
{{rpc_methods}},
|
|
||||||
RPC_METHOD_SUPER(RPC)
|
|
||||||
};
|
|
||||||
return rpc_methods;
|
|
||||||
}
|
|
||||||
static struct rpc_class *get_rpc_class() {
|
|
||||||
static const rpc_function funcs[] = {
|
|
||||||
{"new", rpc_function_caller<const char*, {{cons_type}}, &RPC::construct<Rpc{{name}}, {{cons_type}}> >},
|
|
||||||
RPC_METHOD_END
|
|
||||||
};
|
|
||||||
static rpc_class c = {"{{name}}", funcs, NULL};
|
|
||||||
return &c;
|
|
||||||
}
|
|
||||||
private:
|
|
||||||
{{name}} o;
|
|
||||||
};
|
|
|
@ -1,27 +0,0 @@
|
||||||
"""
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2013 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
"""
|
|
||||||
from tools.targets import TARGETS
|
|
||||||
|
|
||||||
DEFAULT_SUPPORT = {}
|
|
||||||
CORTEX_ARM_SUPPORT = {}
|
|
||||||
|
|
||||||
for target in TARGETS:
|
|
||||||
DEFAULT_SUPPORT[target.name] = target.supported_toolchains
|
|
||||||
|
|
||||||
if target.core.startswith('Cortex'):
|
|
||||||
CORTEX_ARM_SUPPORT[target.name] = [t for t in target.supported_toolchains
|
|
||||||
if (t=='ARM' or t=='uARM')]
|
|
|
@ -1,16 +0,0 @@
|
||||||
"""
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2013 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
"""
|
|
|
@ -1,89 +0,0 @@
|
||||||
"""
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2013 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
"""
|
|
||||||
from numpy import sin, arange, pi
|
|
||||||
from scipy.signal import lfilter, firwin
|
|
||||||
from pylab import figure, plot, grid, show
|
|
||||||
|
|
||||||
#------------------------------------------------
|
|
||||||
# Create a signal for demonstration.
|
|
||||||
#------------------------------------------------
|
|
||||||
# 320 samples of (1000Hz + 15000 Hz) at 48 kHz
|
|
||||||
sample_rate = 48000.
|
|
||||||
nsamples = 320
|
|
||||||
|
|
||||||
F_1KHz = 1000.
|
|
||||||
A_1KHz = 1.0
|
|
||||||
|
|
||||||
F_15KHz = 15000.
|
|
||||||
A_15KHz = 0.5
|
|
||||||
|
|
||||||
t = arange(nsamples) / sample_rate
|
|
||||||
signal = A_1KHz * sin(2*pi*F_1KHz*t) + A_15KHz*sin(2*pi*F_15KHz*t)
|
|
||||||
|
|
||||||
#------------------------------------------------
|
|
||||||
# Create a FIR filter and apply it to signal.
|
|
||||||
#------------------------------------------------
|
|
||||||
# The Nyquist rate of the signal.
|
|
||||||
nyq_rate = sample_rate / 2.
|
|
||||||
|
|
||||||
# The cutoff frequency of the filter: 6KHz
|
|
||||||
cutoff_hz = 6000.0
|
|
||||||
|
|
||||||
# Length of the filter (number of coefficients, i.e. the filter order + 1)
|
|
||||||
numtaps = 29
|
|
||||||
|
|
||||||
# Use firwin to create a lowpass FIR filter
|
|
||||||
fir_coeff = firwin(numtaps, cutoff_hz/nyq_rate)
|
|
||||||
|
|
||||||
# Use lfilter to filter the signal with the FIR filter
|
|
||||||
filtered_signal = lfilter(fir_coeff, 1.0, signal)
|
|
||||||
|
|
||||||
#------------------------------------------------
|
|
||||||
# Plot the original and filtered signals.
|
|
||||||
#------------------------------------------------
|
|
||||||
|
|
||||||
# The first N-1 samples are "corrupted" by the initial conditions
|
|
||||||
warmup = numtaps - 1
|
|
||||||
|
|
||||||
# The phase delay of the filtered signal
|
|
||||||
delay = (warmup / 2) / sample_rate
|
|
||||||
|
|
||||||
figure(1)
|
|
||||||
# Plot the original signal
|
|
||||||
plot(t, signal)
|
|
||||||
|
|
||||||
# Plot the filtered signal, shifted to compensate for the phase delay
|
|
||||||
plot(t-delay, filtered_signal, 'r-')
|
|
||||||
|
|
||||||
# Plot just the "good" part of the filtered signal. The first N-1
|
|
||||||
# samples are "corrupted" by the initial conditions.
|
|
||||||
plot(t[warmup:]-delay, filtered_signal[warmup:], 'g', linewidth=4)
|
|
||||||
|
|
||||||
grid(True)
|
|
||||||
|
|
||||||
show()
|
|
||||||
|
|
||||||
#------------------------------------------------
|
|
||||||
# Print values
|
|
||||||
#------------------------------------------------
|
|
||||||
def print_values(label, values):
|
|
||||||
var = "float32_t %s[%d]" % (label, len(values))
|
|
||||||
print "%-30s = {%s}" % (var, ', '.join(["%+.10f" % x for x in values]))
|
|
||||||
|
|
||||||
print_values('signal', signal)
|
|
||||||
print_values('fir_coeff', fir_coeff)
|
|
||||||
print_values('filtered_signal', filtered_signal)
|
|
|
@ -1,49 +0,0 @@
|
||||||
"""
|
|
||||||
Copyright (c) 2014-2019 ARM Limited. All rights reserved.
|
|
||||||
|
|
||||||
SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from intelhex import IntelHex
|
|
||||||
from cStringIO import StringIO
|
|
||||||
|
|
||||||
|
|
||||||
def sections(h):
|
|
||||||
start, last_address = None, None
|
|
||||||
for a in h.addresses():
|
|
||||||
if last_address is None:
|
|
||||||
start, last_address = a, a
|
|
||||||
continue
|
|
||||||
|
|
||||||
if a > last_address + 1:
|
|
||||||
yield (start, last_address)
|
|
||||||
start = a
|
|
||||||
|
|
||||||
last_address = a
|
|
||||||
|
|
||||||
if start:
|
|
||||||
yield (start, last_address)
|
|
||||||
|
|
||||||
|
|
||||||
def print_sections(h):
|
|
||||||
for s in sections(h):
|
|
||||||
print "[0x%08X - 0x%08X]" % s
|
|
||||||
|
|
||||||
|
|
||||||
def decode(record):
|
|
||||||
h = IntelHex()
|
|
||||||
f = StringIO(record)
|
|
||||||
h.loadhex(f)
|
|
||||||
h.dump()
|
|
|
@ -1,190 +0,0 @@
|
||||||
"""
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2013 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
"""
|
|
||||||
from os.path import join
|
|
||||||
from jinja2 import Template
|
|
||||||
|
|
||||||
from tools.paths import TOOLS_DATA, MBED_RPC
|
|
||||||
|
|
||||||
RPC_TEMPLATES_PATH = join(TOOLS_DATA, "rpc")
|
|
||||||
|
|
||||||
RPC_TEMPLATE = "RPCClasses.h"
|
|
||||||
CLASS_TEMPLATE = "class.cpp"
|
|
||||||
RPC_CLASSES_PATH = join(MBED_RPC, RPC_TEMPLATE)
|
|
||||||
|
|
||||||
|
|
||||||
def get_template(name):
|
|
||||||
return Template(open(join(RPC_TEMPLATES_PATH, name)).read())
|
|
||||||
|
|
||||||
|
|
||||||
def write_rpc_classes(classes):
|
|
||||||
template = get_template(RPC_TEMPLATE)
|
|
||||||
open(RPC_CLASSES_PATH, "w").write(template.render({"classes":classes}))
|
|
||||||
|
|
||||||
|
|
||||||
RPC_CLASSES = (
|
|
||||||
{
|
|
||||||
"name": "DigitalOut",
|
|
||||||
"cons_args": ["PinName"],
|
|
||||||
"methods": [
|
|
||||||
(None , "write", ["int"]),
|
|
||||||
("int", "read" , []),
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "DigitalIn",
|
|
||||||
"cons_args": ["PinName"],
|
|
||||||
"methods": [
|
|
||||||
("int", "read" , []),
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "DigitalInOut",
|
|
||||||
"cons_args": ["PinName"],
|
|
||||||
"methods": [
|
|
||||||
("int", "read" , []),
|
|
||||||
(None , "write" , ["int"]),
|
|
||||||
(None , "input" , []),
|
|
||||||
(None , "output", []),
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "AnalogIn",
|
|
||||||
"required": "ANALOGIN",
|
|
||||||
"cons_args": ["PinName"],
|
|
||||||
"methods": [
|
|
||||||
("float" , "read" , []),
|
|
||||||
("unsigned short", "read_u16", []),
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "AnalogOut",
|
|
||||||
"required": "ANALOGOUT",
|
|
||||||
"cons_args": ["PinName"],
|
|
||||||
"methods": [
|
|
||||||
("float", "read" , []),
|
|
||||||
(None , "write" , ["float"]),
|
|
||||||
(None , "write_u16", ["unsigned short"]),
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "PwmOut",
|
|
||||||
"required": "PWMOUT",
|
|
||||||
"cons_args": ["PinName"],
|
|
||||||
"methods": [
|
|
||||||
("float", "read" , []),
|
|
||||||
(None , "write" , ["float"]),
|
|
||||||
(None , "period" , ["float"]),
|
|
||||||
(None , "period_ms" , ["int"]),
|
|
||||||
(None , "pulsewidth" , ["float"]),
|
|
||||||
(None , "pulsewidth_ms", ["int"]),
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "SPI",
|
|
||||||
"required": "SPI",
|
|
||||||
"cons_args": ["PinName", "PinName", "PinName"],
|
|
||||||
"methods": [
|
|
||||||
(None , "format" , ["int", "int"]),
|
|
||||||
(None , "frequency", ["int"]),
|
|
||||||
("int", "write" , ["int"]),
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Serial",
|
|
||||||
"required": "SERIAL",
|
|
||||||
"cons_args": ["PinName", "PinName"],
|
|
||||||
"methods": [
|
|
||||||
(None , "baud" , ["int"]),
|
|
||||||
("int", "readable" , []),
|
|
||||||
("int", "writeable", []),
|
|
||||||
("int", "putc" , ["int"]),
|
|
||||||
("int", "getc" , []),
|
|
||||||
("int", "puts" , ["const char *"]),
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Timer",
|
|
||||||
"cons_args": [],
|
|
||||||
"methods": [
|
|
||||||
(None , "start" , []),
|
|
||||||
(None , "stop" , []),
|
|
||||||
(None , "reset" , []),
|
|
||||||
("float", "read" , []),
|
|
||||||
("int" , "read_ms", []),
|
|
||||||
("int" , "read_us", []),
|
|
||||||
]
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_args_proto(args_types, extra=None):
|
|
||||||
args = ["%s a%d" % (s, n) for n, s in enumerate(args_types)]
|
|
||||||
if extra:
|
|
||||||
args.extend(extra)
|
|
||||||
return ', '.join(args)
|
|
||||||
|
|
||||||
|
|
||||||
def get_args_call(args):
|
|
||||||
return ', '.join(["a%d" % (n) for n in range(len(args))])
|
|
||||||
|
|
||||||
|
|
||||||
classes = []
|
|
||||||
class_template = get_template(CLASS_TEMPLATE)
|
|
||||||
|
|
||||||
for c in RPC_CLASSES:
|
|
||||||
c_args = c['cons_args']
|
|
||||||
data = {
|
|
||||||
'name': c['name'],
|
|
||||||
'cons_type': ', '.join(c_args + ['const char*']),
|
|
||||||
"cons_proto": get_args_proto(c_args, ["const char *name=NULL"]),
|
|
||||||
"cons_call": get_args_call(c_args)
|
|
||||||
}
|
|
||||||
|
|
||||||
c_name = "Rpc" + c['name']
|
|
||||||
|
|
||||||
methods = []
|
|
||||||
rpc_methods = []
|
|
||||||
for r, m, a in c['methods']:
|
|
||||||
ret_proto = r if r else "void"
|
|
||||||
args_proto = "void"
|
|
||||||
|
|
||||||
ret_defin = "return " if r else ""
|
|
||||||
args_defin = ""
|
|
||||||
|
|
||||||
if a:
|
|
||||||
args_proto = get_args_proto(a)
|
|
||||||
args_defin = get_args_call(a)
|
|
||||||
|
|
||||||
proto = "%s %s(%s)" % (ret_proto, m, args_proto)
|
|
||||||
defin = "{%so.%s(%s);}" % (ret_defin, m, args_defin)
|
|
||||||
methods.append("%s %s" % (proto, defin))
|
|
||||||
|
|
||||||
rpc_method_type = [r] if r else []
|
|
||||||
rpc_method_type.append(c_name)
|
|
||||||
rpc_method_type.extend(a)
|
|
||||||
rpc_methods.append('{"%s", rpc_method_caller<%s, &%s::%s>}' % (m, ', '.join(rpc_method_type), c_name, m))
|
|
||||||
|
|
||||||
data['methods'] = "\n ".join(methods)
|
|
||||||
data['rpc_methods'] = ",\n ".join(rpc_methods)
|
|
||||||
|
|
||||||
class_decl = class_template.render(data)
|
|
||||||
if 'required' in c:
|
|
||||||
class_decl = "#if DEVICE_%s\n%s\n#endif" % (c['required'], class_decl)
|
|
||||||
|
|
||||||
classes.append(class_decl)
|
|
||||||
|
|
||||||
write_rpc_classes('\n\n'.join(classes))
|
|
|
@ -1,75 +0,0 @@
|
||||||
"""
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2013 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
|
|
||||||
Utility to find which libraries could define a given symbol
|
|
||||||
"""
|
|
||||||
from argparse import ArgumentParser
|
|
||||||
from os.path import join, splitext
|
|
||||||
from os import walk
|
|
||||||
from subprocess import Popen, PIPE
|
|
||||||
|
|
||||||
|
|
||||||
OBJ_EXT = ['.o', '.a', '.ar']
|
|
||||||
|
|
||||||
|
|
||||||
def find_sym_in_lib(sym, obj_path):
|
|
||||||
contain_symbol = False
|
|
||||||
|
|
||||||
out = Popen(["nm", "-C", obj_path], stdout=PIPE, stderr=PIPE).communicate()[0]
|
|
||||||
for line in out.splitlines():
|
|
||||||
tokens = line.split()
|
|
||||||
n = len(tokens)
|
|
||||||
if n == 2:
|
|
||||||
sym_type = tokens[0]
|
|
||||||
sym_name = tokens[1]
|
|
||||||
elif n == 3:
|
|
||||||
sym_type = tokens[1]
|
|
||||||
sym_name = tokens[2]
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if sym_type == "U":
|
|
||||||
# This object is using this symbol, not defining it
|
|
||||||
continue
|
|
||||||
|
|
||||||
if sym_name == sym:
|
|
||||||
contain_symbol = True
|
|
||||||
|
|
||||||
return contain_symbol
|
|
||||||
|
|
||||||
|
|
||||||
def find_sym_in_path(sym, dir_path):
|
|
||||||
for root, _, files in walk(dir_path):
|
|
||||||
for file in files:
|
|
||||||
|
|
||||||
_, ext = splitext(file)
|
|
||||||
if ext not in OBJ_EXT: continue
|
|
||||||
|
|
||||||
path = join(root, file)
|
|
||||||
if find_sym_in_lib(sym, path):
|
|
||||||
print path
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
parser = ArgumentParser(description='Find Symbol')
|
|
||||||
parser.add_argument('-s', '--sym', required=True,
|
|
||||||
help='The symbol to be searched')
|
|
||||||
parser.add_argument('-p', '--path', required=True,
|
|
||||||
help='The path where to search')
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
find_sym_in_path(args.sym, args.path)
|
|
|
@ -21,8 +21,7 @@ from tools.paths import MBED_LIBRARIES,\
|
||||||
CPPUTEST_PLATFORM_SRC, CPPUTEST_TESTRUNNER_SCR, CPPUTEST_LIBRARY,\
|
CPPUTEST_PLATFORM_SRC, CPPUTEST_TESTRUNNER_SCR, CPPUTEST_LIBRARY,\
|
||||||
CPPUTEST_INC, CPPUTEST_PLATFORM_INC, CPPUTEST_TESTRUNNER_INC,\
|
CPPUTEST_INC, CPPUTEST_PLATFORM_INC, CPPUTEST_TESTRUNNER_INC,\
|
||||||
CPPUTEST_INC_EXT
|
CPPUTEST_INC_EXT
|
||||||
from tools.data.support import DEFAULT_SUPPORT
|
from tools.tests import TEST_MBED_LIB, DEFAULT_SUPPORT
|
||||||
from tools.tests import TEST_MBED_LIB
|
|
||||||
|
|
||||||
|
|
||||||
LIBRARIES = [
|
LIBRARIES = [
|
||||||
|
|
|
@ -1,39 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
"""
|
|
||||||
Copyright (c) 2016-2019 ARM Limited. All rights reserved.
|
|
||||||
|
|
||||||
SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
|
|
||||||
def main(path='.', pattern=r'#include\s+"([^"]*\.(?:c|cpp))"'):
|
|
||||||
pattern = re.compile(pattern)
|
|
||||||
|
|
||||||
for root, dirs, files in os.walk(path, followlinks=True):
|
|
||||||
for file in files:
|
|
||||||
with open(os.path.join(root, file)) as f:
|
|
||||||
for line in f.read().splitlines():
|
|
||||||
m = re.search(pattern, line)
|
|
||||||
if m:
|
|
||||||
print os.path.relpath(os.path.join(root, m.group(1)))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import sys
|
|
||||||
main(*sys.argv[1:])
|
|
||||||
|
|
|
@ -1,234 +0,0 @@
|
||||||
"""
|
|
||||||
Copyright (c) 2016-2019 ARM Limited. All rights reserved.
|
|
||||||
|
|
||||||
SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import stat
|
|
||||||
import re
|
|
||||||
from collections import OrderedDict
|
|
||||||
from subprocess import Popen
|
|
||||||
|
|
||||||
git_processes = []
|
|
||||||
|
|
||||||
class MyJSONEncoder(json.JSONEncoder):
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(MyJSONEncoder, self).__init__(*args, **kwargs)
|
|
||||||
self.current_indent = 0
|
|
||||||
self.current_indent_str = ""
|
|
||||||
|
|
||||||
|
|
||||||
def encode(self, o):
|
|
||||||
#Special Processing for lists
|
|
||||||
if isinstance(o, (list, tuple)):
|
|
||||||
primitives_only = True
|
|
||||||
for item in o:
|
|
||||||
if isinstance(item, (list, tuple, dict)):
|
|
||||||
primitives_only = False
|
|
||||||
break
|
|
||||||
output = []
|
|
||||||
if primitives_only:
|
|
||||||
for item in o:
|
|
||||||
output.append(json.dumps(item))
|
|
||||||
return "[" + ", ".join(output) + "]"
|
|
||||||
else:
|
|
||||||
self.current_indent += self.indent
|
|
||||||
self.current_indent_str = " " * self.current_indent
|
|
||||||
for item in o:
|
|
||||||
output.append(self.current_indent_str + self.encode(item))
|
|
||||||
self.current_indent -= self.indent
|
|
||||||
self.current_indent_str = " " * self.current_indent
|
|
||||||
return "[\n" + ",\n".join(output) + "\n" + self.current_indent_str + "]"
|
|
||||||
elif isinstance(o, dict):
|
|
||||||
primitives_only = True
|
|
||||||
for item in o.values():
|
|
||||||
if isinstance(item, (list, tuple, dict)):
|
|
||||||
primitives_only = False
|
|
||||||
break
|
|
||||||
output = []
|
|
||||||
if primitives_only and len(o) < 3:
|
|
||||||
for key, value in o.items():
|
|
||||||
output.append(json.dumps(key) + ": " + self.encode(value))
|
|
||||||
return "{" + ", ".join(output) + "}"
|
|
||||||
else:
|
|
||||||
self.current_indent += self.indent
|
|
||||||
self.current_indent_str = " " * self.current_indent
|
|
||||||
for key, value in o.items():
|
|
||||||
output.append(self.current_indent_str + json.dumps(key) + ": " + self.encode(value))
|
|
||||||
self.current_indent -= self.indent
|
|
||||||
self.current_indent_str = " " * self.current_indent
|
|
||||||
return "{\n" + ",\n".join(output) + "\n" + self.current_indent_str + "}"
|
|
||||||
else:
|
|
||||||
return json.dumps(o)
|
|
||||||
|
|
||||||
def load(path):
|
|
||||||
with open(path, 'r') as f :
|
|
||||||
return json.load(f, object_pairs_hook=OrderedDict)
|
|
||||||
|
|
||||||
def dump(path, obj):
|
|
||||||
with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR), 'w') as f :
|
|
||||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
|
||||||
f.write(MyJSONEncoder(indent=4).encode(obj))
|
|
||||||
f.write(u'\n')
|
|
||||||
f.truncate()
|
|
||||||
|
|
||||||
def find(stem, path) :
|
|
||||||
for root, directories, files in os.walk(path, followlinks=True) :
|
|
||||||
[dir for dir in directories if dir[0] != '.']
|
|
||||||
if (stem_match(stem,os.path.basename(os.path.normpath(root))) and
|
|
||||||
"device.h" in files) :
|
|
||||||
return os.path.join(root, "device.h")
|
|
||||||
|
|
||||||
def find_all_devices(path, verbose=False) :
|
|
||||||
for root, directories, files in os.walk(path, followlinks=True) :
|
|
||||||
[dir for dir in directories if dir[0] != '.']
|
|
||||||
if "device.h" in files :
|
|
||||||
if verbose : print("[VERBOSE] found a device.h file in {}".format(root))
|
|
||||||
yield os.path.join(root, "device.h")
|
|
||||||
|
|
||||||
mbed_matcher = re.compile('mbed', re.IGNORECASE)
|
|
||||||
def stem_match(stem, thing) :
|
|
||||||
return (stem in thing or
|
|
||||||
re.sub(mbed_matcher, '', stem) in thing)
|
|
||||||
|
|
||||||
attr_matcher = re.compile('^#define\W+DEVICE_(\w+)\W+1.*$')
|
|
||||||
def parse_attributes(path) :
|
|
||||||
with open(path) as input :
|
|
||||||
for line in input :
|
|
||||||
m = re.match(attr_matcher, line)
|
|
||||||
if m: yield m.group(1)
|
|
||||||
|
|
||||||
remove_matcher = re.compile('^#define\W+DEVICE_(\w+)\W+[10].*$')
|
|
||||||
def remove_attributes(path) :
|
|
||||||
with open(path) as input :
|
|
||||||
remainder = filter(lambda l: not re.match(remove_matcher, l), input)
|
|
||||||
with open(path,"wb") as output :
|
|
||||||
output.truncate(0)
|
|
||||||
output.write("// The 'provides' section in 'target.json' is now used"+
|
|
||||||
" to create the device's hardware preprocessor switches.\n")
|
|
||||||
output.write("// Check the 'provides' section of the target description"+
|
|
||||||
" in 'targets.json' for more details.\n")
|
|
||||||
output.writelines(remainder)
|
|
||||||
|
|
||||||
def user_select(things, message) :
|
|
||||||
print(message)
|
|
||||||
for thing, number in zip(things, range(len(things))):
|
|
||||||
print("{} : {}".format(number, thing))
|
|
||||||
selection = None
|
|
||||||
while selection is None :
|
|
||||||
print("please select an integer [0..{}] or specify all".format(len(things) - 1))
|
|
||||||
try :
|
|
||||||
i = raw_input()
|
|
||||||
if i == "all" :
|
|
||||||
selection = "all"
|
|
||||||
else :
|
|
||||||
selection = int(i)
|
|
||||||
if (selection > len(things) or
|
|
||||||
selection < 0) :
|
|
||||||
print("selection {} out of range".format(selection))
|
|
||||||
selection = None
|
|
||||||
except (ValueError, SyntaxError) :
|
|
||||||
print("selection not understood")
|
|
||||||
if selection == "all" :
|
|
||||||
return things
|
|
||||||
else :
|
|
||||||
return [things[selection]]
|
|
||||||
|
|
||||||
target_matcher = re.compile("TARGET_")
|
|
||||||
def strip_target(str) :
|
|
||||||
return re.sub(target_matcher, "", str)
|
|
||||||
|
|
||||||
def add_to_targets(targets, device_file, verbose=False, remove=False) :
|
|
||||||
if verbose : print("[VERBOSE] trying target {}".format(device_file))
|
|
||||||
device = strip_target(os.path.basename(os.path.normpath(os.path.dirname(device_file))))
|
|
||||||
if not device :
|
|
||||||
print("[WARNING] device {} did not have an associated device.h".format(device))
|
|
||||||
else :
|
|
||||||
possible_matches = set([key for key in targets.keys() if stem_match(device, key)])
|
|
||||||
for key, value in targets.items() :
|
|
||||||
for alt in value['extra_labels'] if 'extra_labels' in value else [] :
|
|
||||||
if stem_match(device, alt) : possible_matches.add(key)
|
|
||||||
for alt in value['extra_labels_add'] if 'extra_labels_add' in value else [] :
|
|
||||||
if stem_match(device, alt) : possible_matches.add(key)
|
|
||||||
possible_matches = list(possible_matches)
|
|
||||||
for match in possible_matches :
|
|
||||||
if device == match : possible_matches = [match]
|
|
||||||
if not possible_matches :
|
|
||||||
print("[WARNING] device {} did not have an associated entry in targets.json".format(device))
|
|
||||||
return None
|
|
||||||
elif len(possible_matches) > 1 :
|
|
||||||
message = ("possible matches for file {}".format(device_file))
|
|
||||||
target = user_select(possible_matches, message)
|
|
||||||
else :
|
|
||||||
target = possible_matches
|
|
||||||
attrs = list(parse_attributes(device_file))
|
|
||||||
if attrs :
|
|
||||||
for t in target :
|
|
||||||
targets[t]["device_has"] = sorted(list(set(targets[t].setdefault("device_has",[]) + attrs)))
|
|
||||||
if verbose : print("[VERBOSE] target {} now device_has {}".format(t, attrs))
|
|
||||||
if remove is True:
|
|
||||||
remove_attributes(device_file)
|
|
||||||
|
|
||||||
if __name__ == '__main__' :
|
|
||||||
import argparse
|
|
||||||
parser = argparse.ArgumentParser(description='A helpful little script for converting' +
|
|
||||||
' device.h files to parts of the targets.json file')
|
|
||||||
parser.add_argument('-a', '--all', action='store_true',
|
|
||||||
help='find and convert all available device.h files in the'+
|
|
||||||
' directory tree starting at the current directory')
|
|
||||||
parser.add_argument('-f', '--file', nargs='+', help='specify an individual file to '+
|
|
||||||
'convert from device.h format to a piece of targets.json')
|
|
||||||
parser.add_argument('-t', '--target', nargs='+', help='specify an individual target'+
|
|
||||||
' to convert from device.h format to a piece of targets.json')
|
|
||||||
parser.add_argument('-v', '--verbose', action='store_true',
|
|
||||||
help="print out every target that is updated in the targets.json")
|
|
||||||
parser.add_argument('-r', '--rm', action='store_true',
|
|
||||||
help="remove the used attributes from a device.h file")
|
|
||||||
args = parser.parse_args()
|
|
||||||
if not args.target and not args.file and not args.all :
|
|
||||||
print("[WARNING] no action specified; auto-formatting targets.json")
|
|
||||||
|
|
||||||
targets_file_name = os.path.join(os.curdir, "hal", "targets.json")
|
|
||||||
try :
|
|
||||||
targets = load(targets_file_name)
|
|
||||||
except OSError :
|
|
||||||
print("[ERROR] did not find targets.json where I expected it {}".format(targets_file_name))
|
|
||||||
exit(1)
|
|
||||||
except ValueError :
|
|
||||||
print("[ERROR] invalid json found in {}".format(targets_file_name))
|
|
||||||
exit(2)
|
|
||||||
|
|
||||||
if args.target :
|
|
||||||
for target in args.target :
|
|
||||||
device_file = find(target, os.curdir)
|
|
||||||
if device_file :
|
|
||||||
add_to_targets(targets, device_file, verbose=args.verbose, remove=args.rm)
|
|
||||||
else :
|
|
||||||
print("[WARNING] could not locate a device file for target {}".format(target))
|
|
||||||
|
|
||||||
if args.file :
|
|
||||||
for file in args.file :
|
|
||||||
add_to_targets(targets, file, verbose=args.verbose, remove=args.rm)
|
|
||||||
|
|
||||||
if args.all :
|
|
||||||
for file in find_all_devices(os.curdir, verbose=args.verbose) :
|
|
||||||
add_to_targets(targets, file, verbose=args.verbose, remove=args.rm)
|
|
||||||
|
|
||||||
dump(targets_file_name, targets)
|
|
||||||
|
|
||||||
for process in git_processes :
|
|
||||||
process.wait()
|
|
|
@ -65,7 +65,6 @@ from tools.build_api import mcu_toolchain_matrix
|
||||||
# Imports from TEST API
|
# Imports from TEST API
|
||||||
from tools.test_api import SingleTestRunner
|
from tools.test_api import SingleTestRunner
|
||||||
from tools.test_api import singletest_in_cli_mode
|
from tools.test_api import singletest_in_cli_mode
|
||||||
from tools.test_api import detect_database_verbose
|
|
||||||
from tools.test_api import get_json_data_from_file
|
from tools.test_api import get_json_data_from_file
|
||||||
from tools.test_api import get_avail_tests_summary_table
|
from tools.test_api import get_avail_tests_summary_table
|
||||||
from tools.test_api import get_default_test_options_parser
|
from tools.test_api import get_default_test_options_parser
|
||||||
|
@ -109,10 +108,6 @@ if __name__ == '__main__':
|
||||||
print "Version %d.%d"% get_version()
|
print "Version %d.%d"% get_version()
|
||||||
exit(0)
|
exit(0)
|
||||||
|
|
||||||
if opts.db_url and opts.verbose_test_configuration_only:
|
|
||||||
detect_database_verbose(opts.db_url)
|
|
||||||
exit(0)
|
|
||||||
|
|
||||||
# Print summary / information about automation test status
|
# Print summary / information about automation test status
|
||||||
if opts.test_automation_report:
|
if opts.test_automation_report:
|
||||||
print get_avail_tests_summary_table(platform_filter=opts.general_filter_regex)
|
print get_avail_tests_summary_table(platform_filter=opts.general_filter_regex)
|
||||||
|
@ -227,7 +222,6 @@ if __name__ == '__main__':
|
||||||
_clean=opts.clean,
|
_clean=opts.clean,
|
||||||
_parser=parser,
|
_parser=parser,
|
||||||
_opts=opts,
|
_opts=opts,
|
||||||
_opts_db_url=opts.db_url,
|
|
||||||
_opts_log_file_name=opts.log_file_name,
|
_opts_log_file_name=opts.log_file_name,
|
||||||
_opts_report_html_file_name=opts.report_html_file_name,
|
_opts_report_html_file_name=opts.report_html_file_name,
|
||||||
_opts_report_junit_file_name=opts.report_junit_file_name,
|
_opts_report_junit_file_name=opts.report_junit_file_name,
|
||||||
|
|
121
tools/size.py
121
tools/size.py
|
@ -1,121 +0,0 @@
|
||||||
"""
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2013 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
"""
|
|
||||||
import sys
|
|
||||||
from os.path import join, abspath, dirname, exists, splitext
|
|
||||||
from subprocess import Popen, PIPE
|
|
||||||
import csv
|
|
||||||
from collections import defaultdict
|
|
||||||
|
|
||||||
ROOT = abspath(join(dirname(__file__), ".."))
|
|
||||||
sys.path.insert(0, ROOT)
|
|
||||||
|
|
||||||
from tools.paths import BUILD_DIR, TOOLS_DATA
|
|
||||||
from tools.settings import GCC_ARM_PATH
|
|
||||||
from tools.tests import TEST_MAP
|
|
||||||
from tools.build_api import build_mbed_libs, build_project
|
|
||||||
|
|
||||||
SIZE = join(GCC_ARM_PATH, 'arm-none-eabi-size')
|
|
||||||
|
|
||||||
def get_size(path):
|
|
||||||
out = Popen([SIZE, path], stdout=PIPE).communicate()[0]
|
|
||||||
return map(int, out.splitlines()[1].split()[:4])
|
|
||||||
|
|
||||||
def get_percentage(before, after):
|
|
||||||
if before == 0:
|
|
||||||
return 0 if after == 0 else 100.0
|
|
||||||
return float(after - before) / float(before) * 100.0
|
|
||||||
|
|
||||||
def human_size(val):
|
|
||||||
if val>1024:
|
|
||||||
return "%.0fKb" % (float(val)/1024.0)
|
|
||||||
return "%d" % val
|
|
||||||
|
|
||||||
def print_diff(name, before, after):
|
|
||||||
print "%s: (%s -> %s) %.2f%%" % (name, human_size(before) , human_size(after) , get_percentage(before , after))
|
|
||||||
|
|
||||||
BENCHMARKS = [
|
|
||||||
("BENCHMARK_1", "CENV"),
|
|
||||||
("BENCHMARK_2", "PRINTF"),
|
|
||||||
("BENCHMARK_3", "FP"),
|
|
||||||
("BENCHMARK_4", "MBED"),
|
|
||||||
("BENCHMARK_5", "ALL"),
|
|
||||||
]
|
|
||||||
BENCHMARK_DATA_PATH = join(TOOLS_DATA, 'benchmarks.csv')
|
|
||||||
|
|
||||||
|
|
||||||
def benchmarks():
|
|
||||||
# CSV Data
|
|
||||||
csv_data = csv.writer(open(BENCHMARK_DATA_PATH, 'wb'))
|
|
||||||
csv_data.writerow(['Toolchain', "Target", "Benchmark", "code", "data", "bss", "flash"])
|
|
||||||
|
|
||||||
# Build
|
|
||||||
for toolchain in ['ARM', 'uARM', 'GCC_ARM']:
|
|
||||||
for mcu in ["LPC1768", "LPC11U24"]:
|
|
||||||
# Build Libraries
|
|
||||||
build_mbed_libs(mcu, toolchain)
|
|
||||||
|
|
||||||
# Build benchmarks
|
|
||||||
build_dir = join(BUILD_DIR, "benchmarks", mcu, toolchain)
|
|
||||||
for test_id, title in BENCHMARKS:
|
|
||||||
# Build Benchmark
|
|
||||||
try:
|
|
||||||
test = TEST_MAP[test_id]
|
|
||||||
path = build_project(test.source_dir, join(build_dir, test_id),
|
|
||||||
mcu, toolchain, test.dependencies)
|
|
||||||
base, ext = splitext(path)
|
|
||||||
# Check Size
|
|
||||||
code, data, bss, flash = get_size(base+'.elf')
|
|
||||||
csv_data.writerow([toolchain, mcu, title, code, data, bss, flash])
|
|
||||||
except Exception, e:
|
|
||||||
print "Unable to build %s for toolchain %s targeting %s" % (test_id, toolchain, mcu)
|
|
||||||
print e
|
|
||||||
|
|
||||||
|
|
||||||
def compare(t1, t2, target):
|
|
||||||
if not exists(BENCHMARK_DATA_PATH):
|
|
||||||
benchmarks()
|
|
||||||
else:
|
|
||||||
print "Loading: %s" % BENCHMARK_DATA_PATH
|
|
||||||
|
|
||||||
data = csv.reader(open(BENCHMARK_DATA_PATH, 'rb'))
|
|
||||||
|
|
||||||
benchmarks_data = defaultdict(dict)
|
|
||||||
for (toolchain, mcu, name, code, data, bss, flash) in data:
|
|
||||||
if target == mcu:
|
|
||||||
for t in [t1, t2]:
|
|
||||||
if toolchain == t:
|
|
||||||
benchmarks_data[name][t] = map(int, (code, data, bss, flash))
|
|
||||||
|
|
||||||
print "%s vs %s for %s" % (t1, t2, target)
|
|
||||||
for name, data in benchmarks_data.items():
|
|
||||||
try:
|
|
||||||
# Check Size
|
|
||||||
code_a, data_a, bss_a, flash_a = data[t1]
|
|
||||||
code_u, data_u, bss_u, flash_u = data[t2]
|
|
||||||
|
|
||||||
print "\n=== %s ===" % name
|
|
||||||
print_diff("code", code_a , code_u)
|
|
||||||
print_diff("data", data_a , data_u)
|
|
||||||
print_diff("bss", bss_a , bss_u)
|
|
||||||
print_diff("flash", flash_a , flash_u)
|
|
||||||
except Exception, e:
|
|
||||||
print "No data for benchmark %s" % (name)
|
|
||||||
print e
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
compare("ARM", "GCC_ARM", "LPC1768")
|
|
344
tools/synch.py
344
tools/synch.py
|
@ -1,344 +0,0 @@
|
||||||
"""
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2013 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
|
|
||||||
One repository to update them all
|
|
||||||
On mbed.org the mbed SDK is split up in multiple repositories, this script takes
|
|
||||||
care of updating them all.
|
|
||||||
"""
|
|
||||||
import sys
|
|
||||||
from copy import copy
|
|
||||||
from os import walk, remove, makedirs, getcwd, rmdir, listdir
|
|
||||||
from os.path import join, abspath, dirname, relpath, exists, isfile, normpath, isdir
|
|
||||||
from shutil import copyfile
|
|
||||||
from optparse import OptionParser
|
|
||||||
import re
|
|
||||||
import string
|
|
||||||
|
|
||||||
ROOT = abspath(join(dirname(__file__), ".."))
|
|
||||||
sys.path.insert(0, ROOT)
|
|
||||||
|
|
||||||
from tools.settings import MBED_ORG_PATH, MBED_ORG_USER, BUILD_DIR
|
|
||||||
from tools.paths import *
|
|
||||||
from tools.utils import run_cmd
|
|
||||||
|
|
||||||
MBED_URL = "mbed.org"
|
|
||||||
MBED_USER = "mbed_official"
|
|
||||||
|
|
||||||
changed = []
|
|
||||||
push_remote = True
|
|
||||||
quiet = False
|
|
||||||
commit_msg = ''
|
|
||||||
|
|
||||||
# Code that does have a mirror in the mbed SDK
|
|
||||||
# Tuple data: (repo_name, list_of_code_dirs, [team])
|
|
||||||
# team is optional - if not specified, the code is published under mbed_official
|
|
||||||
OFFICIAL_CODE = {"mbed-dev" : ["cmsis", "drivers", "hal", "platform", "targets", "mbed.h"]}
|
|
||||||
|
|
||||||
|
|
||||||
# A list of regular expressions that will be checked against each directory
|
|
||||||
# name and skipped if they match.
|
|
||||||
IGNORE_DIRS = (
|
|
||||||
)
|
|
||||||
|
|
||||||
IGNORE_FILES = (
|
|
||||||
'COPYING',
|
|
||||||
'\.md',
|
|
||||||
"\.lib",
|
|
||||||
"\.bld"
|
|
||||||
)
|
|
||||||
|
|
||||||
def ignore_path(name, reg_exps):
|
|
||||||
for r in reg_exps:
|
|
||||||
if re.search(r, name):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
class MbedRepository:
|
|
||||||
@staticmethod
|
|
||||||
def run_and_print(command, cwd):
|
|
||||||
stdout, _, _ = run_cmd(command, work_dir=cwd, redirect=True)
|
|
||||||
print(stdout)
|
|
||||||
|
|
||||||
def __init__(self, name):
|
|
||||||
self.name = name
|
|
||||||
self.path = join(MBED_ORG_PATH, name)
|
|
||||||
self.url = "http://" + MBED_URL + "/users/" + MBED_ORG_USER + "/code/%s/"
|
|
||||||
|
|
||||||
if not exists(self.path):
|
|
||||||
# Checkout code
|
|
||||||
if not exists(MBED_ORG_PATH):
|
|
||||||
makedirs(MBED_ORG_PATH)
|
|
||||||
|
|
||||||
self.run_and_print(['hg', 'clone', self.url % name], cwd=MBED_ORG_PATH)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# Update
|
|
||||||
self.run_and_print(['hg', 'pull'], cwd=self.path)
|
|
||||||
self.run_and_print(['hg', 'update'], cwd=self.path)
|
|
||||||
|
|
||||||
def publish(self):
|
|
||||||
# The maintainer has to evaluate the changes first and explicitly accept them
|
|
||||||
self.run_and_print(['hg', 'addremove'], cwd=self.path)
|
|
||||||
stdout, _, _ = run_cmd(['hg', 'status'], work_dir=self.path)
|
|
||||||
if stdout == '':
|
|
||||||
print "No changes"
|
|
||||||
return False
|
|
||||||
print stdout
|
|
||||||
if quiet:
|
|
||||||
commit = 'Y'
|
|
||||||
else:
|
|
||||||
commit = raw_input(push_remote and "Do you want to commit and push? Y/N: " or "Do you want to commit? Y/N: ")
|
|
||||||
if commit == 'Y':
|
|
||||||
args = ['hg', 'commit', '-u', MBED_ORG_USER]
|
|
||||||
|
|
||||||
|
|
||||||
# NOTE commit_msg should always come from the relevant mbed 2 release text
|
|
||||||
if commit_msg:
|
|
||||||
args = args + ['-m', commit_msg]
|
|
||||||
self.run_and_print(args, cwd=self.path)
|
|
||||||
if push_remote:
|
|
||||||
self.run_and_print(['hg', 'push'], cwd=self.path)
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Check if a file is a text file or a binary file
|
|
||||||
# Taken from http://code.activestate.com/recipes/173220/
|
|
||||||
text_characters = "".join(map(chr, range(32, 127)) + list("\n\r\t\b"))
|
|
||||||
_null_trans = string.maketrans("", "")
|
|
||||||
def is_text_file(filename):
|
|
||||||
block_size = 1024
|
|
||||||
def istext(s):
|
|
||||||
if "\0" in s:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
if not s: # Empty files are considered text
|
|
||||||
return 1
|
|
||||||
|
|
||||||
# Get the non-text characters (maps a character to itself then
|
|
||||||
# use the 'remove' option to get rid of the text characters.)
|
|
||||||
t = s.translate(_null_trans, text_characters)
|
|
||||||
|
|
||||||
# If more than 30% non-text characters, then
|
|
||||||
# this is considered a binary file
|
|
||||||
if float(len(t))/len(s) > 0.30:
|
|
||||||
return 0
|
|
||||||
return 1
|
|
||||||
with open(filename) as f:
|
|
||||||
res = istext(f.read(block_size))
|
|
||||||
return res
|
|
||||||
|
|
||||||
# Return the line ending type for the given file ('cr' or 'crlf')
|
|
||||||
def get_line_endings(f):
|
|
||||||
examine_size = 1024
|
|
||||||
try:
|
|
||||||
tf = open(f, "rb")
|
|
||||||
lines, ncrlf = tf.readlines(examine_size), 0
|
|
||||||
tf.close()
|
|
||||||
for l in lines:
|
|
||||||
if l.endswith("\r\n"):
|
|
||||||
ncrlf = ncrlf + 1
|
|
||||||
return 'crlf' if ncrlf > len(lines) >> 1 else 'cr'
|
|
||||||
except:
|
|
||||||
return 'cr'
|
|
||||||
|
|
||||||
# Copy file to destination, but preserve destination line endings if possible
|
|
||||||
# This prevents very annoying issues with huge diffs that appear because of
|
|
||||||
# differences in line endings
|
|
||||||
def copy_with_line_endings(sdk_file, repo_file):
|
|
||||||
if not isfile(repo_file):
|
|
||||||
copyfile(sdk_file, repo_file)
|
|
||||||
return
|
|
||||||
is_text = is_text_file(repo_file)
|
|
||||||
if is_text:
|
|
||||||
sdk_le = get_line_endings(sdk_file)
|
|
||||||
repo_le = get_line_endings(repo_file)
|
|
||||||
if not is_text or sdk_le == repo_le:
|
|
||||||
copyfile(sdk_file, repo_file)
|
|
||||||
else:
|
|
||||||
print "Converting line endings in '%s' to '%s'" % (abspath(repo_file), repo_le)
|
|
||||||
f = open(sdk_file, "rb")
|
|
||||||
data = f.read()
|
|
||||||
f.close()
|
|
||||||
f = open(repo_file, "wb")
|
|
||||||
data = data.replace("\r\n", "\n") if repo_le == 'cr' else data.replace('\n','\r\n')
|
|
||||||
f.write(data)
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
def visit_files(path, visit):
|
|
||||||
for root, dirs, files in walk(path):
|
|
||||||
# Ignore hidden directories
|
|
||||||
for d in copy(dirs):
|
|
||||||
full = join(root, d)
|
|
||||||
if d.startswith('.'):
|
|
||||||
dirs.remove(d)
|
|
||||||
if ignore_path(full, IGNORE_DIRS):
|
|
||||||
print "Skipping '%s'" % full
|
|
||||||
dirs.remove(d)
|
|
||||||
|
|
||||||
for file in files:
|
|
||||||
if ignore_path(file, IGNORE_FILES):
|
|
||||||
continue
|
|
||||||
|
|
||||||
visit(join(root, file))
|
|
||||||
|
|
||||||
def visit_dirs(path, visit):
|
|
||||||
|
|
||||||
for root, dirs, files in walk(path, topdown=False):
|
|
||||||
for d in dirs:
|
|
||||||
full = join(root, d)
|
|
||||||
|
|
||||||
# We don't want to remove the .hg directory
|
|
||||||
if not '.hg' in full:
|
|
||||||
visit(full)
|
|
||||||
|
|
||||||
|
|
||||||
def update_repo(repo_name, sdk_paths, lib=False):
|
|
||||||
repo = MbedRepository(repo_name)
|
|
||||||
|
|
||||||
# copy files from mbed SDK to mbed_official repository
|
|
||||||
def visit_mbed_sdk(sdk_file):
|
|
||||||
|
|
||||||
# Source files structure is different for the compiled binary lib
|
|
||||||
# compared to the mbed-dev sources
|
|
||||||
if lib:
|
|
||||||
repo_file = join(repo.path, relpath(sdk_file, sdk_path))
|
|
||||||
else:
|
|
||||||
repo_file = join(repo.path, sdk_file)
|
|
||||||
repo_dir = dirname(repo_file)
|
|
||||||
if not exists(repo_dir):
|
|
||||||
print("CREATING: %s" % repo_dir)
|
|
||||||
makedirs(repo_dir)
|
|
||||||
|
|
||||||
copy_with_line_endings(sdk_file, repo_file)
|
|
||||||
|
|
||||||
# Go through each path specified in the mbed structure
|
|
||||||
for sdk_path in sdk_paths:
|
|
||||||
|
|
||||||
if isfile(sdk_path):
|
|
||||||
# Single file so just copy directly across
|
|
||||||
visit_mbed_sdk(sdk_path)
|
|
||||||
else:
|
|
||||||
visit_files(sdk_path, visit_mbed_sdk)
|
|
||||||
|
|
||||||
def sdk_remove(repo_path):
|
|
||||||
|
|
||||||
print("REMOVING: %s" % repo_path)
|
|
||||||
|
|
||||||
# Check if this is an empty directory or a file before determining how to
|
|
||||||
# delete it. As this function should only be called with a directory list
|
|
||||||
# after being called with a file list, the directory should automatically
|
|
||||||
# be either valid or empty .
|
|
||||||
if isfile(repo_path):
|
|
||||||
remove(repo_path)
|
|
||||||
elif isdir(repo_path) and not listdir(repo_path):
|
|
||||||
rmdir(repo_path)
|
|
||||||
else:
|
|
||||||
print("ERROR: %s is not empty, please remove manually." % repo_path)
|
|
||||||
print listdir(repo_path)
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
# remove repository files that do not exist in the mbed SDK
|
|
||||||
def visit_lib_repo(repo_path):
|
|
||||||
for sdk_path in sdk_paths:
|
|
||||||
sdk_file = join(sdk_path, relpath(repo_path, repo.path))
|
|
||||||
if not exists(sdk_file):
|
|
||||||
sdk_remove(repo_path)
|
|
||||||
|
|
||||||
# remove repository files that do not exist in the mbed SDK source
|
|
||||||
def visit_repo(repo_path):
|
|
||||||
|
|
||||||
# work out equivalent sdk path from repo file
|
|
||||||
sdk_path = join(getcwd(), relpath(repo_path, repo.path))
|
|
||||||
|
|
||||||
if not exists(sdk_path):
|
|
||||||
sdk_remove(repo_path)
|
|
||||||
|
|
||||||
# Go through each path specified in the mbed structure
|
|
||||||
# Check if there are any files in any of those paths that are no longer part of the SDK
|
|
||||||
|
|
||||||
if lib:
|
|
||||||
visit_files(repo.path, visit_lib_repo)
|
|
||||||
# Now do the same for directories that may need to be removed. This needs to be done
|
|
||||||
# bottom up to ensure any lower nested directories can be deleted first
|
|
||||||
visit_dirs(repo.path, visit_lib_repo)
|
|
||||||
|
|
||||||
else:
|
|
||||||
visit_files(repo.path, visit_repo)
|
|
||||||
|
|
||||||
# Now do the same for directories that may need to be removed. This needs to be done
|
|
||||||
# bottom up to ensure any lower nested directories can be deleted first
|
|
||||||
visit_dirs(repo.path, visit_repo)
|
|
||||||
|
|
||||||
if repo.publish():
|
|
||||||
changed.append(repo_name)
|
|
||||||
|
|
||||||
|
|
||||||
def update_code(repositories):
|
|
||||||
for repo_name in repositories.keys():
|
|
||||||
sdk_dirs = repositories[repo_name]
|
|
||||||
print '\n=== Updating "%s" ===' % repo_name
|
|
||||||
update_repo(repo_name, sdk_dirs)
|
|
||||||
|
|
||||||
|
|
||||||
def update_mbed():
|
|
||||||
update_repo("mbed", [join(BUILD_DIR, "mbed")], lib=True)
|
|
||||||
|
|
||||||
def do_sync(options):
|
|
||||||
global push_remote, quiet, commit_msg, changed
|
|
||||||
|
|
||||||
push_remote = not options.nopush
|
|
||||||
quiet = options.quiet
|
|
||||||
commit_msg = options.msg
|
|
||||||
changed = []
|
|
||||||
|
|
||||||
if options.code:
|
|
||||||
update_code(OFFICIAL_CODE)
|
|
||||||
|
|
||||||
if options.mbed:
|
|
||||||
update_mbed()
|
|
||||||
|
|
||||||
if changed:
|
|
||||||
print "Repositories with changes:", changed
|
|
||||||
|
|
||||||
return changed
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
parser = OptionParser()
|
|
||||||
|
|
||||||
parser.add_option("-c", "--code",
|
|
||||||
action="store_true", default=False,
|
|
||||||
help="Update the mbed_official code")
|
|
||||||
|
|
||||||
parser.add_option("-m", "--mbed",
|
|
||||||
action="store_true", default=False,
|
|
||||||
help="Release a build of the mbed library")
|
|
||||||
|
|
||||||
parser.add_option("-n", "--nopush",
|
|
||||||
action="store_true", default=False,
|
|
||||||
help="Commit the changes locally only, don't push them")
|
|
||||||
|
|
||||||
parser.add_option("", "--commit_message",
|
|
||||||
action="store", type="string", default='', dest='msg',
|
|
||||||
help="Commit message to use for all the commits")
|
|
||||||
|
|
||||||
parser.add_option("-q", "--quiet",
|
|
||||||
action="store_true", default=False,
|
|
||||||
help="Don't ask for confirmation before commiting or pushing")
|
|
||||||
|
|
||||||
(options, args) = parser.parse_args()
|
|
||||||
|
|
||||||
do_sync(options)
|
|
||||||
|
|
|
@ -57,7 +57,6 @@ from tools.memap import MemapParser
|
||||||
from tools.targets import TARGET_MAP, Target
|
from tools.targets import TARGET_MAP, Target
|
||||||
from tools.config import Config
|
from tools.config import Config
|
||||||
import tools.test_configs as TestConfig
|
import tools.test_configs as TestConfig
|
||||||
from tools.test_db import BaseDBAccess
|
|
||||||
from tools.build_api import build_project, build_mbed_libs, build_lib
|
from tools.build_api import build_project, build_mbed_libs, build_lib
|
||||||
from tools.build_api import get_target_supported_toolchains
|
from tools.build_api import get_target_supported_toolchains
|
||||||
from tools.build_api import write_build_report
|
from tools.build_api import write_build_report
|
||||||
|
@ -185,7 +184,6 @@ class SingleTestRunner(object):
|
||||||
_clean=False,
|
_clean=False,
|
||||||
_parser=None,
|
_parser=None,
|
||||||
_opts=None,
|
_opts=None,
|
||||||
_opts_db_url=None,
|
|
||||||
_opts_log_file_name=None,
|
_opts_log_file_name=None,
|
||||||
_opts_report_html_file_name=None,
|
_opts_report_html_file_name=None,
|
||||||
_opts_report_junit_file_name=None,
|
_opts_report_junit_file_name=None,
|
||||||
|
@ -244,7 +242,6 @@ class SingleTestRunner(object):
|
||||||
self.test_spec = _test_spec
|
self.test_spec = _test_spec
|
||||||
|
|
||||||
# Settings passed e.g. from command line
|
# Settings passed e.g. from command line
|
||||||
self.opts_db_url = _opts_db_url
|
|
||||||
self.opts_log_file_name = _opts_log_file_name
|
self.opts_log_file_name = _opts_log_file_name
|
||||||
self.opts_report_html_file_name = _opts_report_html_file_name
|
self.opts_report_html_file_name = _opts_report_html_file_name
|
||||||
self.opts_report_junit_file_name = _opts_report_junit_file_name
|
self.opts_report_junit_file_name = _opts_report_junit_file_name
|
||||||
|
@ -284,21 +281,6 @@ class SingleTestRunner(object):
|
||||||
# File / screen logger initialization
|
# File / screen logger initialization
|
||||||
self.logger = CLITestLogger(file_name=self.opts_log_file_name) # Default test logger
|
self.logger = CLITestLogger(file_name=self.opts_log_file_name) # Default test logger
|
||||||
|
|
||||||
# Database related initializations
|
|
||||||
self.db_logger = factory_db_logger(self.opts_db_url)
|
|
||||||
self.db_logger_build_id = None # Build ID (database index of build_id table)
|
|
||||||
# Let's connect to database to set up credentials and confirm database is ready
|
|
||||||
if self.db_logger:
|
|
||||||
self.db_logger.connect_url(self.opts_db_url) # Save db access info inside db_logger object
|
|
||||||
if self.db_logger.is_connected():
|
|
||||||
# Get hostname and uname so we can use it as build description
|
|
||||||
# when creating new build_id in external database
|
|
||||||
(_hostname, _uname) = self.db_logger.get_hostname()
|
|
||||||
_host_location = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
build_id_type = None if self.opts_only_build_tests is None else self.db_logger.BUILD_ID_TYPE_BUILD_ONLY
|
|
||||||
self.db_logger_build_id = self.db_logger.get_next_build_id(_hostname, desc=_uname, location=_host_location, type=build_id_type)
|
|
||||||
self.db_logger.disconnect()
|
|
||||||
|
|
||||||
def dump_options(self):
|
def dump_options(self):
|
||||||
""" Function returns data structure with common settings passed to SingelTestRunner
|
""" Function returns data structure with common settings passed to SingelTestRunner
|
||||||
It can be used for example to fill _extra fields in database storing test suite single run data
|
It can be used for example to fill _extra fields in database storing test suite single run data
|
||||||
|
@ -307,8 +289,7 @@ class SingleTestRunner(object):
|
||||||
or
|
or
|
||||||
data_str = json.dumps(self.dump_options())
|
data_str = json.dumps(self.dump_options())
|
||||||
"""
|
"""
|
||||||
result = {"db_url" : str(self.opts_db_url),
|
result = {"log_file_name" : str(self.opts_log_file_name),
|
||||||
"log_file_name" : str(self.opts_log_file_name),
|
|
||||||
"shuffle_test_order" : str(self.opts_shuffle_test_order),
|
"shuffle_test_order" : str(self.opts_shuffle_test_order),
|
||||||
"shuffle_test_seed" : str(self.opts_shuffle_test_seed),
|
"shuffle_test_seed" : str(self.opts_shuffle_test_seed),
|
||||||
"test_by_names" : str(self.opts_test_by_names),
|
"test_by_names" : str(self.opts_test_by_names),
|
||||||
|
@ -416,27 +397,6 @@ class SingleTestRunner(object):
|
||||||
|
|
||||||
if self.opts_shuffle_test_order:
|
if self.opts_shuffle_test_order:
|
||||||
random.shuffle(test_map_keys, self.shuffle_random_func)
|
random.shuffle(test_map_keys, self.shuffle_random_func)
|
||||||
# Update database with shuffle seed f applicable
|
|
||||||
if self.db_logger:
|
|
||||||
self.db_logger.reconnect();
|
|
||||||
if self.db_logger.is_connected():
|
|
||||||
self.db_logger.update_build_id_info(
|
|
||||||
self.db_logger_build_id,
|
|
||||||
_shuffle_seed=self.shuffle_random_func())
|
|
||||||
self.db_logger.disconnect();
|
|
||||||
|
|
||||||
if self.db_logger:
|
|
||||||
self.db_logger.reconnect();
|
|
||||||
if self.db_logger.is_connected():
|
|
||||||
# Update MUTs and Test Specification in database
|
|
||||||
self.db_logger.update_build_id_info(
|
|
||||||
self.db_logger_build_id,
|
|
||||||
_muts=self.muts, _test_spec=self.test_spec)
|
|
||||||
# Update Extra information in database (some options passed to test suite)
|
|
||||||
self.db_logger.update_build_id_info(
|
|
||||||
self.db_logger_build_id,
|
|
||||||
_extra=json.dumps(self.dump_options()))
|
|
||||||
self.db_logger.disconnect();
|
|
||||||
|
|
||||||
valid_test_map_keys = self.get_valid_tests(test_map_keys, target, toolchain, test_ids, self.opts_include_non_automated)
|
valid_test_map_keys = self.get_valid_tests(test_map_keys, target, toolchain, test_ids, self.opts_include_non_automated)
|
||||||
skipped_test_map_keys = self.get_skipped_tests(test_map_keys, valid_test_map_keys)
|
skipped_test_map_keys = self.get_skipped_tests(test_map_keys, valid_test_map_keys)
|
||||||
|
@ -656,12 +616,6 @@ class SingleTestRunner(object):
|
||||||
self.execute_thread_slice(q, target, toolchains, clean, test_ids, self.build_report, self.build_properties)
|
self.execute_thread_slice(q, target, toolchains, clean, test_ids, self.build_report, self.build_properties)
|
||||||
q.get()
|
q.get()
|
||||||
|
|
||||||
if self.db_logger:
|
|
||||||
self.db_logger.reconnect();
|
|
||||||
if self.db_logger.is_connected():
|
|
||||||
self.db_logger.update_build_id_info(self.db_logger_build_id, _status_fk=self.db_logger.BUILD_ID_STATUS_COMPLETED)
|
|
||||||
self.db_logger.disconnect();
|
|
||||||
|
|
||||||
return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext, self.build_report, self.build_properties
|
return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext, self.build_report, self.build_properties
|
||||||
|
|
||||||
def get_valid_tests(self, test_map_keys, target, toolchain, test_ids, include_non_automated):
|
def get_valid_tests(self, test_map_keys, target, toolchain, test_ids, include_non_automated):
|
||||||
|
@ -885,9 +839,6 @@ class SingleTestRunner(object):
|
||||||
mcu = mut['mcu']
|
mcu = mut['mcu']
|
||||||
copy_method = mut.get('copy_method') # Available board configuration selection e.g. core selection etc.
|
copy_method = mut.get('copy_method') # Available board configuration selection e.g. core selection etc.
|
||||||
|
|
||||||
if self.db_logger:
|
|
||||||
self.db_logger.reconnect()
|
|
||||||
|
|
||||||
selected_copy_method = self.opts_copy_method if copy_method is None else copy_method
|
selected_copy_method = self.opts_copy_method if copy_method is None else copy_method
|
||||||
|
|
||||||
# Tests can be looped so test results must be stored for the same test
|
# Tests can be looped so test results must be stored for the same test
|
||||||
|
@ -986,27 +937,10 @@ class SingleTestRunner(object):
|
||||||
single_test_result, target_name_unique, toolchain_name, test_id,
|
single_test_result, target_name_unique, toolchain_name, test_id,
|
||||||
test_description, elapsed_time, single_timeout))
|
test_description, elapsed_time, single_timeout))
|
||||||
|
|
||||||
# Update database entries for ongoing test
|
|
||||||
if self.db_logger and self.db_logger.is_connected():
|
|
||||||
test_type = 'SingleTest'
|
|
||||||
self.db_logger.insert_test_entry(self.db_logger_build_id,
|
|
||||||
target_name,
|
|
||||||
toolchain_name,
|
|
||||||
test_type,
|
|
||||||
test_id,
|
|
||||||
single_test_result,
|
|
||||||
single_test_output,
|
|
||||||
elapsed_time,
|
|
||||||
single_timeout,
|
|
||||||
test_index)
|
|
||||||
|
|
||||||
# If we perform waterfall test we test until we get OK and we stop testing
|
# If we perform waterfall test we test until we get OK and we stop testing
|
||||||
if self.opts_waterfall_test and single_test_result == self.TEST_RESULT_OK:
|
if self.opts_waterfall_test and single_test_result == self.TEST_RESULT_OK:
|
||||||
break
|
break
|
||||||
|
|
||||||
if self.db_logger:
|
|
||||||
self.db_logger.disconnect()
|
|
||||||
|
|
||||||
return (self.shape_global_test_loop_result(test_all_result, self.opts_waterfall_test and self.opts_consolidate_waterfall_test),
|
return (self.shape_global_test_loop_result(test_all_result, self.opts_waterfall_test and self.opts_consolidate_waterfall_test),
|
||||||
target_name_unique,
|
target_name_unique,
|
||||||
toolchain_name,
|
toolchain_name,
|
||||||
|
@ -1658,46 +1592,6 @@ class CLITestLogger(TestLogger):
|
||||||
pass
|
pass
|
||||||
return log_line_str
|
return log_line_str
|
||||||
|
|
||||||
|
|
||||||
def factory_db_logger(db_url):
|
|
||||||
""" Factory database driver depending on database type supplied in database connection string db_url
|
|
||||||
"""
|
|
||||||
if db_url is not None:
|
|
||||||
from tools.test_mysql import MySQLDBAccess
|
|
||||||
connection_info = BaseDBAccess().parse_db_connection_string(db_url)
|
|
||||||
if connection_info is not None:
|
|
||||||
(db_type, username, password, host, db_name) = BaseDBAccess().parse_db_connection_string(db_url)
|
|
||||||
if db_type == 'mysql':
|
|
||||||
return MySQLDBAccess()
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def detect_database_verbose(db_url):
|
|
||||||
""" uses verbose mode (prints) database detection sequence to check it database connection string is valid
|
|
||||||
"""
|
|
||||||
result = BaseDBAccess().parse_db_connection_string(db_url)
|
|
||||||
if result is not None:
|
|
||||||
# Parsing passed
|
|
||||||
(db_type, username, password, host, db_name) = result
|
|
||||||
#print "DB type '%s', user name '%s', password '%s', host '%s', db name '%s'"% result
|
|
||||||
# Let's try to connect
|
|
||||||
db_ = factory_db_logger(db_url)
|
|
||||||
if db_ is not None:
|
|
||||||
print("Connecting to database '%s'..." % db_url)
|
|
||||||
db_.connect(host, username, password, db_name)
|
|
||||||
if db_.is_connected():
|
|
||||||
print("ok")
|
|
||||||
print("Detecting database...")
|
|
||||||
print(db_.detect_database(verbose=True))
|
|
||||||
print("Disconnecting...")
|
|
||||||
db_.disconnect()
|
|
||||||
print("done")
|
|
||||||
else:
|
|
||||||
print("Database type '%s' unknown" % db_type)
|
|
||||||
else:
|
|
||||||
print("Parse error: '%s' - DB Url error" % db_url)
|
|
||||||
|
|
||||||
|
|
||||||
def get_module_avail(module_name):
|
def get_module_avail(module_name):
|
||||||
""" This function returns True if module_name is already imported module
|
""" This function returns True if module_name is already imported module
|
||||||
"""
|
"""
|
||||||
|
@ -1987,10 +1881,6 @@ def get_default_test_options_parser():
|
||||||
type=int,
|
type=int,
|
||||||
help='You can increase global timeout for each test by specifying additional test timeout in seconds')
|
help='You can increase global timeout for each test by specifying additional test timeout in seconds')
|
||||||
|
|
||||||
parser.add_argument('--db',
|
|
||||||
dest='db_url',
|
|
||||||
help='This specifies what database test suite uses to store its state. To pass DB connection info use database connection string. Example: \'mysql://username:password@127.0.0.1/db_name\'')
|
|
||||||
|
|
||||||
parser.add_argument('-l', '--log',
|
parser.add_argument('-l', '--log',
|
||||||
dest='log_file_name',
|
dest='log_file_name',
|
||||||
help='Log events to external file (note not all console entries may be visible in log file)')
|
help='Log events to external file (note not all console entries may be visible in log file)')
|
||||||
|
|
165
tools/test_db.py
165
tools/test_db.py
|
@ -1,165 +0,0 @@
|
||||||
"""
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2014 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
Author: Przemyslaw Wirkus <Przemyslaw.Wirkus@arm.com>
|
|
||||||
"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
import json
|
|
||||||
|
|
||||||
|
|
||||||
class BaseDBAccess():
|
|
||||||
""" Class used to connect with test database and store test results
|
|
||||||
"""
|
|
||||||
def __init__(self):
|
|
||||||
self.db_object = None
|
|
||||||
self.db_type = None
|
|
||||||
# Connection credentials
|
|
||||||
self.host = None
|
|
||||||
self.user = None
|
|
||||||
self.passwd = None
|
|
||||||
self.db = None
|
|
||||||
|
|
||||||
# Test Suite DB scheme (table names)
|
|
||||||
self.TABLE_BUILD_ID = 'mtest_build_id'
|
|
||||||
self.TABLE_BUILD_ID_STATUS = 'mtest_build_id_status'
|
|
||||||
self.TABLE_BUILD_ID_TYPE = 'mtest_build_id_type'
|
|
||||||
self.TABLE_TARGET = 'mtest_target'
|
|
||||||
self.TABLE_TEST_ENTRY = 'mtest_test_entry'
|
|
||||||
self.TABLE_TEST_ID = 'mtest_test_id'
|
|
||||||
self.TABLE_TEST_RESULT = 'mtest_test_result'
|
|
||||||
self.TABLE_TEST_TYPE = 'mtest_test_type'
|
|
||||||
self.TABLE_TOOLCHAIN = 'mtest_toolchain'
|
|
||||||
# Build ID status PKs
|
|
||||||
self.BUILD_ID_STATUS_STARTED = 1 # Started
|
|
||||||
self.BUILD_ID_STATUS_IN_PROGRESS = 2 # In Progress
|
|
||||||
self.BUILD_ID_STATUS_COMPLETED = 3 #Completed
|
|
||||||
self.BUILD_ID_STATUS_FAILED = 4 # Failed
|
|
||||||
# Build ID type PKs
|
|
||||||
self.BUILD_ID_TYPE_TEST = 1 # Test
|
|
||||||
self.BUILD_ID_TYPE_BUILD_ONLY = 2 # Build Only
|
|
||||||
|
|
||||||
def get_hostname(self):
|
|
||||||
""" Useful when creating build_id in database
|
|
||||||
Function returns (hostname, uname) which can be used as (build_id_name, build_id_desc)
|
|
||||||
"""
|
|
||||||
# Get hostname from socket
|
|
||||||
import socket
|
|
||||||
hostname = socket.gethostbyaddr(socket.gethostname())[0]
|
|
||||||
# Get uname from platform resources
|
|
||||||
import platform
|
|
||||||
uname = json.dumps(platform.uname())
|
|
||||||
return (hostname, uname)
|
|
||||||
|
|
||||||
def get_db_type(self):
|
|
||||||
""" Returns database type. E.g. 'mysql', 'sqlLite' etc.
|
|
||||||
"""
|
|
||||||
return self.db_type
|
|
||||||
|
|
||||||
def detect_database(self, verbose=False):
|
|
||||||
""" detect database and return VERION data structure or string (verbose=True)
|
|
||||||
"""
|
|
||||||
return None
|
|
||||||
|
|
||||||
def parse_db_connection_string(self, str):
|
|
||||||
""" Parsing SQL DB connection string. String should contain:
|
|
||||||
- DB Name, user name, password, URL (DB host), name
|
|
||||||
Function should return tuple with parsed (db_type, username, password, host, db_name) or None if error
|
|
||||||
|
|
||||||
(db_type, username, password, host, db_name) = self.parse_db_connection_string(db_url)
|
|
||||||
|
|
||||||
E.g. connection string: 'mysql://username:password@127.0.0.1/db_name'
|
|
||||||
"""
|
|
||||||
result = None
|
|
||||||
if type(str) == type(''):
|
|
||||||
PATTERN = '^([\w]+)://([\w]+):([\w]*)@(.*)/([\w]+)'
|
|
||||||
result = re.match(PATTERN, str)
|
|
||||||
if result is not None:
|
|
||||||
result = result.groups() # Tuple (db_name, host, user, passwd, db)
|
|
||||||
return result # (db_type, username, password, host, db_name)
|
|
||||||
|
|
||||||
def is_connected(self):
|
|
||||||
""" Returns True if we are connected to database
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def connect(self, host, user, passwd, db):
|
|
||||||
""" Connects to DB and returns DB object
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def connect_url(self, db_url):
|
|
||||||
""" Connects to database using db_url (database url parsing),
|
|
||||||
store host, username, password, db_name
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def reconnect(self):
|
|
||||||
""" Reconnects to DB and returns DB object using stored host name,
|
|
||||||
database name and credentials (user name and password)
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def disconnect(self):
|
|
||||||
""" Close DB connection
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def escape_string(self, str):
|
|
||||||
""" Escapes string so it can be put in SQL query between quotes
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def select_all(self, query):
|
|
||||||
""" Execute SELECT query and get all results
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def insert(self, query, commit=True):
|
|
||||||
""" Execute INSERT query, define if you want to commit
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def get_next_build_id(self, name, desc='', location='', type=None, status=None):
|
|
||||||
""" Insert new build_id (DB unique build like ID number to send all test results)
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def get_table_entry_pk(self, table, column, value, update_db=True):
|
|
||||||
""" Checks for entries in tables with two columns (<TABLE_NAME>_pk, <column>)
|
|
||||||
If update_db is True updates table entry if value in specified column doesn't exist
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def update_table_entry(self, table, column, value):
|
|
||||||
""" Updates table entry if value in specified column doesn't exist
|
|
||||||
Locks table to perform atomic read + update
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def update_build_id_info(self, build_id, **kw):
|
|
||||||
""" Update additional data inside build_id table
|
|
||||||
Examples:
|
|
||||||
db.update_build_is(build_id, _status_fk=self.BUILD_ID_STATUS_COMPLETED, _shuffle_seed=0.0123456789):
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def insert_test_entry(self, build_id, target, toolchain, test_type, test_id, test_result, test_time, test_timeout, test_loop, test_extra=''):
|
|
||||||
""" Inserts test result entry to database. All checks regarding existing
|
|
||||||
toolchain names in DB are performed.
|
|
||||||
If some data is missing DB will be updated
|
|
||||||
"""
|
|
||||||
pass
|
|
|
@ -1,271 +0,0 @@
|
||||||
"""
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2014 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
Author: Przemyslaw Wirkus <Przemyslaw.Wirkus@arm.com>
|
|
||||||
"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
import MySQLdb as mdb
|
|
||||||
|
|
||||||
# Imports from TEST API
|
|
||||||
from tools.test_db import BaseDBAccess
|
|
||||||
|
|
||||||
|
|
||||||
class MySQLDBAccess(BaseDBAccess):
|
|
||||||
""" Wrapper for MySQL DB access for common test suite interface
|
|
||||||
"""
|
|
||||||
def __init__(self):
|
|
||||||
BaseDBAccess.__init__(self)
|
|
||||||
self.DB_TYPE = 'mysql'
|
|
||||||
|
|
||||||
def detect_database(self, verbose=False):
|
|
||||||
""" detect database and return VERION data structure or string (verbose=True)
|
|
||||||
"""
|
|
||||||
query = 'SHOW VARIABLES LIKE "%version%"'
|
|
||||||
rows = self.select_all(query)
|
|
||||||
if verbose:
|
|
||||||
result = []
|
|
||||||
for row in rows:
|
|
||||||
result.append("\t%s: %s"% (row['Variable_name'], row['Value']))
|
|
||||||
result = "\n".join(result)
|
|
||||||
else:
|
|
||||||
result = rows
|
|
||||||
return result
|
|
||||||
|
|
||||||
def parse_db_connection_string(self, str):
|
|
||||||
""" Parsing SQL DB connection string. String should contain:
|
|
||||||
- DB Name, user name, password, URL (DB host), name
|
|
||||||
Function should return tuple with parsed (host, user, passwd, db) or None if error
|
|
||||||
E.g. connection string: 'mysql://username:password@127.0.0.1/db_name'
|
|
||||||
"""
|
|
||||||
result = BaseDBAccess().parse_db_connection_string(str)
|
|
||||||
if result is not None:
|
|
||||||
(db_type, username, password, host, db_name) = result
|
|
||||||
if db_type != 'mysql':
|
|
||||||
result = None
|
|
||||||
return result
|
|
||||||
|
|
||||||
def is_connected(self):
|
|
||||||
""" Returns True if we are connected to database
|
|
||||||
"""
|
|
||||||
return self.db_object is not None
|
|
||||||
|
|
||||||
def connect(self, host, user, passwd, db):
|
|
||||||
""" Connects to DB and returns DB object
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
self.db_object = mdb.connect(host=host, user=user, passwd=passwd, db=db)
|
|
||||||
# Let's remember connection credentials
|
|
||||||
self.db_type = self.DB_TYPE
|
|
||||||
self.host = host
|
|
||||||
self.user = user
|
|
||||||
self.passwd = passwd
|
|
||||||
self.db = db
|
|
||||||
except mdb.Error, e:
|
|
||||||
print "Error %d: %s"% (e.args[0], e.args[1])
|
|
||||||
self.db_object = None
|
|
||||||
self.db_type = None
|
|
||||||
self.host = None
|
|
||||||
self.user = None
|
|
||||||
self.passwd = None
|
|
||||||
self.db = None
|
|
||||||
|
|
||||||
def connect_url(self, db_url):
|
|
||||||
""" Connects to database using db_url (database url parsing),
|
|
||||||
store host, username, password, db_name
|
|
||||||
"""
|
|
||||||
result = self.parse_db_connection_string(db_url)
|
|
||||||
if result is not None:
|
|
||||||
(db_type, username, password, host, db_name) = result
|
|
||||||
if db_type == self.DB_TYPE:
|
|
||||||
self.connect(host, username, password, db_name)
|
|
||||||
|
|
||||||
def reconnect(self):
|
|
||||||
""" Reconnects to DB and returns DB object using stored host name,
|
|
||||||
database name and credentials (user name and password)
|
|
||||||
"""
|
|
||||||
self.connect(self.host, self.user, self.passwd, self.db)
|
|
||||||
|
|
||||||
def disconnect(self):
|
|
||||||
""" Close DB connection
|
|
||||||
"""
|
|
||||||
if self.db_object:
|
|
||||||
self.db_object.close()
|
|
||||||
self.db_object = None
|
|
||||||
self.db_type = None
|
|
||||||
|
|
||||||
def escape_string(self, str):
|
|
||||||
""" Escapes string so it can be put in SQL query between quotes
|
|
||||||
"""
|
|
||||||
con = self.db_object
|
|
||||||
result = con.escape_string(str)
|
|
||||||
return result if result else ''
|
|
||||||
|
|
||||||
def select_all(self, query):
|
|
||||||
""" Execute SELECT query and get all results
|
|
||||||
"""
|
|
||||||
con = self.db_object
|
|
||||||
cur = con.cursor(mdb.cursors.DictCursor)
|
|
||||||
cur.execute(query)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
return rows
|
|
||||||
|
|
||||||
def insert(self, query, commit=True):
|
|
||||||
""" Execute INSERT query, define if you want to commit
|
|
||||||
"""
|
|
||||||
con = self.db_object
|
|
||||||
cur = con.cursor()
|
|
||||||
cur.execute(query)
|
|
||||||
if commit:
|
|
||||||
con.commit()
|
|
||||||
return cur.lastrowid
|
|
||||||
|
|
||||||
def get_next_build_id(self, name, desc='', location='', type=None, status=None):
|
|
||||||
""" Insert new build_id (DB unique build like ID number to send all test results)
|
|
||||||
"""
|
|
||||||
if status is None:
|
|
||||||
status = self.BUILD_ID_STATUS_STARTED
|
|
||||||
|
|
||||||
if type is None:
|
|
||||||
type = self.BUILD_ID_TYPE_TEST
|
|
||||||
|
|
||||||
query = """INSERT INTO `%s` (%s_name, %s_desc, %s_location, %s_type_fk, %s_status_fk)
|
|
||||||
VALUES ('%s', '%s', '%s', %d, %d)"""% (self.TABLE_BUILD_ID,
|
|
||||||
self.TABLE_BUILD_ID,
|
|
||||||
self.TABLE_BUILD_ID,
|
|
||||||
self.TABLE_BUILD_ID,
|
|
||||||
self.TABLE_BUILD_ID,
|
|
||||||
self.TABLE_BUILD_ID,
|
|
||||||
self.escape_string(name),
|
|
||||||
self.escape_string(desc),
|
|
||||||
self.escape_string(location),
|
|
||||||
type,
|
|
||||||
status)
|
|
||||||
index = self.insert(query) # Provide inserted record PK
|
|
||||||
return index
|
|
||||||
|
|
||||||
def get_table_entry_pk(self, table, column, value, update_db=True):
|
|
||||||
""" Checks for entries in tables with two columns (<TABLE_NAME>_pk, <column>)
|
|
||||||
If update_db is True updates table entry if value in specified column doesn't exist
|
|
||||||
"""
|
|
||||||
# TODO: table buffering
|
|
||||||
result = None
|
|
||||||
table_pk = '%s_pk'% table
|
|
||||||
query = """SELECT `%s`
|
|
||||||
FROM `%s`
|
|
||||||
WHERE `%s`='%s'"""% (table_pk,
|
|
||||||
table,
|
|
||||||
column,
|
|
||||||
self.escape_string(value))
|
|
||||||
rows = self.select_all(query)
|
|
||||||
if len(rows) == 1:
|
|
||||||
result = rows[0][table_pk]
|
|
||||||
elif len(rows) == 0 and update_db:
|
|
||||||
# Update DB with new value
|
|
||||||
result = self.update_table_entry(table, column, value)
|
|
||||||
return result
|
|
||||||
|
|
||||||
def update_table_entry(self, table, column, value):
|
|
||||||
""" Updates table entry if value in specified column doesn't exist
|
|
||||||
Locks table to perform atomic read + update
|
|
||||||
"""
|
|
||||||
result = None
|
|
||||||
con = self.db_object
|
|
||||||
cur = con.cursor()
|
|
||||||
cur.execute("LOCK TABLES `%s` WRITE"% table)
|
|
||||||
table_pk = '%s_pk'% table
|
|
||||||
query = """SELECT `%s`
|
|
||||||
FROM `%s`
|
|
||||||
WHERE `%s`='%s'"""% (table_pk,
|
|
||||||
table,
|
|
||||||
column,
|
|
||||||
self.escape_string(value))
|
|
||||||
cur.execute(query)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if len(rows) == 0:
|
|
||||||
query = """INSERT INTO `%s` (%s)
|
|
||||||
VALUES ('%s')"""% (table,
|
|
||||||
column,
|
|
||||||
self.escape_string(value))
|
|
||||||
cur.execute(query)
|
|
||||||
result = cur.lastrowid
|
|
||||||
con.commit()
|
|
||||||
cur.execute("UNLOCK TABLES")
|
|
||||||
return result
|
|
||||||
|
|
||||||
def update_build_id_info(self, build_id, **kw):
|
|
||||||
""" Update additional data inside build_id table
|
|
||||||
Examples:
|
|
||||||
db.update_build_id_info(build_id, _status_fk=self.BUILD_ID_STATUS_COMPLETED, _shuffle_seed=0.0123456789):
|
|
||||||
"""
|
|
||||||
if len(kw):
|
|
||||||
con = self.db_object
|
|
||||||
cur = con.cursor()
|
|
||||||
# Prepare UPDATE query
|
|
||||||
# ["`mtest_build_id_pk`=[value-1]", "`mtest_build_id_name`=[value-2]", "`mtest_build_id_desc`=[value-3]"]
|
|
||||||
set_list = []
|
|
||||||
for col_sufix in kw:
|
|
||||||
assign_str = "`%s%s`='%s'"% (self.TABLE_BUILD_ID, col_sufix, self.escape_string(str(kw[col_sufix])))
|
|
||||||
set_list.append(assign_str)
|
|
||||||
set_str = ', '.join(set_list)
|
|
||||||
query = """UPDATE `%s`
|
|
||||||
SET %s
|
|
||||||
WHERE `mtest_build_id_pk`=%d"""% (self.TABLE_BUILD_ID,
|
|
||||||
set_str,
|
|
||||||
build_id)
|
|
||||||
cur.execute(query)
|
|
||||||
con.commit()
|
|
||||||
|
|
||||||
def insert_test_entry(self, build_id, target, toolchain, test_type, test_id, test_result, test_output, test_time, test_timeout, test_loop, test_extra=''):
|
|
||||||
""" Inserts test result entry to database. All checks regarding existing
|
|
||||||
toolchain names in DB are performed.
|
|
||||||
If some data is missing DB will be updated
|
|
||||||
"""
|
|
||||||
# Get all table FK and if entry is new try to insert new value
|
|
||||||
target_fk = self.get_table_entry_pk(self.TABLE_TARGET, self.TABLE_TARGET + '_name', target)
|
|
||||||
toolchain_fk = self.get_table_entry_pk(self.TABLE_TOOLCHAIN, self.TABLE_TOOLCHAIN + '_name', toolchain)
|
|
||||||
test_type_fk = self.get_table_entry_pk(self.TABLE_TEST_TYPE, self.TABLE_TEST_TYPE + '_name', test_type)
|
|
||||||
test_id_fk = self.get_table_entry_pk(self.TABLE_TEST_ID, self.TABLE_TEST_ID + '_name', test_id)
|
|
||||||
test_result_fk = self.get_table_entry_pk(self.TABLE_TEST_RESULT, self.TABLE_TEST_RESULT + '_name', test_result)
|
|
||||||
|
|
||||||
con = self.db_object
|
|
||||||
cur = con.cursor()
|
|
||||||
|
|
||||||
query = """ INSERT INTO `%s` (`mtest_build_id_fk`,
|
|
||||||
`mtest_target_fk`,
|
|
||||||
`mtest_toolchain_fk`,
|
|
||||||
`mtest_test_type_fk`,
|
|
||||||
`mtest_test_id_fk`,
|
|
||||||
`mtest_test_result_fk`,
|
|
||||||
`mtest_test_output`,
|
|
||||||
`mtest_test_time`,
|
|
||||||
`mtest_test_timeout`,
|
|
||||||
`mtest_test_loop_no`,
|
|
||||||
`mtest_test_result_extra`)
|
|
||||||
VALUES (%d, %d, %d, %d, %d, %d, '%s', %.2f, %.2f, %d, '%s')"""% (self.TABLE_TEST_ENTRY,
|
|
||||||
build_id,
|
|
||||||
target_fk,
|
|
||||||
toolchain_fk,
|
|
||||||
test_type_fk,
|
|
||||||
test_id_fk,
|
|
||||||
test_result_fk,
|
|
||||||
self.escape_string(test_output),
|
|
||||||
test_time,
|
|
||||||
test_timeout,
|
|
||||||
test_loop,
|
|
||||||
self.escape_string(test_extra))
|
|
||||||
cur.execute(query)
|
|
||||||
con.commit()
|
|
|
@ -1,243 +0,0 @@
|
||||||
"""
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2014 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
Author: Przemyslaw Wirkus <Przemyslaw.wirkus@arm.com>
|
|
||||||
"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import json
|
|
||||||
import optparse
|
|
||||||
from flask import Flask
|
|
||||||
from os.path import join, abspath, dirname
|
|
||||||
|
|
||||||
# Be sure that the tools directory is in the search path
|
|
||||||
ROOT = abspath(join(dirname(__file__), ".."))
|
|
||||||
sys.path.insert(0, ROOT)
|
|
||||||
|
|
||||||
# Imports related to mbed build api
|
|
||||||
from tools.utils import construct_enum
|
|
||||||
from tools.build_api import mcu_toolchain_matrix
|
|
||||||
|
|
||||||
# Imports from TEST API
|
|
||||||
from test_api import SingleTestRunner
|
|
||||||
from test_api import SingleTestExecutor
|
|
||||||
from test_api import get_json_data_from_file
|
|
||||||
from test_api import print_muts_configuration_from_json
|
|
||||||
from test_api import print_test_configuration_from_json
|
|
||||||
from test_api import get_avail_tests_summary_table
|
|
||||||
from test_api import get_default_test_options_parser
|
|
||||||
|
|
||||||
|
|
||||||
class SingleTestRunnerWebService(SingleTestRunner):
|
|
||||||
def __init__(self):
|
|
||||||
super(SingleTestRunnerWebService, self).__init__()
|
|
||||||
|
|
||||||
# With this lock we should control access to certain resources inside this class
|
|
||||||
self.resource_lock = thread.allocate_lock()
|
|
||||||
|
|
||||||
self.RestRequest = construct_enum(REST_MUTS='muts',
|
|
||||||
REST_TEST_SPEC='test_spec',
|
|
||||||
REST_TEST_RESULTS='test_results')
|
|
||||||
|
|
||||||
def get_rest_result_template(self, result, command, success_code):
|
|
||||||
""" Returns common part of every web service request
|
|
||||||
"""
|
|
||||||
result = {"result" : result,
|
|
||||||
"command" : command,
|
|
||||||
"success_code": success_code} # 0 - OK, >0 - Error number
|
|
||||||
return result
|
|
||||||
|
|
||||||
# REST API handlers for Flask framework
|
|
||||||
def rest_api_status(self):
|
|
||||||
""" Returns current test execution status. E.g. running / finished etc.
|
|
||||||
"""
|
|
||||||
with self.resource_lock:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def rest_api_config(self):
|
|
||||||
""" Returns configuration passed to SingleTest executor
|
|
||||||
"""
|
|
||||||
with self.resource_lock:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def rest_api_log(self):
|
|
||||||
""" Returns current test log """
|
|
||||||
with self.resource_lock:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def rest_api_request_handler(self, request_type):
|
|
||||||
""" Returns various data structures. Both static and mutable during test
|
|
||||||
"""
|
|
||||||
result = {}
|
|
||||||
success_code = 0
|
|
||||||
with self.resource_lock:
|
|
||||||
if request_type == self.RestRequest.REST_MUTS:
|
|
||||||
result = self.muts # Returns MUTs
|
|
||||||
elif request_type == self.RestRequest.REST_TEST_SPEC:
|
|
||||||
result = self.test_spec # Returns Test Specification
|
|
||||||
elif request_type == self.RestRequest.REST_TEST_RESULTS:
|
|
||||||
pass # Returns test results
|
|
||||||
else:
|
|
||||||
success_code = -1
|
|
||||||
return json.dumps(self.get_rest_result_template(result, 'request/' + request_type, success_code), indent=4)
|
|
||||||
|
|
||||||
|
|
||||||
def singletest_in_webservice_mode():
|
|
||||||
# TODO Implement this web service functionality
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def get_default_test_webservice_options_parser():
|
|
||||||
""" Get test script web service options used by CLI, webservices etc.
|
|
||||||
"""
|
|
||||||
parser = get_default_test_options_parser()
|
|
||||||
|
|
||||||
# Things related to web services offered by test suite scripts
|
|
||||||
parser.add_argument('', '--rest-api',
|
|
||||||
dest='rest_api_enabled',
|
|
||||||
default=False,
|
|
||||||
action="store_true",
|
|
||||||
help='Enables REST API.')
|
|
||||||
|
|
||||||
parser.add_argument('', '--rest-api-port',
|
|
||||||
dest='rest_api_port_no',
|
|
||||||
type=int,
|
|
||||||
help='Sets port for REST API interface')
|
|
||||||
|
|
||||||
return parser
|
|
||||||
|
|
||||||
'''
|
|
||||||
if __name__ == '__main__':
|
|
||||||
# Command line options
|
|
||||||
parser = get_default_test_options_parser()
|
|
||||||
|
|
||||||
parser.description = """This script allows you to run mbed defined test cases for particular MCU(s) and corresponding toolchain(s)."""
|
|
||||||
parser.epilog = """Example: singletest.py -i test_spec.json -M muts_all.json"""
|
|
||||||
|
|
||||||
(opts, args) = parser.parse_args()
|
|
||||||
|
|
||||||
# Print summary / information about automation test status
|
|
||||||
if opts.test_automation_report:
|
|
||||||
print get_avail_tests_summary_table()
|
|
||||||
exit(0)
|
|
||||||
|
|
||||||
# Print summary / information about automation test status
|
|
||||||
if opts.test_case_report:
|
|
||||||
test_case_report_cols = ['id', 'automated', 'description', 'peripherals', 'host_test', 'duration', 'source_dir']
|
|
||||||
print get_avail_tests_summary_table(cols=test_case_report_cols, result_summary=False, join_delim='\n')
|
|
||||||
exit(0)
|
|
||||||
|
|
||||||
# Only prints matrix of supported toolchains
|
|
||||||
if opts.supported_toolchains:
|
|
||||||
print mcu_toolchain_matrix(platform_filter=opts.general_filter_regex)
|
|
||||||
exit(0)
|
|
||||||
|
|
||||||
# Open file with test specification
|
|
||||||
# test_spec_filename tells script which targets and their toolchain(s)
|
|
||||||
# should be covered by the test scenario
|
|
||||||
test_spec = get_json_data_from_file(opts.test_spec_filename) if opts.test_spec_filename else None
|
|
||||||
if test_spec is None:
|
|
||||||
if not opts.test_spec_filename:
|
|
||||||
parser.print_help()
|
|
||||||
exit(-1)
|
|
||||||
|
|
||||||
# Get extra MUTs if applicable
|
|
||||||
MUTs = get_json_data_from_file(opts.muts_spec_filename) if opts.muts_spec_filename else None
|
|
||||||
|
|
||||||
if MUTs is None:
|
|
||||||
if not opts.muts_spec_filename:
|
|
||||||
parser.print_help()
|
|
||||||
exit(-1)
|
|
||||||
|
|
||||||
# Only prints read MUTs configuration
|
|
||||||
if MUTs and opts.verbose_test_configuration_only:
|
|
||||||
print "MUTs configuration in %s:"% opts.muts_spec_filename
|
|
||||||
print print_muts_configuration_from_json(MUTs)
|
|
||||||
print
|
|
||||||
print "Test specification in %s:"% opts.test_spec_filename
|
|
||||||
print print_test_configuration_from_json(test_spec)
|
|
||||||
exit(0)
|
|
||||||
|
|
||||||
# Verbose test specification and MUTs configuration
|
|
||||||
if MUTs and opts.verbose:
|
|
||||||
print print_muts_configuration_from_json(MUTs)
|
|
||||||
if test_spec and opts.verbose:
|
|
||||||
print print_test_configuration_from_json(test_spec)
|
|
||||||
|
|
||||||
if opts.only_build_tests:
|
|
||||||
# We are skipping testing phase, and suppress summary
|
|
||||||
opts.suppress_summary = True
|
|
||||||
|
|
||||||
single_test = SingleTestRunner(_global_loops_count=opts.test_global_loops_value,
|
|
||||||
_test_loops_list=opts.test_loops_list,
|
|
||||||
_muts=MUTs,
|
|
||||||
_test_spec=test_spec,
|
|
||||||
_opts_goanna_for_mbed_sdk=opts.goanna_for_mbed_sdk,
|
|
||||||
_opts_goanna_for_tests=opts.goanna_for_tests,
|
|
||||||
_opts_shuffle_test_order=opts.shuffle_test_order,
|
|
||||||
_opts_shuffle_test_seed=opts.shuffle_test_seed,
|
|
||||||
_opts_test_by_names=opts.test_by_names,
|
|
||||||
_opts_test_only_peripheral=opts.test_only_peripheral,
|
|
||||||
_opts_test_only_common=opts.test_only_common,
|
|
||||||
_opts_verbose_skipped_tests=opts.verbose_skipped_tests,
|
|
||||||
_opts_verbose_test_result_only=opts.verbose_test_result_only,
|
|
||||||
_opts_verbose=opts.verbose,
|
|
||||||
_opts_firmware_global_name=opts.firmware_global_name,
|
|
||||||
_opts_only_build_tests=opts.only_build_tests,
|
|
||||||
_opts_suppress_summary=opts.suppress_summary,
|
|
||||||
_opts_test_x_toolchain_summary=opts.test_x_toolchain_summary,
|
|
||||||
_opts_copy_method=opts.copy_method
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
st_exec_thread = SingleTestExecutor(single_test)
|
|
||||||
except KeyboardInterrupt, e:
|
|
||||||
print "\n[CTRL+c] exit"
|
|
||||||
st_exec_thread.start()
|
|
||||||
|
|
||||||
if opts.rest_api_enabled:
|
|
||||||
# Enable REST API
|
|
||||||
|
|
||||||
app = Flask(__name__)
|
|
||||||
|
|
||||||
@app.route('/')
|
|
||||||
def hello_world():
|
|
||||||
return 'Hello World!'
|
|
||||||
|
|
||||||
@app.route('/status')
|
|
||||||
def rest_api_status():
|
|
||||||
return single_test.rest_api_status() # TODO
|
|
||||||
|
|
||||||
@app.route('/config')
|
|
||||||
def rest_api_config():
|
|
||||||
return single_test.rest_api_config() # TODO
|
|
||||||
|
|
||||||
@app.route('/log')
|
|
||||||
def rest_api_log():
|
|
||||||
return single_test.rest_api_log() # TODO
|
|
||||||
|
|
||||||
@app.route('/request/<request_type>') # 'muts', 'test_spec', 'test_results'
|
|
||||||
def rest_api_request_handler(request_type):
|
|
||||||
result = single_test.rest_api_request_handler(request_type) # TODO
|
|
||||||
return result
|
|
||||||
|
|
||||||
rest_api_port = int(opts.rest_api_port_no) if opts.rest_api_port_no else 5555
|
|
||||||
app.debug = False
|
|
||||||
app.run(port=rest_api_port) # Blocking Flask REST API web service
|
|
||||||
else:
|
|
||||||
st_exec_thread.join()
|
|
||||||
|
|
||||||
'''
|
|
|
@ -15,9 +15,21 @@ See the License for the specific language governing permissions and
|
||||||
limitations under the License.
|
limitations under the License.
|
||||||
"""
|
"""
|
||||||
from tools.paths import *
|
from tools.paths import *
|
||||||
from tools.data.support import DEFAULT_SUPPORT, CORTEX_ARM_SUPPORT
|
|
||||||
from argparse import ArgumentTypeError
|
from argparse import ArgumentTypeError
|
||||||
from tools.utils import columnate
|
from tools.utils import columnate
|
||||||
|
from tools.targets import TARGETS
|
||||||
|
|
||||||
|
DEFAULT_SUPPORT = {}
|
||||||
|
CORTEX_ARM_SUPPORT = {}
|
||||||
|
|
||||||
|
for target in TARGETS:
|
||||||
|
DEFAULT_SUPPORT[target.name] = target.supported_toolchains
|
||||||
|
|
||||||
|
if target.core.startswith('Cortex'):
|
||||||
|
CORTEX_ARM_SUPPORT[target.name] = [
|
||||||
|
t for t in target.supported_toolchains
|
||||||
|
if (t == 'ARM' or t == 'uARM')
|
||||||
|
]
|
||||||
|
|
||||||
TEST_CMSIS_LIB = join(TEST_DIR, "cmsis", "lib")
|
TEST_CMSIS_LIB = join(TEST_DIR, "cmsis", "lib")
|
||||||
TEST_MBED_LIB = join(TEST_DIR, "mbed", "env")
|
TEST_MBED_LIB = join(TEST_DIR, "mbed", "env")
|
||||||
|
|
|
@ -1,373 +0,0 @@
|
||||||
"""
|
|
||||||
mbed SDK
|
|
||||||
Copyright (c) 2011-2013 ARM Limited
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
"""
|
|
||||||
import sys
|
|
||||||
import argparse
|
|
||||||
import xml.etree.ElementTree as ET
|
|
||||||
import requests
|
|
||||||
import urlparse
|
|
||||||
|
|
||||||
def create_headers(args):
|
|
||||||
return { 'X-Api-Key': args.api_key }
|
|
||||||
|
|
||||||
def finish_command(command, response):
|
|
||||||
print(command, response.status_code, response.reason)
|
|
||||||
print(response.text)
|
|
||||||
|
|
||||||
if response.status_code < 400:
|
|
||||||
sys.exit(0)
|
|
||||||
else:
|
|
||||||
sys.exit(2)
|
|
||||||
|
|
||||||
def create_build(args):
|
|
||||||
build = {}
|
|
||||||
build['buildType'] = args.build_type
|
|
||||||
build['number'] = args.build_number
|
|
||||||
build['source'] = args.build_source
|
|
||||||
build['status'] = 'running'
|
|
||||||
|
|
||||||
r = requests.post(urlparse.urljoin(args.url, "api/builds"), headers=create_headers(args), json=build)
|
|
||||||
|
|
||||||
if r.status_code < 400:
|
|
||||||
if args.property_file_format:
|
|
||||||
print("MBED_BUILD_ID=" + r.text)
|
|
||||||
else:
|
|
||||||
print(r.text)
|
|
||||||
|
|
||||||
sys.exit(0)
|
|
||||||
else:
|
|
||||||
sys.exit(2)
|
|
||||||
|
|
||||||
def finish_build(args):
|
|
||||||
data = {}
|
|
||||||
data['status'] = 'completed'
|
|
||||||
|
|
||||||
r = requests.put(urlparse.urljoin(args.url, "api/builds/" + args.build_id), headers=create_headers(args), json=data)
|
|
||||||
finish_command('finish-build', r)
|
|
||||||
|
|
||||||
def promote_build(args):
|
|
||||||
data = {}
|
|
||||||
data['buildType'] = 'Release'
|
|
||||||
|
|
||||||
r = requests.put(urlparse.urljoin(args.url, "api/builds/" + args.build_id), headers=create_headers(args), json=data)
|
|
||||||
finish_command('promote-build', r)
|
|
||||||
|
|
||||||
def abort_build(args):
|
|
||||||
data = {}
|
|
||||||
data['status'] = 'aborted'
|
|
||||||
|
|
||||||
r = requests.put(urlparse.urljoin(args.url, "api/builds/" + args.build_id), headers=create_headers(args), json=data)
|
|
||||||
finish_command('abort-build', r)
|
|
||||||
|
|
||||||
def add_project_runs(args):
|
|
||||||
'''
|
|
||||||
-------------------------------------
|
|
||||||
Notes on 'project_run_data' structure:
|
|
||||||
--------------------------------------
|
|
||||||
'projectRuns' - Tree structure used to keep track of what projects have
|
|
||||||
been logged in different report files. The tree is organized as follows:
|
|
||||||
|
|
||||||
'projectRuns': { - Root element of tree
|
|
||||||
|
|
||||||
'hostOs': { - Host OS on which project was built/tested
|
|
||||||
- ex. windows, linux, or mac
|
|
||||||
|
|
||||||
'platform': { - Platform for which project was built/tested
|
|
||||||
(Corresponds to platform names in targets.py)
|
|
||||||
- ex. K64F, LPC1768, NRF51822, etc.
|
|
||||||
|
|
||||||
'toolchain': { - Toolchain with which project was built/tested
|
|
||||||
(Corresponds to TOOLCHAIN_CLASSES names in toolchains/__init__.py)
|
|
||||||
- ex. ARM, uARM, GCC_ARM, etc.
|
|
||||||
|
|
||||||
'project': { - Project that was build/tested
|
|
||||||
(Corresponds to test id in tests.py or library id in libraries.py)
|
|
||||||
- For tests, ex. MBED_A1, MBED_11, DTCT_1 etc.
|
|
||||||
- For libraries, ex. MBED, RTX, RTOS, etc.
|
|
||||||
|
|
||||||
},
|
|
||||||
...
|
|
||||||
},
|
|
||||||
...
|
|
||||||
},
|
|
||||||
...
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
'platforms_set' - Set of all the platform names mentioned in the given report files
|
|
||||||
|
|
||||||
'toolchains_set' - Set of all the toolchain names mentioned in the given report files
|
|
||||||
|
|
||||||
'names_set' - Set of all the project names mentioned in the given report files
|
|
||||||
|
|
||||||
'hostOses_set' - Set of all the host names given (only given by the command line arguments)
|
|
||||||
'''
|
|
||||||
|
|
||||||
project_run_data = {}
|
|
||||||
project_run_data['projectRuns'] = {}
|
|
||||||
project_run_data['platforms_set'] = set()
|
|
||||||
project_run_data['vendors_set'] = set()
|
|
||||||
project_run_data['toolchains_set'] = set()
|
|
||||||
project_run_data['names_set'] = set()
|
|
||||||
project_run_data['hostOses_set'] = set()
|
|
||||||
project_run_data['hostOses_set'].add(args.host_os)
|
|
||||||
|
|
||||||
if args.build_report:
|
|
||||||
add_report(project_run_data, args.build_report, True, args.build_id, args.host_os)
|
|
||||||
|
|
||||||
if args.test_report:
|
|
||||||
add_report(project_run_data, args.test_report, False, args.build_id, args.host_os)
|
|
||||||
|
|
||||||
ts_data = format_project_run_data(project_run_data, args.limit)
|
|
||||||
total_result = True
|
|
||||||
|
|
||||||
total_parts = len(ts_data)
|
|
||||||
print "Uploading project runs in %d parts" % total_parts
|
|
||||||
|
|
||||||
for index, data in enumerate(ts_data):
|
|
||||||
r = requests.post(urlparse.urljoin(args.url, "api/projectRuns"), headers=create_headers(args), json=data)
|
|
||||||
print("add-project-runs part %d/%d" % (index + 1, total_parts), r.status_code, r.reason)
|
|
||||||
print(r.text)
|
|
||||||
|
|
||||||
if r.status_code >= 400:
|
|
||||||
total_result = False
|
|
||||||
|
|
||||||
if total_result:
|
|
||||||
print "'add-project-runs' completed successfully"
|
|
||||||
sys.exit(0)
|
|
||||||
else:
|
|
||||||
print "'add-project-runs' failed"
|
|
||||||
sys.exit(2)
|
|
||||||
|
|
||||||
def prep_ts_data():
|
|
||||||
ts_data = {}
|
|
||||||
ts_data['projectRuns'] = []
|
|
||||||
ts_data['platforms'] = set()
|
|
||||||
ts_data['vendors'] = set()
|
|
||||||
ts_data['toolchains'] = set()
|
|
||||||
ts_data['names'] = set()
|
|
||||||
ts_data['hostOses'] = set()
|
|
||||||
return ts_data
|
|
||||||
|
|
||||||
def finish_ts_data(ts_data, project_run_data):
|
|
||||||
ts_data['platforms'] = list(ts_data['platforms'])
|
|
||||||
ts_data['vendors'] = list(ts_data['vendors'])
|
|
||||||
ts_data['toolchains'] = list(ts_data['toolchains'])
|
|
||||||
ts_data['names'] = list(ts_data['names'])
|
|
||||||
ts_data['hostOses'] = list(ts_data['hostOses'])
|
|
||||||
|
|
||||||
# Add all vendors to every projectRun submission
|
|
||||||
# TODO Either add "vendor" to the "project_run_data"
|
|
||||||
# or remove "vendor" entirely from the viewer
|
|
||||||
ts_data['vendors'] = list(project_run_data['vendors_set'])
|
|
||||||
|
|
||||||
def format_project_run_data(project_run_data, limit):
|
|
||||||
all_ts_data = []
|
|
||||||
current_limit_count = 0
|
|
||||||
|
|
||||||
ts_data = prep_ts_data()
|
|
||||||
ts_data['projectRuns'] = []
|
|
||||||
|
|
||||||
for hostOs_name, hostOs in project_run_data['projectRuns'].items():
|
|
||||||
for platform_name, platform in hostOs.items():
|
|
||||||
for toolchain_name, toolchain in platform.items():
|
|
||||||
for project_name, project in toolchain.items():
|
|
||||||
if current_limit_count >= limit:
|
|
||||||
finish_ts_data(ts_data, project_run_data)
|
|
||||||
all_ts_data.append(ts_data)
|
|
||||||
ts_data = prep_ts_data()
|
|
||||||
current_limit_count = 0
|
|
||||||
|
|
||||||
ts_data['projectRuns'].append(project)
|
|
||||||
ts_data['platforms'].add(platform_name)
|
|
||||||
ts_data['toolchains'].add(toolchain_name)
|
|
||||||
ts_data['names'].add(project_name)
|
|
||||||
ts_data['hostOses'].add(hostOs_name)
|
|
||||||
current_limit_count += 1
|
|
||||||
|
|
||||||
if current_limit_count > 0:
|
|
||||||
finish_ts_data(ts_data, project_run_data)
|
|
||||||
all_ts_data.append(ts_data)
|
|
||||||
|
|
||||||
return all_ts_data
|
|
||||||
|
|
||||||
def find_project_run(projectRuns, project):
|
|
||||||
keys = ['hostOs', 'platform', 'toolchain', 'project']
|
|
||||||
|
|
||||||
elem = projectRuns
|
|
||||||
|
|
||||||
for key in keys:
|
|
||||||
if not project[key] in elem:
|
|
||||||
return None
|
|
||||||
|
|
||||||
elem = elem[project[key]]
|
|
||||||
|
|
||||||
return elem
|
|
||||||
|
|
||||||
def add_project_run(projectRuns, project):
|
|
||||||
keys = ['hostOs', 'platform', 'toolchain']
|
|
||||||
|
|
||||||
elem = projectRuns
|
|
||||||
|
|
||||||
for key in keys:
|
|
||||||
if not project[key] in elem:
|
|
||||||
elem[project[key]] = {}
|
|
||||||
|
|
||||||
elem = elem[project[key]]
|
|
||||||
|
|
||||||
elem[project['project']] = project
|
|
||||||
|
|
||||||
def update_project_run_results(project_to_update, project, is_build):
|
|
||||||
if is_build:
|
|
||||||
project_to_update['buildPass'] = project['buildPass']
|
|
||||||
project_to_update['buildResult'] = project['buildResult']
|
|
||||||
project_to_update['buildOutput'] = project['buildOutput']
|
|
||||||
else:
|
|
||||||
project_to_update['testPass'] = project['testPass']
|
|
||||||
project_to_update['testResult'] = project['testResult']
|
|
||||||
project_to_update['testOutput'] = project['testOutput']
|
|
||||||
|
|
||||||
def update_project_run(projectRuns, project, is_build):
|
|
||||||
found_project = find_project_run(projectRuns, project)
|
|
||||||
if found_project:
|
|
||||||
update_project_run_results(found_project, project, is_build)
|
|
||||||
else:
|
|
||||||
add_project_run(projectRuns, project)
|
|
||||||
|
|
||||||
def add_report(project_run_data, report_file, is_build, build_id, host_os):
|
|
||||||
tree = None
|
|
||||||
|
|
||||||
try:
|
|
||||||
tree = ET.parse(report_file)
|
|
||||||
except:
|
|
||||||
print(sys.exc_info()[0])
|
|
||||||
print('Invalid path to report: %s', report_file)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
test_suites = tree.getroot()
|
|
||||||
|
|
||||||
for test_suite in test_suites:
|
|
||||||
platform = ""
|
|
||||||
toolchain = ""
|
|
||||||
vendor = ""
|
|
||||||
for properties in test_suite.findall('properties'):
|
|
||||||
for property in properties.findall('property'):
|
|
||||||
if property.attrib['name'] == 'target':
|
|
||||||
platform = property.attrib['value']
|
|
||||||
project_run_data['platforms_set'].add(platform)
|
|
||||||
elif property.attrib['name'] == 'toolchain':
|
|
||||||
toolchain = property.attrib['value']
|
|
||||||
project_run_data['toolchains_set'].add(toolchain)
|
|
||||||
elif property.attrib['name'] == 'vendor':
|
|
||||||
vendor = property.attrib['value']
|
|
||||||
project_run_data['vendors_set'].add(vendor)
|
|
||||||
|
|
||||||
for test_case in test_suite.findall('testcase'):
|
|
||||||
projectRun = {}
|
|
||||||
projectRun['build'] = build_id
|
|
||||||
projectRun['hostOs'] = host_os
|
|
||||||
projectRun['platform'] = platform
|
|
||||||
projectRun['toolchain'] = toolchain
|
|
||||||
projectRun['project'] = test_case.attrib['classname'].split('.')[-1]
|
|
||||||
projectRun['vendor'] = vendor
|
|
||||||
|
|
||||||
project_run_data['names_set'].add(projectRun['project'])
|
|
||||||
|
|
||||||
should_skip = False
|
|
||||||
skips = test_case.findall('skipped')
|
|
||||||
|
|
||||||
if skips:
|
|
||||||
should_skip = skips[0].attrib['message'] == 'SKIP'
|
|
||||||
|
|
||||||
if not should_skip:
|
|
||||||
system_outs = test_case.findall('system-out')
|
|
||||||
|
|
||||||
output = ""
|
|
||||||
if system_outs:
|
|
||||||
output = system_outs[0].text
|
|
||||||
|
|
||||||
if is_build:
|
|
||||||
projectRun['buildOutput'] = output
|
|
||||||
else:
|
|
||||||
projectRun['testOutput'] = output
|
|
||||||
|
|
||||||
errors = test_case.findall('error')
|
|
||||||
failures = test_case.findall('failure')
|
|
||||||
projectRunPass = None
|
|
||||||
result = None
|
|
||||||
|
|
||||||
if errors:
|
|
||||||
projectRunPass = False
|
|
||||||
result = errors[0].attrib['message']
|
|
||||||
elif failures:
|
|
||||||
projectRunPass = False
|
|
||||||
result = failures[0].attrib['message']
|
|
||||||
elif skips:
|
|
||||||
projectRunPass = True
|
|
||||||
result = skips[0].attrib['message']
|
|
||||||
else:
|
|
||||||
projectRunPass = True
|
|
||||||
result = 'OK'
|
|
||||||
|
|
||||||
if is_build:
|
|
||||||
projectRun['buildPass'] = projectRunPass
|
|
||||||
projectRun['buildResult'] = result
|
|
||||||
else:
|
|
||||||
projectRun['testPass'] = projectRunPass
|
|
||||||
projectRun['testResult'] = result
|
|
||||||
|
|
||||||
update_project_run(project_run_data['projectRuns'], projectRun, is_build)
|
|
||||||
|
|
||||||
def main(arguments):
|
|
||||||
# Register and parse command line arguments
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument('-u', '--url', required=True, help='url to ci site')
|
|
||||||
parser.add_argument('-k', '--api-key', required=True, help='api-key for posting data')
|
|
||||||
|
|
||||||
subparsers = parser.add_subparsers(help='subcommand help')
|
|
||||||
|
|
||||||
create_build_parser = subparsers.add_parser('create-build', help='create a new build')
|
|
||||||
create_build_parser.add_argument('-b', '--build-number', required=True, help='build number')
|
|
||||||
create_build_parser.add_argument('-T', '--build-type', choices=['Nightly', 'Limited', 'Pull_Request', 'Release_Candidate'], required=True, help='type of build')
|
|
||||||
create_build_parser.add_argument('-s', '--build-source', required=True, help='url to source of build')
|
|
||||||
create_build_parser.add_argument('-p', '--property-file-format', action='store_true', help='print result in the property file format')
|
|
||||||
create_build_parser.set_defaults(func=create_build)
|
|
||||||
|
|
||||||
finish_build_parser = subparsers.add_parser('finish-build', help='finish a running build')
|
|
||||||
finish_build_parser.add_argument('-b', '--build-id', required=True, help='build id')
|
|
||||||
finish_build_parser.set_defaults(func=finish_build)
|
|
||||||
|
|
||||||
finish_build_parser = subparsers.add_parser('promote-build', help='promote a build to a release')
|
|
||||||
finish_build_parser.add_argument('-b', '--build-id', required=True, help='build id')
|
|
||||||
finish_build_parser.set_defaults(func=promote_build)
|
|
||||||
|
|
||||||
abort_build_parser = subparsers.add_parser('abort-build', help='abort a running build')
|
|
||||||
abort_build_parser.add_argument('-b', '--build-id', required=True, help='build id')
|
|
||||||
abort_build_parser.set_defaults(func=abort_build)
|
|
||||||
|
|
||||||
add_project_runs_parser = subparsers.add_parser('add-project-runs', help='add project runs to a build')
|
|
||||||
add_project_runs_parser.add_argument('-b', '--build-id', required=True, help='build id')
|
|
||||||
add_project_runs_parser.add_argument('-r', '--build-report', required=False, help='path to junit xml build report')
|
|
||||||
add_project_runs_parser.add_argument('-t', '--test-report', required=False, help='path to junit xml test report')
|
|
||||||
add_project_runs_parser.add_argument('-o', '--host-os', required=True, help='host os on which test was run')
|
|
||||||
add_project_runs_parser.add_argument('-l', '--limit', required=False, type=int, default=1000, help='Limit the number of project runs sent at a time to avoid HTTP errors (default is 1000)')
|
|
||||||
add_project_runs_parser.set_defaults(func=add_project_runs)
|
|
||||||
|
|
||||||
args = parser.parse_args(arguments)
|
|
||||||
args.func(args)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main(sys.argv[1:])
|
|
Loading…
Reference in New Issue