Add SUPERTARGET! Use scan_resources to find duplicates

pull/2854/head
Jimmy Brisson 2016-07-08 15:10:30 -05:00
parent 685e2c52e2
commit 9b187dcceb
3 changed files with 26 additions and 6 deletions

View File

@ -12,6 +12,16 @@
"public": false,
"default_lib": "std"
},
".Super-Target": {
"device_has": ["AACI", "ANALOGIN", "STDIO_MESSAGES", "STORAGE", "TSC", "PORTIN", "I2C_ASYNCH", "DEBUG_AWARENESS", "I2CSLAVE", "CAN", "RTC_LSI", "LOCALFILESYSTEM", "CLCD", "LOWPOWERTIMER", "RTC", "ERROR_PATTERN", "SPI", "SERIAL_ASYNCH", "SERIAL_FC", "SEMIHOST", "INTERRUPTIN", "SPI_ASYNCH", "PORTOUT", "SERIAL", "ANALOGOUT", "SPISLAVE", "PORTINOUT", "PWMOUT", "SLEEP", "ERROR_RED", "ETHERNET", "I2C", "SERIAL_ASYNCH_DMA"],
"features": ["BLE", "IPV4", "IPV6", "CLIENT", "UVISOR"],
"extra_labels": [],
"core": "Cortex-M4",
"fpu": "double",
"public": true,
"default_build": "standard",
"release": true
},
"CM4_UARM": {
"inherits": ["Target"],
"core": "Cortex-M4",

View File

View File

@ -1,5 +1,12 @@
from os import walk
from os.path import join
from os.path import join, abspath, dirname, basename
import sys
ROOT = abspath(join(dirname(__file__), "..", ".."))
sys.path.insert(0, ROOT)
from tools.toolchains.gcc import GCC_ARM
from tools.targets import TARGET_MAP
from argparse import ArgumentParser
if __name__ == "__main__":
@ -9,13 +16,16 @@ if __name__ == "__main__":
parser.add_argument("--silent", help="Supress printing of filenames, just return number of duplicates", action="store_true")
args = parser.parse_args()
toolchain = GCC_ARM(TARGET_MAP[".Super-Target"])
resources = sum([toolchain.scan_resources(d) for d in args.dirs], None)
scanned_files = {}
for dir in args.dirs:
for root, dirs, files in walk(dir):
for file in files:
scanned_files.setdefault(file, [])
scanned_files[file].append(join(root, file))
for r in [resources] + resources.features.values():
for file in r.c_sources + r.s_sources + r.cpp_sources + r.objects + r.libraries + r.hex_files + r.bin_files:
scanned_files.setdefault(basename(file), [])
scanned_files[basename(file)].append(file)
count_dupe = 0
for key, value in scanned_files.iteritems():