Merge pull request #8757 from theotherjimmy/use-cpm

Update Cmsis-pack-manager to 0.2.3
pull/9964/head
Martin Kojtal 2019-03-06 15:58:42 +01:00 committed by GitHub
commit c37ac83e8e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 464177 additions and 685 deletions

View File

@ -23,3 +23,4 @@ git+https://github.com/armmbed/manifest-tool.git@v1.4.6
icetea>=1.2.1,<1.3
pycryptodome>=3.7.2,<=3.7.3
pyusb>=1.0.0,<2.0.0
cmsis-pack-manager>=0.2.3,<0.3.0

View File

@ -16,175 +16,78 @@ See the License for the specific language governing permissions and
limitations under the License.
"""
try:
from urllib2 import urlopen, URLError
except ImportError:
from urllib.request import urlopen, URLError
from bs4 import BeautifulSoup
from os.path import join, dirname, basename
from os import makedirs
from errno import EEXIST
from threading import Thread
try:
from Queue import Queue
except ImportError:
from queue import Queue
from re import compile, sub
from sys import stderr, stdout
from itertools import takewhile
import argparse
from json import dump, load
from zipfile import ZipFile
from tempfile import gettempdir
from os.path import join, dirname
from json import load, dump
import warnings
from distutils.version import LooseVersion
from cmsis_pack_manager import Cache as _Cache
from tools.flash_algo import PackFlashAlgo
warnings.filterwarnings("ignore")
from fuzzywuzzy import process
RootPackURL = "http://www.keil.com/pack/index.idx"
LocalPackDir = dirname(__file__)
LocalPackIndex = join(LocalPackDir, "index.json")
LocalPackAliases = join(LocalPackDir, "aliases.json")
LocalPackLegacyNames = join(LocalPackDir, "legacy-names.json")
protocol_matcher = compile("\w*://")
def strip_protocol(url) :
return protocol_matcher.sub("", str(url))
class _CacheLookup(object):
def __init__(self, index, legacy_names):
self.index = index
self.legacy_names = legacy_names
def largest_version(content) :
return sorted([t['version'] for t in content.package.releases('release')],
reverse=True, key=lambda v: LooseVersion(v))[0]
def __getitem__(self, name):
try:
return self.index[name]
except KeyError:
return self.index[self.legacy_names[name]]
def do_queue(Class, function, interable) :
q = Queue()
threads = [Class(q, function) for each in range(20)]
for each in threads :
each.setDaemon(True)
each.start()
for thing in interable :
q.put(thing)
q.join()
class Reader (Thread) :
def __init__(self, queue, func) :
Thread.__init__(self)
self.queue = queue
self.func = func
def run(self) :
while True :
url = self.queue.get()
self.func(url)
self.queue.task_done()
def __contains__(self, name):
return name in self.index or name in self.legacy_names
class Cache () :
class Cache(object):
""" The Cache object is the only relevant API object at the moment
Constructing the Cache object does not imply any caching.
A user of the API must explicitly call caching functions.
:param silent: A boolean that, when True, significantly reduces the printing of this Object
:param silent: Not used
:type silent: bool
:param no_timeouts: A boolean that, when True, disables the default connection timeout and low speed timeout for downloading things.
:param no_timeouts: Not used
:type no_timeouts: bool
"""
def __init__ (self, silent, no_timeouts) :
self.silent = silent
self.counter = 0
self.total = 1
self._index = {}
self._aliases = {}
self.urls = None
self.no_timeouts = no_timeouts
self.data_path = gettempdir()
def display_counter (self, message) :
stdout.write("{} {}/{}\r".format(message, self.counter, self.total))
stdout.flush()
def cache_file (self, url) :
"""Low level interface to caching a single file.
:param url: The URL to cache.
:type url: str
:rtype: None
"""
if not self.silent : print("Caching {}...".format(url))
dest = join(self.data_path, strip_protocol(url))
try :
makedirs(dirname(dest))
except OSError as exc :
if exc.errno == EEXIST : pass
else : raise
def __init__(self, silent, no_timeouts):
self._cache = _Cache(
silent, no_timeouts,
json_path=LocalPackDir, data_path=LocalPackDir
)
try:
with open(dest, "wb+") as fd :
fd.write(urlopen(url).read())
except URLError as e:
stderr.write(e.reason)
self.counter += 1
self.display_counter("Caching Files")
def pdsc_to_pack (self, url) :
"""Find the URL of the specified pack file described by a PDSC.
The PDSC is assumed to be cached and is looked up in the cache by its URL.
:param url: The url used to look up the PDSC.
:type url: str
:return: The url of the PACK file.
:rtype: str
"""
content = self.pdsc_from_cache(url)
new_url = content.package.url.get_text()
if not new_url.endswith("/") :
new_url = new_url + "/"
return (new_url + content.package.vendor.get_text() + "." +
content.package.find('name').get_text() + "." +
largest_version(content) + ".pack")
def cache_pdsc_and_pack (self, url) :
self.cache_file(url)
try :
self.cache_file(self.pdsc_to_pack(url))
except AttributeError :
stderr.write("[ ERROR ] {} does not appear to be a conforming .pdsc file\n".format(url))
self.counter += 1
def get_urls(self):
"""Extract the URLs of all know PDSC files.
Will pull the index from the internet if it is not cached.
:return: A list of all PDSC URLs
:rtype: [str]
"""
if not self.urls :
try : root_data = self.pdsc_from_cache(RootPackURL)
except IOError : root_data = self.cache_and_parse(RootPackURL)
self.urls = ["/".join([pdsc.get('url').strip("/"),
pdsc.get('name').strip("/")])
for pdsc in root_data.find_all("pdsc")]
return self.urls
self._legacy_names = load(open(LocalPackLegacyNames))
except IOError:
self._legacy_names = {}
def _get_sectors(self, device):
"""Extract sector sizes from device FLM algorithm
Will return None if there is no algorithm, pdsc URL formatted in correctly
Will return None if there is no algorithm, pdsc URL formatted in
correctly
:return: A list tuples of sector start and size
:rtype: [list]
"""
try:
pack = self.pack_from_cache(device)
pack = self._cache.pack_from_cache(device)
ret = []
for filename in device['algorithm'].keys():
for algo in device['algorithms']:
try:
flm = pack.open(filename)
flm = pack.open(
algo["file_name"]
.replace("\\\\", "/")
.replace("\\", "/")
)
flash_alg = PackFlashAlgo(flm.read())
sectors = [(flash_alg.flash_start + offset, size)
for offset, size in flash_alg.sector_sizes]
@ -196,308 +99,37 @@ class Cache () :
except Exception:
return None
def _extract_dict(self, device, filename, pack) :
to_ret = dict(pdsc_file=filename, pack_file=pack)
try : to_ret["memory"] = dict([(m["id"], dict(start=m["start"],
size=m["size"]))
for m in device("memory")])
except (KeyError, TypeError, IndexError) as e:
try : to_ret["memory"] = dict([(m["name"], dict(start=m["start"],
size=m["size"]))
for m in device("memory")])
except (KeyError, TypeError, IndexError) as e : pass
try: algorithms = device("algorithm")
except:
try: algorithms = device.parent("algorithm")
except: pass
else:
if not algorithms:
try: algorithms = device.parent("algorithm")
except: pass
try : to_ret["algorithm"] = dict([(algo.get("name").replace('\\','/'),
dict(start=algo["start"],
size=algo["size"],
ramstart=algo.get("ramstart",None),
ramsize=algo.get("ramsize",None),
default=algo.get("default",1)))
for algo in algorithms])
except (KeyError, TypeError, IndexError) as e: pass
try: to_ret["debug"] = device.parent.parent.debug["svd"]
except (KeyError, TypeError, IndexError) as e : pass
try: to_ret["debug"] = device.parent.debug["svd"]
except (KeyError, TypeError, IndexError) as e : pass
try: to_ret["debug"] = device.debug["svd"]
except (KeyError, TypeError, IndexError) as e : pass
@property
def index(self):
return _CacheLookup(self._cache.index, self._legacy_names)
to_ret["compile"] = {}
try: compile_l1 = device.parent("compile")
except (KeyError, TypeError, IndexError) as e : compile_l1 = []
try: compile_l2 = device.parent.parent("compile")
except (KeyError, TypeError, IndexError) as e : compile_l2 = []
compile = compile_l2 + compile_l1
for c in compile:
try: to_ret["compile"]["header"] = c["header"]
except (KeyError, TypeError, IndexError) as e : pass
try: to_ret["compile"]["define"] = c["define"]
except (KeyError, TypeError, IndexError) as e : pass
def cache_descriptors(self):
self._cache.cache_descriptors
try: to_ret["core"] = device.parent.processor['dcore']
except (KeyError, TypeError, IndexError) as e : pass
try: to_ret["core"] = device.parent.parent.processor['dcore']
except (KeyError, TypeError, IndexError) as e : pass
def cache_everything(self):
self._cache.cache_everything()
for name, device in self._cache.index.items():
if name != "version":
device["sectors"] = self._get_sectors(device)
self.generate_index()
to_ret["processor"] = {}
try: proc_l1 = device("processor")
except (KeyError, TypeError, IndexError) as e: proc_l1 = []
try: proc_l2 = device.parent("processor")
except (KeyError, TypeError, IndexError) as e: proc_l2 = []
try: proc_l3 = device.parent.parent("processor")
except (KeyError, TypeError, IndexError) as e: proc_l3 = []
proc = proc_l3 + proc_l2 + proc_l1
for p in proc:
try: to_ret["processor"]["fpu"] = p['dfpu']
except (KeyError, TypeError, IndexError) as e: pass
try: to_ret["processor"]["endianness"] = p['dendian']
except (KeyError, TypeError, IndexError) as e: pass
try: to_ret["processor"]["clock"] = p['dclock']
except (KeyError, TypeError, IndexError) as e: pass
try: to_ret["vendor"] = device.parent['dvendor']
except (KeyError, TypeError, IndexError) as e: pass
try: to_ret["vendor"] = device.parent.parent['dvendor']
except (KeyError, TypeError, IndexError) as e: pass
if not to_ret["processor"]:
del to_ret["processor"]
if not to_ret["compile"]:
del to_ret["compile"]
to_ret['debug-interface'] = []
to_ret['sectors'] = self._get_sectors(to_ret)
return to_ret
def _generate_index_helper(self, d) :
try :
pack = self.pdsc_to_pack(d)
self._index.update(dict([(dev['dname'], self._extract_dict(dev, d, pack)) for dev in
(self.pdsc_from_cache(d)("device"))]))
except AttributeError as e :
stderr.write("[ ERROR ] file {}\n".format(d))
print(e)
self.counter += 1
self.display_counter("Generating Index")
def _generate_aliases_helper(self, d) :
try :
mydict = []
for dev in self.pdsc_from_cache(d)("board"):
try :
mydict.append((dev['name'], dev.mounteddevice['dname']))
except (KeyError, TypeError, IndexError) as e:
pass
self._aliases.update(dict(mydict))
except (AttributeError, TypeError) as e :
pass
self.counter += 1
self.display_counter("Scanning for Aliases")
def get_flash_algorthim_binary(self, device_name, all=False) :
def get_svd_file(self, device_name):
"""Retrieve the flash algorithm file for a particular part.
Assumes that both the PDSC and the PACK file associated with that part are in the cache.
:param device_name: The exact name of a device
:param all: Return an iterator of all flash algos for this device
:type device_name: str
:return: A file-like object that, when read, is the ELF file that describes the flashing algorithm
:return: A file-like object that, when read, is the ELF file that describes the flashing algorithm.
When "all" is set to True then an iterator for file-like objects is returned
:rtype: ZipExtFile or ZipExtFile iterator if all is True
"""
device = self.index[device_name]
pack = self.pack_from_cache(device)
algo_itr = (pack.open(path) for path in device['algorithm'].keys())
return algo_itr if all else algo_itr.next()
def get_svd_file(self, device_name) :
"""Retrieve the flash algorithm file for a particular part.
Assumes that both the PDSC and the PACK file associated with that part are in the cache.
Assumes that both the PDSC and the PACK file associated with that part
are in the cache.
:param device_name: The exact name of a device
:type device_name: str
:return: A file-like object that, when read, is the ELF file that describes the flashing algorithm
:return: A file-like object that, when read, is the ELF file that
describes the flashing algorithm
:rtype: ZipExtFile
"""
device = self.index[device_name]
pack = self.pack_from_cache(device)
return pack.open(device['debug'])
def generate_index(self) :
self._index = {}
self.counter = 0
do_queue(Reader, self._generate_index_helper, self.get_urls())
with open(LocalPackIndex, "wb+") as out:
self._index["version"] = "0.1.0"
dump(self._index, out)
stdout.write("\n")
def generate_aliases(self) :
self._aliases = {}
self.counter = 0
do_queue(Reader, self._generate_aliases_helper, self.get_urls())
with open(LocalPackAliases, "wb+") as out:
dump(self._aliases, out)
stdout.write("\n")
def find_device(self, match) :
choices = process.extract(match, self.index.keys(), limit=len(self.index))
choices = sorted([(v, k) for k, v in choices], reverse=True)
if choices : choices = list(takewhile(lambda t: t[0] == choices[0][0], choices))
return [(v, self.index[v]) for k,v in choices]
def dump_index_to_file(self, file) :
with open(file, "wb+") as out:
dump(self.index, out)
@property
def index(self) :
"""An index of most of the important data in all cached PDSC files.
:Example:
>>> from ArmPackManager import Cache
>>> a = Cache()
>>> a.index["LPC1768"]
{u'algorithm': {u'RAMsize': u'0x0FE0',
u'RAMstart': u'0x10000000',
u'name': u'Flash/LPC_IAP_512.FLM',
u'size': u'0x80000',
u'start': u'0x00000000'},
u'compile': [u'Device/Include/LPC17xx.h', u'LPC175x_6x'],
u'debug': u'SVD/LPC176x5x.svd',
u'pdsc_file': u'http://www.keil.com/pack/Keil.LPC1700_DFP.pdsc',
u'memory': {u'IRAM1': {u'size': u'0x8000', u'start': u'0x10000000'},
u'IRAM2': {u'size': u'0x8000', u'start': u'0x2007C000'},
u'IROM1': {u'size': u'0x80000', u'start': u'0x00000000'}}}
"""
if not self._index :
with open(LocalPackIndex) as i :
self._index = load(i)
return self._index
@property
def aliases(self) :
"""An index of most of the important data in all cached PDSC files.
:Example:
>>> from ArmPackManager import Cache
>>> a = Cache()
>>> a.index["LPC1768"]
{u'algorithm': {u'RAMsize': u'0x0FE0',
u'RAMstart': u'0x10000000',
u'name': u'Flash/LPC_IAP_512.FLM',
u'size': u'0x80000',
u'start': u'0x00000000'},
u'compile': [u'Device/Include/LPC17xx.h', u'LPC175x_6x'],
u'debug': u'SVD/LPC176x5x.svd',
u'pdsc_file': u'http://www.keil.com/pack/Keil.LPC1700_DFP.pdsc',
u'memory': {u'IRAM1': {u'size': u'0x8000', u'start': u'0x10000000'},
u'IRAM2': {u'size': u'0x8000', u'start': u'0x2007C000'},
u'IROM1': {u'size': u'0x80000', u'start': u'0x00000000'}}}
"""
if not self._aliases :
with open(LocalPackAliases) as i :
self._aliases = load(i)
return self._aliases
def cache_everything(self) :
"""Cache every PACK and PDSC file known.
Generates an index afterwards.
.. note:: This process may use 4GB of drive space and take upwards of 10 minutes to complete.
"""
self.cache_pack_list(self.get_urls())
self.generate_index()
self.generate_aliases()
def cache_descriptors(self) :
"""Cache every PDSC file known.
Generates an index afterwards.
.. note:: This process may use 11MB of drive space and take upwards of 1 minute.
"""
self.cache_descriptor_list(self.get_urls())
self.generate_index()
self.generate_aliases()
def cache_descriptor_list(self, list) :
"""Cache a list of PDSC files.
:param list: URLs of PDSC files to cache.
:type list: [str]
"""
self.total = len(list)
self.display_counter("Caching Files")
do_queue(Reader, self.cache_file, list)
stdout.write("\n")
def cache_pack_list(self, list) :
"""Cache a list of PACK files, referenced by their PDSC URL
:param list: URLs of PDSC files to cache.
:type list: [str]
"""
self.total = len(list) * 2
self.display_counter("Caching Files")
do_queue(Reader, self.cache_pdsc_and_pack, list)
stdout.write("\n")
def pdsc_from_cache(self, url) :
"""Low level inteface for extracting a PDSC file from the cache.
Assumes that the file specified is a PDSC file and is in the cache.
:param url: The URL of a PDSC file.
:type url: str
:return: A parsed representation of the PDSC file.
:rtype: BeautifulSoup
"""
dest = join(self.data_path, strip_protocol(url))
with open(dest, "r") as fd :
return BeautifulSoup(fd, "html.parser")
def pack_from_cache(self, device) :
"""Low level inteface for extracting a PACK file from the cache.
Assumes that the file specified is a PACK file and is in the cache.
:param url: The URL of a PACK file.
:type url: str
:return: A parsed representation of the PACK file.
:rtype: ZipFile
"""
return ZipFile(join(self.data_path,
strip_protocol(device['pack_file'])))
def gen_dict_from_cache() :
pdsc_files = pdsc_from_cache(RootPackUrl)
def cache_and_parse(self, url) :
"""A low level shortcut that Caches and Parses a PDSC file.
:param url: The URL of the PDSC file.
:type url: str
:return: A parsed representation of the PDSC file.
:rtype: BeautifulSoup
"""
self.cache_file(url)
return self.pdsc_from_cache(url)
def generate_index(self):
with open(LocalPackIndex, "w+") as out:
self._cache.index["version"] = "0.2.0"
dump(self._cache.index, out, indent=4, sort_keys=True)

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,67 @@
{
"MK22DN512xxx5": "MK22DN512VLH5",
"MK24FN1M0xxx12": "MK24FN1M0VLL12",
"MKL26Z128xxx4": "MKL26Z128VLH4",
"MKL27Z64xxx4": "MKL27Z64VLH4",
"MKL43Z256xxx4": "MKL43Z256VLH4",
"MKL46Z256xxx4": "MKL46Z256VLL4",
"MKL82Z128xxx7": "MKL82Z128VLK7",
"R7S72103": "R7S72100",
"STM32F030R8": "STM32F030R8Tx",
"STM32F031K6": "STM32F031K6Tx",
"STM32F042K6": "STM32F042K6Tx",
"STM32F051R8": "STM32F051R8Tx",
"STM32F070RB": "STM32F070RBTx",
"STM32F072RB": "STM32F072RBTx",
"STM32F091RC": "STM32F091RCTx",
"STM32F207ZG": "STM32F207ZGTx",
"STM32F302R8": "STM32F302R8Tx",
"STM32F303K8": "STM32F303K8Tx",
"STM32F303RE": "STM32F303RETx",
"STM32F303VC": "STM32F303VCTx",
"STM32F303ZE": "STM32F303ZETx",
"STM32F334C8": "STM32F334C8Tx",
"STM32F334R8": "STM32F334R8Tx",
"STM32F401RE": "STM32F401RETx",
"STM32F401VC": "STM32F401VCTx",
"STM32F401VE": "STM32F401VETx",
"STM32F405RG": "STM32F405RGTx",
"STM32F407VG": "STM32F407VGTx",
"STM32F410RB": "STM32F410RBTx",
"STM32F411RE": "STM32F411RETx",
"STM32F412ZG": "STM32F412ZGTx",
"STM32F413ZH": "STM32F413ZHTx",
"STM32F429ZI": "STM32F429ZITx",
"STM32F437VG": "STM32F437VGTx",
"STM32F439VI": "STM32F439VITx",
"STM32F439ZI": "STM32F439ZITx",
"STM32F446RE": "STM32F446RETx",
"STM32F446VE": "STM32F446VETx",
"STM32F446ZE": "STM32F446ZETx",
"STM32F469NI": "STM32F469NIHx",
"STM32F746NG": "STM32F746NGHx",
"STM32F746ZG": "STM32F746ZGTx",
"STM32F756ZG": "STM32F756ZGTx",
"STM32F767ZI": "STM32F767ZITx",
"STM32F769NI": "STM32F769NIHx",
"STM32H743ZI": "STM32H743ZITx",
"STM32L011K4": "STM32L011K4Tx",
"STM32L031K6": "STM32L031K6Tx",
"STM32L053C8": "STM32L053C8Tx",
"STM32L053R8": "STM32L053R8Tx",
"STM32L072CZ": "STM32L072CZTx",
"STM32L073RZ": "STM32L073RZTx",
"STM32L082CZ": "STM32L082CZYx",
"STM32L432KC": "STM32L432KCUx",
"STM32L433RC": "STM32L433RCTx",
"STM32L443RC": "STM32L443RCTx",
"STM32L471QG": "STM32L471QGIx",
"STM32L475VG": "STM32L475VGTx",
"STM32L476JG": "STM32L476JGYx",
"STM32L476RG": "STM32L476RGTx",
"STM32L476VG": "STM32L476VGTx",
"STM32L486RG": "STM32L486RGTx",
"STM32L496AG": "STM32L496AGIx",
"STM32L496ZG": "STM32L496ZGTx",
"STM32L4R5ZI": "STM32L4R5ZITx"
}

View File

@ -1,209 +0,0 @@
"""
Copyright (c) 2016-2019 ARM Limited. All rights reserved.
SPDX-License-Identifier: Apache-2.0
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function, division, absolute_import
import argparse
from os.path import basename
from tools.arm_pack_manager import Cache
from os.path import basename, join, dirname, exists
from os import makedirs
from itertools import takewhile
from fuzzywuzzy import process
from .arm_pack_manager import Cache
parser = argparse.ArgumentParser(description='A Handy little utility for keeping your cache of pack files up to date.')
subparsers = parser.add_subparsers(title="Commands")
def subcommand(name, *args, **kwargs):
def subcommand(command):
subparser = subparsers.add_parser(name, **kwargs)
for arg in args:
arg = dict(arg)
opt = arg['name']
del arg['name']
if isinstance(opt, basestring):
subparser.add_argument(opt, **arg)
else:
subparser.add_argument(*opt, **arg)
subparser.add_argument("-v", "--verbose", action="store_true", dest="verbose", help="Verbose diagnostic output")
subparser.add_argument("-vv", "--very_verbose", action="store_true", dest="very_verbose", help="Very verbose diagnostic output")
subparser.add_argument("--no-timeouts", action="store_true", help="Remove all timeouts and try to download unconditionally")
subparser.add_argument("--and", action="store_true", dest="intersection", help="combine search terms as if with an and")
subparser.add_argument("--or", action="store_false", dest="intersection", help="combine search terms as if with an or")
subparser.add_argument("--union", action="store_false", dest="intersection", help="combine search terms as if with a set union")
subparser.add_argument("--intersection", action="store_true", dest="intersection", help="combine search terms as if with a set intersection")
def thunk(parsed_args):
cache = Cache(not parsed_args.verbose, parsed_args.no_timeouts)
argv = [arg['dest'] if 'dest' in arg else arg['name'] for arg in args]
argv = [(arg if isinstance(arg, basestring) else arg[-1]).strip('-')
for arg in argv]
argv = {arg: vars(parsed_args)[arg] for arg in argv
if vars(parsed_args)[arg] is not None}
return command(cache, **argv)
subparser.set_defaults(command=thunk)
return command
return subcommand
def user_selection (message, options) :
print(message)
for choice, index in zip(options, range(len(options))) :
print("({}) {}".format(index, choice))
pick = None
while pick is None :
stdout.write("please select an integer from 0 to {} or \"all\"".format(len(options)-1))
input = raw_input()
try :
if input == "all" :
pick = options
else :
pick = [options[int(input)]]
except ValueError :
print("I did not understand your input")
return pick
def fuzzy_find(matches, urls) :
choices = {}
for match in matches :
for key, value in process.extract(match, urls, limit=None) :
choices.setdefault(key, 0)
choices[key] += value
choices = sorted([(v, k) for k, v in choices.items()], reverse=True)
if not choices : return []
elif len(choices) == 1 : return [choices[0][1]]
elif choices[0][0] > choices[1][0] : choices = choices[:1]
else : choices = list(takewhile(lambda t: t[0] == choices[0][0], choices))
return [v for k,v in choices]
@subcommand('cache',
dict(name='matches', nargs="*",
help="a bunch of things to search for in part names"),
dict(name=['-e','--everything'], action="store_true",
help="download everything possible"),
dict(name=['-d','--descriptors'], action="store_true",
help="download all descriptors"),
dict(name=["-b","--batch"], action="store_true",
help="don't ask for user input and assume download all"),
help="Cache a group of PACK or PDSC files")
def command_cache (cache, matches, everything=False, descriptors=False, batch=False, verbose= False, intersection=True) :
if everything :
cache.cache_everything()
return True
if descriptors :
cache.cache_descriptors()
return True
if not matches :
print("No action specified nothing to do")
else :
urls = cache.get_urls()
if intersection :
choices = fuzzy_find(matches, map(basename, urls))
else :
choices = sum([fuzzy_find([m], map(basename, urls)) for m in matches], [])
if not batch and len(choices) > 1 :
choices = user_selection("Please select a file to cache", choices)
to_download = []
for choice in choices :
for url in urls :
if choice in url :
to_download.append(url)
cache.cache_pack_list(to_download)
return True
@subcommand('find-part',
dict(name='matches', nargs="+", help="words to match to processors"),
dict(name=['-l',"--long"], action="store_true",
help="print out part details with part"),
dict(name=['-p', '--parts-only'], action="store_false", dest="print_aliases"),
dict(name=['-a', '--aliases-only'], action="store_false", dest="print_parts"),
help="Find a Part and it's description within the cache")
def command_find_part (cache, matches, long=False, intersection=True,
print_aliases=True, print_parts=True) :
if long :
import pprint
pp = pprint.PrettyPrinter()
parts = cache.index
if intersection :
choices = fuzzy_find(matches, parts.keys())
aliases = fuzzy_find(matches, cache.aliases.keys())
else :
choices = sum([fuzzy_find([m], parts.keys()) for m in matches], [])
aliases = sum([fuzzy_find([m], cache.aliases.keys()) for m in matches], [])
if print_parts:
for part in choices :
print(part)
if long :
pp.pprint(cache.index[part])
if print_aliases:
for alias in aliases :
print(alias)
if long :
pp.pprint(cache.index[cache.aliases[alias]])
@subcommand('dump-parts',
dict(name='out', help='directory to dump to'),
dict(name='parts', nargs='+', help='parts to dump'),
help='Create a directory with an index.json describing the part and all of their associated flashing algorithms.'
)
def command_dump_parts (cache, out, parts, intersection=False) :
index = {}
if intersection :
for part in fuzzy_find(parts, cache.index):
index.update(cache.index[part])
else :
for part in parts :
index.update(dict(cache.find_device(part)))
for n, p in index.items() :
try :
if not exists(join(out, dirname(p['algorithm']['file']))) :
makedirs(join(out, dirname(p['algorithm']['file'])))
with open(join(out, p['algorithm']['file']), "wb+") as fd :
fd.write(cache.get_flash_algorthim_binary(n).read())
except KeyError:
print("[Warning] {} does not have an associated flashing algorithm".format(n))
with open(join(out, "index.json"), "wb+") as fd :
dump(index,fd)
@subcommand('cache-part',
dict(name='matches', nargs="+", help="words to match to devices"),
help='Cache PACK files associated with the parts matching the provided words')
def command_cache_part (cache, matches, intersection=True) :
index = cache.index
if intersection :
choices = fuzzy_find(matches, index.keys())
aliases = fuzzy_find(matches, cache.aliases.keys())
else :
choices = sum([fuzzy_find([m], index.keys()) for m in matches], [])
aliases = sum([fuzzy_find([m], cache.aliases.keys()) for m in matches], [])
urls = set([index[c]['pdsc_file'] for c in choices])
urls += set([index[cache.aliasse[a]] for a in aliases])
cache.cache_pack_list(list(urls))
def get_argparse() :
return parser
def main() :
args = parser.parse_args()
args.command(args)

View File

@ -645,15 +645,23 @@ class Config(object):
"arm_pack_manager index.")
return cache.index[self.target.device_name]
def _get_mem_specs(self, memories, cmsis_part, exception_text):
for memory in memories:
try:
size = cmsis_part['memory'][memory]['size']
start = cmsis_part['memory'][memory]['start']
return (start, size)
except KeyError:
continue
raise ConfigException(exception_text)
@staticmethod
def _memory_ordering(memory):
return (memory['default'], memory['size'], memory['start'])
def _get_mem_specs(self, permissions, cmsis_part):
all_matching_memories = {
name: memory for name, memory in cmsis_part['memories'].items()
if all(memory['access'].get(perm) for perm in permissions)
}
if all_matching_memories:
return all_matching_memories
else:
raise ConfigException(
"Missing a memory that is {} in CMSIS Pack data".format(
", ".join(permissions)
)
)
def get_all_active_memories(self, memory_list):
"""Get information of all available rom/ram memories in the form of dictionary
@ -664,7 +672,6 @@ class Config(object):
# This is usually done for a target which:
# 1. Doesn't support CMSIS pack, or
# 2. Supports TrustZone and user needs to change its flash partition
available_memories = {}
# Counter to keep track of ROM/RAM memories supported by target
active_memory_counter = 0
@ -687,16 +694,16 @@ class Config(object):
"ram/rom start/size not found in "
"targets.json.")
present_memories = set(cmsis_part['memory'].keys())
present_memories = set(cmsis_part['memories'].keys())
valid_memories = set(memory_list).intersection(present_memories)
memories = self._get_mem_specs(
["read", "write" if active_memory == "RAM" else "execute"],
cmsis_part
)
for memory in valid_memories:
mem_start, mem_size = self._get_mem_specs(
[memory],
cmsis_part,
"Not enough information in CMSIS packs to build a bootloader "
"project"
)
mem_start = memories[memory]["start"]
mem_size = memories[memory]["size"]
if memory=='IROM1' or memory=='PROGRAM_FLASH':
mem_start = getattr(self.target, "mbed_rom_start", False) or mem_start
mem_size = getattr(self.target, "mbed_rom_size", False) or mem_size
@ -712,8 +719,10 @@ class Config(object):
active_memory_counter += 1
memory = active_memory + str(active_memory_counter)
mem_start = int(mem_start, 0)
mem_size = int(mem_size, 0)
if not isinstance(mem_start, int):
mem_start = int(mem_start, 0)
if not isinstance(mem_size, int):
mem_size = int(mem_size, 0)
available_memories[memory] = [mem_start, mem_size]
return available_memories
@ -722,19 +731,23 @@ class Config(object):
def ram_regions(self):
"""Generate a list of ram regions from the config"""
cmsis_part = self._get_cmsis_part()
ram_start, ram_size = self._get_mem_specs(
["IRAM1", "SRAM0"],
cmsis_part,
"Not enough information in CMSIS packs to build a ram sharing project"
)
rams = self._get_mem_specs(("read", "write"), cmsis_part)
best_ram = sorted(
rams.values(),
key=self._memory_ordering,
reverse=True
)[0]
ram_start, ram_size = best_ram["start"], best_ram["size"]
# Override ram_start/ram_size
#
# This is usually done for a target which:
# 1. Doesn't support CMSIS pack, or
# 2. Supports TrustZone and user needs to change its flash partition
ram_start = getattr(self.target, "mbed_ram_start", False) or ram_start
ram_size = getattr(self.target, "mbed_ram_size", False) or ram_size
return [RamRegion("application_ram", int(ram_start, 0), int(ram_size, 0), True)]
if getattr(self.target, "mbed_ram_start"):
ram_start = int(getattr(self.target, "mbed_ram_start"), 0)
if getattr(self.target, "mbed_ram_size"):
ram_size = int(getattr(self.target, "mbed_ram_size"), 0)
return [RamRegion("application_ram", ram_start, ram_size, True)]
@property
def regions(self):

View File

@ -36,16 +36,25 @@ class DeviceCMSIS():
target_info = self.check_supported(target)
if not target_info:
raise TargetNotSupportedException("Target not supported in CMSIS pack")
self.url = target_info['pdsc_file']
self.pdsc_url, self.pdsc_id, _ = split_path(self.url)
self.pack_url, self.pack_id, _ = split_path(target_info['pack_file'])
self.dname = target_info["_cpu_name"]
self.pack_url = target_info['from_pack']['url']
self.pack_id = "{}.{}.{}".format(
target_info['from_pack']['vendor'],
target_info['from_pack']['pack'],
target_info['from_pack']['version']
)
self.dname = target_info["name"]
self.core = target_info["_core"]
self.dfpu = target_info['processor']['fpu']
self.debug, self.dvendor = self.vendor_debug(target_info['vendor'])
self.dendian = target_info['processor'].get('endianness','Little-endian')
try:
self.dfpu = target_info['processor']['Symmetric']['fpu']
except KeyError:
self.dfpu = target_info['processor']['Asymmetric']['fpu']
self.debug, self.dvendor = self.vendor_debug(
target_info.get('vendor') or target_info['from_pack']['vendor']
)
self.dendian = target_info['processor'].get(
'endianness', 'Little-endian'
)
self.debug_svd = target_info.get('debug', '')
self.compile_header = target_info['compile']['header']
self.target_info = target_info
@staticmethod

View File

@ -26,7 +26,6 @@ class DeviceUvision(DeviceCMSIS):
self.svd = ''
if self.debug_svd:
self.svd = dev_format.format(self.dname, self.debug_svd)
self.reg_file = dev_format.format(self.dname, self.compile_header)
self.debug_interface = self.uv_debug()
self.flash_dll = self.generate_flash_dll()
@ -73,14 +72,10 @@ class DeviceUvision(DeviceCMSIS):
'''
fl_count = 0
def get_mem_no_x(mem_str):
mem_reg = "\dx(\w+)"
m = re.search(mem_reg, mem_str)
return m.group(1) if m else None
RAMS = [
(get_mem_no_x(info["start"]), get_mem_no_x(info["size"]))
for mem, info in self.target_info["memory"].items() if "RAM" in mem
(info["start"], info["size"])
for mem, info in self.target_info["memories"].items()
if "RAM" in mem
]
format_str = (
"UL2CM3(-S0 -C0 -P0 -FD{ramstart}"
@ -92,24 +87,25 @@ class DeviceUvision(DeviceCMSIS):
if len(RAMS) >= 1:
ramstart = RAMS[0][0]
extra_flags = []
for name, info in self.target_info["algorithm"].items():
if not name or not info:
for info in self.target_info["algorithms"]:
if not info:
continue
if int(info["default"]) == 0:
if not info["default"]:
continue
name = info['file_name']
name_reg = "\w*/([\w_]+)\.flm"
m = re.search(name_reg, name.lower())
fl_name = m.group(1) if m else None
name_flag = "-FF" + str(fl_count) + fl_name
start = get_mem_no_x(info["start"])
size = get_mem_no_x(info["size"])
start = '{:x}'.format(info["start"])
size = '{:x}'.format(info["size"])
rom_start_flag = "-FS" + str(fl_count) + str(start)
rom_size_flag = "-FL" + str(fl_count) + str(size)
if info["ramstart"] is not None and info["ramsize"] is not None:
ramstart = get_mem_no_x(info["ramstart"])
ramsize = get_mem_no_x(info["ramsize"])
ramstart = '{:x}'.format(info["ramstart"])
ramsize = '{:x}'.format(info["ramsize"])
path_flag = "-FP{}($$Device:{}${})".format(
str(fl_count), self.dname, name

View File

@ -25,7 +25,7 @@
<StartupFile></StartupFile>
<FlashDriverDll>{{device.flash_dll}}</FlashDriverDll>
<DeviceId>0</DeviceId>
<RegisterFile>{{device.reg_file}}</RegisterFile>
<RegisterFile></RegisterFile>
<MemoryEnv></MemoryEnv>
<Cmp></Cmp>
<Asm></Asm>