Use upgrade table for unavailable CMP data

pull/8757/head
Jimmy Brisson 2018-11-15 13:26:33 -06:00
parent 74f45fbbfc
commit aa07e126f1
3 changed files with 113 additions and 243 deletions

View File

@ -16,29 +16,10 @@ See the License for the specific language governing permissions and
limitations under the License.
"""
try:
from urllib2 import urlopen, URLError
except ImportError:
from urllib.request import urlopen, URLError
from bs4 import BeautifulSoup
from os.path import join, dirname, basename
from os import makedirs
from errno import EEXIST
from threading import Thread
try:
from Queue import Queue
except ImportError:
from queue import Queue
from re import compile, sub
from sys import stderr, stdout
from itertools import takewhile
import argparse
from json import dump, load
from zipfile import ZipFile
from tempfile import gettempdir
from os.path import join, dirname
from json import load, dump
import warnings
from cmsis_pack_manager import Cache as _Cache
from distutils.version import LooseVersion
from tools.flash_algo import PackFlashAlgo
@ -49,40 +30,64 @@ RootPackURL = "http://www.keil.com/pack/index.idx"
LocalPackDir = dirname(__file__)
LocalPackIndex = join(LocalPackDir, "index.json")
LocalPackAliases = join(LocalPackDir, "aliases.json")
LocalPackLegacyNames = join(LocalPackDir, "legacy-names.json")
class _CacheLookup(object):
def __init__(self, index, legacy_names):
self.index = index
self.legacy_names = legacy_names
class Cache (_Cache):
def __getitem__(self, name):
try:
return self.index[name]
except KeyError:
return self.index[self.legacy_names[name]]
def __contains__(self, name):
return name in self.index or name in self.legacy_names
class Cache(object):
""" The Cache object is the only relevant API object at the moment
Constructing the Cache object does not imply any caching.
A user of the API must explicitly call caching functions.
:param silent: A boolean that, when True, significantly reduces the printing of this Object
:param silent: Not used
:type silent: bool
:param no_timeouts: A boolean that, when True, disables the default connection timeout and low speed timeout for downloading things.
:param no_timeouts: Not used
:type no_timeouts: bool
"""
def __init__ (self, silent, no_timeouts):
super(Cache, self).__init__(
def __init__(self, silent, no_timeouts):
self._cache = _Cache(
silent, no_timeouts,
json_path=LocalPackDir, data_path=LocalPackDir
)
try:
self._legacy_names = load(open(LocalPackLegacyNames))
except IOError:
self._legacy_names = {}
def _get_sectors(self, device):
"""Extract sector sizes from device FLM algorithm
Will return None if there is no algorithm, pdsc URL formatted in correctly
Will return None if there is no algorithm, pdsc URL formatted in
correctly
:return: A list tuples of sector start and size
:rtype: [list]
"""
try:
pack = self.pack_from_cache(device)
pack = self._cache.pack_from_cache(device)
ret = []
for algo in device['algorithms']:
try:
flm = pack.open(algo["file_name"].replace("\\\\", "/").replace("\\", "/"))
flm = pack.open(
algo["file_name"]
.replace("\\\\", "/")
.replace("\\", "/")
)
flash_alg = PackFlashAlgo(flm.read())
sectors = [(flash_alg.flash_start + offset, size)
for offset, size in flash_alg.sector_sizes]
@ -97,14 +102,21 @@ class Cache (_Cache):
print(e)
return None
@property
def index(self):
return _CacheLookup(self._cache.index, self._legacy_names)
def cache_descriptors(self):
self._cache.cache_descriptors
def cache_everything(self):
super(Cache, self).cache_everything()
for name, device in self.index.items():
self._cache.cache_everything()
for name, device in self._cache.index.items():
if name != "version":
device["sectors"] = self._get_sectors(device)
self.generate_index()
def get_svd_file(self, device_name) :
def get_svd_file(self, device_name):
"""Retrieve the flash algorithm file for a particular part.
Assumes that both the PDSC and the PACK file associated with that part are in the cache.
@ -120,5 +132,6 @@ class Cache (_Cache):
def generate_index(self):
with open(LocalPackIndex, "wb+") as out:
self._index["version"] = "0.2.0"
dump(self._index, out, indent=4, sort_keys=True)
self._cache.index["version"] = "0.2.0"
dump(self._cache.index, out, indent=4, sort_keys=True)

View File

@ -0,0 +1,66 @@
{
"MK22DN512xxx5": "MK22FN512VLH5",
"MK24FN1M0xxx12": "MK24FN1M0VLL12",
"MKL26Z128xxx4": "MKL26Z128VLH4",
"MKL27Z64xxx4": "MKL27Z64VLH4",
"MKL43Z256xxx4": "MKL43Z256VLH4",
"MKL46Z256xxx4": "MKL46Z256VLL4",
"MKL82Z128xxx7": "MKL82Z128VLK7",
"R7S72103": "R7S72100",
"STM32F030R8": "STM32F030R8Tx",
"STM32F031K6": "STM32F031K6Tx",
"STM32F042K6": "STM32F042K6Tx",
"STM32F051R8": "STM32F051R8Tx",
"STM32F070RB": "STM32F070RBTx",
"STM32F072RB": "STM32F072RBTx",
"STM32F091RC": "STM32F091RCTx",
"STM32F207ZG": "STM32F207ZGTx",
"STM32F302R8": "STM32F302R8Tx",
"STM32F303K8": "STM32F303K8Tx",
"STM32F303RE": "STM32F303RETx",
"STM32F303VC": "STM32F303VCTx",
"STM32F303ZE": "STM32F303ZETx",
"STM32F334C8": "STM32F334C8Tx",
"STM32F334R8": "STM32F334R8Tx",
"STM32F401RE": "STM32F401RETx",
"STM32F401VC": "STM32F401VCTx",
"STM32F401VE": "STM32F401VETx",
"STM32F405RG": "STM32F405RGTx",
"STM32F407VG": "STM32F407VGTx",
"STM32F410RB": "STM32F410RBTx",
"STM32F411RE": "STM32F411RETx",
"STM32F412ZG": "STM32F412ZGTx",
"STM32F413ZH": "STM32F413ZHTx",
"STM32F429ZI": "STM32F429ZITx",
"STM32F437VG": "STM32F437VGTx",
"STM32F439VI": "STM32F439VITx",
"STM32F439ZI": "STM32F439ZITx",
"STM32F446RE": "STM32F446RETx",
"STM32F446VE": "STM32F446VETx",
"STM32F446ZE": "STM32F446ZETx",
"STM32F469NI": "STM32F469NIHx",
"STM32F746NG": "STM32F746NGHx",
"STM32F746ZG": "STM32F746ZGTx",
"STM32F756ZG": "STM32F756ZGTx",
"STM32F767ZI": "STM32F767ZITx",
"STM32F769NI": "STM32F769NIHx",
"STM32L011K4": "STM32L011K4Tx",
"STM32L031K6": "STM32L031K6Tx",
"STM32L053C8": "STM32L053C8Tx",
"STM32L053R8": "STM32L053R8Tx",
"STM32L072CZ": "STM32L072CZTx",
"STM32L073RZ": "STM32L073RZTx",
"STM32L082CZ": "STM32L082CZYx",
"STM32L432KC": "STM32L432KCUx",
"STM32L433RC": "STM32L433RCTx",
"STM32L443RC": "STM32L443RCTx",
"STM32L471QG": "STM32L471QGIx",
"STM32L475VG": "STM32L475VGTx",
"STM32L476JG": "STM32L476JGYx",
"STM32L476RG": "STM32L476RGTx",
"STM32L476VG": "STM32L476VGTx",
"STM32L486RG": "STM32L486RGTx",
"STM32L496AG": "STM32L496AGIx",
"STM32L496ZG": "STM32L496ZGTx",
"STM32L4R5ZI": "STM32L4R5ZITx"
}

View File

@ -1,209 +0,0 @@
"""
Copyright (c) 2016-2019 ARM Limited. All rights reserved.
SPDX-License-Identifier: Apache-2.0
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function, division, absolute_import
import argparse
from os.path import basename
from tools.arm_pack_manager import Cache
from os.path import basename, join, dirname, exists
from os import makedirs
from itertools import takewhile
from fuzzywuzzy import process
from .arm_pack_manager import Cache
parser = argparse.ArgumentParser(description='A Handy little utility for keeping your cache of pack files up to date.')
subparsers = parser.add_subparsers(title="Commands")
def subcommand(name, *args, **kwargs):
def subcommand(command):
subparser = subparsers.add_parser(name, **kwargs)
for arg in args:
arg = dict(arg)
opt = arg['name']
del arg['name']
if isinstance(opt, basestring):
subparser.add_argument(opt, **arg)
else:
subparser.add_argument(*opt, **arg)
subparser.add_argument("-v", "--verbose", action="store_true", dest="verbose", help="Verbose diagnostic output")
subparser.add_argument("-vv", "--very_verbose", action="store_true", dest="very_verbose", help="Very verbose diagnostic output")
subparser.add_argument("--no-timeouts", action="store_true", help="Remove all timeouts and try to download unconditionally")
subparser.add_argument("--and", action="store_true", dest="intersection", help="combine search terms as if with an and")
subparser.add_argument("--or", action="store_false", dest="intersection", help="combine search terms as if with an or")
subparser.add_argument("--union", action="store_false", dest="intersection", help="combine search terms as if with a set union")
subparser.add_argument("--intersection", action="store_true", dest="intersection", help="combine search terms as if with a set intersection")
def thunk(parsed_args):
cache = Cache(not parsed_args.verbose, parsed_args.no_timeouts)
argv = [arg['dest'] if 'dest' in arg else arg['name'] for arg in args]
argv = [(arg if isinstance(arg, basestring) else arg[-1]).strip('-')
for arg in argv]
argv = {arg: vars(parsed_args)[arg] for arg in argv
if vars(parsed_args)[arg] is not None}
return command(cache, **argv)
subparser.set_defaults(command=thunk)
return command
return subcommand
def user_selection (message, options) :
print(message)
for choice, index in zip(options, range(len(options))) :
print("({}) {}".format(index, choice))
pick = None
while pick is None :
stdout.write("please select an integer from 0 to {} or \"all\"".format(len(options)-1))
input = raw_input()
try :
if input == "all" :
pick = options
else :
pick = [options[int(input)]]
except ValueError :
print("I did not understand your input")
return pick
def fuzzy_find(matches, urls) :
choices = {}
for match in matches :
for key, value in process.extract(match, urls, limit=None) :
choices.setdefault(key, 0)
choices[key] += value
choices = sorted([(v, k) for k, v in choices.items()], reverse=True)
if not choices : return []
elif len(choices) == 1 : return [choices[0][1]]
elif choices[0][0] > choices[1][0] : choices = choices[:1]
else : choices = list(takewhile(lambda t: t[0] == choices[0][0], choices))
return [v for k,v in choices]
@subcommand('cache',
dict(name='matches', nargs="*",
help="a bunch of things to search for in part names"),
dict(name=['-e','--everything'], action="store_true",
help="download everything possible"),
dict(name=['-d','--descriptors'], action="store_true",
help="download all descriptors"),
dict(name=["-b","--batch"], action="store_true",
help="don't ask for user input and assume download all"),
help="Cache a group of PACK or PDSC files")
def command_cache (cache, matches, everything=False, descriptors=False, batch=False, verbose= False, intersection=True) :
if everything :
cache.cache_everything()
return True
if descriptors :
cache.cache_descriptors()
return True
if not matches :
print("No action specified nothing to do")
else :
urls = cache.get_urls()
if intersection :
choices = fuzzy_find(matches, map(basename, urls))
else :
choices = sum([fuzzy_find([m], map(basename, urls)) for m in matches], [])
if not batch and len(choices) > 1 :
choices = user_selection("Please select a file to cache", choices)
to_download = []
for choice in choices :
for url in urls :
if choice in url :
to_download.append(url)
cache.cache_pack_list(to_download)
return True
@subcommand('find-part',
dict(name='matches', nargs="+", help="words to match to processors"),
dict(name=['-l',"--long"], action="store_true",
help="print out part details with part"),
dict(name=['-p', '--parts-only'], action="store_false", dest="print_aliases"),
dict(name=['-a', '--aliases-only'], action="store_false", dest="print_parts"),
help="Find a Part and it's description within the cache")
def command_find_part (cache, matches, long=False, intersection=True,
print_aliases=True, print_parts=True) :
if long :
import pprint
pp = pprint.PrettyPrinter()
parts = cache.index
if intersection :
choices = fuzzy_find(matches, parts.keys())
aliases = fuzzy_find(matches, cache.aliases.keys())
else :
choices = sum([fuzzy_find([m], parts.keys()) for m in matches], [])
aliases = sum([fuzzy_find([m], cache.aliases.keys()) for m in matches], [])
if print_parts:
for part in choices :
print(part)
if long :
pp.pprint(cache.index[part])
if print_aliases:
for alias in aliases :
print(alias)
if long :
pp.pprint(cache.index[cache.aliases[alias]])
@subcommand('dump-parts',
dict(name='out', help='directory to dump to'),
dict(name='parts', nargs='+', help='parts to dump'),
help='Create a directory with an index.json describing the part and all of their associated flashing algorithms.'
)
def command_dump_parts (cache, out, parts, intersection=False) :
index = {}
if intersection :
for part in fuzzy_find(parts, cache.index):
index.update(cache.index[part])
else :
for part in parts :
index.update(dict(cache.find_device(part)))
for n, p in index.items() :
try :
if not exists(join(out, dirname(p['algorithm']['file']))) :
makedirs(join(out, dirname(p['algorithm']['file'])))
with open(join(out, p['algorithm']['file']), "wb+") as fd :
fd.write(cache.get_flash_algorthim_binary(n).read())
except KeyError:
print("[Warning] {} does not have an associated flashing algorithm".format(n))
with open(join(out, "index.json"), "wb+") as fd :
dump(index,fd)
@subcommand('cache-part',
dict(name='matches', nargs="+", help="words to match to devices"),
help='Cache PACK files associated with the parts matching the provided words')
def command_cache_part (cache, matches, intersection=True) :
index = cache.index
if intersection :
choices = fuzzy_find(matches, index.keys())
aliases = fuzzy_find(matches, cache.aliases.keys())
else :
choices = sum([fuzzy_find([m], index.keys()) for m in matches], [])
aliases = sum([fuzzy_find([m], cache.aliases.keys()) for m in matches], [])
urls = set([index[c]['pdsc_file'] for c in choices])
urls += set([index[cache.aliasse[a]] for a in aliases])
cache.cache_pack_list(list(urls))
def get_argparse() :
return parser
def main() :
args = parser.parse_args()
args.command(args)