mirror of https://github.com/ARMmbed/mbed-os.git
arm_pack_manager - [API change] differentiate between pack and pdsc loading from cache
- pull_from_cache renamed to pdsc_from_cache - pack_from_cache added - index file member replaced with pdsc_file - pack_file member added to cachepull/2708/head
parent
6b2595af61
commit
39c1703c75
|
@ -123,7 +123,7 @@ class Cache () :
|
|||
:return: The url of the PACK file.
|
||||
:rtype: str
|
||||
"""
|
||||
content = self.pull_from_cache(url)
|
||||
content = self.pdsc_from_cache(url)
|
||||
new_url = content.package.url.get_text()
|
||||
if not new_url.endswith("/") :
|
||||
new_url = new_url + "/"
|
||||
|
@ -134,7 +134,7 @@ class Cache () :
|
|||
def cache_pdsc_and_pack (self, curl, url) :
|
||||
self.cache_file(curl, url)
|
||||
try :
|
||||
self. cache_file(curl, self.pdsc_to_pack(url))
|
||||
self.cache_file(curl, self.pdsc_to_pack(url))
|
||||
except AttributeError :
|
||||
stderr.write("[ ERROR ] {} does not appear to be a conforming .pdsc file\n".format(url))
|
||||
self.counter += 1
|
||||
|
@ -148,13 +148,14 @@ class Cache () :
|
|||
:rtype: [str]
|
||||
"""
|
||||
if not self.urls :
|
||||
try : root_data = self.pull_from_cache(RootPackURL)
|
||||
try : root_data = self.pdsc_from_cache(RootPackURL)
|
||||
except IOError : root_data = self.cache_and_parse(RootPackURL)
|
||||
self.urls = [join(pdsc.get('url'), pdsc.get('name')) for pdsc in root_data.find_all("pdsc")]
|
||||
return self.urls
|
||||
|
||||
def _extract_dict(self, device, filename) :
|
||||
to_ret = dict(file=filename)
|
||||
def _extract_dict(self, device, filename, pack) :
|
||||
to_ret = dict(pdsc_file=filename, pack_file=pack)
|
||||
if device == u'301': stderr.write(filename+"\n")
|
||||
try :
|
||||
to_ret["memory"] = dict([(m["id"], dict(start=m["start"],
|
||||
size=m["size"]))
|
||||
|
@ -172,9 +173,11 @@ class Cache () :
|
|||
|
||||
def _generate_index_helper(self, d) :
|
||||
try :
|
||||
self._index.update(dict([(dev['dname'], self._extract_dict(dev, d)) for dev in
|
||||
(self.pull_from_cache(d)("device"))]))
|
||||
pack = self.pdsc_to_pack(d)
|
||||
self._index.update(dict([(dev['dname'], self._extract_dict(dev, d, pack)) for dev in
|
||||
(self.pdsc_from_cache(d)("device"))]))
|
||||
except AttributeError as e :
|
||||
stderr.write("[ ERROR ] file {}\n".format(d))
|
||||
print(e)
|
||||
self.counter += 1
|
||||
self.display_counter("Generating Index")
|
||||
|
@ -189,10 +192,21 @@ class Cache () :
|
|||
:return: A file-like object that, when read, is the ELF file that describes the flashing algorithm
|
||||
:rtype: ZipExtFile
|
||||
"""
|
||||
device = self.index[device_name]
|
||||
pack = ZipFile(join(save_data_path('arm-pack-manager'),
|
||||
strip_protocol(self.pdsc_to_pack(device['file']))))
|
||||
return pack.open(device['algorithm'])
|
||||
pack = self.pack_from_cache(self.index[device_name])
|
||||
return pack.open(device['algorithm']['file'])
|
||||
|
||||
def get_svd_file(self, device_name) :
|
||||
"""Retrieve the flash algorithm file for a particular part.
|
||||
|
||||
Assumes that both the PDSC and the PACK file associated with that part are in the cache.
|
||||
|
||||
:param device_name: The exact name of a device
|
||||
:type device_name: str
|
||||
:return: A file-like object that, when read, is the ELF file that describes the flashing algorithm
|
||||
:rtype: ZipExtFile
|
||||
"""
|
||||
pack = self.pack_from_cache(self.index[device_name])
|
||||
return pack.open(device['debug'])
|
||||
|
||||
def generate_index(self) :
|
||||
self._index = {}
|
||||
|
@ -228,7 +242,7 @@ class Cache () :
|
|||
u'start': u'0x00000000'},
|
||||
u'compile': [u'Device/Include/LPC17xx.h', u'LPC175x_6x'],
|
||||
u'debug': u'SVD/LPC176x5x.svd',
|
||||
u'file': u'http://www.keil.com/pack/Keil.LPC1700_DFP.pdsc',
|
||||
u'pdsc_file': u'http://www.keil.com/pack/Keil.LPC1700_DFP.pdsc',
|
||||
u'memory': {u'IRAM1': {u'size': u'0x8000', u'start': u'0x10000000'},
|
||||
u'IRAM2': {u'size': u'0x8000', u'start': u'0x2007C000'},
|
||||
u'IROM1': {u'size': u'0x80000', u'start': u'0x00000000'}}}
|
||||
|
@ -285,7 +299,7 @@ class Cache () :
|
|||
do_queue(Cacher, self.cache_pdsc_and_pack, list)
|
||||
stdout.write("\n")
|
||||
|
||||
def pull_from_cache(self, url) :
|
||||
def pdsc_from_cache(self, url) :
|
||||
"""Low level inteface for extracting a PDSC file from the cache.
|
||||
|
||||
Assumes that the file specified is a PDSC file and is in the cache.
|
||||
|
@ -299,8 +313,21 @@ class Cache () :
|
|||
with open(dest, "r") as fd :
|
||||
return BeautifulSoup(fd, "html.parser")
|
||||
|
||||
def pack_from_cache(self, url) :
|
||||
"""Low level inteface for extracting a PACK file from the cache.
|
||||
|
||||
Assumes that the file specified is a PACK file and is in the cache.
|
||||
|
||||
:param url: The URL of a PACK file.
|
||||
:type url: str
|
||||
:return: A parsed representation of the PACK file.
|
||||
:rtype: ZipFile
|
||||
"""
|
||||
return ZipFile(join(save_data_path('arm-pack-manager'),
|
||||
strip_protocol(device['pack_file'])))
|
||||
|
||||
def gen_dict_from_cache() :
|
||||
pdsc_files = pull_from_cache(RootPackUrl)
|
||||
pdsc_files = pdsc_from_cache(RootPackUrl)
|
||||
|
||||
def cache_and_parse(self, url) :
|
||||
"""A low level shortcut that Caches and Parses a PDSC file.
|
||||
|
@ -311,5 +338,5 @@ class Cache () :
|
|||
:rtype: BeautifulSoup
|
||||
"""
|
||||
self.cache_file(Curl(), url)
|
||||
return self.pull_from_cache(url)
|
||||
return self.pdsc_from_cache(url)
|
||||
|
||||
|
|
|
@ -27,10 +27,10 @@ def subcommand(name, *args, **kwargs):
|
|||
subparser.add_argument("-v", "--verbose", action="store_true", dest="verbose", help="Verbose diagnostic output")
|
||||
subparser.add_argument("-vv", "--very_verbose", action="store_true", dest="very_verbose", help="Very verbose diagnostic output")
|
||||
subparser.add_argument("--no-timeouts", action="store_true", help="Remove all timeouts and try to download unconditionally")
|
||||
subparser.add_argument("--and", action=s"store_true", dest="intersection" help="combine search terms as if with an and")
|
||||
subparser.add_argument("--or", action=s"store_false", dest="intersection" help="combine search terms as if with an or")
|
||||
subparser.add_argument("--union", action=s"store_false", dest="intersection" help="combine search terms as if with a set union")
|
||||
subparser.add_argument("--intersection", action=s"store_true", dest="intersection" help="combine search terms as if with a set intersection")
|
||||
subparser.add_argument("--and", action="store_true", dest="intersection", help="combine search terms as if with an and")
|
||||
subparser.add_argument("--or", action="store_false", dest="intersection", help="combine search terms as if with an or")
|
||||
subparser.add_argument("--union", action="store_false", dest="intersection", help="combine search terms as if with a set union")
|
||||
subparser.add_argument("--intersection", action="store_true", dest="intersection", help="combine search terms as if with a set intersection")
|
||||
|
||||
def thunk(parsed_args):
|
||||
cache = Cache(not parsed_args.verbose, parsed_args.no_timeouts)
|
||||
|
@ -66,12 +66,12 @@ def user_selection (message, options) :
|
|||
def fuzzy_find(matches, urls) :
|
||||
choices = {}
|
||||
for match in matches :
|
||||
for key, value in process.extract(match, urls, limit=len(urls)) :
|
||||
for key, value in process.extract(match, urls, limit=None) :
|
||||
choices.setdefault(key, 0)
|
||||
choices[key] += value
|
||||
choices = sorted([(v, k) for k, v in choices.iteritems()], reverse=True)
|
||||
if not choices : return []
|
||||
elif len(choices) == 1 : return choices[0][1]
|
||||
elif len(choices) == 1 : return [choices[0][1]]
|
||||
elif choices[0][0] > choices[1][0] : choices = choices[:1]
|
||||
else : choices = list(takewhile(lambda t: t[0] == choices[0][0], choices))
|
||||
return [v for k,v in choices]
|
||||
|
@ -100,7 +100,7 @@ def command_cache (cache, matches, everything=False, descriptors=False, batch=Fa
|
|||
if intersection :
|
||||
choices = fuzzy_find(matches, map(basename, urls))
|
||||
else :
|
||||
choices = sum([fuzzy_find([m], map(basename, urls) for m in matches)])
|
||||
choices = sum([fuzzy_find([m], map(basename, urls)) for m in matches], [])
|
||||
if not batch and len(choices) > 1 :
|
||||
choices = user_selection("Please select a file to cache", choices)
|
||||
to_download = []
|
||||
|
@ -123,9 +123,9 @@ def command_find_part (cache, matches, long=False, intersection=True) :
|
|||
pp = pprint.PrettyPrinter()
|
||||
parts = cache.index
|
||||
if intersection :
|
||||
choices = fuzzy_find(matches, map(basename, index.keys()))
|
||||
choices = fuzzy_find(matches, parts.keys())
|
||||
else :
|
||||
choices = sum([fuzzy_find([m], map(basename, index.keys()) for m in matches)])
|
||||
choices = sum([fuzzy_find([m], parts.keys()) for m in matches], [])
|
||||
for part in choices :
|
||||
print part
|
||||
if long :
|
||||
|
@ -136,10 +136,10 @@ def command_find_part (cache, matches, long=False, intersection=True) :
|
|||
dict(name='parts', nargs='+', help='parts to dump'),
|
||||
help='Create a directory with an index.json describing the part and all of their associated flashing algorithms.'
|
||||
)
|
||||
def command_dump_parts (cache, out, parts, intersection=True) :
|
||||
def command_dump_parts (cache, out, parts, intersection=False) :
|
||||
index = {}
|
||||
if intersection :
|
||||
for part in fuzzy_find(matches, map(basename, urls)):
|
||||
for part in fuzzy_find(parts, cache.index):
|
||||
index.update(cache.index[part])
|
||||
else :
|
||||
for part in parts :
|
||||
|
@ -159,13 +159,13 @@ def command_dump_parts (cache, out, parts, intersection=True) :
|
|||
@subcommand('cache-part',
|
||||
dict(name='matches', nargs="+", help="words to match to devices"),
|
||||
help='Cache PACK files associated with the parts matching the provided words')
|
||||
def command_cache_part (cache, matches) :
|
||||
def command_cache_part (cache, matches, intersection=True) :
|
||||
index = cache.index
|
||||
if intersection :
|
||||
choices = fuzzy_find(matches, map(basename, index.keys()))
|
||||
choices = fuzzy_find(matches, index.keys())
|
||||
else :
|
||||
choices = sum([fuzzy_find([m], map(basename, index.keys()) for m in matches)])
|
||||
urls = [index[c]['file'] for c in choices]
|
||||
choices = sum([fuzzy_find([m], index.keys()) for m in matches], [])
|
||||
urls = list(set([index[c]['pdsc_file'] for c in choices]))
|
||||
cache.cache_pack_list(urls)
|
||||
|
||||
def get_argparse() :
|
||||
|
|
Loading…
Reference in New Issue