arm_pack_manager - caches packs now too.

pull/2708/head
Jimmy Brisson 2016-06-02 17:21:56 -05:00 committed by Sarah Marsh
parent 0832770159
commit 44980ab009
1 changed files with 46 additions and 13 deletions

View File

@ -6,55 +6,88 @@ from os import makedirs
from errno import EEXIST from errno import EEXIST
from threading import Thread from threading import Thread
from Queue import Queue from Queue import Queue
from re import compile, sub
RootPackURL = "http://www.keil.com/pack/index.idx" RootPackURL = "http://www.keil.com/pack/index.idx"
class Cacher (Thread) : class Cacher (Thread) :
def __init__(self, queue, silent) : def __init__(self, queue, silent, func) :
Thread.__init__(self) Thread.__init__(self)
self.queue = queue self.queue = queue
self.curl = Curl() self.curl = Curl()
self.curl.setopt(self.curl.FOLLOWLOCATION, True) self.curl.setopt(self.curl.FOLLOWLOCATION, True)
self.silent = silent self.silent = silent
self.func = func
def run(self) : def run(self) :
while True : while True :
url = self.queue.get() url = self.queue.get()
cache_file(self.curl, url, self.silent) self.func(self.curl, url, self.silent)
self.queue.task_done() self.queue.task_done()
protocol_matcher = compile("\w*://")
def strip_protocol(url) :
return sub(protocol_matcher, "", url)
def cache_file (curl, url, silent=False) : def cache_file (curl, url, silent=False) :
if not silent : print("Caching {}...".format(url)) if not silent : print("Caching {}...".format(url))
dest = join(save_data_path('arm-pack-manager'), url) dest = join(save_data_path('arm-pack-manager'), strip_protocol(url))
try : try :
makedirs(dirname(dest)) makedirs(dirname(dest))
except OSError as exc : except OSError as exc :
if exc.errno == EEXIST : pass if exc.errno == EEXIST : pass
else : raise else : raise
with open(dest, "wb+") as fd : with open(dest, "wb+") as fd :
curl.setopt(curl.URL, RootPackURL) curl.setopt(curl.URL, url)
curl.setopt(curl.FOLLOWLOCATION, True) curl.setopt(curl.FOLLOWLOCATION, True)
curl.setopt(curl.WRITEDATA, fd) curl.setopt(curl.WRITEDATA, fd)
curl.setopt(curl.TIMEOUT, 5)
try :
curl.perform() curl.perform()
except Exception:
pass
def cache_everything(silent=False) : def largest_version(content) :
url_queue = Queue() return sorted([t['version'] for t in content.package.releases('release')], reverse=True)[0]
urls = [pdsc.get('url') + pdsc.get('name') for pdsc in cache_and_parse(RootPackURL,silent).find_all("pdsc")]
threads = [Cacher(url_queue, silent) for each in range(10)] def cache_pdsc_and_pack (curl, url, silent=False) :
content = cache_and_parse(url)
try :
new_url = content.package.url.get_text()
if not new_url.endswith("/") :
new_url = new_url + "/"
cache_file(curl,
new_url +
content.package.vendor.get_text() + "." +
content.package.find('name').get_text() + "." +
largest_version(content) + ".pack",
silent)
except AttributeError :
print("[WARNING] {} does not appear to be a conforming .pdsc file".format(url))
def do_queue(function, interable, silent=False) :
q = Queue()
threads = [Cacher(q, silent, function) for each in range(10)]
for each in threads : for each in threads :
each.setDaemon(True) each.setDaemon(True)
each.start() each.start()
for url in urls: for thing in interable :
url_queue.put(url) q.put(thing)
url_queue.join() q.join()
def cache_everything(silent=False) :
urls = [join(pdsc.get('url'), pdsc.get('name')) for pdsc in cache_and_parse(RootPackURL,silent).find_all("pdsc")]
do_queue(cache_pdsc_and_pack, urls, silent)
def pull_from_cache(url) : def pull_from_cache(url) :
dest = join(save_data_path('arm-pack-manager'), url) dest = join(save_data_path('arm-pack-manager'), strip_protocol(url))
with open(dest, "r") as fd : with open(dest, "r") as fd :
return BeautifulSoup(fd, "html.parser") return BeautifulSoup(fd, "html.parser")
def gen_dict_from_cache() :
pdsc_files = pull_from_cache(RootPackUrl)
def cache_and_parse(url, silent=False) : def cache_and_parse(url, silent=False) :
cache_file(Curl(), url, silent) cache_file(Curl(), url, silent)
return pull_from_cache(url) return pull_from_cache(url)