2016-06-01 15:39:54 +00:00
|
|
|
from xdg.BaseDirectory import save_data_path
|
|
|
|
from pycurl import Curl
|
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
from os.path import join, dirname
|
|
|
|
from os import makedirs
|
|
|
|
from errno import EEXIST
|
|
|
|
from threading import Thread
|
|
|
|
from Queue import Queue
|
2016-06-02 22:21:56 +00:00
|
|
|
from re import compile, sub
|
2016-06-01 15:39:54 +00:00
|
|
|
|
|
|
|
|
|
|
|
RootPackURL = "http://www.keil.com/pack/index.idx"
|
|
|
|
|
|
|
|
class Cacher (Thread) :
|
2016-06-02 22:21:56 +00:00
|
|
|
def __init__(self, queue, silent, func) :
|
2016-06-01 15:39:54 +00:00
|
|
|
Thread.__init__(self)
|
|
|
|
self.queue = queue
|
|
|
|
self.curl = Curl()
|
|
|
|
self.curl.setopt(self.curl.FOLLOWLOCATION, True)
|
|
|
|
self.silent = silent
|
2016-06-02 22:21:56 +00:00
|
|
|
self.func = func
|
2016-06-01 15:39:54 +00:00
|
|
|
def run(self) :
|
|
|
|
while True :
|
|
|
|
url = self.queue.get()
|
2016-06-02 22:21:56 +00:00
|
|
|
self.func(self.curl, url, self.silent)
|
2016-06-01 15:39:54 +00:00
|
|
|
self.queue.task_done()
|
|
|
|
|
2016-06-02 22:21:56 +00:00
|
|
|
protocol_matcher = compile("\w*://")
|
|
|
|
def strip_protocol(url) :
|
|
|
|
return sub(protocol_matcher, "", url)
|
|
|
|
|
2016-06-01 15:39:54 +00:00
|
|
|
|
|
|
|
def cache_file (curl, url, silent=False) :
|
|
|
|
if not silent : print("Caching {}...".format(url))
|
2016-06-02 22:21:56 +00:00
|
|
|
dest = join(save_data_path('arm-pack-manager'), strip_protocol(url))
|
2016-06-01 15:39:54 +00:00
|
|
|
try :
|
|
|
|
makedirs(dirname(dest))
|
|
|
|
except OSError as exc :
|
|
|
|
if exc.errno == EEXIST : pass
|
|
|
|
else : raise
|
|
|
|
with open(dest, "wb+") as fd :
|
2016-06-02 22:21:56 +00:00
|
|
|
curl.setopt(curl.URL, url)
|
2016-06-01 15:39:54 +00:00
|
|
|
curl.setopt(curl.FOLLOWLOCATION, True)
|
|
|
|
curl.setopt(curl.WRITEDATA, fd)
|
2016-06-02 22:21:56 +00:00
|
|
|
curl.setopt(curl.TIMEOUT, 5)
|
|
|
|
try :
|
|
|
|
curl.perform()
|
|
|
|
except Exception:
|
|
|
|
pass
|
2016-06-01 15:39:54 +00:00
|
|
|
|
2016-06-02 22:21:56 +00:00
|
|
|
def largest_version(content) :
|
|
|
|
return sorted([t['version'] for t in content.package.releases('release')], reverse=True)[0]
|
|
|
|
|
|
|
|
def cache_pdsc_and_pack (curl, url, silent=False) :
|
|
|
|
content = cache_and_parse(url)
|
|
|
|
try :
|
|
|
|
new_url = content.package.url.get_text()
|
|
|
|
if not new_url.endswith("/") :
|
|
|
|
new_url = new_url + "/"
|
|
|
|
cache_file(curl,
|
|
|
|
new_url +
|
|
|
|
content.package.vendor.get_text() + "." +
|
|
|
|
content.package.find('name').get_text() + "." +
|
|
|
|
largest_version(content) + ".pack",
|
|
|
|
silent)
|
|
|
|
except AttributeError :
|
|
|
|
print("[WARNING] {} does not appear to be a conforming .pdsc file".format(url))
|
|
|
|
|
|
|
|
def do_queue(function, interable, silent=False) :
|
|
|
|
q = Queue()
|
|
|
|
threads = [Cacher(q, silent, function) for each in range(10)]
|
2016-06-01 15:39:54 +00:00
|
|
|
for each in threads :
|
|
|
|
each.setDaemon(True)
|
|
|
|
each.start()
|
2016-06-02 22:21:56 +00:00
|
|
|
for thing in interable :
|
|
|
|
q.put(thing)
|
|
|
|
q.join()
|
2016-06-01 15:39:54 +00:00
|
|
|
|
2016-06-02 22:21:56 +00:00
|
|
|
def cache_everything(silent=False) :
|
|
|
|
urls = [join(pdsc.get('url'), pdsc.get('name')) for pdsc in cache_and_parse(RootPackURL,silent).find_all("pdsc")]
|
|
|
|
do_queue(cache_pdsc_and_pack, urls, silent)
|
2016-06-01 15:39:54 +00:00
|
|
|
|
|
|
|
def pull_from_cache(url) :
|
2016-06-02 22:21:56 +00:00
|
|
|
dest = join(save_data_path('arm-pack-manager'), strip_protocol(url))
|
2016-06-01 15:39:54 +00:00
|
|
|
with open(dest, "r") as fd :
|
|
|
|
return BeautifulSoup(fd, "html.parser")
|
|
|
|
|
2016-06-02 22:21:56 +00:00
|
|
|
def gen_dict_from_cache() :
|
|
|
|
pdsc_files = pull_from_cache(RootPackUrl)
|
|
|
|
|
2016-06-01 15:39:54 +00:00
|
|
|
def cache_and_parse(url, silent=False) :
|
|
|
|
cache_file(Curl(), url, silent)
|
|
|
|
return pull_from_cache(url)
|
|
|
|
|
|
|
|
if __name__ == "__main__" :
|
|
|
|
cache_everything()
|
|
|
|
|
2016-05-31 22:46:42 +00:00
|
|
|
|