Merge pull request #3097 from theotherjimmy/pack-manager-urllib2

Arm-Pack-Manager Remove pycurl dependency
pull/3140/head
Martin Kojtal 2016-10-26 14:48:05 +02:00 committed by GitHub
commit 53cd23e285
2 changed files with 9 additions and 30 deletions

View File

@ -9,6 +9,5 @@ requests
mbed-ls>=0.2.13 mbed-ls>=0.2.13
mbed-host-tests>=1.1.2 mbed-host-tests>=1.1.2
mbed-greentea>=0.2.24 mbed-greentea>=0.2.24
pycurl>=4
beautifulsoup4>=4 beautifulsoup4>=4
fuzzywuzzy>=0.11 fuzzywuzzy>=0.11

View File

@ -1,4 +1,4 @@
from pycurl import Curl from urllib2 import urlopen, URLError
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from os.path import join, dirname, basename from os.path import join, dirname, basename
from os import makedirs from os import makedirs
@ -49,19 +49,6 @@ class Reader (Thread) :
self.func(url) self.func(url)
self.queue.task_done() self.queue.task_done()
class Cacher (Thread) :
def __init__(self, queue, func) :
Thread.__init__(self)
self.queue = queue
self.curl = Curl()
self.curl.setopt(self.curl.FOLLOWLOCATION, True)
self.func = func
def run(self) :
while True :
url = self.queue.get()
self.func(self.curl, url)
self.queue.task_done()
class Cache () : class Cache () :
""" The Cache object is the only relevant API object at the moment """ The Cache object is the only relevant API object at the moment
@ -88,7 +75,7 @@ class Cache () :
stdout.write("{} {}/{}\r".format(message, self.counter, self.total)) stdout.write("{} {}/{}\r".format(message, self.counter, self.total))
stdout.flush() stdout.flush()
def cache_file (self, curl, url) : def cache_file (self, url) :
"""Low level interface to caching a single file. """Low level interface to caching a single file.
:param curl: The user is responsible for providing a curl.Curl object as the curl parameter. :param curl: The user is responsible for providing a curl.Curl object as the curl parameter.
@ -104,18 +91,11 @@ class Cache () :
except OSError as exc : except OSError as exc :
if exc.errno == EEXIST : pass if exc.errno == EEXIST : pass
else : raise else : raise
with open(dest, "wb+") as fd : try:
curl.setopt(curl.URL, url) with open(dest, "wb+") as fd :
curl.setopt(curl.FOLLOWLOCATION, True) fd.write(urlopen(url).read())
curl.setopt(curl.WRITEDATA, fd) except URLError as e:
if not self.no_timeouts : stderr.write(e.reason)
curl.setopt(curl.CONNECTTIMEOUT, 2)
curl.setopt(curl.LOW_SPEED_LIMIT, 50 * 1024)
curl.setopt(curl.LOW_SPEED_TIME, 2)
try :
curl.perform()
except Exception as e :
stderr.write("[ ERROR ] file {} did not download {}\n".format(url, str(e)))
self.counter += 1 self.counter += 1
self.display_counter("Caching Files") self.display_counter("Caching Files")
@ -401,7 +381,7 @@ class Cache () :
""" """
self.total = len(list) self.total = len(list)
self.display_counter("Caching Files") self.display_counter("Caching Files")
do_queue(Cacher, self.cache_file, list) do_queue(Reader, self.cache_file, list)
stdout.write("\n") stdout.write("\n")
def cache_pack_list(self, list) : def cache_pack_list(self, list) :
@ -412,7 +392,7 @@ class Cache () :
""" """
self.total = len(list) * 2 self.total = len(list) * 2
self.display_counter("Caching Files") self.display_counter("Caching Files")
do_queue(Cacher, self.cache_pdsc_and_pack, list) do_queue(Reader, self.cache_pdsc_and_pack, list)
stdout.write("\n") stdout.write("\n")
def pdsc_from_cache(self, url) : def pdsc_from_cache(self, url) :