From 4fac10ccb79b5e451d1e3aca1826635f2e8894db Mon Sep 17 00:00:00 2001 From: blitzmann Date: Sun, 4 Mar 2018 18:08:34 -0500 Subject: [PATCH] Fix update checker, migrate to requests package for all network stuff. The eveapi package is probably not going to work with caching and whatnot, but pyfa doesn't use it and XML API is EOL anyway --- service/eveapi.py | 10 ++-- service/marketSources/evecentral.py | 11 ++-- service/marketSources/evemarketdata.py | 10 ++-- service/network.py | 81 +++++++++++++------------- service/update.py | 6 +- 5 files changed, 58 insertions(+), 60 deletions(-) diff --git a/service/eveapi.py b/service/eveapi.py index a06cf2efe..be48f8833 100644 --- a/service/eveapi.py +++ b/service/eveapi.py @@ -402,11 +402,11 @@ class _RootContext(_Context): req = self._scheme + '://' + self._host + path - response = network.request(req, network.EVE, kw) + response = network.request(req, network.EVE, params=kw) if cache: store = True - response = response.read() + response = response.text else: store = False else: @@ -416,8 +416,8 @@ class _RootContext(_Context): if retrieve_fallback: # implementor is handling fallbacks... try: - return _ParseXML(response, True, - store and (lambda obj: cache.store(self._host, path, kw, response, obj))) + return _ParseXML(response.text, True, + store and (lambda obj: cache.store(self._host, path, kw, response.text, obj))) except Error as e: response = retrieve_fallback(self._host, path, kw, reason=e) if response is not None: @@ -425,7 +425,7 @@ class _RootContext(_Context): raise else: # implementor is not handling fallbacks... - return _ParseXML(response, True, store and (lambda obj: cache.store(self._host, path, kw, response, obj))) + return _ParseXML(response.text, True, store and (lambda obj: cache.store(self._host, path, kw, response, obj))) # ----------------------------------------------------------------------------- diff --git a/service/marketSources/evecentral.py b/service/marketSources/evecentral.py index e323015c5..052941828 100644 --- a/service/marketSources/evecentral.py +++ b/service/marketSources/evecentral.py @@ -33,16 +33,17 @@ class EveCentral(object): name = "eve-central.com" def __init__(self, types, system, priceMap): - data = [] + data = {} baseurl = "https://eve-central.com/api/marketstat" - data.append(("usesystem", system)) # Use Jita for market + data["usesystem"] = system # Use Jita for market + data["typeid"] = set() for typeID in types: # Add all typeID arguments - data.append(("typeid", typeID)) + data["typeid"].add(typeID) network = Network.getInstance() - data = network.request(baseurl, network.PRICES, data) - xml = minidom.parse(data) + data = network.request(baseurl, network.PRICES, params=data) + xml = minidom.parseString(data.text) types = xml.getElementsByTagName("marketstat").item(0).getElementsByTagName("type") # Cycle through all types we've got from request for type_ in types: diff --git a/service/marketSources/evemarketdata.py b/service/marketSources/evemarketdata.py index 0a5861388..0c7b53bff 100644 --- a/service/marketSources/evemarketdata.py +++ b/service/marketSources/evemarketdata.py @@ -33,14 +33,14 @@ class EveMarketData(object): name = "eve-marketdata.com" def __init__(self, types, system, priceMap): - data = [] + data = {} baseurl = "https://eve-marketdata.com/api/item_prices.xml" - data.append(("system_id", system)) # Use Jita for market - data.append(("type_ids", ','.join(str(x) for x in types))) + data["system_id"] = system # Use Jita for market + data["type_ids"] = ','.join(str(x) for x in types) network = Network.getInstance() - data = network.request(baseurl, network.PRICES, data) - xml = minidom.parse(data) + data = network.request(baseurl, network.PRICES, params=data) + xml = minidom.parseString(data.text) types = xml.getElementsByTagName("eve").item(0).getElementsByTagName("price") # Cycle through all types we've got from request diff --git a/service/network.py b/service/network.py index f7667a5b1..c19ec90b5 100644 --- a/service/network.py +++ b/service/network.py @@ -18,9 +18,7 @@ # ============================================================================= -import urllib.request -import urllib.error -import urllib.parse +import requests import socket from logbook import Logger @@ -72,7 +70,8 @@ class Network(object): return cls._instance - def request(self, url, type, data=None): + def request(self, url, type, *args, **kwargs): + # URL is required to be https as of right now # print "Starting request: %s\n\tType: %s\n\tPost Data: %s"%(url,type,data) @@ -88,49 +87,47 @@ class Network(object): config.expansionVersion) headers = {"User-Agent": "pyfa {0} (Python-urllib2)".format(versionString)} - proxy = NetworkSettings.getInstance().getProxySettings() - if proxy is not None: - # proxy is a tuple of (host, port): (u'192.168.20.1', 3128) - proxy_auth = NetworkSettings.getInstance().getProxyAuthDetails() - # proxy_auth is a tuple of (login, password) or None - if proxy_auth is not None: - # add login:password@ in front of proxy address - proxy_handler = urllib.request.ProxyHandler({ - 'https': '{0}:{1}@{2}:{3}'.format( - proxy_auth[0], proxy_auth[1], proxy[0], proxy[1]) - }) - else: - # build proxy handler with no login/pass info - proxy_handler = urllib.request.ProxyHandler({'https': "{0}:{1}".format(proxy[0], proxy[1])}) - opener = urllib.request.build_opener(proxy_handler) - urllib.request.install_opener(opener) - else: - # This is a bug fix, explicitly disable possibly previously installed - # opener with proxy, by urllib2.install_opener() a few lines above in code. - # Now this explicitly disables proxy handler, "uninstalling" opener. - # This is used in case when user had proxy enabled, so proxy_handler was already - # installed globally, and then user had disabled the proxy, so we should clear that opener - urllib.request.install_opener(None) - # another option could be installing a default opener: - # urllib2.install_opener(urllib2.build_opener()) + # proxy = NetworkSettings.getInstance().getProxySettings() + # if proxy is not None: + # # proxy is a tuple of (host, port): (u'192.168.20.1', 3128) + # proxy_auth = NetworkSettings.getInstance().getProxyAuthDetails() + # # proxy_auth is a tuple of (login, password) or None + # if proxy_auth is not None: + # # add login:password@ in front of proxy address + # proxy_handler = urllib.request.ProxyHandler({ + # 'https': '{0}:{1}@{2}:{3}'.format( + # proxy_auth[0], proxy_auth[1], proxy[0], proxy[1]) + # }) + # else: + # # build proxy handler with no login/pass info + # proxy_handler = urllib.request.ProxyHandler({'https': "{0}:{1}".format(proxy[0], proxy[1])}) + # opener = urllib.request.build_opener(proxy_handler) + # urllib.request.install_opener(opener) + # else: + # # This is a bug fix, explicitly disable possibly previously installed + # # opener with proxy, by urllib2.install_opener() a few lines above in code. + # # Now this explicitly disables proxy handler, "uninstalling" opener. + # # This is used in case when user had proxy enabled, so proxy_handler was already + # # installed globally, and then user had disabled the proxy, so we should clear that opener + # urllib.request.install_opener(None) + # # another option could be installing a default opener: + # # urllib2.install_opener(urllib2.build_opener()) - request = urllib.request.Request(url, headers=headers, data=urllib.parse.urlencode(data).encode("utf-8") if data else None) try: - return urllib.request.urlopen(request) - except urllib.error.HTTPError as error: + resp = requests.get(url, headers=headers, **kwargs) + resp.raise_for_status() + return resp + except requests.exceptions.HTTPError as error: pyfalog.warning("HTTPError:") pyfalog.warning(error) - if error.code == 404: + if error.response.status_code == 404: raise RequestError() - elif error.code == 403: + elif error.response.status_code == 403: raise AuthenticationError() - elif error.code >= 500: + elif error.response.status_code >= 500: raise ServerError() raise Error(error) - except urllib.error.URLError as error: - pyfalog.warning("Timed out or other URL error:") - pyfalog.warning(error) - if "timed out" in error.reason: - raise TimeoutError() - else: - raise Error(error) + except requests.exceptions.Timeout: + raise TimeoutError() + except Exception as error: + raise Error(error) diff --git a/service/update.py b/service/update.py index a1f6989c4..0d8d38f53 100644 --- a/service/update.py +++ b/service/update.py @@ -49,13 +49,13 @@ class CheckUpdateThread(threading.Thread): try: response = network.request('https://api.github.com/repos/pyfa-org/Pyfa/releases', network.UPDATE) - jsonResponse = json.loads(response.read()) + jsonResponse = response.json() jsonResponse.sort( key=lambda x: calendar.timegm(dateutil.parser.parse(x['published_at']).utctimetuple()), reverse=True ) - for release in jsonResponse: + for release in jsonResponse[:5]: rVersion = Version(release['tag_name']) cVersion = Version(config.version) @@ -73,8 +73,8 @@ class CheckUpdateThread(threading.Thread): if rVersion > cVersion: wx.CallAfter(self.callback, release, rVersion) + break - break except Exception as e: pyfalog.error("Caught exception in run") pyfalog.error(e)