Fix update checker, migrate to requests package for all network stuff. The eveapi package is probably not going to work with caching and whatnot, but pyfa doesn't use it and XML API is EOL anyway

This commit is contained in:
blitzmann
2018-03-04 18:08:34 -05:00
parent 7956ca0409
commit 4fac10ccb7
5 changed files with 58 additions and 60 deletions

View File

@@ -402,11 +402,11 @@ class _RootContext(_Context):
req = self._scheme + '://' + self._host + path
response = network.request(req, network.EVE, kw)
response = network.request(req, network.EVE, params=kw)
if cache:
store = True
response = response.read()
response = response.text
else:
store = False
else:
@@ -416,8 +416,8 @@ class _RootContext(_Context):
if retrieve_fallback:
# implementor is handling fallbacks...
try:
return _ParseXML(response, True,
store and (lambda obj: cache.store(self._host, path, kw, response, obj)))
return _ParseXML(response.text, True,
store and (lambda obj: cache.store(self._host, path, kw, response.text, obj)))
except Error as e:
response = retrieve_fallback(self._host, path, kw, reason=e)
if response is not None:
@@ -425,7 +425,7 @@ class _RootContext(_Context):
raise
else:
# implementor is not handling fallbacks...
return _ParseXML(response, True, store and (lambda obj: cache.store(self._host, path, kw, response, obj)))
return _ParseXML(response.text, True, store and (lambda obj: cache.store(self._host, path, kw, response, obj)))
# -----------------------------------------------------------------------------

View File

@@ -33,16 +33,17 @@ class EveCentral(object):
name = "eve-central.com"
def __init__(self, types, system, priceMap):
data = []
data = {}
baseurl = "https://eve-central.com/api/marketstat"
data.append(("usesystem", system)) # Use Jita for market
data["usesystem"] = system # Use Jita for market
data["typeid"] = set()
for typeID in types: # Add all typeID arguments
data.append(("typeid", typeID))
data["typeid"].add(typeID)
network = Network.getInstance()
data = network.request(baseurl, network.PRICES, data)
xml = minidom.parse(data)
data = network.request(baseurl, network.PRICES, params=data)
xml = minidom.parseString(data.text)
types = xml.getElementsByTagName("marketstat").item(0).getElementsByTagName("type")
# Cycle through all types we've got from request
for type_ in types:

View File

@@ -33,14 +33,14 @@ class EveMarketData(object):
name = "eve-marketdata.com"
def __init__(self, types, system, priceMap):
data = []
data = {}
baseurl = "https://eve-marketdata.com/api/item_prices.xml"
data.append(("system_id", system)) # Use Jita for market
data.append(("type_ids", ','.join(str(x) for x in types)))
data["system_id"] = system # Use Jita for market
data["type_ids"] = ','.join(str(x) for x in types)
network = Network.getInstance()
data = network.request(baseurl, network.PRICES, data)
xml = minidom.parse(data)
data = network.request(baseurl, network.PRICES, params=data)
xml = minidom.parseString(data.text)
types = xml.getElementsByTagName("eve").item(0).getElementsByTagName("price")
# Cycle through all types we've got from request

View File

@@ -18,9 +18,7 @@
# =============================================================================
import urllib.request
import urllib.error
import urllib.parse
import requests
import socket
from logbook import Logger
@@ -72,7 +70,8 @@ class Network(object):
return cls._instance
def request(self, url, type, data=None):
def request(self, url, type, *args, **kwargs):
# URL is required to be https as of right now
# print "Starting request: %s\n\tType: %s\n\tPost Data: %s"%(url,type,data)
@@ -88,49 +87,47 @@ class Network(object):
config.expansionVersion)
headers = {"User-Agent": "pyfa {0} (Python-urllib2)".format(versionString)}
proxy = NetworkSettings.getInstance().getProxySettings()
if proxy is not None:
# proxy is a tuple of (host, port): (u'192.168.20.1', 3128)
proxy_auth = NetworkSettings.getInstance().getProxyAuthDetails()
# proxy_auth is a tuple of (login, password) or None
if proxy_auth is not None:
# add login:password@ in front of proxy address
proxy_handler = urllib.request.ProxyHandler({
'https': '{0}:{1}@{2}:{3}'.format(
proxy_auth[0], proxy_auth[1], proxy[0], proxy[1])
})
else:
# build proxy handler with no login/pass info
proxy_handler = urllib.request.ProxyHandler({'https': "{0}:{1}".format(proxy[0], proxy[1])})
opener = urllib.request.build_opener(proxy_handler)
urllib.request.install_opener(opener)
else:
# This is a bug fix, explicitly disable possibly previously installed
# opener with proxy, by urllib2.install_opener() a few lines above in code.
# Now this explicitly disables proxy handler, "uninstalling" opener.
# This is used in case when user had proxy enabled, so proxy_handler was already
# installed globally, and then user had disabled the proxy, so we should clear that opener
urllib.request.install_opener(None)
# another option could be installing a default opener:
# urllib2.install_opener(urllib2.build_opener())
# proxy = NetworkSettings.getInstance().getProxySettings()
# if proxy is not None:
# # proxy is a tuple of (host, port): (u'192.168.20.1', 3128)
# proxy_auth = NetworkSettings.getInstance().getProxyAuthDetails()
# # proxy_auth is a tuple of (login, password) or None
# if proxy_auth is not None:
# # add login:password@ in front of proxy address
# proxy_handler = urllib.request.ProxyHandler({
# 'https': '{0}:{1}@{2}:{3}'.format(
# proxy_auth[0], proxy_auth[1], proxy[0], proxy[1])
# })
# else:
# # build proxy handler with no login/pass info
# proxy_handler = urllib.request.ProxyHandler({'https': "{0}:{1}".format(proxy[0], proxy[1])})
# opener = urllib.request.build_opener(proxy_handler)
# urllib.request.install_opener(opener)
# else:
# # This is a bug fix, explicitly disable possibly previously installed
# # opener with proxy, by urllib2.install_opener() a few lines above in code.
# # Now this explicitly disables proxy handler, "uninstalling" opener.
# # This is used in case when user had proxy enabled, so proxy_handler was already
# # installed globally, and then user had disabled the proxy, so we should clear that opener
# urllib.request.install_opener(None)
# # another option could be installing a default opener:
# # urllib2.install_opener(urllib2.build_opener())
request = urllib.request.Request(url, headers=headers, data=urllib.parse.urlencode(data).encode("utf-8") if data else None)
try:
return urllib.request.urlopen(request)
except urllib.error.HTTPError as error:
resp = requests.get(url, headers=headers, **kwargs)
resp.raise_for_status()
return resp
except requests.exceptions.HTTPError as error:
pyfalog.warning("HTTPError:")
pyfalog.warning(error)
if error.code == 404:
if error.response.status_code == 404:
raise RequestError()
elif error.code == 403:
elif error.response.status_code == 403:
raise AuthenticationError()
elif error.code >= 500:
elif error.response.status_code >= 500:
raise ServerError()
raise Error(error)
except urllib.error.URLError as error:
pyfalog.warning("Timed out or other URL error:")
pyfalog.warning(error)
if "timed out" in error.reason:
raise TimeoutError()
else:
raise Error(error)
except requests.exceptions.Timeout:
raise TimeoutError()
except Exception as error:
raise Error(error)

View File

@@ -49,13 +49,13 @@ class CheckUpdateThread(threading.Thread):
try:
response = network.request('https://api.github.com/repos/pyfa-org/Pyfa/releases', network.UPDATE)
jsonResponse = json.loads(response.read())
jsonResponse = response.json()
jsonResponse.sort(
key=lambda x: calendar.timegm(dateutil.parser.parse(x['published_at']).utctimetuple()),
reverse=True
)
for release in jsonResponse:
for release in jsonResponse[:5]:
rVersion = Version(release['tag_name'])
cVersion = Version(config.version)
@@ -73,8 +73,8 @@ class CheckUpdateThread(threading.Thread):
if rVersion > cVersion:
wx.CallAfter(self.callback, release, rVersion)
break
break
except Exception as e:
pyfalog.error("Caught exception in run")
pyfalog.error(e)