Merge tag 'v2.0.1' into singularity (bring sisi code up to date with pyfa 2 / py3 / wx4)
# Conflicts: # eos/effects/techtwocommandburstbonus.py
This commit is contained in:
@@ -32,7 +32,7 @@ class Attribute(object):
|
||||
|
||||
@staticmethod
|
||||
def getAttributeInfo(identity):
|
||||
if isinstance(identity, (int, basestring)):
|
||||
if isinstance(identity, (int, str)):
|
||||
info = eos.db.getAttributeInfo(identity, eager=("icon", "unit"))
|
||||
elif isinstance(identity, (int, float)):
|
||||
id_ = int(identity)
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
# =============================================================================
|
||||
|
||||
import sys
|
||||
import copy
|
||||
import itertools
|
||||
@@ -34,10 +33,10 @@ import wx
|
||||
|
||||
import config
|
||||
import eos.db
|
||||
from service.eveapi import EVEAPIConnection, ParseXML
|
||||
from service.esi import Esi
|
||||
|
||||
from eos.saveddata.implant import Implant as es_Implant
|
||||
from eos.saveddata.character import Character as es_Character
|
||||
from eos.saveddata.character import Character as es_Character, Skill
|
||||
from eos.saveddata.module import Slot as es_Slot, Module as es_Module
|
||||
from eos.saveddata.fighter import Fighter as es_Fighter
|
||||
|
||||
@@ -52,6 +51,9 @@ class CharacterImportThread(threading.Thread):
|
||||
self.callback = callback
|
||||
|
||||
def run(self):
|
||||
wx.CallAfter(self.callback)
|
||||
# todo: Fix character import (don't need CCP SML anymore, only support evemon?)
|
||||
return
|
||||
paths = self.paths
|
||||
sCharacter = Character.getInstance()
|
||||
all5_character = es_Character("All 5", 5)
|
||||
@@ -62,43 +64,34 @@ class CharacterImportThread(threading.Thread):
|
||||
|
||||
for path in paths:
|
||||
try:
|
||||
# we try to parse api XML data first
|
||||
with open(path, mode='r') as charFile:
|
||||
sheet = ParseXML(charFile)
|
||||
char = sCharacter.new(sheet.name + " (imported)")
|
||||
sCharacter.apiUpdateCharSheet(char.ID, sheet.skills, 0)
|
||||
except:
|
||||
# if it's not api XML data, try this
|
||||
# this is a horrible logic flow, but whatever
|
||||
try:
|
||||
charFile = open(path, mode='r').read()
|
||||
doc = minidom.parseString(charFile)
|
||||
if doc.documentElement.tagName not in ("SerializableCCPCharacter", "SerializableUriCharacter"):
|
||||
pyfalog.error("Incorrect EVEMon XML sheet")
|
||||
raise RuntimeError("Incorrect EVEMon XML sheet")
|
||||
name = doc.getElementsByTagName("name")[0].firstChild.nodeValue
|
||||
securitystatus = float(doc.getElementsByTagName("securityStatus")[0].firstChild.nodeValue) or 0.0
|
||||
skill_els = doc.getElementsByTagName("skill")
|
||||
skills = []
|
||||
for skill in skill_els:
|
||||
if int(skill.getAttribute("typeID")) in all_skill_ids and (0 <= int(skill.getAttribute("level")) <= 5):
|
||||
skills.append({
|
||||
"typeID": int(skill.getAttribute("typeID")),
|
||||
"level": int(skill.getAttribute("level")),
|
||||
})
|
||||
else:
|
||||
pyfalog.error(
|
||||
"Attempted to import unknown skill {0} (ID: {1}) (Level: {2})",
|
||||
skill.getAttribute("name"),
|
||||
skill.getAttribute("typeID"),
|
||||
skill.getAttribute("level"),
|
||||
)
|
||||
char = sCharacter.new(name + " (EVEMon)")
|
||||
sCharacter.apiUpdateCharSheet(char.ID, skills, securitystatus)
|
||||
except Exception, e:
|
||||
pyfalog.error("Exception on character import:")
|
||||
pyfalog.error(e)
|
||||
continue
|
||||
charFile = open(path, mode='r').read()
|
||||
doc = minidom.parseString(charFile)
|
||||
if doc.documentElement.tagName not in ("SerializableCCPCharacter", "SerializableUriCharacter"):
|
||||
pyfalog.error("Incorrect EVEMon XML sheet")
|
||||
raise RuntimeError("Incorrect EVEMon XML sheet")
|
||||
name = doc.getElementsByTagName("name")[0].firstChild.nodeValue
|
||||
securitystatus = doc.getElementsByTagName("securityStatus")[0].firstChild.nodeValue or 0
|
||||
skill_els = doc.getElementsByTagName("skill")
|
||||
skills = []
|
||||
for skill in skill_els:
|
||||
if int(skill.getAttribute("typeID")) in all_skill_ids and (0 <= int(skill.getAttribute("level")) <= 5):
|
||||
skills.append({
|
||||
"typeID": int(skill.getAttribute("typeID")),
|
||||
"level": int(skill.getAttribute("level")),
|
||||
})
|
||||
else:
|
||||
pyfalog.error(
|
||||
"Attempted to import unknown skill {0} (ID: {1}) (Level: {2})",
|
||||
skill.getAttribute("name"),
|
||||
skill.getAttribute("typeID"),
|
||||
skill.getAttribute("level"),
|
||||
)
|
||||
char = sCharacter.new(name + " (EVEMon)")
|
||||
sCharacter.apiUpdateCharSheet(char.ID, skills, securitystatus)
|
||||
except Exception as e:
|
||||
pyfalog.error("Exception on character import:")
|
||||
pyfalog.error(e)
|
||||
continue
|
||||
|
||||
wx.CallAfter(self.callback)
|
||||
|
||||
@@ -115,6 +108,7 @@ class SkillBackupThread(threading.Thread):
|
||||
def run(self):
|
||||
path = self.path
|
||||
sCharacter = Character.getInstance()
|
||||
|
||||
if self.saveFmt == "xml" or self.saveFmt == "emp":
|
||||
backupData = sCharacter.exportXml()
|
||||
else:
|
||||
@@ -122,14 +116,13 @@ class SkillBackupThread(threading.Thread):
|
||||
|
||||
if self.saveFmt == "emp":
|
||||
with gzip.open(path, mode='wb') as backupFile:
|
||||
backupFile.write(backupData)
|
||||
backupFile.write(backupData.encode())
|
||||
else:
|
||||
with open(path, mode='w', encoding='utf-8') as backupFile:
|
||||
backupFile.write(backupData)
|
||||
|
||||
wx.CallAfter(self.callback)
|
||||
|
||||
|
||||
class Character(object):
|
||||
instance = None
|
||||
skillReqsDict = {}
|
||||
@@ -147,18 +140,18 @@ class Character(object):
|
||||
self.all5()
|
||||
|
||||
def exportText(self):
|
||||
data = u"Pyfa exported plan for \"" + self.skillReqsDict['charname'] + "\"\n"
|
||||
data += u"=" * 79 + u"\n"
|
||||
data += u"\n"
|
||||
item = u""
|
||||
data = "Pyfa exported plan for \"" + self.skillReqsDict['charname'] + "\"\n"
|
||||
data += "=" * 79 + "\n"
|
||||
data += "\n"
|
||||
item = ""
|
||||
try:
|
||||
for s in self.skillReqsDict['skills']:
|
||||
if item == "" or not item == s["item"]:
|
||||
item = s["item"]
|
||||
data += u"-" * 79 + "\n"
|
||||
data += u"Skills required for {}:\n".format(item)
|
||||
data += u"{}{}: {}\n".format(" " * s["indent"], s["skill"], int(s["level"]))
|
||||
data += u"-" * 79 + "\n"
|
||||
data += "-" * 79 + "\n"
|
||||
data += "Skills required for {}:\n".format(item)
|
||||
data += "{}{}: {}\n".format(" " * s["indent"], s["skill"], int(s["level"]))
|
||||
data += "-" * 79 + "\n"
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -268,7 +261,7 @@ class Character(object):
|
||||
for grp in cat.groups:
|
||||
if grp.published:
|
||||
groups.append((grp.ID, grp.name))
|
||||
return groups
|
||||
return sorted(groups, key=lambda x: x[1])
|
||||
|
||||
@staticmethod
|
||||
def getSkills(groupID):
|
||||
@@ -277,7 +270,7 @@ class Character(object):
|
||||
for skill in group.items:
|
||||
if skill.published is True:
|
||||
skills.append((skill.ID, skill.name))
|
||||
return skills
|
||||
return sorted(skills, key=lambda x: x[1])
|
||||
|
||||
@staticmethod
|
||||
def getSkillsByName(text):
|
||||
@@ -286,7 +279,7 @@ class Character(object):
|
||||
for skill in items:
|
||||
if skill.published is True:
|
||||
skills.append((skill.ID, skill.name))
|
||||
return skills
|
||||
return sorted(skills, key=lambda x: x[1])
|
||||
|
||||
@staticmethod
|
||||
def setAlphaClone(char, cloneID):
|
||||
@@ -345,6 +338,8 @@ class Character(object):
|
||||
|
||||
@staticmethod
|
||||
def getApiDetails(charID):
|
||||
# todo: fix this (or get rid of?)
|
||||
return ("", "", "", [])
|
||||
char = eos.db.getCharacter(charID)
|
||||
if char.chars is not None:
|
||||
chars = json.loads(char.chars)
|
||||
@@ -352,27 +347,24 @@ class Character(object):
|
||||
chars = None
|
||||
return char.apiID or "", char.apiKey or "", char.defaultChar or "", chars or []
|
||||
|
||||
def apiEnabled(self, charID):
|
||||
id_, key, default, _ = self.getApiDetails(charID)
|
||||
return id_ is not "" and key is not "" and default is not ""
|
||||
@staticmethod
|
||||
def getSsoCharacter(charID):
|
||||
char = eos.db.getCharacter(charID)
|
||||
sso = char.getSsoCharacter(config.getClientSecret())
|
||||
return sso
|
||||
|
||||
@staticmethod
|
||||
def apiCharList(charID, userID, apiKey):
|
||||
def setSsoCharacter(charID, ssoCharID):
|
||||
char = eos.db.getCharacter(charID)
|
||||
if ssoCharID is not None:
|
||||
sso = eos.db.getSsoCharacter(ssoCharID, config.getClientSecret())
|
||||
char.setSsoCharacter(sso, config.getClientSecret())
|
||||
else:
|
||||
char.setSsoCharacter(None, config.getClientSecret())
|
||||
eos.db.commit()
|
||||
|
||||
char.apiID = userID
|
||||
char.apiKey = apiKey
|
||||
|
||||
api = EVEAPIConnection()
|
||||
auth = api.auth(keyID=userID, vCode=apiKey)
|
||||
apiResult = auth.account.Characters()
|
||||
charList = map(lambda c: unicode(c.name), apiResult.characters)
|
||||
|
||||
char.chars = json.dumps(charList)
|
||||
return charList
|
||||
|
||||
def apiFetch(self, charID, charName, callback):
|
||||
thread = UpdateAPIThread(charID, charName, (self.apiFetchCallback, callback))
|
||||
def apiFetch(self, charID, callback):
|
||||
thread = UpdateAPIThread(charID, (self.apiFetchCallback, callback))
|
||||
thread.start()
|
||||
|
||||
def apiFetchCallback(self, guiCallback, e=None):
|
||||
@@ -393,7 +385,7 @@ class Character(object):
|
||||
if ifHigher and level < skill.level:
|
||||
return
|
||||
|
||||
if isinstance(level, basestring) or level > 5 or level < 0:
|
||||
if isinstance(level, str) or level > 5 or level < 0:
|
||||
skill.setLevel(None, persist)
|
||||
else:
|
||||
skill.setLevel(level, persist)
|
||||
@@ -457,7 +449,7 @@ class Character(object):
|
||||
return reqs
|
||||
|
||||
def _checkRequirements(self, fit, char, subThing, reqs):
|
||||
for req, level in subThing.requiredSkills.iteritems():
|
||||
for req, level in subThing.requiredSkills.items():
|
||||
name = req.name
|
||||
ID = req.ID
|
||||
info = reqs.get(name)
|
||||
@@ -470,35 +462,32 @@ class Character(object):
|
||||
|
||||
|
||||
class UpdateAPIThread(threading.Thread):
|
||||
def __init__(self, charID, charName, callback):
|
||||
def __init__(self, charID, callback):
|
||||
threading.Thread.__init__(self)
|
||||
|
||||
self.name = "CheckUpdate"
|
||||
self.callback = callback
|
||||
self.charID = charID
|
||||
self.charName = charName
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
dbChar = eos.db.getCharacter(self.charID)
|
||||
dbChar.defaultChar = self.charName
|
||||
char = eos.db.getCharacter(self.charID)
|
||||
|
||||
api = EVEAPIConnection()
|
||||
auth = api.auth(keyID=dbChar.apiID, vCode=dbChar.apiKey)
|
||||
apiResult = auth.account.Characters()
|
||||
charID = None
|
||||
for char in apiResult.characters:
|
||||
if char.name == self.charName:
|
||||
charID = char.characterID
|
||||
break
|
||||
sEsi = Esi.getInstance()
|
||||
sChar = Character.getInstance()
|
||||
ssoChar = sChar.getSsoCharacter(char.ID)
|
||||
resp = sEsi.getSkills(ssoChar.ID)
|
||||
|
||||
if charID is None:
|
||||
return
|
||||
# todo: check if alpha. if so, pop up a question if they want to apply it as alpha. Use threading events to set the answer?
|
||||
char.clearSkills()
|
||||
for skillRow in resp["skills"]:
|
||||
char.addSkill(Skill(char, skillRow["skill_id"], skillRow["trained_skill_level"]))
|
||||
|
||||
sheet = auth.character(charID).CharacterSheet()
|
||||
charInfo = api.eve.CharacterInfo(characterID=charID)
|
||||
resp = sEsi.getSecStatus(ssoChar.ID)
|
||||
|
||||
char.secStatus = resp['security_status']
|
||||
|
||||
dbChar.apiUpdateCharSheet(sheet.skills, charInfo.securityStatus)
|
||||
self.callback[0](self.callback[1])
|
||||
except Exception:
|
||||
except Exception as ex:
|
||||
pyfalog.warn(ex)
|
||||
self.callback[0](self.callback[1], sys.exc_info())
|
||||
|
||||
@@ -15,9 +15,28 @@ all = {}
|
||||
# init container to store the separate conversion packs in case we need them
|
||||
packs = {}
|
||||
|
||||
|
||||
prefix = __name__ + "."
|
||||
for importer, modname, ispkg in pkgutil.iter_modules(__path__, prefix):
|
||||
|
||||
# load modules to work based with and without pyinstaller
|
||||
# from: https://github.com/webcomics/dosage/blob/master/dosagelib/loader.py
|
||||
# see: https://github.com/pyinstaller/pyinstaller/issues/1905
|
||||
|
||||
# load modules using iter_modules()
|
||||
# (should find all filters in normal build, but not pyinstaller)
|
||||
module_names = [m[1] for m in pkgutil.iter_modules(__path__, prefix)]
|
||||
|
||||
# special handling for PyInstaller
|
||||
importers = map(pkgutil.get_importer, __path__)
|
||||
toc = set()
|
||||
for i in importers:
|
||||
if hasattr(i, 'toc'):
|
||||
toc |= i.toc
|
||||
|
||||
for elm in toc:
|
||||
if elm.startswith(prefix):
|
||||
module_names.append(elm)
|
||||
|
||||
for modname in module_names:
|
||||
conversionPack = __import__(modname, fromlist="dummy")
|
||||
all.update(conversionPack.CONVERSIONS)
|
||||
modname_tail = modname.rsplit('.', 1)[-1]
|
||||
|
||||
227
service/crest.py
227
service/crest.py
@@ -1,227 +0,0 @@
|
||||
# noinspection PyPackageRequirements
|
||||
import wx
|
||||
from logbook import Logger
|
||||
import threading
|
||||
import copy
|
||||
import uuid
|
||||
import time
|
||||
|
||||
import eos.db
|
||||
from eos.enum import Enum
|
||||
from eos.saveddata.crestchar import CrestChar
|
||||
import gui.globalEvents as GE
|
||||
from service.settings import CRESTSettings
|
||||
from service.server import StoppableHTTPServer, AuthHandler
|
||||
from service.pycrest.eve import EVE
|
||||
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
|
||||
class Servers(Enum):
|
||||
TQ = 0
|
||||
SISI = 1
|
||||
|
||||
|
||||
class CrestModes(Enum):
|
||||
IMPLICIT = 0
|
||||
USER = 1
|
||||
|
||||
|
||||
class Crest(object):
|
||||
clientIDs = {
|
||||
Servers.TQ : 'f9be379951c046339dc13a00e6be7704',
|
||||
Servers.SISI: 'af87365240d644f7950af563b8418bad'
|
||||
}
|
||||
|
||||
# @todo: move this to settings
|
||||
clientCallback = 'http://localhost:6461'
|
||||
clientTest = True
|
||||
|
||||
_instance = None
|
||||
|
||||
@classmethod
|
||||
def getInstance(cls):
|
||||
if cls._instance is None:
|
||||
cls._instance = Crest()
|
||||
|
||||
return cls._instance
|
||||
|
||||
@classmethod
|
||||
def restartService(cls):
|
||||
# This is here to reseed pycrest values when changing preferences
|
||||
# We first stop the server n case one is running, as creating a new
|
||||
# instance doesn't do this.
|
||||
if cls._instance.httpd:
|
||||
cls._instance.stopServer()
|
||||
cls._instance = Crest()
|
||||
cls._instance.mainFrame.updateCrestMenus(type=cls._instance.settings.get('mode'))
|
||||
return cls._instance
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
A note on login/logout events: the character login events happen
|
||||
whenever a characters is logged into via the SSO, regardless of mod.
|
||||
However, the mode should be send as an argument. Similarily,
|
||||
the Logout even happens whenever the character is deleted for either
|
||||
mode. The mode is sent as an argument, as well as the umber of
|
||||
characters still in the cache (if USER mode)
|
||||
"""
|
||||
|
||||
self.settings = CRESTSettings.getInstance()
|
||||
self.scopes = ['characterFittingsRead', 'characterFittingsWrite']
|
||||
|
||||
# these will be set when needed
|
||||
self.httpd = None
|
||||
self.state = None
|
||||
self.ssoTimer = None
|
||||
|
||||
# Base EVE connection that is copied to all characters
|
||||
self.eve = EVE(
|
||||
client_id=self.settings.get('clientID') if self.settings.get(
|
||||
'mode') == CrestModes.USER else self.clientIDs.get(self.settings.get('server')),
|
||||
api_key=self.settings.get('clientSecret') if self.settings.get('mode') == CrestModes.USER else None,
|
||||
redirect_uri=self.clientCallback,
|
||||
testing=self.isTestServer
|
||||
)
|
||||
|
||||
self.implicitCharacter = None
|
||||
|
||||
# The database cache does not seem to be working for some reason. Use
|
||||
# this as a temporary measure
|
||||
self.charCache = {}
|
||||
|
||||
# need these here to post events
|
||||
import gui.mainFrame # put this here to avoid loop
|
||||
self.mainFrame = gui.mainFrame.MainFrame.getInstance()
|
||||
|
||||
@property
|
||||
def isTestServer(self):
|
||||
return self.settings.get('server') == Servers.SISI
|
||||
|
||||
def delCrestCharacter(self, charID):
|
||||
char = eos.db.getCrestCharacter(charID)
|
||||
del self.charCache[char.ID]
|
||||
eos.db.remove(char)
|
||||
wx.PostEvent(self.mainFrame, GE.SsoLogout(type=CrestModes.USER, numChars=len(self.charCache)))
|
||||
|
||||
def delAllCharacters(self):
|
||||
chars = eos.db.getCrestCharacters()
|
||||
for char in chars:
|
||||
eos.db.remove(char)
|
||||
self.charCache = {}
|
||||
wx.PostEvent(self.mainFrame, GE.SsoLogout(type=CrestModes.USER, numChars=0))
|
||||
|
||||
def getCrestCharacters(self):
|
||||
chars = eos.db.getCrestCharacters()
|
||||
# I really need to figure out that DB cache problem, this is ridiculous
|
||||
chars2 = [self.getCrestCharacter(char.ID) for char in chars]
|
||||
return chars2
|
||||
|
||||
def getCrestCharacter(self, charID):
|
||||
"""
|
||||
Get character, and modify to include the eve connection
|
||||
"""
|
||||
if self.settings.get('mode') == CrestModes.IMPLICIT:
|
||||
if self.implicitCharacter.ID != charID:
|
||||
raise ValueError("CharacterID does not match currently logged in character.")
|
||||
return self.implicitCharacter
|
||||
|
||||
if charID in self.charCache:
|
||||
return self.charCache.get(charID)
|
||||
|
||||
char = eos.db.getCrestCharacter(charID)
|
||||
if char and not hasattr(char, "eve"):
|
||||
char.eve = copy.deepcopy(self.eve)
|
||||
char.eve.temptoken_authorize(refresh_token=char.refresh_token)
|
||||
self.charCache[charID] = char
|
||||
return char
|
||||
|
||||
def getFittings(self, charID):
|
||||
char = self.getCrestCharacter(charID)
|
||||
return char.eve.get('%scharacters/%d/fittings/' % (char.eve._authed_endpoint, char.ID))
|
||||
|
||||
def postFitting(self, charID, json):
|
||||
# @todo: new fitting ID can be recovered from Location header,
|
||||
# ie: Location -> https://api-sisi.testeveonline.com/characters/1611853631/fittings/37486494/
|
||||
char = self.getCrestCharacter(charID)
|
||||
return char.eve.post('%scharacters/%d/fittings/' % (char.eve._authed_endpoint, char.ID), data=json)
|
||||
|
||||
def delFitting(self, charID, fittingID):
|
||||
char = self.getCrestCharacter(charID)
|
||||
return char.eve.delete('%scharacters/%d/fittings/%d/' % (char.eve._authed_endpoint, char.ID, fittingID))
|
||||
|
||||
def logout(self):
|
||||
"""Logout of implicit character"""
|
||||
pyfalog.debug("Character logout")
|
||||
self.implicitCharacter = None
|
||||
wx.PostEvent(self.mainFrame, GE.SsoLogout(type=self.settings.get('mode')))
|
||||
|
||||
def stopServer(self):
|
||||
pyfalog.debug("Stopping Server")
|
||||
self.httpd.stop()
|
||||
self.httpd = None
|
||||
|
||||
def startServer(self):
|
||||
pyfalog.debug("Starting server")
|
||||
if self.httpd:
|
||||
self.stopServer()
|
||||
time.sleep(1)
|
||||
# we need this to ensure that the previous get_request finishes, and then the socket will close
|
||||
self.httpd = StoppableHTTPServer(('localhost', 6461), AuthHandler)
|
||||
|
||||
self.serverThread = threading.Thread(target=self.httpd.serve, args=(self.handleLogin,))
|
||||
self.serverThread.name = "CRESTServer"
|
||||
self.serverThread.daemon = True
|
||||
self.serverThread.start()
|
||||
|
||||
self.state = str(uuid.uuid4())
|
||||
return self.eve.auth_uri(scopes=self.scopes, state=self.state)
|
||||
|
||||
def handleLogin(self, message):
|
||||
if not message:
|
||||
raise Exception("Could not parse out querystring parameters.")
|
||||
|
||||
if message['state'][0] != self.state:
|
||||
pyfalog.warn("OAUTH state mismatch")
|
||||
raise Exception("OAUTH State Mismatch.")
|
||||
|
||||
pyfalog.debug("Handling CREST login with: {0}", message)
|
||||
|
||||
if 'access_token' in message: # implicit
|
||||
eve = copy.deepcopy(self.eve)
|
||||
eve.temptoken_authorize(
|
||||
access_token=message['access_token'][0],
|
||||
expires_in=int(message['expires_in'][0])
|
||||
)
|
||||
self.ssoTimer = threading.Timer(int(message['expires_in'][0]), self.logout)
|
||||
self.ssoTimer.start()
|
||||
|
||||
eve()
|
||||
info = eve.whoami()
|
||||
|
||||
pyfalog.debug("Got character info: {0}", info)
|
||||
|
||||
self.implicitCharacter = CrestChar(info['CharacterID'], info['CharacterName'])
|
||||
self.implicitCharacter.eve = eve
|
||||
# self.implicitCharacter.fetchImage()
|
||||
|
||||
wx.PostEvent(self.mainFrame, GE.SsoLogin(type=CrestModes.IMPLICIT))
|
||||
elif 'code' in message:
|
||||
eve = copy.deepcopy(self.eve)
|
||||
eve.authorize(message['code'][0])
|
||||
eve()
|
||||
info = eve.whoami()
|
||||
|
||||
pyfalog.debug("Got character info: {0}", info)
|
||||
|
||||
# check if we have character already. If so, simply replace refresh_token
|
||||
char = self.getCrestCharacter(int(info['CharacterID']))
|
||||
if char:
|
||||
char.refresh_token = eve.refresh_token
|
||||
else:
|
||||
char = CrestChar(info['CharacterID'], info['CharacterName'], eve.refresh_token)
|
||||
char.eve = eve
|
||||
self.charCache[int(info['CharacterID'])] = char
|
||||
eos.db.save(char)
|
||||
|
||||
wx.PostEvent(self.mainFrame, GE.SsoLogin(type=CrestModes.USER))
|
||||
@@ -82,7 +82,7 @@ class DamagePattern(object):
|
||||
|
||||
def exportPatterns(self):
|
||||
patterns = self.getDamagePatternList()
|
||||
for i in xrange(len(patterns) - 1, -1, -1):
|
||||
for i in range(len(patterns) - 1, -1, -1):
|
||||
if patterns[i].name in ("Uniform", "Selected Ammo"):
|
||||
del patterns[i]
|
||||
|
||||
|
||||
184
service/esi.py
Normal file
184
service/esi.py
Normal file
@@ -0,0 +1,184 @@
|
||||
# noinspection PyPackageRequirements
|
||||
import wx
|
||||
from logbook import Logger
|
||||
import threading
|
||||
import time
|
||||
import base64
|
||||
import json
|
||||
import config
|
||||
import webbrowser
|
||||
|
||||
import eos.db
|
||||
from eos.enum import Enum
|
||||
from eos.saveddata.ssocharacter import SsoCharacter
|
||||
from service.esiAccess import APIException, SsoMode
|
||||
import gui.globalEvents as GE
|
||||
from service.server import StoppableHTTPServer, AuthHandler
|
||||
from service.settings import EsiSettings
|
||||
from service.esiAccess import EsiAccess
|
||||
|
||||
import wx
|
||||
from requests import Session
|
||||
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
|
||||
class LoginMethod(Enum):
|
||||
SERVER = 0
|
||||
MANUAL = 1
|
||||
|
||||
|
||||
class Esi(EsiAccess):
|
||||
_instance = None
|
||||
|
||||
@classmethod
|
||||
def getInstance(cls):
|
||||
if cls._instance is None:
|
||||
cls._instance = Esi()
|
||||
|
||||
return cls._instance
|
||||
|
||||
def __init__(self):
|
||||
self.settings = EsiSettings.getInstance()
|
||||
|
||||
super().__init__()
|
||||
|
||||
# these will be set when needed
|
||||
self.httpd = None
|
||||
self.state = None
|
||||
self.ssoTimer = None
|
||||
|
||||
self.implicitCharacter = None
|
||||
|
||||
# until I can get around to making proper caching and modifications to said cache, storee deleted fittings here
|
||||
# so that we can easily hide them in the fitting browser
|
||||
self.fittings_deleted = set()
|
||||
|
||||
# need these here to post events
|
||||
import gui.mainFrame # put this here to avoid loop
|
||||
self.mainFrame = gui.mainFrame.MainFrame.getInstance()
|
||||
|
||||
def delSsoCharacter(self, id):
|
||||
char = eos.db.getSsoCharacter(id, config.getClientSecret())
|
||||
|
||||
# There is an issue in which the SSO character is not removed from any linked characters - a reference to the
|
||||
# sso character remains even though the SSO character is deleted which should have deleted the link. This is a
|
||||
# work around until we can figure out why. Manually delete SSOCharacter from all of it's characters
|
||||
for x in char.characters:
|
||||
x._Character__ssoCharacters.remove(char)
|
||||
eos.db.remove(char)
|
||||
wx.PostEvent(self.mainFrame, GE.SsoLogout(charID=id))
|
||||
|
||||
def getSsoCharacters(self):
|
||||
chars = eos.db.getSsoCharacters(config.getClientSecret())
|
||||
return chars
|
||||
|
||||
def getSsoCharacter(self, id):
|
||||
char = eos.db.getSsoCharacter(id, config.getClientSecret())
|
||||
eos.db.commit()
|
||||
return char
|
||||
|
||||
def getSkills(self, id):
|
||||
char = self.getSsoCharacter(id)
|
||||
resp = super().getSkills(char)
|
||||
return resp.json()
|
||||
|
||||
def getSecStatus(self, id):
|
||||
char = self.getSsoCharacter(id)
|
||||
resp = super().getSecStatus(char)
|
||||
return resp.json()
|
||||
|
||||
def getFittings(self, id):
|
||||
char = self.getSsoCharacter(id)
|
||||
resp = super().getFittings(char)
|
||||
return resp.json()
|
||||
|
||||
def postFitting(self, id, json_str):
|
||||
# @todo: new fitting ID can be recovered from resp.data,
|
||||
char = self.getSsoCharacter(id)
|
||||
resp = super().postFitting(char, json_str)
|
||||
return resp.json()
|
||||
|
||||
def delFitting(self, id, fittingID):
|
||||
char = self.getSsoCharacter(id)
|
||||
super().delFitting(char, fittingID)
|
||||
self.fittings_deleted.add(fittingID)
|
||||
|
||||
def login(self):
|
||||
serverAddr = None
|
||||
# always start the local server if user is using client details. Otherwise, start only if they choose to do so.
|
||||
if self.settings.get('ssoMode') == SsoMode.CUSTOM or self.settings.get('loginMode') == LoginMethod.SERVER:
|
||||
serverAddr = self.startServer(6461 if self.settings.get('ssoMode') == SsoMode.CUSTOM else 0) # random port, or if it's custom application, use a defined port
|
||||
uri = self.getLoginURI(serverAddr)
|
||||
webbrowser.open(uri)
|
||||
wx.PostEvent(self.mainFrame, GE.SsoLoggingIn(sso_mode=self.settings.get('ssoMode'), login_mode=self.settings.get('loginMode')))
|
||||
|
||||
def stopServer(self):
|
||||
pyfalog.debug("Stopping Server")
|
||||
self.httpd.stop()
|
||||
self.httpd = None
|
||||
|
||||
def startServer(self, port): # todo: break this out into two functions: starting the server, and getting the URI
|
||||
pyfalog.debug("Starting server")
|
||||
|
||||
# we need this to ensure that the previous get_request finishes, and then the socket will close
|
||||
if self.httpd:
|
||||
self.stopServer()
|
||||
time.sleep(1)
|
||||
|
||||
self.httpd = StoppableHTTPServer(('localhost', port), AuthHandler)
|
||||
port = self.httpd.socket.getsockname()[1]
|
||||
self.serverThread = threading.Thread(target=self.httpd.serve, args=(self.handleServerLogin,))
|
||||
self.serverThread.name = "SsoCallbackServer"
|
||||
self.serverThread.daemon = True
|
||||
self.serverThread.start()
|
||||
|
||||
return 'http://localhost:{}'.format(port)
|
||||
|
||||
def handleLogin(self, message):
|
||||
|
||||
# we already have authenticated stuff for the auto mode
|
||||
if (self.settings.get('ssoMode') == SsoMode.AUTO):
|
||||
ssoInfo = message['SSOInfo'][0]
|
||||
auth_response = json.loads(base64.b64decode(ssoInfo))
|
||||
else:
|
||||
# otherwise, we need to fetch the information
|
||||
auth_response = self.auth(message['code'][0])
|
||||
|
||||
res = self._session.get(
|
||||
self.oauth_verify,
|
||||
headers=self.get_oauth_header(auth_response['access_token'])
|
||||
)
|
||||
if res.status_code != 200:
|
||||
raise APIException(
|
||||
self.oauth_verify,
|
||||
res.status_code,
|
||||
res.json()
|
||||
)
|
||||
cdata = res.json()
|
||||
print(cdata)
|
||||
|
||||
currentCharacter = self.getSsoCharacter(cdata['CharacterName'])
|
||||
|
||||
if currentCharacter is None:
|
||||
currentCharacter = SsoCharacter(cdata['CharacterID'], cdata['CharacterName'], config.getClientSecret())
|
||||
|
||||
Esi.update_token(currentCharacter, auth_response)
|
||||
|
||||
eos.db.save(currentCharacter)
|
||||
wx.PostEvent(self.mainFrame, GE.SsoLogin(character=currentCharacter))
|
||||
|
||||
# get (endpoint, char, data?)
|
||||
|
||||
def handleServerLogin(self, message):
|
||||
if not message:
|
||||
raise Exception("Could not parse out querystring parameters.")
|
||||
|
||||
if message['state'][0] != self.state:
|
||||
pyfalog.warn("OAUTH state mismatch")
|
||||
raise Exception("OAUTH State Mismatch.")
|
||||
|
||||
pyfalog.debug("Handling SSO login with: {0}", message)
|
||||
|
||||
self.handleLogin(message)
|
||||
|
||||
284
service/esiAccess.py
Normal file
284
service/esiAccess.py
Normal file
@@ -0,0 +1,284 @@
|
||||
'''
|
||||
|
||||
A lot of the inspiration (and straight up code copying!) for this class comes from EsiPy <https://github.com/Kyria/EsiPy>
|
||||
Much of the credit goes to the maintainer of that package, Kyria <tweetfleet slack: @althalus>. The reasoning for no
|
||||
longer using EsiPy was due to it's reliance on pyswagger, which has caused a bit of a headache in how it operates on a
|
||||
low level.
|
||||
|
||||
Eventually I'll rewrite this to be a bit cleaner and a bit more generic, but for now, it works!
|
||||
|
||||
'''
|
||||
|
||||
# noinspection PyPackageRequirements
|
||||
from logbook import Logger
|
||||
import uuid
|
||||
import time
|
||||
import config
|
||||
import base64
|
||||
|
||||
import datetime
|
||||
from eos.enum import Enum
|
||||
from service.settings import EsiSettings
|
||||
|
||||
from requests import Session
|
||||
from urllib.parse import urlencode, quote
|
||||
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
# todo: reimplement Caching for calls
|
||||
# from esipy.cache import FileCache
|
||||
# file_cache = FileCache(cache_path)
|
||||
# cache_path = os.path.join(config.savePath, config.ESI_CACHE)
|
||||
#
|
||||
# if not os.path.exists(cache_path):
|
||||
# os.mkdir(cache_path)
|
||||
#
|
||||
|
||||
|
||||
scopes = [
|
||||
'esi-skills.read_skills.v1',
|
||||
'esi-fittings.read_fittings.v1',
|
||||
'esi-fittings.write_fittings.v1'
|
||||
]
|
||||
|
||||
|
||||
class SsoMode(Enum):
|
||||
AUTO = 0
|
||||
CUSTOM = 1
|
||||
|
||||
|
||||
class APIException(Exception):
|
||||
""" Exception for SSO related errors """
|
||||
|
||||
def __init__(self, url, code, json_response):
|
||||
self.url = url
|
||||
self.status_code = code
|
||||
self.response = json_response
|
||||
super(APIException, self).__init__(str(self))
|
||||
|
||||
def __str__(self):
|
||||
if 'error' in self.response:
|
||||
return 'HTTP Error %s: %s' % (self.status_code,
|
||||
self.response['error'])
|
||||
elif 'message' in self.response:
|
||||
return 'HTTP Error %s: %s' % (self.status_code,
|
||||
self.response['message'])
|
||||
return 'HTTP Error %s' % (self.status_code)
|
||||
|
||||
|
||||
class ESIEndpoints(Enum):
|
||||
CHAR = "/v4/characters/{character_id}/"
|
||||
CHAR_SKILLS = "/v4/characters/{character_id}/skills/"
|
||||
CHAR_FITTINGS = "/v1/characters/{character_id}/fittings/"
|
||||
CHAR_DEL_FIT = "/v1/characters/{character_id}/fittings/{fitting_id}/"
|
||||
|
||||
|
||||
class EsiAccess(object):
|
||||
def __init__(self):
|
||||
self.settings = EsiSettings.getInstance()
|
||||
|
||||
# session request stuff
|
||||
self._session = Session()
|
||||
self._session.headers.update({
|
||||
'Accept': 'application/json',
|
||||
'User-Agent': (
|
||||
'pyfa v{}'.format(config.version)
|
||||
)
|
||||
})
|
||||
|
||||
@property
|
||||
def sso_url(self):
|
||||
if (self.settings.get("ssoMode") == SsoMode.CUSTOM):
|
||||
return "https://login.eveonline.com"
|
||||
return "https://www.pyfa.io"
|
||||
|
||||
@property
|
||||
def esi_url(self):
|
||||
return "https://esi.tech.ccp.is"
|
||||
|
||||
@property
|
||||
def oauth_verify(self):
|
||||
return '%s/verify/' % self.esi_url
|
||||
|
||||
@property
|
||||
def oauth_authorize(self):
|
||||
return '%s/oauth/authorize' % self.sso_url
|
||||
|
||||
@property
|
||||
def oauth_token(self):
|
||||
return '%s/oauth/token' % self.sso_url
|
||||
|
||||
def getSkills(self, char):
|
||||
return self.get(char, ESIEndpoints.CHAR_SKILLS, character_id=char.characterID)
|
||||
|
||||
def getSecStatus(self, char):
|
||||
return self.get(char, ESIEndpoints.CHAR, character_id=char.characterID)
|
||||
|
||||
def getFittings(self, char):
|
||||
return self.get(char, ESIEndpoints.CHAR_FITTINGS, character_id=char.characterID)
|
||||
|
||||
def postFitting(self, char, json_str):
|
||||
# @todo: new fitting ID can be recovered from resp.data,
|
||||
return self.post(char, ESIEndpoints.CHAR_FITTINGS, json_str, character_id=char.characterID)
|
||||
|
||||
def delFitting(self, char, fittingID):
|
||||
return self.delete(char, ESIEndpoints.CHAR_DEL_FIT, character_id=char.characterID, fitting_id=fittingID)
|
||||
|
||||
@staticmethod
|
||||
def update_token(char, tokenResponse):
|
||||
""" helper function to update token data from SSO response """
|
||||
char.accessToken = tokenResponse['access_token']
|
||||
char.accessTokenExpires = datetime.datetime.fromtimestamp(time.time() + tokenResponse['expires_in'])
|
||||
if 'refresh_token' in tokenResponse:
|
||||
char.refreshToken = config.cipher.encrypt(tokenResponse['refresh_token'].encode())
|
||||
|
||||
def getLoginURI(self, redirect=None):
|
||||
self.state = str(uuid.uuid4())
|
||||
|
||||
if (self.settings.get("ssoMode") == SsoMode.AUTO):
|
||||
args = {
|
||||
'state': self.state,
|
||||
'pyfa_version': config.version,
|
||||
'login_method': self.settings.get('loginMode'),
|
||||
'client_hash': config.getClientSecret()
|
||||
}
|
||||
|
||||
if redirect is not None:
|
||||
args['redirect'] = redirect
|
||||
|
||||
return '%s?%s' % (
|
||||
self.oauth_authorize,
|
||||
urlencode(args)
|
||||
)
|
||||
else:
|
||||
return '%s?response_type=%s&redirect_uri=%s&client_id=%s%s%s' % (
|
||||
self.oauth_authorize,
|
||||
'code',
|
||||
quote('http://localhost:6461', safe=''),
|
||||
self.settings.get('clientID'),
|
||||
'&scope=%s' % '+'.join(scopes) if scopes else '',
|
||||
'&state=%s' % self.state
|
||||
)
|
||||
|
||||
def get_oauth_header(self, token):
|
||||
""" Return the Bearer Authorization header required in oauth calls
|
||||
|
||||
:return: a dict with the authorization header
|
||||
"""
|
||||
return {'Authorization': 'Bearer %s' % token}
|
||||
|
||||
def get_refresh_token_params(self, refreshToken):
|
||||
""" Return the param object for the post() call to get the access_token
|
||||
from the refresh_token
|
||||
|
||||
:param code: the refresh token
|
||||
:return: a dict with the url, params and header
|
||||
"""
|
||||
if refreshToken is None:
|
||||
raise AttributeError('No refresh token is defined.')
|
||||
|
||||
data = {
|
||||
'grant_type': 'refresh_token',
|
||||
'refresh_token': refreshToken,
|
||||
}
|
||||
|
||||
if self.settings.get('ssoMode') == SsoMode.AUTO:
|
||||
# data is all we really need, the rest is handled automatically by pyfa.io
|
||||
return {
|
||||
'data': data,
|
||||
'url': self.oauth_token,
|
||||
}
|
||||
|
||||
# otherwise, we need to make the token with the client keys
|
||||
return self.__make_token_request_parameters(data)
|
||||
|
||||
def __get_token_auth_header(self):
|
||||
""" Return the Basic Authorization header required to get the tokens
|
||||
|
||||
:return: a dict with the headers
|
||||
"""
|
||||
# encode/decode for py2/py3 compatibility
|
||||
auth_b64 = "%s:%s" % (self.settings.get('clientID'), self.settings.get('clientSecret'))
|
||||
auth_b64 = base64.b64encode(auth_b64.encode('latin-1'))
|
||||
auth_b64 = auth_b64.decode('latin-1')
|
||||
|
||||
return {'Authorization': 'Basic %s' % auth_b64}
|
||||
|
||||
def __make_token_request_parameters(self, params):
|
||||
request_params = {
|
||||
'headers': self.__get_token_auth_header(),
|
||||
'data': params,
|
||||
'url': self.oauth_token,
|
||||
}
|
||||
|
||||
return request_params
|
||||
|
||||
def get_access_token_request_params(self, code):
|
||||
return self.__make_token_request_parameters(
|
||||
{
|
||||
'grant_type': 'authorization_code',
|
||||
'code': code,
|
||||
}
|
||||
)
|
||||
|
||||
def auth(self, code):
|
||||
request_data = self.get_access_token_request_params(code)
|
||||
res = self._session.post(**request_data)
|
||||
if res.status_code != 200:
|
||||
raise Exception(
|
||||
request_data['url'],
|
||||
res.status_code,
|
||||
res.json()
|
||||
)
|
||||
json_res = res.json()
|
||||
return json_res
|
||||
|
||||
def refresh(self, ssoChar):
|
||||
request_data = self.get_refresh_token_params(config.cipher.decrypt(ssoChar.refreshToken).decode())
|
||||
res = self._session.post(**request_data)
|
||||
if res.status_code != 200:
|
||||
raise APIException(
|
||||
request_data['url'],
|
||||
res.status_code,
|
||||
res.json()
|
||||
)
|
||||
json_res = res.json()
|
||||
self.update_token(ssoChar, json_res)
|
||||
return json_res
|
||||
|
||||
def _before_request(self, ssoChar):
|
||||
if ssoChar.is_token_expired():
|
||||
pyfalog.info("Refreshing token for {}".format(ssoChar.characterName))
|
||||
self.refresh(ssoChar)
|
||||
|
||||
if ssoChar.accessToken is not None:
|
||||
self._session.headers.update(self.get_oauth_header(ssoChar.accessToken))
|
||||
|
||||
def _after_request(self, resp):
|
||||
if ("warning" in resp.headers):
|
||||
pyfalog.warn("{} - {}".format(resp.headers["warning"], resp.url))
|
||||
|
||||
if resp.status_code >= 400:
|
||||
raise APIException(
|
||||
resp.url,
|
||||
resp.status_code,
|
||||
resp.json()
|
||||
)
|
||||
|
||||
return resp
|
||||
|
||||
def get(self, ssoChar, endpoint, *args, **kwargs):
|
||||
self._before_request(ssoChar)
|
||||
endpoint = endpoint.format(**kwargs)
|
||||
return self._after_request(self._session.get("{}{}".format(self.esi_url, endpoint)))
|
||||
|
||||
def post(self, ssoChar, endpoint, json, *args, **kwargs):
|
||||
self._before_request(ssoChar)
|
||||
endpoint = endpoint.format(**kwargs)
|
||||
return self._after_request(self._session.post("{}{}".format(self.esi_url, endpoint), data=json))
|
||||
|
||||
def delete(self, ssoChar, endpoint, *args, **kwargs):
|
||||
self._before_request(ssoChar)
|
||||
endpoint = endpoint.format(**kwargs)
|
||||
return self._after_request(self._session.delete("{}{}".format(self.esi_url, endpoint)))
|
||||
|
||||
1012
service/eveapi.py
1012
service/eveapi.py
File diff suppressed because it is too large
Load Diff
@@ -74,7 +74,7 @@ class Fit(object):
|
||||
"exportCharges": True,
|
||||
"openFitInNew": False,
|
||||
"priceSystem": "Jita",
|
||||
"priceSource": "eve-central.com",
|
||||
"priceSource": "eve-marketdata.com",
|
||||
"showShipBrowserTooltip": True,
|
||||
"marketSearchDelay": 250
|
||||
}
|
||||
@@ -142,7 +142,7 @@ class Fit(object):
|
||||
except ValueError:
|
||||
ship = es_Citadel(eos.db.getItem(shipID))
|
||||
fit = FitType(ship)
|
||||
fit.name = name if name is not None else u"New %s" % fit.ship.item.name
|
||||
fit.name = name if name is not None else "New %s" % fit.ship.item.name
|
||||
fit.damagePattern = self.pattern
|
||||
fit.targetResists = self.targetResists
|
||||
fit.character = self.character
|
||||
@@ -178,11 +178,11 @@ class Fit(object):
|
||||
# it will be refreshed first during the projected loop and throw an
|
||||
# error during the command loop
|
||||
refreshFits = set()
|
||||
for projection in fit.projectedOnto.values():
|
||||
for projection in list(fit.projectedOnto.values()):
|
||||
if projection.victim_fit != fit and projection.victim_fit in eos.db.saveddata_session: # GH issue #359
|
||||
refreshFits.add(projection.victim_fit)
|
||||
|
||||
for booster in fit.boostedOnto.values():
|
||||
for booster in list(fit.boostedOnto.values()):
|
||||
if booster.boosted_fit != fit and booster.boosted_fit in eos.db.saveddata_session: # GH issue #359
|
||||
refreshFits.add(booster.boosted_fit)
|
||||
|
||||
@@ -785,7 +785,7 @@ class Fit(object):
|
||||
total = fit.getNumSlots(fighter.slot)
|
||||
standardAttackActive = False
|
||||
for ability in fighter.abilities:
|
||||
if ability.effect.isImplemented and ability.effect.handlerName == u'fighterabilityattackm':
|
||||
if ability.effect.isImplemented and ability.effect.handlerName == 'fighterabilityattackm':
|
||||
# Activate "standard attack" if available
|
||||
ability.active = True
|
||||
standardAttackActive = True
|
||||
@@ -793,8 +793,8 @@ class Fit(object):
|
||||
# Activate all other abilities (Neut, Web, etc) except propmods if no standard attack is active
|
||||
if ability.effect.isImplemented and \
|
||||
standardAttackActive is False and \
|
||||
ability.effect.handlerName != u'fighterabilitymicrowarpdrive' and \
|
||||
ability.effect.handlerName != u'fighterabilityevasivemaneuvers':
|
||||
ability.effect.handlerName != 'fighterabilitymicrowarpdrive' and \
|
||||
ability.effect.handlerName != 'fighterabilityevasivemaneuvers':
|
||||
ability.active = True
|
||||
|
||||
if used >= total:
|
||||
@@ -1207,11 +1207,13 @@ class Fit(object):
|
||||
|
||||
def recalc(self, fit):
|
||||
start_time = time()
|
||||
pyfalog.info(u"=" * 10 + u"recalc: {0}" + u"=" * 10, fit.name)
|
||||
if fit.factorReload is not self.serviceFittingOptions["useGlobalForceReload"]:
|
||||
fit.factorReload = self.serviceFittingOptions["useGlobalForceReload"]
|
||||
pyfalog.info("=" * 10 + "recalc: {0}" + "=" * 10, fit.name)
|
||||
|
||||
|
||||
fit.factorReload = self.serviceFittingOptions["useGlobalForceReload"]
|
||||
fit.clear()
|
||||
|
||||
fit.calculateModifiedAttributes()
|
||||
|
||||
pyfalog.info("=" * 10 + "recalc time: " + str(time() - start_time) + "=" * 10)
|
||||
|
||||
|
||||
21
service/jargon/__init__.py
Normal file
21
service/jargon/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# =============================================================================
|
||||
# Copyright (C) 2018 Filip Sufitchi
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
#
|
||||
# pyfa is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# pyfa is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
# =============================================================================
|
||||
|
||||
from .jargon import Jargon
|
||||
from .loader import JargonLoader
|
||||
91
service/jargon/defaults.yaml
Normal file
91
service/jargon/defaults.yaml
Normal file
@@ -0,0 +1,91 @@
|
||||
1: I
|
||||
2: II
|
||||
aar: Ancillary Armor Repairer
|
||||
ab: Afterburner
|
||||
ac: Autocannon
|
||||
am: Antimatter
|
||||
anp: Adaptive Nano Plating
|
||||
acr: Ancillary Current Router
|
||||
arty: Artillery
|
||||
asb: Ancillary Shield Booster
|
||||
bcs: Ballistic Control System
|
||||
bcu: Ballistic Control System
|
||||
boosh: Micro Jump Field Generator
|
||||
ccc: Capacitor Control Circuit
|
||||
cn: Caldari Navy
|
||||
cnam: Caldari Navy Antimatter
|
||||
cpr: Capacitor Power Relay
|
||||
cpu: Co-Processor
|
||||
coproc: Co-Processor
|
||||
dc: Damage Control
|
||||
dcu: Damage Control
|
||||
dda: Drone Damage Amplifier
|
||||
disco: Smartbomb
|
||||
eanm: Energized Adaptive Nano Membrane
|
||||
enam: Energized Adaptive Nano Membrane
|
||||
eccm: Sensor Booster
|
||||
fn: Federation Navy
|
||||
fnam: Federation Navy Antimatter
|
||||
gd: Guidance Disruptor
|
||||
ham: Heavy Assault Missile
|
||||
haml: Heavy Assault Missile Launcher
|
||||
hm: Heavy Missile
|
||||
hml: Heavy Missile Launcher
|
||||
istab: Inertial Stabilizer
|
||||
in: Imperial Navy
|
||||
inmf: Imperial Navy Multifrequency
|
||||
jam: ECM
|
||||
lar: Large Armor Repairer
|
||||
laar: Large Ancillary Armor Repairer
|
||||
lasb: Large Ancillary Shield Booster
|
||||
lm: Light Missile
|
||||
lmjd: Large Micro Jump Drive
|
||||
lml: Light Missile Launcher
|
||||
lo: Liquid Ozone
|
||||
lse: Large Shield Extender
|
||||
maar: Medium Ancillary Armor Repairer
|
||||
masb: Medium Ancillary Shield Booster
|
||||
mf: Multifrequency
|
||||
md: Guidance Disruptor
|
||||
mjfg: Micro Jump Field Generator
|
||||
mar: Medium Armor Repairer
|
||||
mfs: Magnetic Field Stabilizer
|
||||
mmjd: Medium Micro Jump Drive
|
||||
mjd: Micro Jump Drive
|
||||
mlu: Mining Laser Upgrade
|
||||
msb: Medium Shield Booster
|
||||
mse: Medium Shield Extender
|
||||
mwd: Microwarpdrive
|
||||
odi: Overdrive Injector
|
||||
point: Warp Disruptor
|
||||
pdu: Power Diagnostic Unit
|
||||
pp: Phased Plasma
|
||||
rcu: Reactor Control Unit
|
||||
rf: Republic Fleet
|
||||
rhml: Rapid Heavy Missile Launcher
|
||||
rl: Rocket Launcher
|
||||
rlml: Rapid Light Missile Launcher
|
||||
rr: Remote # Hacky, for shield, armor, and cap
|
||||
rtc: Remote Tracking Computer
|
||||
rtl: Rapid Torpedo Launcher
|
||||
sar: Small Armor Repairer
|
||||
saar: Small Ancillary Armor Repairer
|
||||
sasb: Small Ancillary Shield Booster
|
||||
sb: Sensor Booster # Or smartbomb? :/
|
||||
sebo: Sensor Booster
|
||||
sd: Sensor Dampener
|
||||
sg: Stasis Grappler
|
||||
ssb: Small Shield Booster
|
||||
sse: Small Shield Extender
|
||||
spr: Shield Power Relay
|
||||
sw: Stasis Webifier
|
||||
tc: Tracking Computer
|
||||
td: Tracking Disruptor
|
||||
te: Tracking enhancer
|
||||
tl: Remote Tracking Computer
|
||||
tp: Target Painter
|
||||
wcs: Warp Core Stabilizer
|
||||
web: stasis
|
||||
xl: X-Large
|
||||
xlasb: X-Large Ancillary Shield Booster
|
||||
xlsb: X-Large Shield Booster
|
||||
21
service/jargon/header.yaml
Normal file
21
service/jargon/header.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
# This is the default Pyfa jargon file.
|
||||
#
|
||||
# It is essentially a giant set of find/replace statements in order to translate
|
||||
# abbreviated Eve community terms into more useful full terms. It is intended
|
||||
# for translation of strings such as "haml 2" "into "Heavy Assault Missile Launcher II"..
|
||||
#
|
||||
# These abbreviations are not case-sensitive. If abbreviations collide, the
|
||||
# later one is used.
|
||||
#
|
||||
# Abbreviations with spaces are not supported.
|
||||
#
|
||||
# Syntax:
|
||||
#
|
||||
# abbreviation: full name
|
||||
#
|
||||
# The default jargon definitions are stored in Pyfa itself as well, and are
|
||||
# listed here for convenience overriding them. To disable a jargon definition,
|
||||
# set it as an empty string. For example, if you do not want "web" to return
|
||||
# anything containing "stasis":
|
||||
#
|
||||
# web: ""
|
||||
47
service/jargon/jargon.py
Normal file
47
service/jargon/jargon.py
Normal file
@@ -0,0 +1,47 @@
|
||||
# =============================================================================
|
||||
# Copyright (C) 2018 Filip Sufitchi
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
#
|
||||
# pyfa is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# pyfa is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
# =============================================================================
|
||||
|
||||
import config
|
||||
import pkg_resources
|
||||
|
||||
class Jargon(object):
|
||||
def __init__(self, rawdata: dict):
|
||||
self._rawdata = rawdata
|
||||
|
||||
# copy the data to lowercase keys, ignore blank keys
|
||||
self._data = {str(k).lower():v for k,v in rawdata.items() if k}
|
||||
|
||||
def get(self, term: str) -> str:
|
||||
return self._data.get(term.lower())
|
||||
|
||||
def get_rawdata() -> dict:
|
||||
return self._rawdata
|
||||
|
||||
def apply(self, query):
|
||||
query_words = query.split()
|
||||
parts = []
|
||||
|
||||
for word in query_words:
|
||||
replacement = self.get(word)
|
||||
if replacement:
|
||||
parts.append(replacement)
|
||||
else:
|
||||
parts.append(word)
|
||||
|
||||
return ' '.join(parts)
|
||||
82
service/jargon/loader.py
Normal file
82
service/jargon/loader.py
Normal file
@@ -0,0 +1,82 @@
|
||||
# =============================================================================
|
||||
# Copyright (C) 2018 Filip Sufitchi
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
#
|
||||
# pyfa is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# pyfa is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
# =============================================================================
|
||||
|
||||
import os
|
||||
import config
|
||||
import yaml
|
||||
|
||||
from .jargon import Jargon
|
||||
from .resources import DEFAULT_DATA, DEFAULT_HEADER
|
||||
|
||||
JARGON_PATH = os.path.join(config.savePath, 'jargon.yaml')
|
||||
|
||||
class JargonLoader(object):
|
||||
def __init__(self, jargon_path: str):
|
||||
self.jargon_path = jargon_path
|
||||
self._jargon_mtime = 0 # type: int
|
||||
self._jargon = None # type: Jargon
|
||||
|
||||
def get_jargon(self) -> Jargon:
|
||||
if self._is_stale():
|
||||
self._load_jargon()
|
||||
return self._jargon
|
||||
|
||||
def _is_stale(self):
|
||||
return (not self._jargon or not self._jargon_mtime or
|
||||
self.jargon_mtime != self._get_jargon_file_mtime())
|
||||
|
||||
def _load_jargon(self):
|
||||
jargondata = yaml.load(DEFAULT_DATA)
|
||||
with open(JARGON_PATH) as f:
|
||||
userdata = yaml.load(f)
|
||||
jargondata.update(userdata)
|
||||
self.jargon_mtime = self._get_jargon_file_mtime()
|
||||
self._jargon = Jargon(jargondata)
|
||||
|
||||
def _get_jargon_file_mtime(self) -> int:
|
||||
if not os.path.exists(self.jargon_path):
|
||||
return 0
|
||||
return os.stat(self.jargon_path).st_mtime
|
||||
|
||||
@staticmethod
|
||||
def init_user_jargon(jargon_path):
|
||||
values = yaml.load(DEFAULT_DATA)
|
||||
|
||||
## Disabled for issue/1533; do not overwrite existing user config
|
||||
# if os.path.exists(jargon_path):
|
||||
# with open(jargon_path) as f:
|
||||
# custom_values = yaml.load(f)
|
||||
# if custom_values:
|
||||
# values.update(custom_values)
|
||||
|
||||
if not os.path.exists(jargon_path):
|
||||
with open(jargon_path, 'w') as f:
|
||||
f.write(DEFAULT_HEADER)
|
||||
f.write('\n\n')
|
||||
yaml.dump(values, stream=f, default_flow_style=False)
|
||||
|
||||
_instance = None
|
||||
@staticmethod
|
||||
def instance(jargon_path=None):
|
||||
if not JargonLoader._instance:
|
||||
jargon_path = jargon_path or JARGON_PATH
|
||||
JargonLoader._instance = JargonLoader(jargon_path)
|
||||
return JargonLoader._instance
|
||||
|
||||
JargonLoader.init_user_jargon(JARGON_PATH)
|
||||
23
service/jargon/resources.py
Normal file
23
service/jargon/resources.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# =============================================================================
|
||||
# Copyright (C) 2018 Filip Sufitchi
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
#
|
||||
# pyfa is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# pyfa is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
# =============================================================================
|
||||
|
||||
import pkg_resources
|
||||
|
||||
DEFAULT_DATA = pkg_resources.resource_string(__name__, 'defaults.yaml').decode()
|
||||
DEFAULT_HEADER = pkg_resources.resource_string(__name__, 'header.yaml').decode()
|
||||
@@ -20,7 +20,7 @@
|
||||
import re
|
||||
import threading
|
||||
from logbook import Logger
|
||||
import Queue
|
||||
import queue
|
||||
|
||||
# noinspection PyPackageRequirements
|
||||
import wx
|
||||
@@ -30,22 +30,17 @@ import config
|
||||
import eos.db
|
||||
from service import conversions
|
||||
from service.settings import SettingsProvider
|
||||
from service.jargon import JargonLoader
|
||||
|
||||
from eos.gamedata import Category as types_Category, Group as types_Group, Item as types_Item, MarketGroup as types_MarketGroup, \
|
||||
MetaGroup as types_MetaGroup, MetaType as types_MetaType
|
||||
|
||||
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from utils.compat import OrderedDict
|
||||
from collections import OrderedDict
|
||||
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
# Event which tells threads dependent on Market that it's initialized
|
||||
mktRdy = threading.Event()
|
||||
|
||||
|
||||
class ShipBrowserWorkerThread(threading.Thread):
|
||||
def __init__(self):
|
||||
threading.Thread.__init__(self)
|
||||
@@ -53,7 +48,7 @@ class ShipBrowserWorkerThread(threading.Thread):
|
||||
self.name = "ShipBrowser"
|
||||
|
||||
def run(self):
|
||||
self.queue = Queue.Queue()
|
||||
self.queue = queue.Queue()
|
||||
self.cache = {}
|
||||
# Wait for full market initialization (otherwise there's high risky
|
||||
# this thread will attempt to init Market which is already being inited)
|
||||
@@ -88,7 +83,10 @@ class SearchWorkerThread(threading.Thread):
|
||||
def __init__(self):
|
||||
threading.Thread.__init__(self)
|
||||
self.name = "SearchWorker"
|
||||
pyfalog.debug("Initialize SearchWorkerThread.")
|
||||
self.jargonLoader = JargonLoader.instance()
|
||||
# load the jargon while in an out-of-thread context, to spot any problems while in the main thread
|
||||
self.jargonLoader.get_jargon()
|
||||
self.jargonLoader.get_jargon().apply('test string')
|
||||
|
||||
def run(self):
|
||||
self.cv = threading.Condition()
|
||||
@@ -115,13 +113,25 @@ class SearchWorkerThread(threading.Thread):
|
||||
else:
|
||||
filter_ = None
|
||||
|
||||
results = eos.db.searchItems(request, where=filter_,
|
||||
join=(types_Item.group, types_Group.category),
|
||||
eager=("icon", "group.category", "metaGroup", "metaGroup.parent"))
|
||||
|
||||
jargon_request = self.jargonLoader.get_jargon().apply(request)
|
||||
|
||||
|
||||
results = []
|
||||
if len(request) >= config.minItemSearchLength:
|
||||
results = eos.db.searchItems(request, where=filter_,
|
||||
join=(types_Item.group, types_Group.category),
|
||||
eager=("icon", "group.category", "metaGroup", "metaGroup.parent"))
|
||||
|
||||
jargon_results = []
|
||||
if len(jargon_request) >= config.minItemSearchLength:
|
||||
jargon_results = eos.db.searchItems(jargon_request, where=filter_,
|
||||
join=(types_Item.group, types_Group.category),
|
||||
eager=("icon", "group.category", "metaGroup", "metaGroup.parent"))
|
||||
|
||||
items = set()
|
||||
# Return only published items, consult with Market service this time
|
||||
for item in results:
|
||||
for item in [*results, *jargon_results]:
|
||||
if sMkt.getPublicityByItem(item):
|
||||
items.add(item)
|
||||
wx.CallAfter(callback, items)
|
||||
@@ -259,7 +269,7 @@ class Market(object):
|
||||
}
|
||||
# Parent type name: set(item names)
|
||||
self.ITEMS_FORCEDMETAGROUP_R = {}
|
||||
for item, value in self.ITEMS_FORCEDMETAGROUP.items():
|
||||
for item, value in list(self.ITEMS_FORCEDMETAGROUP.items()):
|
||||
parent = value[1]
|
||||
if parent not in self.ITEMS_FORCEDMETAGROUP_R:
|
||||
self.ITEMS_FORCEDMETAGROUP_R[parent] = set()
|
||||
@@ -352,7 +362,7 @@ class Market(object):
|
||||
def __makeRevDict(orig):
|
||||
"""Creates reverse dictionary"""
|
||||
rev = {}
|
||||
for item, value in orig.items():
|
||||
for item, value in list(orig.items()):
|
||||
if value not in rev:
|
||||
rev[value] = set()
|
||||
rev[value].add(item)
|
||||
@@ -366,7 +376,7 @@ class Market(object):
|
||||
item = identity
|
||||
elif isinstance(identity, int):
|
||||
item = eos.db.getItem(identity, *args, **kwargs)
|
||||
elif isinstance(identity, basestring):
|
||||
elif isinstance(identity, str):
|
||||
# We normally lookup with string when we are using import/export
|
||||
# features. Check against overrides
|
||||
identity = conversions.all.get(identity, identity)
|
||||
@@ -387,7 +397,7 @@ class Market(object):
|
||||
"""Get group by its ID or name"""
|
||||
if isinstance(identity, types_Group):
|
||||
return identity
|
||||
elif isinstance(identity, (int, float, basestring)):
|
||||
elif isinstance(identity, (int, float, str)):
|
||||
if isinstance(identity, float):
|
||||
identity = int(identity)
|
||||
# Check custom groups
|
||||
@@ -406,7 +416,7 @@ class Market(object):
|
||||
"""Get category by its ID or name"""
|
||||
if isinstance(identity, types_Category):
|
||||
category = identity
|
||||
elif isinstance(identity, (int, basestring)):
|
||||
elif isinstance(identity, (int, str)):
|
||||
category = eos.db.getCategory(identity, *args, **kwargs)
|
||||
elif isinstance(identity, float):
|
||||
id_ = int(identity)
|
||||
@@ -420,7 +430,7 @@ class Market(object):
|
||||
"""Get meta group by its ID or name"""
|
||||
if isinstance(identity, types_MetaGroup):
|
||||
metaGroup = identity
|
||||
elif isinstance(identity, (int, basestring)):
|
||||
elif isinstance(identity, (int, str)):
|
||||
metaGroup = eos.db.getMetaGroup(identity, *args, **kwargs)
|
||||
elif isinstance(identity, float):
|
||||
id_ = int(identity)
|
||||
@@ -590,7 +600,7 @@ class Market(object):
|
||||
|
||||
def getGroupsByCategory(self, cat):
|
||||
"""Get groups from given category"""
|
||||
groups = set(filter(lambda grp: self.getPublicityByGroup(grp), cat.groups))
|
||||
groups = set([grp for grp in cat.groups if self.getPublicityByGroup(grp)])
|
||||
|
||||
return groups
|
||||
|
||||
@@ -610,7 +620,7 @@ class Market(object):
|
||||
if hasattr(group, 'addItems'):
|
||||
groupItems.update(group.addItems)
|
||||
items = set(
|
||||
filter(lambda item: self.getPublicityByItem(item) and self.getGroupByItem(item) == group, groupItems))
|
||||
[item for item in groupItems if self.getPublicityByItem(item) and self.getGroupByItem(item) == group])
|
||||
return items
|
||||
|
||||
def getItemsByMarketGroup(self, mg, vars_=True):
|
||||
@@ -640,7 +650,7 @@ class Market(object):
|
||||
else:
|
||||
result = baseitms
|
||||
# Get rid of unpublished items
|
||||
result = set(filter(lambda item_: self.getPublicityByItem(item_), result))
|
||||
result = set([item_ for item_ in result if self.getPublicityByItem(item_)])
|
||||
return result
|
||||
|
||||
def marketGroupHasTypesCheck(self, mg):
|
||||
@@ -767,11 +777,11 @@ class Market(object):
|
||||
@staticmethod
|
||||
def directAttrRequest(items, attribs):
|
||||
try:
|
||||
itemIDs = tuple(map(lambda i: i.ID, items))
|
||||
itemIDs = tuple([i.ID for i in items])
|
||||
except TypeError:
|
||||
itemIDs = (items.ID,)
|
||||
try:
|
||||
attrIDs = tuple(map(lambda i: i.ID, attribs))
|
||||
attrIDs = tuple([i.ID for i in attribs])
|
||||
except TypeError:
|
||||
attrIDs = (attribs.ID,)
|
||||
info = {}
|
||||
@@ -787,7 +797,7 @@ class Market(object):
|
||||
|
||||
def filterItemsByMeta(self, items, metas):
|
||||
"""Filter items by meta lvl"""
|
||||
filtered = set(filter(lambda item: self.getMetaGroupIdByItem(item) in metas, items))
|
||||
filtered = set([item for item in items if self.getMetaGroupIdByItem(item) in metas])
|
||||
return filtered
|
||||
|
||||
def getSystemWideEffects(self):
|
||||
|
||||
@@ -1 +1 @@
|
||||
__all__ = ['evecentral', 'evemarketdata']
|
||||
__all__ = ['evemarketer', 'evemarketdata']
|
||||
@@ -33,14 +33,14 @@ class EveMarketData(object):
|
||||
name = "eve-marketdata.com"
|
||||
|
||||
def __init__(self, types, system, priceMap):
|
||||
data = []
|
||||
data = {}
|
||||
baseurl = "https://eve-marketdata.com/api/item_prices.xml"
|
||||
data.append(("system_id", system)) # Use Jita for market
|
||||
data.append(("type_ids", ','.join(str(x) for x in types)))
|
||||
data["system_id"] = system # Use Jita for market
|
||||
data["type_ids"] = ','.join(str(x) for x in types)
|
||||
|
||||
network = Network.getInstance()
|
||||
data = network.request(baseurl, network.PRICES, data)
|
||||
xml = minidom.parse(data)
|
||||
data = network.request(baseurl, network.PRICES, params=data)
|
||||
xml = minidom.parseString(data.text)
|
||||
types = xml.getElementsByTagName("eve").item(0).getElementsByTagName("price")
|
||||
|
||||
# Cycle through all types we've got from request
|
||||
|
||||
@@ -30,19 +30,20 @@ pyfalog = Logger(__name__)
|
||||
|
||||
class EveCentral(object):
|
||||
|
||||
name = "eve-central.com"
|
||||
name = "evemarketer"
|
||||
|
||||
def __init__(self, types, system, priceMap):
|
||||
data = []
|
||||
baseurl = "https://eve-central.com/api/marketstat"
|
||||
data.append(("usesystem", system)) # Use Jita for market
|
||||
data = {}
|
||||
baseurl = "https://api.evemarketer.com/ec/marketstat"
|
||||
|
||||
data["usesystem"] = system # Use Jita for market
|
||||
data["typeid"] = set()
|
||||
for typeID in types: # Add all typeID arguments
|
||||
data.append(("typeid", typeID))
|
||||
data["typeid"].add(typeID)
|
||||
|
||||
network = Network.getInstance()
|
||||
data = network.request(baseurl, network.PRICES, data)
|
||||
xml = minidom.parse(data)
|
||||
data = network.request(baseurl, network.PRICES, params=data)
|
||||
xml = minidom.parseString(data.text)
|
||||
types = xml.getElementsByTagName("marketstat").item(0).getElementsByTagName("type")
|
||||
# Cycle through all types we've got from request
|
||||
for type_ in types:
|
||||
@@ -18,8 +18,7 @@
|
||||
# =============================================================================
|
||||
|
||||
|
||||
import urllib2
|
||||
import urllib
|
||||
import requests
|
||||
import socket
|
||||
from logbook import Logger
|
||||
|
||||
@@ -33,24 +32,24 @@ timeout = 3
|
||||
socket.setdefaulttimeout(timeout)
|
||||
|
||||
|
||||
class Error(StandardError):
|
||||
class Error(Exception):
|
||||
def __init__(self, msg=None):
|
||||
self.message = msg
|
||||
|
||||
|
||||
class RequestError(StandardError):
|
||||
class RequestError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class AuthenticationError(StandardError):
|
||||
class AuthenticationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ServerError(StandardError):
|
||||
class ServerError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TimeoutError(StandardError):
|
||||
class TimeoutError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@@ -58,7 +57,7 @@ class Network(object):
|
||||
# Request constants - every request must supply this, as it is checked if
|
||||
# enabled or not via settings
|
||||
ENABLED = 1
|
||||
EVE = 2 # Mostly API, but also covers CREST requests
|
||||
EVE = 2 # Mostly API, but also covers CREST requests. update: might be useless these days, this Network class needs to be reviewed
|
||||
PRICES = 4
|
||||
UPDATE = 8
|
||||
|
||||
@@ -71,7 +70,8 @@ class Network(object):
|
||||
|
||||
return cls._instance
|
||||
|
||||
def request(self, url, type, data=None):
|
||||
def request(self, url, type, *args, **kwargs):
|
||||
|
||||
# URL is required to be https as of right now
|
||||
# print "Starting request: %s\n\tType: %s\n\tPost Data: %s"%(url,type,data)
|
||||
|
||||
@@ -85,51 +85,49 @@ class Network(object):
|
||||
# Set up some things for the request
|
||||
versionString = "{0} {1} - {2} {3}".format(config.version, config.tag, config.expansionName,
|
||||
config.expansionVersion)
|
||||
headers = {"User-Agent": "pyfa {0} (Python-urllib2)".format(versionString)}
|
||||
headers = {"User-Agent": "pyfa {0} (python-requests {1})".format(versionString, requests.__version__)}
|
||||
# user-agent: pyfa 2.0.0b4 git -YC120.2 1.2 (python-requests 2.18.4)
|
||||
|
||||
proxy = NetworkSettings.getInstance().getProxySettings()
|
||||
if proxy is not None:
|
||||
# proxy is a tuple of (host, port): (u'192.168.20.1', 3128)
|
||||
proxy_auth = NetworkSettings.getInstance().getProxyAuthDetails()
|
||||
# proxy_auth is a tuple of (login, password) or None
|
||||
if proxy_auth is not None:
|
||||
# add login:password@ in front of proxy address
|
||||
proxy_handler = urllib2.ProxyHandler({
|
||||
'https': '{0}:{1}@{2}:{3}'.format(
|
||||
proxy_auth[0], proxy_auth[1], proxy[0], proxy[1])
|
||||
})
|
||||
else:
|
||||
# build proxy handler with no login/pass info
|
||||
proxy_handler = urllib2.ProxyHandler({'https': "{0}:{1}".format(proxy[0], proxy[1])})
|
||||
opener = urllib2.build_opener(proxy_handler)
|
||||
urllib2.install_opener(opener)
|
||||
else:
|
||||
# This is a bug fix, explicitly disable possibly previously installed
|
||||
# opener with proxy, by urllib2.install_opener() a few lines above in code.
|
||||
# Now this explicitly disables proxy handler, "uninstalling" opener.
|
||||
# This is used in case when user had proxy enabled, so proxy_handler was already
|
||||
# installed globally, and then user had disabled the proxy, so we should clear that opener
|
||||
urllib2.install_opener(None)
|
||||
# another option could be installing a default opener:
|
||||
# urllib2.install_opener(urllib2.build_opener())
|
||||
# python-requests supports setting proxy for request as parameter to get() / post()
|
||||
# in a form like: proxies = { 'http': 'http://10.10.1.10:3128', 'https': 'http://10.10.1.10:1080' }
|
||||
# or with HTTP Basic auth support: proxies = {'http': 'http://user:pass@10.10.1.10:3128/'}
|
||||
# then you do: requests.get('http://example.org', proxies=proxies)
|
||||
|
||||
proxies = None
|
||||
proxy_settings = NetworkSettings.getInstance().getProxySettings()
|
||||
# proxy_settings is a tuple of (host, port), like ('192.168.20.1', 3128), or None
|
||||
|
||||
if proxy_settings is not None:
|
||||
# form proxy address in format "http://host:port
|
||||
proxy_host_port = '{}:{}'.format(proxy_settings[0], proxy_settings[1])
|
||||
proxy_auth_details = NetworkSettings.getInstance().getProxyAuthDetails()
|
||||
# proxy_auth_details is a tuple of (login, password), or None
|
||||
user_pass = ''
|
||||
if proxy_auth_details is not None:
|
||||
# construct prefix in form "user:password@"
|
||||
user_pass = '{}:{}@'.format(proxy_auth_details[0], proxy_auth_details[1])
|
||||
proxies = {
|
||||
'http': 'http://' + user_pass + proxy_host_port,
|
||||
'https': 'http://' + user_pass + proxy_host_port
|
||||
}
|
||||
# final form: { 'http': 'http://user:password@host:port', ... }, or
|
||||
# { 'http': 'http://host:port', ... } if no auth info.
|
||||
|
||||
request = urllib2.Request(url, headers=headers, data=urllib.urlencode(data) if data else None)
|
||||
try:
|
||||
return urllib2.urlopen(request)
|
||||
except urllib2.HTTPError as error:
|
||||
resp = requests.get(url, headers=headers, proxies=proxies, **kwargs)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
except requests.exceptions.HTTPError as error:
|
||||
pyfalog.warning("HTTPError:")
|
||||
pyfalog.warning(error)
|
||||
if error.code == 404:
|
||||
if error.response.status_code == 404:
|
||||
raise RequestError()
|
||||
elif error.code == 403:
|
||||
elif error.response.status_code == 403:
|
||||
raise AuthenticationError()
|
||||
elif error.code >= 500:
|
||||
elif error.response.status_code >= 500:
|
||||
raise ServerError()
|
||||
raise Error(error)
|
||||
except urllib2.URLError as error:
|
||||
pyfalog.warning("Timed out or other URL error:")
|
||||
pyfalog.warning(error)
|
||||
if "timed out" in error.reason:
|
||||
raise TimeoutError()
|
||||
else:
|
||||
raise Error(error)
|
||||
except requests.exceptions.Timeout:
|
||||
raise TimeoutError()
|
||||
except Exception as error:
|
||||
raise Error(error)
|
||||
|
||||
148
service/port.py
148
service/port.py
@@ -25,6 +25,8 @@ import collections
|
||||
import json
|
||||
import threading
|
||||
import locale
|
||||
from bs4 import UnicodeDammit
|
||||
|
||||
|
||||
from codecs import open
|
||||
|
||||
@@ -49,16 +51,15 @@ from service.market import Market
|
||||
from utils.strfunctions import sequential_rep, replace_ltgt
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
if 'wxMac' not in wx.PlatformInfo or ('wxMac' in wx.PlatformInfo and wx.VERSION >= (3, 0)):
|
||||
from service.crest import Crest
|
||||
from service.esi import Esi
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
class ESIExportException(Exception):
|
||||
pass
|
||||
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from utils.compat import OrderedDict
|
||||
|
||||
EFT_SLOT_ORDER = [Slot.LOW, Slot.MED, Slot.HIGH, Slot.RIG, Slot.SUBSYSTEM, Slot.SERVICE]
|
||||
INV_FLAGS = {
|
||||
Slot.LOW: 11,
|
||||
@@ -170,9 +171,7 @@ class UserCancelException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class IPortUser:
|
||||
|
||||
__metaclass__ = ABCMeta
|
||||
class IPortUser(metaclass=ABCMeta):
|
||||
|
||||
ID_PULSE = 1
|
||||
# Pulse the progress bar
|
||||
@@ -271,7 +270,7 @@ class Port(object):
|
||||
fits are processed as well as when fits are being saved.
|
||||
returns
|
||||
"""
|
||||
defcodepage = locale.getpreferredencoding()
|
||||
|
||||
sFit = svcFit.getInstance()
|
||||
|
||||
fit_list = []
|
||||
@@ -283,63 +282,17 @@ class Port(object):
|
||||
PortProcessing.notify(iportuser, IPortUser.PROCESS_IMPORT | IPortUser.ID_UPDATE, msg)
|
||||
# wx.CallAfter(callback, 1, msg)
|
||||
|
||||
with open(path, "r") as file_:
|
||||
with open(path, "rb") as file_:
|
||||
srcString = file_.read()
|
||||
dammit = UnicodeDammit(srcString)
|
||||
srcString = dammit.unicode_markup
|
||||
|
||||
if len(srcString) == 0: # ignore blank files
|
||||
pyfalog.debug("File is blank.")
|
||||
continue
|
||||
|
||||
codec_found = None
|
||||
# If file had ANSI encoding, decode it to unicode using detection
|
||||
# of BOM header or if there is no header try default
|
||||
# codepage then fallback to utf-16, cp1252
|
||||
|
||||
if isinstance(srcString, str):
|
||||
savebom = None
|
||||
|
||||
encoding_map = (
|
||||
('\xef\xbb\xbf', 'utf-8'),
|
||||
('\xff\xfe\0\0', 'utf-32'),
|
||||
('\0\0\xfe\xff', 'UTF-32BE'),
|
||||
('\xff\xfe', 'utf-16'),
|
||||
('\xfe\xff', 'UTF-16BE'))
|
||||
|
||||
for bom, encoding in encoding_map:
|
||||
if srcString.startswith(bom):
|
||||
codec_found = encoding
|
||||
savebom = bom
|
||||
|
||||
if codec_found is None:
|
||||
pyfalog.info("Unicode BOM not found in file {0}.", path)
|
||||
attempt_codecs = (defcodepage, "utf-8", "utf-16", "cp1252")
|
||||
|
||||
for page in attempt_codecs:
|
||||
try:
|
||||
pyfalog.info("Attempting to decode file {0} using {1} page.", path, page)
|
||||
srcString = unicode(srcString, page)
|
||||
codec_found = page
|
||||
pyfalog.info("File {0} decoded using {1} page.", path, page)
|
||||
except UnicodeDecodeError:
|
||||
pyfalog.info("Error unicode decoding {0} from page {1}, trying next codec", path, page)
|
||||
else:
|
||||
break
|
||||
else:
|
||||
pyfalog.info("Unicode BOM detected in {0}, using {1} page.", path, codec_found)
|
||||
srcString = unicode(srcString[len(savebom):], codec_found)
|
||||
|
||||
else:
|
||||
# nasty hack to detect other transparent utf-16 loading
|
||||
if srcString[0] == '<' and 'utf-16' in srcString[:128].lower():
|
||||
codec_found = "utf-16"
|
||||
else:
|
||||
codec_found = "utf-8"
|
||||
|
||||
if codec_found is None:
|
||||
return False, "Proper codec could not be established for %s" % path
|
||||
|
||||
try:
|
||||
_, fitsImport = Port.importAuto(srcString, path, iportuser=iportuser, encoding=codec_found)
|
||||
_, fitsImport = Port.importAuto(srcString, path, iportuser=iportuser)
|
||||
fit_list += fitsImport
|
||||
except xml.parsers.expat.ExpatError:
|
||||
pyfalog.warning("Malformed XML in:\n{0}", path)
|
||||
@@ -389,24 +342,20 @@ class Port(object):
|
||||
"""Service which houses all import/export format functions"""
|
||||
|
||||
@classmethod
|
||||
def exportCrest(cls, ofit, callback=None):
|
||||
def exportESI(cls, ofit, callback=None):
|
||||
# A few notes:
|
||||
# max fit name length is 50 characters
|
||||
# Most keys are created simply because they are required, but bogus data is okay
|
||||
|
||||
nested_dict = lambda: collections.defaultdict(nested_dict)
|
||||
fit = nested_dict()
|
||||
sCrest = Crest.getInstance()
|
||||
sEsi = Esi.getInstance()
|
||||
sFit = svcFit.getInstance()
|
||||
|
||||
eve = sCrest.eve
|
||||
|
||||
# max length is 50 characters
|
||||
name = ofit.name[:47] + '...' if len(ofit.name) > 50 else ofit.name
|
||||
fit['name'] = name
|
||||
fit['ship']['href'] = "%sinventory/types/%d/" % (eve._authed_endpoint, ofit.ship.item.ID)
|
||||
fit['ship']['id'] = ofit.ship.item.ID
|
||||
fit['ship']['name'] = ''
|
||||
fit['ship_type_id'] = ofit.ship.item.ID
|
||||
|
||||
# 2017/03/29 NOTE: "<" or "<" is Ignored
|
||||
# fit['description'] = "<pyfa:%d />" % ofit.ID
|
||||
@@ -434,9 +383,7 @@ class Port(object):
|
||||
slotNum[slot] += 1
|
||||
|
||||
item['quantity'] = 1
|
||||
item['type']['href'] = "%sinventory/types/%d/" % (eve._authed_endpoint, module.item.ID)
|
||||
item['type']['id'] = module.item.ID
|
||||
item['type']['name'] = ''
|
||||
item['type_id'] = module.item.ID
|
||||
fit['items'].append(item)
|
||||
|
||||
if module.charge and sFit.serviceFittingOptions["exportCharges"]:
|
||||
@@ -449,42 +396,37 @@ class Port(object):
|
||||
item = nested_dict()
|
||||
item['flag'] = INV_FLAG_CARGOBAY
|
||||
item['quantity'] = cargo.amount
|
||||
item['type']['href'] = "%sinventory/types/%d/" % (eve._authed_endpoint, cargo.item.ID)
|
||||
item['type']['id'] = cargo.item.ID
|
||||
item['type']['name'] = ''
|
||||
item['type_id'] = cargo.item.ID
|
||||
fit['items'].append(item)
|
||||
|
||||
for chargeID, amount in charges.items():
|
||||
for chargeID, amount in list(charges.items()):
|
||||
item = nested_dict()
|
||||
item['flag'] = INV_FLAG_CARGOBAY
|
||||
item['quantity'] = amount
|
||||
item['type']['href'] = "%sinventory/types/%d/" % (eve._authed_endpoint, chargeID)
|
||||
item['type']['id'] = chargeID
|
||||
item['type']['name'] = ''
|
||||
item['type_id'] = chargeID
|
||||
fit['items'].append(item)
|
||||
|
||||
for drone in ofit.drones:
|
||||
item = nested_dict()
|
||||
item['flag'] = INV_FLAG_DRONEBAY
|
||||
item['quantity'] = drone.amount
|
||||
item['type']['href'] = "%sinventory/types/%d/" % (eve._authed_endpoint, drone.item.ID)
|
||||
item['type']['id'] = drone.item.ID
|
||||
item['type']['name'] = ''
|
||||
item['type_id'] = drone.item.ID
|
||||
fit['items'].append(item)
|
||||
|
||||
for fighter in ofit.fighters:
|
||||
item = nested_dict()
|
||||
item['flag'] = INV_FLAG_FIGHTER
|
||||
item['quantity'] = fighter.amountActive
|
||||
item['type']['href'] = "%sinventory/types/%d/" % (eve._authed_endpoint, fighter.item.ID)
|
||||
item['type']['id'] = fighter.item.ID
|
||||
item['type']['name'] = fighter.item.name
|
||||
item['type_id'] = fighter.item.ID
|
||||
fit['items'].append(item)
|
||||
|
||||
if len(fit['items']) == 0:
|
||||
raise ESIExportException("Cannot export fitting: module list cannot be empty.")
|
||||
|
||||
return json.dumps(fit)
|
||||
|
||||
@classmethod
|
||||
def importAuto(cls, string, path=None, activeFit=None, iportuser=None, encoding=None):
|
||||
def importAuto(cls, string, path=None, activeFit=None, iportuser=None):
|
||||
# type: (basestring, basestring, object, IPortUser, basestring) -> object
|
||||
# Get first line and strip space symbols of it to avoid possible detection errors
|
||||
firstLine = re.split("[\n\r]+", string.strip(), maxsplit=1)[0]
|
||||
@@ -492,14 +434,11 @@ class Port(object):
|
||||
|
||||
# If XML-style start of tag encountered, detect as XML
|
||||
if re.search(RE_XML_START, firstLine):
|
||||
if encoding:
|
||||
return "XML", cls.importXml(string, iportuser, encoding)
|
||||
else:
|
||||
return "XML", cls.importXml(string, iportuser)
|
||||
return "XML", cls.importXml(string, iportuser)
|
||||
|
||||
# If JSON-style start, parse as CREST/JSON
|
||||
if firstLine[0] == '{':
|
||||
return "JSON", (cls.importCrest(string),)
|
||||
return "JSON", (cls.importESI(string),)
|
||||
|
||||
# If we've got source file name which is used to describe ship name
|
||||
# and first line contains something like [setup name], detect as eft config file
|
||||
@@ -517,7 +456,7 @@ class Port(object):
|
||||
return "DNA", (cls.importDna(string),)
|
||||
|
||||
@staticmethod
|
||||
def importCrest(str_):
|
||||
def importESI(str_):
|
||||
|
||||
sMkt = Market.getInstance()
|
||||
fitobj = Fit()
|
||||
@@ -529,13 +468,13 @@ class Port(object):
|
||||
fitobj.notes = refobj['description']
|
||||
|
||||
try:
|
||||
refobj = refobj['ship']['id']
|
||||
ship = refobj['ship_type_id']
|
||||
try:
|
||||
fitobj.ship = Ship(sMkt.getItem(refobj))
|
||||
fitobj.ship = Ship(sMkt.getItem(ship))
|
||||
except ValueError:
|
||||
fitobj.ship = Citadel(sMkt.getItem(refobj))
|
||||
fitobj.ship = Citadel(sMkt.getItem(ship))
|
||||
except:
|
||||
pyfalog.warning("Caught exception in importCrest")
|
||||
pyfalog.warning("Caught exception in importESI")
|
||||
return None
|
||||
|
||||
items.sort(key=lambda k: k['flag'])
|
||||
@@ -543,7 +482,7 @@ class Port(object):
|
||||
moduleList = []
|
||||
for module in items:
|
||||
try:
|
||||
item = sMkt.getItem(module['type']['id'], eager="group.category")
|
||||
item = sMkt.getItem(module['type_id'], eager="group.category")
|
||||
if module['flag'] == INV_FLAG_DRONEBAY:
|
||||
d = Drone(item)
|
||||
d.amount = module['quantity']
|
||||
@@ -589,7 +528,7 @@ class Port(object):
|
||||
def importDna(string):
|
||||
sMkt = Market.getInstance()
|
||||
|
||||
ids = map(int, re.findall(r'\d+', string))
|
||||
ids = list(map(int, re.findall(r'\d+', string)))
|
||||
for id_ in ids:
|
||||
try:
|
||||
try:
|
||||
@@ -643,7 +582,7 @@ class Port(object):
|
||||
c.amount = int(amount)
|
||||
f.cargo.append(c)
|
||||
else:
|
||||
for i in xrange(int(amount)):
|
||||
for i in range(int(amount)):
|
||||
try:
|
||||
m = Module(item)
|
||||
except:
|
||||
@@ -832,11 +771,6 @@ class Port(object):
|
||||
except:
|
||||
return [] # empty list is expected
|
||||
|
||||
# If client didn't take care of encoding file contents into Unicode,
|
||||
# do it using fallback encoding ourselves
|
||||
if isinstance(contents, str):
|
||||
contents = unicode(contents, locale.getpreferredencoding())
|
||||
|
||||
fits = [] # List for fits
|
||||
fitIndices = [] # List for starting line numbers for each fit
|
||||
lines = re.split('[\n\r]+', contents) # Separate string into lines
|
||||
@@ -1020,10 +954,10 @@ class Port(object):
|
||||
return fits
|
||||
|
||||
@staticmethod
|
||||
def importXml(text, iportuser=None, encoding="utf-8"):
|
||||
def importXml(text, iportuser=None):
|
||||
# type: (basestring, IPortUser, basestring) -> list[eos.saveddata.fit.Fit]
|
||||
sMkt = Market.getInstance()
|
||||
doc = xml.dom.minidom.parseString(text.encode(encoding))
|
||||
doc = xml.dom.minidom.parseString(text)
|
||||
# NOTE:
|
||||
# When L_MARK is included at this point,
|
||||
# Decided to be localized data
|
||||
@@ -1121,7 +1055,7 @@ class Port(object):
|
||||
also, it's OK to arrange modules randomly?
|
||||
"""
|
||||
offineSuffix = " /OFFLINE"
|
||||
export = u"[%s, %s]\n" % (fit.ship.item.name, fit.name)
|
||||
export = "[%s, %s]\n" % (fit.ship.item.name, fit.name)
|
||||
stuff = {}
|
||||
sFit = svcFit.getInstance()
|
||||
for module in fit.modules:
|
||||
@@ -1332,7 +1266,7 @@ class Port(object):
|
||||
charges[cargo.item.name] = 0
|
||||
charges[cargo.item.name] += cargo.amount
|
||||
|
||||
for name, qty in charges.items():
|
||||
for name, qty in list(charges.items()):
|
||||
hardware = doc.createElement("hardware")
|
||||
hardware.setAttribute("qty", "%d" % qty)
|
||||
hardware.setAttribute("slot", "cargo")
|
||||
|
||||
86
service/prereqsCheck.py
Normal file
86
service/prereqsCheck.py
Normal file
@@ -0,0 +1,86 @@
|
||||
import sys
|
||||
import inspect
|
||||
import re
|
||||
import platform
|
||||
|
||||
version_block = ''
|
||||
|
||||
|
||||
class PreCheckException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class PreCheckMessage():
|
||||
def __init__(self, msg):
|
||||
# wx may not be installed, in which case print to console. For all other prechecks, should pop up a MessageDialog
|
||||
try:
|
||||
import wx
|
||||
app = wx.App(False)
|
||||
wx.MessageBox(msg, 'Error', wx.ICON_ERROR | wx.STAY_ON_TOP)
|
||||
app.MainLoop()
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
print(msg)
|
||||
|
||||
|
||||
def version_precheck():
|
||||
global version_block
|
||||
|
||||
version_block += "\nOS version: {}".format(platform.platform())
|
||||
version_block += "\nPython version: {}".format(sys.version)
|
||||
|
||||
if sys.version_info < (3, 6):
|
||||
msg = "pyfa requires python 3.6"
|
||||
raise PreCheckException(msg)
|
||||
|
||||
try:
|
||||
# the way that the version string is imported in wx is odd, causing us to have to split out the imports like this. :(
|
||||
from wx.__version__ import VERSION, VERSION_STRING
|
||||
|
||||
if VERSION[0] < 4:
|
||||
raise Exception()
|
||||
if VERSION[3] != '':
|
||||
if VERSION[3][0] == 'b' and int(VERSION[3][-1]) < 2:
|
||||
raise Exception()
|
||||
|
||||
import wx
|
||||
version_block += "\nwxPython version: {} ({})".format(VERSION_STRING, wx.wxWidgets_version)
|
||||
except:
|
||||
msg = "pyfa requires wxPython v4.0.0b2+. You can download wxPython from https://wxpython.org/pages/downloads/"
|
||||
raise PreCheckException(msg)
|
||||
|
||||
try:
|
||||
import sqlalchemy
|
||||
saMatch = re.match("([0-9]+).([0-9]+).([0-9]+)(([b\.])([0-9]+))?", sqlalchemy.__version__)
|
||||
version_block += "\nSQLAlchemy version: {}".format(sqlalchemy.__version__)
|
||||
|
||||
if (int(saMatch.group(1)), int(saMatch.group(2)), int(saMatch.group(3))) < (1, 0, 5):
|
||||
raise Exception()
|
||||
except:
|
||||
msg = "pyfa requires SQLAlchemy v1.0.5+. You can download SQLAlchemy from https://www.sqlalchemy.org/download.html"
|
||||
raise PreCheckException(msg)
|
||||
|
||||
try:
|
||||
import logbook
|
||||
logVersion = logbook.__version__.split('.')
|
||||
version_block += "\nLogbook version: {}".format(logbook.__version__)
|
||||
|
||||
if int(logVersion[0]) < 1:
|
||||
raise Exception()
|
||||
except:
|
||||
raise PreCheckException("pyfa requires Logbook version 1.0.0+. You can download Logbook from https://pypi.python.org/pypi/Logbook")
|
||||
|
||||
try:
|
||||
import requests
|
||||
version_block += "\nRequests version: {}".format(requests.__version__)
|
||||
except:
|
||||
msg = "pyfa requires the requests module. You can download requests from https://pypi.python.org/pypi/requests"
|
||||
raise PreCheckException(msg)
|
||||
|
||||
try:
|
||||
import dateutil
|
||||
version_block += "\nDateutil version: {}".format(dateutil.__version__)
|
||||
except:
|
||||
msg = "pyfa requires the python-dateutil module. You can download python-dateutil form https://pypi.python.org/pypi/python-dateutil"
|
||||
raise PreCheckException(msg)
|
||||
@@ -20,7 +20,7 @@
|
||||
|
||||
import time
|
||||
import threading
|
||||
import Queue
|
||||
import queue
|
||||
from xml.dom import minidom
|
||||
|
||||
from logbook import Logger
|
||||
@@ -201,7 +201,7 @@ class PriceWorkerThread(threading.Thread):
|
||||
def __init__(self):
|
||||
threading.Thread.__init__(self)
|
||||
self.name = "PriceWorker"
|
||||
self.queue = Queue.Queue()
|
||||
self.queue = queue.Queue()
|
||||
self.wait = {}
|
||||
pyfalog.debug("Initialize PriceWorkerThread.")
|
||||
|
||||
@@ -234,4 +234,4 @@ class PriceWorkerThread(threading.Thread):
|
||||
self.wait[itemID].append(callback)
|
||||
|
||||
|
||||
from service.marketSources import evecentral, evemarketdata # noqa: E402
|
||||
from service.marketSources import evemarketer, evemarketdata # noqa: E402
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
version = "0.0.1"
|
||||
@@ -1,24 +0,0 @@
|
||||
import sys
|
||||
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3: # pragma: no cover
|
||||
string_types = str,
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
else: # pragma: no cover
|
||||
string_types = basestring,
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
|
||||
def text_(s, encoding='latin-1', errors='strict'): # pragma: no cover
|
||||
if isinstance(s, binary_type):
|
||||
return s.decode(encoding, errors)
|
||||
return s
|
||||
|
||||
|
||||
def bytes_(s, encoding='latin-1', errors='strict'): # pragma: no cover
|
||||
if isinstance(s, text_type):
|
||||
return s.encode(encoding, errors)
|
||||
return s
|
||||
@@ -1,2 +0,0 @@
|
||||
class APIException(Exception):
|
||||
pass
|
||||
@@ -1,318 +0,0 @@
|
||||
import base64
|
||||
from logbook import Logger
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import zlib
|
||||
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
import config
|
||||
from service.pycrest.compat import bytes_, text_
|
||||
from service.pycrest.errors import APIException
|
||||
|
||||
from urlparse import urlparse, urlunparse, parse_qsl
|
||||
|
||||
try:
|
||||
import pickle
|
||||
except ImportError: # pragma: no cover
|
||||
# noinspection PyPep8Naming
|
||||
import cPickle as pickle
|
||||
|
||||
pyfalog = Logger(__name__)
|
||||
cache_re = re.compile(r'max-age=([0-9]+)')
|
||||
|
||||
|
||||
class APICache(object):
|
||||
def put(self, key, value):
|
||||
raise NotImplementedError
|
||||
|
||||
def get(self, key):
|
||||
raise NotImplementedError
|
||||
|
||||
def invalidate(self, key):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class FileCache(APICache):
|
||||
def __init__(self, path):
|
||||
self._cache = {}
|
||||
self.path = path
|
||||
if not os.path.isdir(self.path):
|
||||
os.mkdir(self.path, 0o700)
|
||||
|
||||
def _getpath(self, key):
|
||||
return os.path.join(self.path, str(hash(key)) + '.cache')
|
||||
|
||||
def put(self, key, value):
|
||||
with open(self._getpath(key), 'wb') as f:
|
||||
f.write(zlib.compress(pickle.dumps(value, -1)))
|
||||
self._cache[key] = value
|
||||
|
||||
def get(self, key):
|
||||
if key in self._cache:
|
||||
return self._cache[key]
|
||||
|
||||
try:
|
||||
with open(self._getpath(key), 'rb') as f:
|
||||
return pickle.loads(zlib.decompress(f.read()))
|
||||
except IOError as ex:
|
||||
pyfalog.debug("IO error opening zip file. (May not exist yet)")
|
||||
if ex.errno == 2: # file does not exist (yet)
|
||||
return None
|
||||
else:
|
||||
raise
|
||||
|
||||
def invalidate(self, key):
|
||||
self._cache.pop(key, None)
|
||||
|
||||
try:
|
||||
os.unlink(self._getpath(key))
|
||||
except OSError as ex:
|
||||
pyfalog.debug("Caught exception in invalidate")
|
||||
pyfalog.debug(ex)
|
||||
if ex.errno == 2: # does not exist
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
class DictCache(APICache):
|
||||
def __init__(self):
|
||||
self._dict = {}
|
||||
|
||||
def get(self, key):
|
||||
return self._dict.get(key, None)
|
||||
|
||||
def put(self, key, value):
|
||||
self._dict[key] = value
|
||||
|
||||
def invalidate(self, key):
|
||||
self._dict.pop(key, None)
|
||||
|
||||
|
||||
class APIConnection(object):
|
||||
def __init__(self, additional_headers=None, user_agent=None, cache_dir=None, cache=None):
|
||||
# Set up a Requests Session
|
||||
session = requests.Session()
|
||||
if additional_headers is None:
|
||||
additional_headers = {}
|
||||
if user_agent is None:
|
||||
user_agent = "pyfa/{0} ({1})".format(config.version, config.tag)
|
||||
session.headers.update({
|
||||
"User-Agent": user_agent,
|
||||
"Accept": "application/json",
|
||||
})
|
||||
session.headers.update(additional_headers)
|
||||
session.mount('https://public-crest.eveonline.com', HTTPAdapter())
|
||||
self._session = session
|
||||
if cache:
|
||||
if isinstance(cache, APICache):
|
||||
self.cache = cache # Inherit from parents
|
||||
elif isinstance(cache, type):
|
||||
self.cache = cache() # Instantiate a new cache
|
||||
elif cache_dir:
|
||||
self.cache_dir = cache_dir
|
||||
self.cache = FileCache(self.cache_dir)
|
||||
else:
|
||||
self.cache = DictCache()
|
||||
|
||||
def get(self, resource, params=None):
|
||||
pyfalog.debug('Getting resource {0}', resource)
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
# remove params from resource URI (needed for paginated stuff)
|
||||
parsed_uri = urlparse(resource)
|
||||
qs = parsed_uri.query
|
||||
resource = urlunparse(parsed_uri._replace(query=''))
|
||||
prms = {}
|
||||
for tup in parse_qsl(qs):
|
||||
prms[tup[0]] = tup[1]
|
||||
|
||||
# params supplied to self.get() override parsed params
|
||||
for key in params:
|
||||
prms[key] = params[key]
|
||||
|
||||
# check cache
|
||||
key = (resource, frozenset(self._session.headers.items()), frozenset(prms.items()))
|
||||
cached = self.cache.get(key)
|
||||
if cached and cached['cached_until'] > time.time():
|
||||
pyfalog.debug('Cache hit for resource {0} (params={1})', resource, prms)
|
||||
return cached
|
||||
elif cached:
|
||||
pyfalog.debug('Cache stale for resource {0} (params={1})', resource, prms)
|
||||
self.cache.invalidate(key)
|
||||
else:
|
||||
pyfalog.debug('Cache miss for resource {0} (params={1})', resource, prms)
|
||||
|
||||
pyfalog.debug('Getting resource {0} (params={1})', resource, prms)
|
||||
res = self._session.get(resource, params=prms)
|
||||
if res.status_code != 200:
|
||||
raise APIException("Got unexpected status code from server: {0}" % res.status_code)
|
||||
|
||||
ret = res.json()
|
||||
|
||||
# cache result
|
||||
expires = self._get_expires(res)
|
||||
if expires > 0:
|
||||
ret.update({'cached_until': time.time() + expires})
|
||||
self.cache.put(key, ret)
|
||||
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def _get_expires(response):
|
||||
if 'Cache-Control' not in response.headers:
|
||||
return 0
|
||||
if any([s in response.headers['Cache-Control'] for s in ['no-cache', 'no-store']]):
|
||||
return 0
|
||||
match = cache_re.search(response.headers['Cache-Control'])
|
||||
if match:
|
||||
return int(match.group(1))
|
||||
return 0
|
||||
|
||||
|
||||
class EVE(APIConnection):
|
||||
def __init__(self, **kwargs):
|
||||
self.api_key = kwargs.pop('api_key', None)
|
||||
self.client_id = kwargs.pop('client_id', None)
|
||||
self.redirect_uri = kwargs.pop('redirect_uri', None)
|
||||
if kwargs.pop('testing', False):
|
||||
self._public_endpoint = "http://public-crest-sisi.testeveonline.com/"
|
||||
self._authed_endpoint = "https://api-sisi.testeveonline.com/"
|
||||
self._image_server = "https://image.testeveonline.com/"
|
||||
self._oauth_endpoint = "https://sisilogin.testeveonline.com/oauth"
|
||||
else:
|
||||
self._public_endpoint = "https://public-crest.eveonline.com/"
|
||||
self._authed_endpoint = "https://crest-tq.eveonline.com/"
|
||||
self._image_server = "https://image.eveonline.com/"
|
||||
self._oauth_endpoint = "https://login.eveonline.com/oauth"
|
||||
self._endpoint = self._public_endpoint
|
||||
self._cache = {}
|
||||
self._data = None
|
||||
self.token = None
|
||||
self.refresh_token = None
|
||||
self.expires = None
|
||||
APIConnection.__init__(self, **kwargs)
|
||||
|
||||
def __call__(self):
|
||||
if not self._data:
|
||||
self._data = APIObject(self.get(self._endpoint), self)
|
||||
return self._data
|
||||
|
||||
def __getattr__(self, item):
|
||||
return self._data.__getattr__(item)
|
||||
|
||||
def auth_uri(self, scopes=None, state=None):
|
||||
s = [] if not scopes else scopes
|
||||
grant_type = "token" if self.api_key is None else "code"
|
||||
|
||||
return "%s/authorize?response_type=%s&redirect_uri=%s&client_id=%s%s%s" % (
|
||||
self._oauth_endpoint,
|
||||
grant_type,
|
||||
self.redirect_uri,
|
||||
self.client_id,
|
||||
"&scope=%s" % '+'.join(s) if scopes else '',
|
||||
"&state=%s" % state if state else ''
|
||||
)
|
||||
|
||||
def _authorize(self, params):
|
||||
auth = text_(base64.b64encode(bytes_("%s:%s" % (self.client_id, self.api_key))))
|
||||
headers = {"Authorization": "Basic %s" % auth}
|
||||
res = self._session.post("%s/token" % self._oauth_endpoint, params=params, headers=headers)
|
||||
if res.status_code != 200:
|
||||
raise APIException("Got unexpected status code from API: %i" % res.status_code)
|
||||
return res.json()
|
||||
|
||||
def set_auth_values(self, res):
|
||||
self.__class__ = AuthedConnection
|
||||
self.token = res['access_token']
|
||||
self.refresh_token = res['refresh_token']
|
||||
self.expires = int(time.time()) + res['expires_in']
|
||||
self._endpoint = self._authed_endpoint
|
||||
self._session.headers.update({"Authorization": "Bearer %s" % self.token})
|
||||
|
||||
def authorize(self, code):
|
||||
res = self._authorize(params={"grant_type": "authorization_code", "code": code})
|
||||
self.set_auth_values(res)
|
||||
|
||||
def refr_authorize(self, refresh_token):
|
||||
res = self._authorize(params={"grant_type": "refresh_token", "refresh_token": refresh_token})
|
||||
self.set_auth_values(res)
|
||||
|
||||
def temptoken_authorize(self, access_token=None, expires_in=0, refresh_token=None):
|
||||
self.set_auth_values({'access_token': access_token,
|
||||
'refresh_token': refresh_token,
|
||||
'expires_in': expires_in})
|
||||
|
||||
|
||||
class AuthedConnection(EVE):
|
||||
def __call__(self):
|
||||
if not self._data:
|
||||
self._data = APIObject(self.get(self._endpoint), self)
|
||||
return self._data
|
||||
|
||||
def whoami(self):
|
||||
# if 'whoami' not in self._cache:
|
||||
# print "Setting this whoami cache"
|
||||
# self._cache['whoami'] = self.get("%s/verify" % self._oauth_endpoint)
|
||||
return self.get("%s/verify" % self._oauth_endpoint)
|
||||
|
||||
def get(self, resource, params=None):
|
||||
if self.refresh_token and int(time.time()) >= self.expires:
|
||||
self.refr_authorize(self.refresh_token)
|
||||
return super(self.__class__, self).get(resource, params)
|
||||
|
||||
def post(self, resource, data, params=None):
|
||||
if self.refresh_token and int(time.time()) >= self.expires:
|
||||
self.refr_authorize(self.refresh_token)
|
||||
return self._session.post(resource, data=data, params=params)
|
||||
|
||||
def delete(self, resource, params=None):
|
||||
if self.refresh_token and int(time.time()) >= self.expires:
|
||||
self.refr_authorize(self.refresh_token)
|
||||
return self._session.delete(resource, params=params)
|
||||
|
||||
|
||||
class APIObject(object):
|
||||
def __init__(self, parent, connection):
|
||||
self._dict = {}
|
||||
self.connection = connection
|
||||
for k, v in parent.items():
|
||||
if type(v) is dict:
|
||||
self._dict[k] = APIObject(v, connection)
|
||||
elif type(v) is list:
|
||||
self._dict[k] = self._wrap_list(v)
|
||||
else:
|
||||
self._dict[k] = v
|
||||
|
||||
def _wrap_list(self, list_):
|
||||
new = []
|
||||
for item in list_:
|
||||
if type(item) is dict:
|
||||
new.append(APIObject(item, self.connection))
|
||||
elif type(item) is list:
|
||||
new.append(self._wrap_list(item))
|
||||
else:
|
||||
new.append(item)
|
||||
return new
|
||||
|
||||
def __getattr__(self, item):
|
||||
if item in self._dict:
|
||||
return self._dict[item]
|
||||
raise AttributeError(item)
|
||||
|
||||
def __call__(self, **kwargs):
|
||||
# Caching is now handled by APIConnection
|
||||
if 'href' in self._dict:
|
||||
return APIObject(self.connection.get(self._dict['href'], params=kwargs), self.connection)
|
||||
else:
|
||||
return self
|
||||
|
||||
def __str__(self): # pragma: no cover
|
||||
return self._dict.__str__()
|
||||
|
||||
def __repr__(self): # pragma: no cover
|
||||
return self._dict.__repr__()
|
||||
|
||||
@@ -1,132 +0,0 @@
|
||||
import datetime
|
||||
import ssl
|
||||
import warnings
|
||||
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
try:
|
||||
from requests.packages import urllib3
|
||||
from requests.packages.urllib3.util import ssl_
|
||||
from requests.packages.urllib3.exceptions import (
|
||||
SystemTimeWarning,
|
||||
SecurityWarning,
|
||||
)
|
||||
from requests.packages.urllib3.packages.ssl_match_hostname import \
|
||||
match_hostname
|
||||
except:
|
||||
import urllib3
|
||||
from urllib3.util import ssl_
|
||||
from urllib3.exceptions import SystemTimeWarning, SecurityWarning
|
||||
from urllib3.packages.ssl_match_hostname import match_hostname
|
||||
|
||||
|
||||
class WeakCiphersHTTPSConnection(urllib3.connection.VerifiedHTTPSConnection): # pragma: no cover
|
||||
|
||||
# Python versions >=2.7.9 and >=3.4.1 do not (by default) allow ciphers
|
||||
# with MD5. Unfortunately, the CREST public server _only_ supports
|
||||
# TLS_RSA_WITH_RC4_128_MD5 (as of 5 Jan 2015). The cipher list below is
|
||||
# nearly identical except for allowing that cipher as a last resort (and
|
||||
# excluding export versions of ciphers).
|
||||
DEFAULT_CIPHERS = (
|
||||
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:'
|
||||
'ECDH+HIGH:DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:'
|
||||
'RSA+3DES:ECDH+RC4:DH+RC4:RSA+RC4:!aNULL:!eNULL:!EXP:-MD5:RSA+RC4+MD5'
|
||||
)
|
||||
|
||||
def __init__(self, host, port, ciphers=None, **kwargs):
|
||||
self.ciphers = ciphers if ciphers is not None else self.DEFAULT_CIPHERS
|
||||
super(WeakCiphersHTTPSConnection, self).__init__(host, port, **kwargs)
|
||||
|
||||
def connect(self):
|
||||
# Yup, copied in VerifiedHTTPSConnection.connect just to change the
|
||||
# default cipher list.
|
||||
|
||||
# Add certificate verification
|
||||
conn = self._new_conn()
|
||||
|
||||
resolved_cert_reqs = ssl_.resolve_cert_reqs(self.cert_reqs)
|
||||
resolved_ssl_version = ssl_.resolve_ssl_version(self.ssl_version)
|
||||
|
||||
hostname = self.host
|
||||
if getattr(self, '_tunnel_host', None):
|
||||
# _tunnel_host was added in Python 2.6.3
|
||||
# (See: http://hg.python.org/cpython/rev/0f57b30a152f)
|
||||
|
||||
self.sock = conn
|
||||
# Calls self._set_hostport(), so self.host is
|
||||
# self._tunnel_host below.
|
||||
self._tunnel()
|
||||
# Mark this connection as not reusable
|
||||
self.auto_open = 0
|
||||
|
||||
# Override the host with the one we're requesting data from.
|
||||
hostname = self._tunnel_host
|
||||
|
||||
is_time_off = datetime.date.today() < urllib3.connection.RECENT_DATE
|
||||
if is_time_off:
|
||||
warnings.warn((
|
||||
'System time is way off (before {0}). This will probably '
|
||||
'lead to SSL verification errors').format(
|
||||
urllib3.connection.RECENT_DATE),
|
||||
SystemTimeWarning
|
||||
)
|
||||
|
||||
# Wrap socket using verification with the root certs in
|
||||
# trusted_root_certs
|
||||
self.sock = ssl_.ssl_wrap_socket(
|
||||
conn,
|
||||
self.key_file,
|
||||
self.cert_file,
|
||||
cert_reqs=resolved_cert_reqs,
|
||||
ca_certs=self.ca_certs,
|
||||
server_hostname=hostname,
|
||||
ssl_version=resolved_ssl_version,
|
||||
ciphers=self.ciphers,
|
||||
)
|
||||
|
||||
if self.assert_fingerprint:
|
||||
ssl_.assert_fingerprint(self.sock.getpeercert(binary_form=True),
|
||||
self.assert_fingerprint)
|
||||
elif resolved_cert_reqs != ssl.CERT_NONE \
|
||||
and self.assert_hostname is not False:
|
||||
cert = self.sock.getpeercert()
|
||||
if not cert.get('subjectAltName', ()):
|
||||
warnings.warn((
|
||||
'Certificate has no `subjectAltName`, falling back to check for a `commonName` for now. '
|
||||
'This feature is being removed by major browsers and deprecated by RFC 2818. '
|
||||
'(See https://github.com/shazow/urllib3/issues/497 for details.)'),
|
||||
SecurityWarning
|
||||
)
|
||||
match_hostname(cert, self.assert_hostname or hostname)
|
||||
|
||||
self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED or self.assert_fingerprint is not None)
|
||||
|
||||
|
||||
class WeakCiphersHTTPSConnectionPool(urllib3.connectionpool.HTTPSConnectionPool):
|
||||
ConnectionCls = WeakCiphersHTTPSConnection
|
||||
|
||||
|
||||
class WeakCiphersPoolManager(urllib3.poolmanager.PoolManager):
|
||||
def _new_pool(self, scheme, host, port):
|
||||
if scheme == 'https':
|
||||
return WeakCiphersHTTPSConnectionPool(host, port, **self.connection_pool_kw)
|
||||
return super(WeakCiphersPoolManager, self)._new_pool(scheme, host, port)
|
||||
|
||||
|
||||
class WeakCiphersAdapter(HTTPAdapter):
|
||||
""""Transport adapter" that allows us to use TLS_RSA_WITH_RC4_128_MD5."""
|
||||
|
||||
def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs):
|
||||
# Rewrite of the requests.adapters.HTTPAdapter.init_poolmanager method
|
||||
# to use WeakCiphersPoolManager instead of urllib3's PoolManager
|
||||
self._pool_connections = connections
|
||||
self._pool_maxsize = maxsize
|
||||
self._pool_block = block
|
||||
|
||||
self.poolmanager = WeakCiphersPoolManager(
|
||||
num_pools=connections,
|
||||
maxsize=maxsize,
|
||||
block=block,
|
||||
strict=True,
|
||||
**pool_kwargs
|
||||
)
|
||||
@@ -1,10 +1,9 @@
|
||||
import BaseHTTPServer
|
||||
import urlparse
|
||||
import http.server
|
||||
import urllib.parse
|
||||
import socket
|
||||
import threading
|
||||
from logbook import Logger
|
||||
|
||||
from service.settings import CRESTSettings
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
# noinspection PyPep8
|
||||
@@ -68,13 +67,13 @@ if (window.location.href.indexOf('step=2') == -1) {{
|
||||
|
||||
|
||||
# https://github.com/fuzzysteve/CREST-Market-Downloader/
|
||||
class AuthHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
class AuthHandler(http.server.BaseHTTPRequestHandler):
|
||||
def do_GET(self):
|
||||
if self.path == "/favicon.ico":
|
||||
return
|
||||
|
||||
parsed_path = urlparse.urlparse(self.path)
|
||||
parts = urlparse.parse_qs(parsed_path.query)
|
||||
parsed_path = urllib.parse.urlparse(self.path)
|
||||
parts = urllib.parse.parse_qs(parsed_path.query)
|
||||
msg = ""
|
||||
|
||||
step2 = 'step' in parts
|
||||
@@ -82,20 +81,20 @@ class AuthHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
try:
|
||||
if step2:
|
||||
self.server.callback(parts)
|
||||
pyfalog.info("Successfully logged into CREST.")
|
||||
msg = "If you see this message then it means you should be logged into CREST. You may close this window and return to the application."
|
||||
pyfalog.info("Successfully logged into EVE.")
|
||||
msg = "If you see this message then it means you should be logged into EVE SSO. You may close this window and return to the application."
|
||||
else:
|
||||
# For implicit mode, we have to serve up the page which will take the hash and redirect useing a querystring
|
||||
# For implicit mode, we have to serve up the page which will take the hash and redirect using a querystring
|
||||
pyfalog.info("Processing response from EVE Online.")
|
||||
msg = "Processing response from EVE Online"
|
||||
except Exception, ex:
|
||||
pyfalog.error("Error in CREST AuthHandler")
|
||||
except Exception as ex:
|
||||
pyfalog.error("Error logging into EVE")
|
||||
pyfalog.error(ex)
|
||||
msg = "<h2>Error</h2>\n<p>{}</p>".format(ex.message)
|
||||
finally:
|
||||
self.send_response(200)
|
||||
self.end_headers()
|
||||
self.wfile.write(HTML.format(msg))
|
||||
self.wfile.write(str.encode(HTML.format(msg)))
|
||||
|
||||
if step2:
|
||||
# Only stop once if we've received something in the querystring
|
||||
@@ -106,13 +105,13 @@ class AuthHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
|
||||
|
||||
# http://code.activestate.com/recipes/425210-simple-stoppable-server-using-socket-timeout/
|
||||
class StoppableHTTPServer(BaseHTTPServer.HTTPServer):
|
||||
class StoppableHTTPServer(http.server.HTTPServer):
|
||||
def server_bind(self):
|
||||
BaseHTTPServer.HTTPServer.server_bind(self)
|
||||
self.settings = CRESTSettings.getInstance()
|
||||
http.server.HTTPServer.server_bind(self)
|
||||
# self.settings = CRESTSettings.getInstance()
|
||||
|
||||
# Allow listening for x seconds
|
||||
sec = self.settings.get('timeout')
|
||||
sec = 120
|
||||
pyfalog.debug("Running server for {0} seconds", sec)
|
||||
|
||||
self.socket.settimeout(1)
|
||||
@@ -131,7 +130,7 @@ class StoppableHTTPServer(BaseHTTPServer.HTTPServer):
|
||||
pass
|
||||
|
||||
def stop(self):
|
||||
pyfalog.warning("Setting CREST server to stop.")
|
||||
pyfalog.warning("Setting pyfa server to stop.")
|
||||
self.run = False
|
||||
|
||||
def handle_timeout(self):
|
||||
@@ -156,5 +155,5 @@ class StoppableHTTPServer(BaseHTTPServer.HTTPServer):
|
||||
if __name__ == "__main__":
|
||||
httpd = StoppableHTTPServer(('', 6461), AuthHandler)
|
||||
t = threading.Thread(target=httpd.serve)
|
||||
raw_input("Press <RETURN> to stop server\n")
|
||||
input("Press <RETURN> to stop server\n")
|
||||
httpd.stop()
|
||||
|
||||
@@ -17,9 +17,11 @@
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
# =============================================================================
|
||||
|
||||
import cPickle
|
||||
import pickle
|
||||
import os.path
|
||||
import urllib2
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
|
||||
import config
|
||||
import eos.config
|
||||
@@ -91,7 +93,7 @@ class SettingsProvider(object):
|
||||
else:
|
||||
try:
|
||||
with open(canonical_path, "rb") as f:
|
||||
info = cPickle.load(f)
|
||||
info = pickle.load(f)
|
||||
for item in defaults:
|
||||
if item not in info:
|
||||
info[item] = defaults[item]
|
||||
@@ -103,7 +105,7 @@ class SettingsProvider(object):
|
||||
return settings_obj
|
||||
|
||||
def saveAll(self):
|
||||
for settings in self.settings.itervalues():
|
||||
for settings in self.settings.values():
|
||||
settings.save()
|
||||
|
||||
|
||||
@@ -124,7 +126,7 @@ class Settings(object):
|
||||
return
|
||||
# NOTE: with + open -> file handle auto close
|
||||
with open(self.location, "wb") as f:
|
||||
cPickle.dump(self.info, f, cPickle.HIGHEST_PROTOCOL)
|
||||
pickle.dump(self.info, f, pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
def __getitem__(self, k):
|
||||
try:
|
||||
@@ -140,22 +142,22 @@ class Settings(object):
|
||||
return self.info.__iter__()
|
||||
|
||||
def iterkeys(self):
|
||||
return self.info.iterkeys()
|
||||
return iter(self.info.keys())
|
||||
|
||||
def itervalues(self):
|
||||
return self.info.itervalues()
|
||||
return iter(self.info.values())
|
||||
|
||||
def iteritems(self):
|
||||
return self.info.iteritems()
|
||||
return iter(self.info.items())
|
||||
|
||||
def keys(self):
|
||||
return self.info.keys()
|
||||
return list(self.info.keys())
|
||||
|
||||
def values(self):
|
||||
return self.info.values()
|
||||
return list(self.info.values())
|
||||
|
||||
def items(self):
|
||||
return self.info.items()
|
||||
return list(self.info.items())
|
||||
|
||||
|
||||
class NetworkSettings(object):
|
||||
@@ -235,7 +237,7 @@ class NetworkSettings(object):
|
||||
def autodetect():
|
||||
|
||||
proxy = None
|
||||
proxydict = urllib2.ProxyHandler().proxies
|
||||
proxydict = urllib.request.ProxyHandler().proxies
|
||||
|
||||
validPrefixes = ("http", "https")
|
||||
|
||||
@@ -350,32 +352,40 @@ class UpdateSettings(object):
|
||||
self.serviceUpdateSettings[type] = value
|
||||
|
||||
|
||||
class CRESTSettings(object):
|
||||
class EsiSettings(object):
|
||||
_instance = None
|
||||
|
||||
@classmethod
|
||||
def getInstance(cls):
|
||||
if cls._instance is None:
|
||||
cls._instance = CRESTSettings()
|
||||
cls._instance = EsiSettings()
|
||||
|
||||
return cls._instance
|
||||
|
||||
def __init__(self):
|
||||
# mode
|
||||
# 0 - Implicit authentication
|
||||
# 1 - User-supplied client details
|
||||
serviceCRESTDefaultSettings = {"mode": 0, "server": 0, "clientID": "", "clientSecret": "", "timeout": 60}
|
||||
# SSO Mode:
|
||||
# 0 - pyfa.io
|
||||
# 1 - custom application
|
||||
# LoginMode:
|
||||
# 0 - Server Start Up
|
||||
# 1 - User copy and paste data from website to pyfa
|
||||
defaults = {
|
||||
"ssoMode": 0,
|
||||
"loginMode": 0,
|
||||
"clientID": "",
|
||||
"clientSecret": "",
|
||||
"timeout": 60}
|
||||
|
||||
self.serviceCRESTSettings = SettingsProvider.getInstance().getSettings(
|
||||
"pyfaServiceCRESTSettings",
|
||||
serviceCRESTDefaultSettings
|
||||
self.settings = SettingsProvider.getInstance().getSettings(
|
||||
"pyfaServiceEsiSettings",
|
||||
defaults
|
||||
)
|
||||
|
||||
def get(self, type):
|
||||
return self.serviceCRESTSettings[type]
|
||||
return self.settings[type]
|
||||
|
||||
def set(self, type, value):
|
||||
self.serviceCRESTSettings[type] = value
|
||||
self.settings[type] = value
|
||||
|
||||
|
||||
class StatViewSettings(object):
|
||||
|
||||
@@ -30,6 +30,8 @@ import config
|
||||
from service.network import Network
|
||||
from service.settings import UpdateSettings
|
||||
from logbook import Logger
|
||||
from packaging.version import Version
|
||||
|
||||
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
@@ -46,53 +48,37 @@ class CheckUpdateThread(threading.Thread):
|
||||
network = Network.getInstance()
|
||||
|
||||
try:
|
||||
response = network.request('https://api.github.com/repos/pyfa-org/Pyfa/releases', network.UPDATE)
|
||||
jsonResponse = json.loads(response.read())
|
||||
try:
|
||||
response = network.request('https://www.pyfa.io/update_check?pyfa_version={}&client_hash={}'.format(config.version, config.getClientSecret()), network.UPDATE)
|
||||
except Exception as e:
|
||||
response = network.request('https://api.github.com/repos/pyfa-org/Pyfa/releases', network.UPDATE)
|
||||
|
||||
jsonResponse = response.json()
|
||||
jsonResponse.sort(
|
||||
key=lambda x: calendar.timegm(dateutil.parser.parse(x['published_at']).utctimetuple()),
|
||||
reverse=True
|
||||
)
|
||||
|
||||
for release in jsonResponse[:5]:
|
||||
try:
|
||||
# Suppress pre releases
|
||||
if release['prerelease'] and self.settings.get('prerelease'):
|
||||
continue
|
||||
rVersion = Version(release['tag_name'])
|
||||
cVersion = Version(config.version)
|
||||
|
||||
# Handle use-case of updating to suppressed version
|
||||
if self.settings.get('version') == 'v' + config.version:
|
||||
self.settings.set('version', None)
|
||||
|
||||
# Suppress version
|
||||
if release['tag_name'] == self.settings.get('version'):
|
||||
break
|
||||
|
||||
# Set the release version that we will be comparing with.
|
||||
if release['prerelease']:
|
||||
rVersion = release['tag_name'].replace('singularity-', '', 1)
|
||||
else:
|
||||
rVersion = release['tag_name'].replace('v', '', 1)
|
||||
|
||||
if config.tag is 'git' and \
|
||||
not release['prerelease'] and \
|
||||
self.versiontuple(rVersion) >= self.versiontuple(config.version):
|
||||
wx.CallAfter(self.callback, release) # git (dev/Singularity) -> Stable
|
||||
break
|
||||
elif config.expansionName is not "Singularity":
|
||||
if release['prerelease']:
|
||||
wx.CallAfter(self.callback, release) # Stable -> Singularity
|
||||
break
|
||||
elif self.versiontuple(rVersion) > self.versiontuple(config.version):
|
||||
wx.CallAfter(self.callback, release) # Stable -> Stable
|
||||
break
|
||||
else:
|
||||
if release['prerelease'] and rVersion > config.expansionVersion:
|
||||
wx.CallAfter(self.callback, release) # Singularity -> Singularity
|
||||
break
|
||||
except Exception as e:
|
||||
# if we break at version checking, try the next version
|
||||
pyfalog.error(e)
|
||||
# Suppress pre releases if we're not already on a pre-release (if we are, we want to know about new ones)
|
||||
if not cVersion.is_prerelease and rVersion.is_prerelease and self.settings.get('prerelease'):
|
||||
continue
|
||||
|
||||
# Handle use-case of updating to suppressed version
|
||||
if self.settings.get('version') == 'v' + config.version:
|
||||
self.settings.set('version', None)
|
||||
|
||||
# Suppress version
|
||||
if release['tag_name'] == self.settings.get('version'):
|
||||
break
|
||||
|
||||
if rVersion > cVersion:
|
||||
wx.CallAfter(self.callback, release, rVersion)
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
pyfalog.error("Caught exception in run")
|
||||
pyfalog.error(e)
|
||||
|
||||
Reference in New Issue
Block a user