oh god this isn't gonna work
Merge branch 'ebag_importchanges' into test_import Conflicts: config.py eos/db/saveddata/queries.py eos/effects/chargebonuswarfarecharge.py eos/effects/elitebonuscommandshipinformationhiddencs3.py eos/effects/elitebonuslogisticremotearmorrepairoptimalfalloff1.py eos/effects/energydestabilizationnew.py eos/effects/iceharvestingdroneoperationdurationbonus.py eos/effects/miningforemanstrengthbonus.py eos/effects/modulebonuswarfarelinkarmor.py eos/effects/modulebonuswarfarelinkinfo.py eos/effects/modulebonuswarfarelinkmining.py eos/effects/modulebonuswarfarelinkshield.py eos/effects/modulebonuswarfarelinkskirmish.py eos/effects/moduletitaneffectgenerator.py eos/effects/remotehullrepair.py eos/effects/rolebonusremotearmorrepairoptimalfalloff.py eos/effects/shipbonusforceauxiliarya4warfarelinksbonus.py eos/effects/shipmodesmallmissiledamagepostdiv.py eos/effects/structureenergyneutralizerfalloff.py eos/effects/structuremoduleeffectstasiswebifier.py eos/effects/structurerigmaxtargets.py eos/effects/subsystembonusamarrdefensiveinformationwarfarehidden.py eos/effects/subsystembonuscaldaridefensiveinformationwarfarehidden.py eos/effects/subsystembonusgallentedefensiveinformationwarfarehidden.py eos/effects/techtwocommandburstbonus.py eos/saveddata/fighter.py eos/saveddata/fit.py eos/saveddata/module.py eve.db gui/bitmapLoader.py gui/builtinContextMenus/itemStats.py gui/builtinStatsViews/miningyieldViewFull.py gui/builtinViewColumns/misc.py gui/builtinViews/__init__.py gui/builtinViews/fittingView.py gui/contextMenu.py gui/graphFrame.py gui/itemStats.py gui/mainFrame.py gui/marketBrowser.py service/__init__.py service/character.py service/fit.py service/port.py service/prefetch.py service/pycrest/eve.py service/settings.py
This commit is contained in:
@@ -1,18 +0,0 @@
|
||||
from service.market import Market
|
||||
from service.fit import Fit
|
||||
from service.attribute import Attribute
|
||||
from service.character import Character
|
||||
from service.damagePattern import DamagePattern
|
||||
from service.targetResists import TargetResists
|
||||
from service.settings import SettingsProvider
|
||||
from service.update import Update
|
||||
from service.price import Price
|
||||
from service.network import Network
|
||||
from service.eveapi import EVEAPIConnection, ParseXML
|
||||
from service.implantSet import ImplantSets
|
||||
|
||||
import wx
|
||||
if not 'wxMac' in wx.PlatformInfo or ('wxMac' in wx.PlatformInfo and wx.VERSION >= (3,0)):
|
||||
from service.pycrest import EVE
|
||||
from service.server import StoppableHTTPServer, AuthHandler
|
||||
from service.crest import Crest
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
@@ -15,12 +15,14 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
|
||||
import eos.db
|
||||
|
||||
|
||||
class Attribute():
|
||||
instance = None
|
||||
|
||||
@classmethod
|
||||
def getInstance(cls):
|
||||
if cls.instance is None:
|
||||
@@ -32,6 +34,8 @@ class Attribute():
|
||||
if isinstance(identity, (int, basestring)):
|
||||
info = eos.db.getAttributeInfo(identity, eager=("icon", "unit"))
|
||||
elif isinstance(identity, (int, float)):
|
||||
id = int(identity)
|
||||
info = eos.db.getAttributeInfo(id, eager=("icon", "unit"))
|
||||
id_ = int(identity)
|
||||
info = eos.db.getAttributeInfo(id_, eager=("icon", "unit"))
|
||||
else:
|
||||
info = None
|
||||
return info
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
@@ -15,11 +15,12 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
|
||||
import copy
|
||||
import itertools
|
||||
import json
|
||||
import logging
|
||||
import threading
|
||||
from codecs import open
|
||||
from xml.etree import ElementTree
|
||||
@@ -28,16 +29,18 @@ import gzip
|
||||
|
||||
import wx
|
||||
|
||||
import eos.db
|
||||
import eos.types
|
||||
import service
|
||||
import config
|
||||
import logging
|
||||
import eos.db
|
||||
from service.eveapi import EVEAPIConnection, ParseXML
|
||||
|
||||
from eos.saveddata.implant import Implant as es_Implant
|
||||
from eos.saveddata.character import Character as es_Character
|
||||
from eos.saveddata.module import Slot as es_Slot, Module as es_Module
|
||||
from eos.saveddata.fighter import Fighter as es_Fighter
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CharacterImportThread(threading.Thread):
|
||||
def __init__(self, paths, callback):
|
||||
threading.Thread.__init__(self)
|
||||
@@ -57,8 +60,8 @@ class CharacterImportThread(threading.Thread):
|
||||
try:
|
||||
# we try to parse api XML data first
|
||||
with open(path, mode='r') as charFile:
|
||||
sheet = service.ParseXML(charFile)
|
||||
char = sCharacter.new(sheet.name+" (imported)")
|
||||
sheet = ParseXML(charFile)
|
||||
char = sCharacter.new(sheet.name + " (imported)")
|
||||
sCharacter.apiUpdateCharSheet(char.ID, sheet.skills)
|
||||
except:
|
||||
# if it's not api XML data, try this
|
||||
@@ -93,6 +96,7 @@ class CharacterImportThread(threading.Thread):
|
||||
|
||||
wx.CallAfter(self.callback)
|
||||
|
||||
|
||||
class SkillBackupThread(threading.Thread):
|
||||
def __init__(self, path, saveFmt, activeFit, callback):
|
||||
threading.Thread.__init__(self)
|
||||
@@ -104,9 +108,6 @@ class SkillBackupThread(threading.Thread):
|
||||
def run(self):
|
||||
path = self.path
|
||||
sCharacter = Character.getInstance()
|
||||
sFit = service.Fit.getInstance()
|
||||
fit = sFit.getFit(self.activeFit)
|
||||
backupData = ""
|
||||
if self.saveFmt == "xml" or self.saveFmt == "emp":
|
||||
backupData = sCharacter.exportXml()
|
||||
else:
|
||||
@@ -116,11 +117,12 @@ class SkillBackupThread(threading.Thread):
|
||||
with gzip.open(path, mode='wb') as backupFile:
|
||||
backupFile.write(backupData)
|
||||
else:
|
||||
with open(path, mode='w',encoding='utf-8') as backupFile:
|
||||
with open(path, mode='w', encoding='utf-8') as backupFile:
|
||||
backupFile.write(backupData)
|
||||
|
||||
wx.CallAfter(self.callback)
|
||||
|
||||
|
||||
class Character(object):
|
||||
instance = None
|
||||
skillReqsDict = {}
|
||||
@@ -138,7 +140,7 @@ class Character(object):
|
||||
self.all5()
|
||||
|
||||
def exportText(self):
|
||||
data = "Pyfa exported plan for \""+self.skillReqsDict['charname']+"\"\n"
|
||||
data = "Pyfa exported plan for \"" + self.skillReqsDict['charname'] + "\"\n"
|
||||
data += "=" * 79 + "\n"
|
||||
data += "\n"
|
||||
item = ""
|
||||
@@ -154,7 +156,7 @@ class Character(object):
|
||||
|
||||
def exportXml(self):
|
||||
root = ElementTree.Element("plan")
|
||||
root.attrib["name"] = "Pyfa exported plan for "+self.skillReqsDict['charname']
|
||||
root.attrib["name"] = "Pyfa exported plan for " + self.skillReqsDict['charname']
|
||||
root.attrib["revision"] = config.evemonMinVersion
|
||||
|
||||
sorts = ElementTree.SubElement(root, "sorting")
|
||||
@@ -165,9 +167,9 @@ class Character(object):
|
||||
skillsSeen = set()
|
||||
|
||||
for s in self.skillReqsDict['skills']:
|
||||
skillKey = str(s["skillID"])+"::"+s["skill"]+"::"+str(int(s["level"]))
|
||||
skillKey = str(s["skillID"]) + "::" + s["skill"] + "::" + str(int(s["level"]))
|
||||
if skillKey in skillsSeen:
|
||||
pass # Duplicate skills confuse EVEMon
|
||||
pass # Duplicate skills confuse EVEMon
|
||||
else:
|
||||
skillsSeen.add(skillKey)
|
||||
entry = ElementTree.SubElement(root, "entry")
|
||||
@@ -179,7 +181,7 @@ class Character(object):
|
||||
notes = ElementTree.SubElement(entry, "notes")
|
||||
notes.text = entry.attrib["skill"]
|
||||
|
||||
tree = ElementTree.ElementTree(root)
|
||||
# tree = ElementTree.ElementTree(root)
|
||||
data = ElementTree.tostring(root, 'utf-8')
|
||||
prettydata = minidom.parseString(data).toprettyxml(indent=" ")
|
||||
|
||||
@@ -194,13 +196,13 @@ class Character(object):
|
||||
thread.start()
|
||||
|
||||
def all0(self):
|
||||
return eos.types.Character.getAll0()
|
||||
return es_Character.getAll0()
|
||||
|
||||
def all0ID(self):
|
||||
return self.all0().ID
|
||||
|
||||
def all5(self):
|
||||
return eos.types.Character.getAll5()
|
||||
return es_Character.getAll5()
|
||||
|
||||
def all5ID(self):
|
||||
return self.all5().ID
|
||||
@@ -246,7 +248,7 @@ class Character(object):
|
||||
group = eos.db.getGroup(groupID)
|
||||
skills = []
|
||||
for skill in group.items:
|
||||
if skill.published == True:
|
||||
if skill.published is True:
|
||||
skills.append((skill.ID, skill.name))
|
||||
return skills
|
||||
|
||||
@@ -267,7 +269,7 @@ class Character(object):
|
||||
return eos.db.getCharacter(charID).name
|
||||
|
||||
def new(self, name="New Character"):
|
||||
char = eos.types.Character(name)
|
||||
char = es_Character(name)
|
||||
eos.db.save(char)
|
||||
return char
|
||||
|
||||
@@ -295,8 +297,8 @@ class Character(object):
|
||||
return (char.apiID or "", char.apiKey or "", char.defaultChar or "", chars or [])
|
||||
|
||||
def apiEnabled(self, charID):
|
||||
id, key, default, _ = self.getApiDetails(charID)
|
||||
return id is not "" and key is not "" and default is not ""
|
||||
id_, key, default, _ = self.getApiDetails(charID)
|
||||
return id_ is not "" and key is not "" and default is not ""
|
||||
|
||||
def apiCharList(self, charID, userID, apiKey):
|
||||
char = eos.db.getCharacter(charID)
|
||||
@@ -304,7 +306,7 @@ class Character(object):
|
||||
char.apiID = userID
|
||||
char.apiKey = apiKey
|
||||
|
||||
api = service.EVEAPIConnection()
|
||||
api = EVEAPIConnection()
|
||||
auth = api.auth(keyID=userID, vCode=apiKey)
|
||||
apiResult = auth.account.Characters()
|
||||
charList = map(lambda c: unicode(c.name), apiResult.characters)
|
||||
@@ -316,7 +318,7 @@ class Character(object):
|
||||
dbChar = eos.db.getCharacter(charID)
|
||||
dbChar.defaultChar = charName
|
||||
|
||||
api = service.EVEAPIConnection()
|
||||
api = EVEAPIConnection()
|
||||
auth = api.auth(keyID=dbChar.apiID, vCode=dbChar.apiKey)
|
||||
apiResult = auth.account.Characters()
|
||||
charID = None
|
||||
@@ -324,7 +326,7 @@ class Character(object):
|
||||
if char.name == charName:
|
||||
charID = char.characterID
|
||||
|
||||
if charID == None:
|
||||
if charID is None:
|
||||
return
|
||||
|
||||
sheet = auth.character(charID).CharacterSheet()
|
||||
@@ -366,7 +368,7 @@ class Character(object):
|
||||
logger.error("Trying to add implant to read-only character")
|
||||
return
|
||||
|
||||
implant = eos.types.Implant(eos.db.getItem(itemID))
|
||||
implant = es_Implant(eos.db.getItem(itemID))
|
||||
char.implants.append(implant)
|
||||
eos.db.commit()
|
||||
|
||||
@@ -380,20 +382,20 @@ class Character(object):
|
||||
return char.implants
|
||||
|
||||
def checkRequirements(self, fit):
|
||||
toCheck = []
|
||||
# toCheck = []
|
||||
reqs = {}
|
||||
for thing in itertools.chain(fit.modules, fit.drones, fit.fighters, (fit.ship,)):
|
||||
if isinstance(thing, eos.types.Module) and thing.slot == eos.types.Slot.RIG:
|
||||
if isinstance(thing, es_Module) and thing.slot == es_Slot.RIG:
|
||||
continue
|
||||
for attr in ("item", "charge"):
|
||||
if attr == "charge" and isinstance(thing, eos.types.Fighter):
|
||||
if attr == "charge" and isinstance(thing, es_Fighter):
|
||||
# Fighter Bombers are automatically charged with micro bombs.
|
||||
# These have skill requirements attached, but aren't used in EVE.
|
||||
continue
|
||||
subThing = getattr(thing, attr, None)
|
||||
subReqs = {}
|
||||
if subThing is not None:
|
||||
if isinstance(thing, eos.types.Fighter) and attr == "charge":
|
||||
if isinstance(thing, es_Fighter) and attr == "charge":
|
||||
continue
|
||||
self._checkRequirements(fit, fit.character, subThing, subReqs)
|
||||
if subReqs:
|
||||
|
||||
@@ -8,6 +8,7 @@ elsewhere (in which case can be accessed with packs[name])
|
||||
"""
|
||||
|
||||
import pkgutil
|
||||
import importlib
|
||||
|
||||
# init parent dict
|
||||
all = {}
|
||||
@@ -15,10 +16,9 @@ all = {}
|
||||
# init container to store the separate conversion packs in case we need them
|
||||
packs = {}
|
||||
|
||||
|
||||
prefix = __name__ + "."
|
||||
for importer, modname, ispkg in pkgutil.iter_modules(__path__, prefix):
|
||||
conversionPack = __import__(modname, fromlist="dummy")
|
||||
conversionPack = importlib.import_module(modname)
|
||||
all.update(conversionPack.CONVERSIONS)
|
||||
modname_tail = modname.rsplit('.', 1)[-1]
|
||||
packs[modname_tail] = conversionPack.CONVERSIONS
|
||||
|
||||
@@ -42,4 +42,4 @@ CONVERSIONS = {
|
||||
"Unit W-634's Modified Drone Control Unit": "Unit W-634's Modified Fighter Support Unit",
|
||||
"Heavy Shadow Serpentis Stasis Grappler": "Shadow Serpentis Heavy Stasis Grappler",
|
||||
"Heavy Domination Stasis Grappler": "Domination Heavy Stasis Grappler",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -96,4 +96,4 @@ CONVERSIONS = {
|
||||
"Micro S95a Remote Shield Booster": "'Micro' Remote Shield Booster",
|
||||
"Large 'Atonement' Remote Shield Booster": "Large Murky Compact Remote Shield Booster",
|
||||
"E50 Prototype Energy Vampire": "Medium Knave Scoped Energy Nosferatu",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,4 +7,3 @@ CONVERSIONS = {
|
||||
"Capital Coaxial Remote Armor Repairer Blueprint": "CONCORD Capital Remote Armor Repairer Blueprint",
|
||||
"Capital Murky Remote Shield Booster Blueprint": "CONCORD Capital Remote Shield Booster Blueprint",
|
||||
}
|
||||
|
||||
|
||||
@@ -10,4 +10,4 @@ CONVERSIONS = {
|
||||
"'Distributor' Guidance Disruptor I Blueprint": "'Distributor' Guidance Disruptor Blueprint",
|
||||
"Highstroke Scoped Guidance Disruptor I": "Highstroke Scoped Guidance Disruptor",
|
||||
"A-211 Enduring Guidance Disruptor I": "A-211 Enduring Guidance Disruptor",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,7 +118,6 @@ CONVERSIONS = {
|
||||
"'Full Duplex' Ballistic Targeting System": "'Full Duplex' Ballistic Control System",
|
||||
"'Kindred' Stabilization Actuator I": "'Kindred' Gyrostabilizer",
|
||||
"Process-Interruptive Warp Disruptor": "'Interruptive' Warp Disruptor",
|
||||
"Multi Sensor Firewall": "'Firewall' Signal Amplifier",
|
||||
"'Inception' Target Painter I": "'Inception' Target Painter",
|
||||
"Citadel Torpedoes": "XL Torpedoes",
|
||||
"'Shady' ECCM - Gravimetric I": "'Shady' Sensor Booster",
|
||||
@@ -358,4 +357,4 @@ CONVERSIONS = {
|
||||
"Wavelength Signal Enhancer I": "F-89 Compact Signal Amplifier",
|
||||
"Type-D Attenuation Signal Augmentation": "F-89 Compact Signal Amplifier",
|
||||
"Indirect Scanning Dampening Unit I": "Phased Muon Scoped Sensor Dampener",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,23 +9,25 @@ import time
|
||||
import eos.db
|
||||
from eos.enum import Enum
|
||||
from eos.types import CrestChar
|
||||
|
||||
import service
|
||||
|
||||
import gui.globalEvents as GE
|
||||
from service.settings import CRESTSettings
|
||||
from service.server import StoppableHTTPServer, AuthHandler
|
||||
from service.pycrest.eve import EVE
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Servers(Enum):
|
||||
TQ = 0
|
||||
SISI = 1
|
||||
|
||||
|
||||
class CrestModes(Enum):
|
||||
IMPLICIT = 0
|
||||
USER = 1
|
||||
|
||||
class Crest():
|
||||
|
||||
class Crest():
|
||||
clientIDs = {
|
||||
Servers.TQ: 'f9be379951c046339dc13a00e6be7704',
|
||||
Servers.SISI: 'af87365240d644f7950af563b8418bad'
|
||||
@@ -36,9 +38,10 @@ class Crest():
|
||||
clientTest = True
|
||||
|
||||
_instance = None
|
||||
|
||||
@classmethod
|
||||
def getInstance(cls):
|
||||
if cls._instance == None:
|
||||
if cls._instance is None:
|
||||
cls._instance = Crest()
|
||||
|
||||
return cls._instance
|
||||
@@ -64,7 +67,7 @@ class Crest():
|
||||
characters still in the cache (if USER mode)
|
||||
"""
|
||||
|
||||
self.settings = service.settings.CRESTSettings.getInstance()
|
||||
self.settings = CRESTSettings.getInstance()
|
||||
self.scopes = ['characterFittingsRead', 'characterFittingsWrite']
|
||||
|
||||
# these will be set when needed
|
||||
@@ -73,8 +76,9 @@ class Crest():
|
||||
self.ssoTimer = None
|
||||
|
||||
# Base EVE connection that is copied to all characters
|
||||
self.eve = service.pycrest.EVE(
|
||||
client_id=self.settings.get('clientID') if self.settings.get('mode') == CrestModes.USER else self.clientIDs.get(self.settings.get('server')),
|
||||
self.eve = EVE(
|
||||
client_id=self.settings.get('clientID') if self.settings.get(
|
||||
'mode') == CrestModes.USER else self.clientIDs.get(self.settings.get('server')),
|
||||
api_key=self.settings.get('clientSecret') if self.settings.get('mode') == CrestModes.USER else None,
|
||||
redirect_uri=self.clientCallback,
|
||||
testing=self.isTestServer
|
||||
@@ -114,9 +118,9 @@ class Crest():
|
||||
return chars2
|
||||
|
||||
def getCrestCharacter(self, charID):
|
||||
'''
|
||||
"""
|
||||
Get character, and modify to include the eve connection
|
||||
'''
|
||||
"""
|
||||
if self.settings.get('mode') == CrestModes.IMPLICIT:
|
||||
if self.implicitCharacter.ID != charID:
|
||||
raise ValueError("CharacterID does not match currently logged in character.")
|
||||
@@ -134,16 +138,17 @@ class Crest():
|
||||
|
||||
def getFittings(self, charID):
|
||||
char = self.getCrestCharacter(charID)
|
||||
return char.eve.get('%scharacters/%d/fittings/'%(char.eve._authed_endpoint,char.ID))
|
||||
return char.eve.get('%scharacters/%d/fittings/' % (char.eve._authed_endpoint, char.ID))
|
||||
|
||||
def postFitting(self, charID, json):
|
||||
#@todo: new fitting ID can be recovered from Location header, ie: Location -> https://api-sisi.testeveonline.com/characters/1611853631/fittings/37486494/
|
||||
# @todo: new fitting ID can be recovered from Location header,
|
||||
# ie: Location -> https://api-sisi.testeveonline.com/characters/1611853631/fittings/37486494/
|
||||
char = self.getCrestCharacter(charID)
|
||||
return char.eve.post('%scharacters/%d/fittings/'%(char.eve._authed_endpoint,char.ID), data=json)
|
||||
return char.eve.post('%scharacters/%d/fittings/' % (char.eve._authed_endpoint, char.ID), data=json)
|
||||
|
||||
def delFitting(self, charID, fittingID):
|
||||
char = self.getCrestCharacter(charID)
|
||||
return char.eve.delete('%scharacters/%d/fittings/%d/'%(char.eve._authed_endpoint, char.ID, fittingID))
|
||||
return char.eve.delete('%scharacters/%d/fittings/%d/' % (char.eve._authed_endpoint, char.ID, fittingID))
|
||||
|
||||
def logout(self):
|
||||
"""Logout of implicit character"""
|
||||
@@ -160,8 +165,9 @@ class Crest():
|
||||
logging.debug("Starting server")
|
||||
if self.httpd:
|
||||
self.stopServer()
|
||||
time.sleep(1) # we need this to ensure that the previous get_request finishes, and then the socket will close
|
||||
self.httpd = service.StoppableHTTPServer(('', 6461), service.AuthHandler)
|
||||
time.sleep(1)
|
||||
# we need this to ensure that the previous get_request finishes, and then the socket will close
|
||||
self.httpd = StoppableHTTPServer(('', 6461), AuthHandler)
|
||||
thread.start_new_thread(self.httpd.serve, (self.handleLogin,))
|
||||
|
||||
self.state = str(uuid.uuid4())
|
||||
@@ -175,7 +181,7 @@ class Crest():
|
||||
logger.warn("OAUTH state mismatch")
|
||||
return
|
||||
|
||||
logger.debug("Handling CREST login with: %s"%message)
|
||||
logger.debug("Handling CREST login with: %s" % message)
|
||||
|
||||
if 'access_token' in message: # implicit
|
||||
eve = copy.deepcopy(self.eve)
|
||||
@@ -193,7 +199,7 @@ class Crest():
|
||||
|
||||
self.implicitCharacter = CrestChar(info['CharacterID'], info['CharacterName'])
|
||||
self.implicitCharacter.eve = eve
|
||||
#self.implicitCharacter.fetchImage()
|
||||
# self.implicitCharacter.fetchImage()
|
||||
|
||||
wx.PostEvent(self.mainFrame, GE.SsoLogin(type=CrestModes.IMPLICIT))
|
||||
elif 'code' in message:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
@@ -15,19 +15,21 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
|
||||
import eos.db
|
||||
import eos.types
|
||||
import copy
|
||||
|
||||
from eos.db.saveddata.loadDefaultDatabaseValues import DefaultDatabaseValues
|
||||
import eos.db
|
||||
from eos.saveddata.damagePattern import DamagePattern as es_DamagePattern
|
||||
|
||||
|
||||
class ImportError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DamagePattern():
|
||||
instance = None
|
||||
|
||||
@classmethod
|
||||
def getInstance(cls):
|
||||
if cls.instance is None:
|
||||
@@ -42,7 +44,7 @@ class DamagePattern():
|
||||
return eos.db.getDamagePattern(name)
|
||||
|
||||
def newPattern(self, name):
|
||||
p = eos.types.DamagePattern(0, 0, 0, 0)
|
||||
p = es_DamagePattern(0, 0, 0, 0)
|
||||
p.name = name
|
||||
eos.db.save(p)
|
||||
return p
|
||||
@@ -68,7 +70,7 @@ class DamagePattern():
|
||||
for pattern in current:
|
||||
lookup[pattern.name] = pattern
|
||||
|
||||
imports, num = eos.types.DamagePattern.importPatterns(text)
|
||||
imports, num = es_DamagePattern.importPatterns(text)
|
||||
for pattern in imports:
|
||||
if pattern.name in lookup:
|
||||
match = lookup[pattern.name]
|
||||
@@ -81,7 +83,7 @@ class DamagePattern():
|
||||
if lenImports == 0:
|
||||
raise ImportError("No patterns found for import")
|
||||
if lenImports != num:
|
||||
raise ImportError("%d patterns imported from clipboard; %d had errors"%(num, num-lenImports))
|
||||
raise ImportError("%d patterns imported from clipboard; %d had errors" % (num, num - lenImports))
|
||||
|
||||
def exportPatterns(self):
|
||||
patterns = self.getDamagePatternList()
|
||||
@@ -90,4 +92,4 @@ class DamagePattern():
|
||||
del patterns[i]
|
||||
|
||||
patterns.sort(key=lambda p: p.name)
|
||||
return eos.types.DamagePattern.exportPatterns(*patterns)
|
||||
return es_DamagePattern.exportPatterns(*patterns)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#-----------------------------------------------------------------------------
|
||||
# -----------------------------------------------------------------------------
|
||||
# eveapi - EVE Online API access
|
||||
#
|
||||
# Copyright (c)2007-2014 Jamie "Entity" van den Berge <jamie@hlekkir.com>
|
||||
@@ -24,7 +24,7 @@
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
# OTHER DEALINGS IN THE SOFTWARE
|
||||
#
|
||||
#-----------------------------------------------------------------------------
|
||||
# -----------------------------------------------------------------------------
|
||||
#
|
||||
# Version: 1.3.0 - 27 May 2014
|
||||
# - Added set_user_agent() module-level function to set the User-Agent header
|
||||
@@ -145,10 +145,10 @@
|
||||
# Requirements:
|
||||
# Python 2.4+
|
||||
#
|
||||
#-----------------------------------------------------------------------------
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# -----------------------------------------------------------------------------
|
||||
# This eveapi has been modified for pyfa.
|
||||
#
|
||||
# Specifically, the entire network request/response has been substituted for
|
||||
@@ -156,7 +156,7 @@
|
||||
#
|
||||
# Additionally, various other parts have been changed to support urllib2
|
||||
# responses instead of httplib
|
||||
#-----------------------------------------------------------------------------
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
|
||||
import urlparse
|
||||
@@ -166,7 +166,7 @@ from xml.parsers import expat
|
||||
from time import strptime
|
||||
from calendar import timegm
|
||||
|
||||
import service
|
||||
from service.network import Network
|
||||
|
||||
proxy = None
|
||||
proxySSL = False
|
||||
@@ -174,7 +174,9 @@ proxySSL = False
|
||||
_default_useragent = "eveapi.py/1.3"
|
||||
_useragent = None # use set_user_agent() to set this.
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
|
||||
def set_cast_func(func):
|
||||
"""Sets an alternative value casting function for the XML parser.
|
||||
@@ -185,25 +187,30 @@ def set_cast_func(func):
|
||||
global _castfunc
|
||||
_castfunc = _autocast if func is None else func
|
||||
|
||||
|
||||
def set_user_agent(user_agent_string):
|
||||
"""Sets a User-Agent for any requests sent by the library."""
|
||||
global _useragent
|
||||
_useragent = user_agent_string
|
||||
|
||||
|
||||
class Error(StandardError):
|
||||
class Error(Exception):
|
||||
def __init__(self, code, message):
|
||||
self.code = code
|
||||
self.args = (message.rstrip("."),)
|
||||
|
||||
def __unicode__(self):
|
||||
return u'%s [code=%s]' % (self.args[0], self.code)
|
||||
|
||||
|
||||
class RequestError(Error):
|
||||
pass
|
||||
|
||||
|
||||
class AuthenticationError(Error):
|
||||
pass
|
||||
|
||||
|
||||
class ServerError(Error):
|
||||
pass
|
||||
|
||||
@@ -304,18 +311,16 @@ def _ParseXML(response, fromContext, storeFunc):
|
||||
return result
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# -----------------------------------------------------------------------------
|
||||
# API Classes
|
||||
#-----------------------------------------------------------------------------
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
|
||||
_listtypes = (list, tuple, dict)
|
||||
_unspecified = []
|
||||
|
||||
class _Context(object):
|
||||
|
||||
class _Context(object):
|
||||
def __init__(self, root, path, parentDict, newKeywords=None):
|
||||
self._root = root or self
|
||||
self._path = path
|
||||
@@ -356,20 +361,18 @@ class _Context(object):
|
||||
|
||||
|
||||
class _AuthContext(_Context):
|
||||
|
||||
def character(self, characterID):
|
||||
# returns a copy of this connection object but for every call made
|
||||
# through it, it will add the folder "/char" to the url, and the
|
||||
# characterID to the parameters passed.
|
||||
return _Context(self._root, self._path + "/char", self.parameters, {"characterID":characterID})
|
||||
return _Context(self._root, self._path + "/char", self.parameters, {"characterID": characterID})
|
||||
|
||||
def corporation(self, characterID):
|
||||
# same as character except for the folder "/corp"
|
||||
return _Context(self._root, self._path + "/corp", self.parameters, {"characterID":characterID})
|
||||
return _Context(self._root, self._path + "/corp", self.parameters, {"characterID": characterID})
|
||||
|
||||
|
||||
class _RootContext(_Context):
|
||||
|
||||
def auth(self, **kw):
|
||||
if len(kw) == 2 and (("keyID" in kw and "vCode" in kw) or ("userID" in kw and "apiKey" in kw)):
|
||||
return _AuthContext(self._root, self._path, self.parameters, kw)
|
||||
@@ -395,9 +398,9 @@ class _RootContext(_Context):
|
||||
response = None
|
||||
|
||||
if response is None:
|
||||
network = service.Network.getInstance()
|
||||
network = Network.getInstance()
|
||||
|
||||
req = self._scheme+'://'+self._host+path
|
||||
req = self._scheme + '://' + self._host + path
|
||||
|
||||
response = network.request(req, network.EVE, kw)
|
||||
|
||||
@@ -413,8 +416,9 @@ class _RootContext(_Context):
|
||||
if retrieve_fallback:
|
||||
# implementor is handling fallbacks...
|
||||
try:
|
||||
return _ParseXML(response, True, store and (lambda obj: cache.store(self._host, path, kw, response, obj)))
|
||||
except Error, e:
|
||||
return _ParseXML(response, True,
|
||||
store and (lambda obj: cache.store(self._host, path, kw, response, obj)))
|
||||
except Error as e:
|
||||
response = retrieve_fallback(self._host, path, kw, reason=e)
|
||||
if response is not None:
|
||||
return response
|
||||
@@ -423,9 +427,11 @@ class _RootContext(_Context):
|
||||
# implementor is not handling fallbacks...
|
||||
return _ParseXML(response, True, store and (lambda obj: cache.store(self._host, path, kw, response, obj)))
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# XML Parser
|
||||
#-----------------------------------------------------------------------------
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _autocast(key, value):
|
||||
# attempts to cast an XML string to the most probable type.
|
||||
@@ -452,11 +458,11 @@ def _autocast(key, value):
|
||||
# couldn't cast. return string unchanged.
|
||||
return value
|
||||
|
||||
|
||||
_castfunc = _autocast
|
||||
|
||||
|
||||
class _Parser(object):
|
||||
|
||||
def Parse(self, data, isStream=False):
|
||||
self.container = self.root = None
|
||||
self._cdata = False
|
||||
@@ -475,7 +481,6 @@ class _Parser(object):
|
||||
p.Parse(data, True)
|
||||
return self.root
|
||||
|
||||
|
||||
def tag_cdatasection_enter(self):
|
||||
# encountered an explicit CDATA tag.
|
||||
self._cdata = True
|
||||
@@ -501,21 +506,20 @@ class _Parser(object):
|
||||
if name == "rowset":
|
||||
# for rowsets, use the given name
|
||||
try:
|
||||
columns = attributes[attributes.index('columns')+1].replace(" ", "").split(",")
|
||||
columns = attributes[attributes.index('columns') + 1].replace(" ", "").split(",")
|
||||
except ValueError:
|
||||
# rowset did not have columns tag set (this is a bug in API)
|
||||
# columns will be extracted from first row instead.
|
||||
columns = []
|
||||
|
||||
try:
|
||||
priKey = attributes[attributes.index('key')+1]
|
||||
priKey = attributes[attributes.index('key') + 1]
|
||||
this = IndexRowset(cols=columns, key=priKey)
|
||||
except ValueError:
|
||||
this = Rowset(cols=columns)
|
||||
|
||||
|
||||
this._name = attributes[attributes.index('name')+1]
|
||||
this.__catch = "row" # tag to auto-add to rowset.
|
||||
this._name = attributes[attributes.index('name') + 1]
|
||||
this.__catch = "row" # tag to auto-add to rowset.
|
||||
else:
|
||||
this = Element()
|
||||
this._name = name
|
||||
@@ -528,7 +532,7 @@ class _Parser(object):
|
||||
if name != "eveapi":
|
||||
raise RuntimeError("Invalid API response")
|
||||
try:
|
||||
this.version = attributes[attributes.index("version")+1]
|
||||
this.version = attributes[attributes.index("version") + 1]
|
||||
except KeyError:
|
||||
raise RuntimeError("Invalid API response")
|
||||
self.root = this
|
||||
@@ -541,16 +545,18 @@ class _Parser(object):
|
||||
# such as rawQuantity in the assets lists.
|
||||
# In either case the tag is assumed to be correct and the rowset's
|
||||
# columns are overwritten with the tag's version, if required.
|
||||
numAttr = len(attributes)/2
|
||||
numAttr = len(attributes) / 2
|
||||
numCols = len(self.container._cols)
|
||||
if numAttr < numCols and (attributes[-2] == self.container._cols[-1]):
|
||||
# the row data is missing attributes that were defined in the rowset.
|
||||
# missing attributes' values will be set to None.
|
||||
fixed = []
|
||||
row_idx = 0; hdr_idx = 0; numAttr*=2
|
||||
row_idx = 0
|
||||
hdr_idx = 0
|
||||
numAttr *= 2
|
||||
for col in self.container._cols:
|
||||
if col == attributes[row_idx]:
|
||||
fixed.append(_castfunc(col, attributes[row_idx+1]))
|
||||
fixed.append(_castfunc(col, attributes[row_idx + 1]))
|
||||
row_idx += 2
|
||||
else:
|
||||
fixed.append(None)
|
||||
@@ -560,7 +566,9 @@ class _Parser(object):
|
||||
if not self.container._cols or (numAttr > numCols):
|
||||
# the row data contains more attributes than were defined.
|
||||
self.container._cols = attributes[0::2]
|
||||
self.container.append([_castfunc(attributes[i], attributes[i+1]) for i in xrange(0, len(attributes), 2)])
|
||||
self.container.append(
|
||||
[_castfunc(attributes[i], attributes[i + 1]) for i in xrange(0, len(attributes), 2)]
|
||||
)
|
||||
# </hack>
|
||||
|
||||
this._isrow = True
|
||||
@@ -611,7 +619,7 @@ class _Parser(object):
|
||||
|
||||
if this is self.root:
|
||||
del this._attributes
|
||||
#this.__dict__.pop("_attributes", None)
|
||||
# this.__dict__.pop("_attributes", None)
|
||||
return
|
||||
|
||||
# we're done with current tag, so we can pop it off. This means that
|
||||
@@ -651,7 +659,7 @@ class _Parser(object):
|
||||
e._name = this._name
|
||||
setattr(self.container, this._name, e)
|
||||
for i in xrange(0, len(attributes), 2):
|
||||
setattr(e, attributes[i], attributes[i+1])
|
||||
setattr(e, attributes[i], attributes[i + 1])
|
||||
else:
|
||||
# tag of the form: <tag />, treat as empty string.
|
||||
setattr(self.container, this._name, "")
|
||||
@@ -663,7 +671,7 @@ class _Parser(object):
|
||||
# multiples of some tag or attribute. Code below handles this case.
|
||||
elif isinstance(sibling, Rowset):
|
||||
# its doppelganger is a rowset, append this as a row to that.
|
||||
row = [_castfunc(attributes[i], attributes[i+1]) for i in xrange(0, len(attributes), 2)]
|
||||
row = [_castfunc(attributes[i], attributes[i + 1]) for i in xrange(0, len(attributes), 2)]
|
||||
row.extend([getattr(this, col) for col in attributes2])
|
||||
sibling.append(row)
|
||||
elif isinstance(sibling, Element):
|
||||
@@ -672,11 +680,13 @@ class _Parser(object):
|
||||
# into a Rowset, adding the sibling element and this one.
|
||||
rs = Rowset()
|
||||
rs.__catch = rs._name = this._name
|
||||
row = [_castfunc(attributes[i], attributes[i+1]) for i in xrange(0, len(attributes), 2)]+[getattr(this, col) for col in attributes2]
|
||||
row = [_castfunc(attributes[i], attributes[i + 1]) for i in xrange(0, len(attributes), 2)] + \
|
||||
[getattr(this, col) for col in attributes2]
|
||||
rs.append(row)
|
||||
row = [getattr(sibling, attributes[i]) for i in xrange(0, len(attributes), 2)]+[getattr(sibling, col) for col in attributes2]
|
||||
row = [getattr(sibling, attributes[i]) for i in xrange(0, len(attributes), 2)] + \
|
||||
[getattr(sibling, col) for col in attributes2]
|
||||
rs.append(row)
|
||||
rs._cols = [attributes[i] for i in xrange(0, len(attributes), 2)]+[col for col in attributes2]
|
||||
rs._cols = [attributes[i] for i in xrange(0, len(attributes), 2)] + [col for col in attributes2]
|
||||
setattr(self.container, this._name, rs)
|
||||
else:
|
||||
# something else must have set this attribute already.
|
||||
@@ -685,29 +695,31 @@ class _Parser(object):
|
||||
|
||||
# Now fix up the attributes and be done with it.
|
||||
for i in xrange(0, len(attributes), 2):
|
||||
this.__dict__[attributes[i]] = _castfunc(attributes[i], attributes[i+1])
|
||||
this.__dict__[attributes[i]] = _castfunc(attributes[i], attributes[i + 1])
|
||||
|
||||
return
|
||||
|
||||
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# -----------------------------------------------------------------------------
|
||||
# XML Data Containers
|
||||
#-----------------------------------------------------------------------------
|
||||
# -----------------------------------------------------------------------------
|
||||
# The following classes are the various container types the XML data is
|
||||
# unpacked into.
|
||||
#
|
||||
# Note that objects returned by API calls are to be treated as read-only. This
|
||||
# is not enforced, but you have been warned.
|
||||
#-----------------------------------------------------------------------------
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
|
||||
class Element(object):
|
||||
# Element is a namespace for attributes and nested tags
|
||||
def __str__(self):
|
||||
return "<Element '%s'>" % self._name
|
||||
|
||||
|
||||
_fmt = u"%s:%s".__mod__
|
||||
|
||||
|
||||
class Row(object):
|
||||
# A Row is a single database record associated with a Rowset.
|
||||
# The fields in the record are accessed as attributes by their respective
|
||||
@@ -750,7 +762,7 @@ class Row(object):
|
||||
try:
|
||||
return self._row[self._cols.index(this)]
|
||||
except:
|
||||
raise AttributeError, this
|
||||
raise AttributeError(this)
|
||||
|
||||
def __getitem__(self, this):
|
||||
return self._row[self._cols.index(this)]
|
||||
@@ -823,7 +835,6 @@ class Rowset(object):
|
||||
for line in self._rows:
|
||||
yield [line[x] for x in i]
|
||||
|
||||
|
||||
# -------------
|
||||
|
||||
def __init__(self, cols=None, rows=None):
|
||||
@@ -871,7 +882,6 @@ class Rowset(object):
|
||||
self._cols, self._rows = state
|
||||
|
||||
|
||||
|
||||
class IndexRowset(Rowset):
|
||||
# An IndexRowset is a Rowset that keeps an index on a column.
|
||||
#
|
||||
@@ -888,7 +898,7 @@ class IndexRowset(Rowset):
|
||||
if row is None:
|
||||
if default:
|
||||
return default[0]
|
||||
raise KeyError, key
|
||||
raise KeyError(key)
|
||||
return Row(self._cols, row)
|
||||
|
||||
# -------------
|
||||
@@ -939,28 +949,28 @@ class FilterRowset(object):
|
||||
# - Each key maps to a Rowset, containing only the rows where the value
|
||||
# of the column this FilterRowset was made on matches the key.
|
||||
|
||||
def __init__(self, cols=None, rows=None, key=None, key2=None, dict=None):
|
||||
if dict is not None:
|
||||
self._items = items = dict
|
||||
def __init__(self, cols=None, rows=None, key=None, key2=None, dict_=None):
|
||||
if dict_ is not None:
|
||||
self._items = items = dict_
|
||||
elif cols is not None:
|
||||
self._items = items = {}
|
||||
|
||||
idfield = cols.index(key)
|
||||
if not key2:
|
||||
for row in rows:
|
||||
id = row[idfield]
|
||||
if id in items:
|
||||
items[id].append(row)
|
||||
id_ = row[idfield]
|
||||
if id_ in items:
|
||||
items[id_].append(row)
|
||||
else:
|
||||
items[id] = [row]
|
||||
items[id_] = [row]
|
||||
else:
|
||||
idfield2 = cols.index(key2)
|
||||
for row in rows:
|
||||
id = row[idfield]
|
||||
if id in items:
|
||||
items[id][row[idfield2]] = row
|
||||
id_ = row[idfield]
|
||||
if id_ in items:
|
||||
items[id_][row[idfield2]] = row
|
||||
else:
|
||||
items[id] = {row[idfield2]:row}
|
||||
items[id_] = {row[idfield2]: row}
|
||||
|
||||
self._cols = cols
|
||||
self.key = key
|
||||
@@ -977,7 +987,7 @@ class FilterRowset(object):
|
||||
self.__iter__ = items.__iter__
|
||||
|
||||
def copy(self):
|
||||
return FilterRowset(self._cols[:], None, self.key, self.key2, dict=copy.deepcopy(self._items))
|
||||
return FilterRowset(self._cols[:], None, self.key, self.key2, dict_=copy.deepcopy(self._items))
|
||||
|
||||
def get(self, key, default=_unspecified):
|
||||
try:
|
||||
|
||||
269
service/fit.py
269
service/fit.py
@@ -17,65 +17,29 @@
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
import locale
|
||||
import copy
|
||||
import threading
|
||||
import logging
|
||||
import wx
|
||||
from codecs import open
|
||||
|
||||
import xml.parsers.expat
|
||||
|
||||
import eos.db
|
||||
import eos.types
|
||||
|
||||
from eos.types import State, Slot
|
||||
|
||||
from service.market import Market
|
||||
from service.damagePattern import DamagePattern
|
||||
from eos.saveddata.booster import Booster as es_Booster
|
||||
from eos.saveddata.cargo import Cargo as es_Cargo
|
||||
from eos.saveddata.character import Character as saveddata_Character
|
||||
from eos.saveddata.citadel import Citadel as es_Citadel
|
||||
from eos.saveddata.damagePattern import DamagePattern as es_DamagePattern
|
||||
from eos.saveddata.drone import Drone as es_Drone
|
||||
from eos.saveddata.fighter import Fighter as es_Fighter
|
||||
from eos.saveddata.implant import Implant as es_Implant
|
||||
from eos.saveddata.module import Module as es_Module
|
||||
from eos.saveddata.ship import Ship as es_Ship
|
||||
from eos.types import State, Slot, Fit as FitType
|
||||
from service.character import Character
|
||||
from service.damagePattern import DamagePattern
|
||||
from service.market import Market
|
||||
from service.settings import SettingsProvider
|
||||
from service.port import Port
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FitBackupThread(threading.Thread):
|
||||
def __init__(self, path, callback):
|
||||
threading.Thread.__init__(self)
|
||||
self.path = path
|
||||
self.callback = callback
|
||||
|
||||
def run(self):
|
||||
path = self.path
|
||||
sFit = Fit.getInstance()
|
||||
allFits = map(lambda x: x[0], sFit.getAllFits())
|
||||
backedUpFits = sFit.exportXml(self.callback, *allFits)
|
||||
backupFile = open(path, "w", encoding="utf-8")
|
||||
backupFile.write(backedUpFits)
|
||||
backupFile.close()
|
||||
|
||||
# Send done signal to GUI
|
||||
wx.CallAfter(self.callback, -1)
|
||||
|
||||
|
||||
class FitImportThread(threading.Thread):
|
||||
def __init__(self, paths, callback):
|
||||
threading.Thread.__init__(self)
|
||||
self.paths = paths
|
||||
self.callback = callback
|
||||
|
||||
def run(self):
|
||||
sFit = Fit.getInstance()
|
||||
success, result = sFit.importFitFromFiles(self.paths, self.callback)
|
||||
|
||||
if not success: # there was an error during processing
|
||||
logger.error("Error while processing file import: %s", result)
|
||||
wx.CallAfter(self.callback, -2, result)
|
||||
else: # Send done signal to GUI
|
||||
wx.CallAfter(self.callback, -1, result)
|
||||
|
||||
|
||||
class Fit(object):
|
||||
instance = None
|
||||
|
||||
@@ -89,7 +53,7 @@ class Fit(object):
|
||||
def __init__(self):
|
||||
self.pattern = DamagePattern.getInstance().getDamagePattern("Uniform")
|
||||
self.targetResists = None
|
||||
self.character = Character.getInstance().all5()
|
||||
self.character = saveddata_Character.getAll5()
|
||||
self.booster = False
|
||||
self.dirtyFitIDs = set()
|
||||
|
||||
@@ -106,8 +70,8 @@ class Fit(object):
|
||||
"showMarketShortcuts": False,
|
||||
"enableGaugeAnimation": True,
|
||||
"exportCharges": True,
|
||||
"openFitInNew":False
|
||||
}
|
||||
"openFitInNew": False,
|
||||
}
|
||||
|
||||
self.serviceFittingOptions = SettingsProvider.getInstance().getSettings(
|
||||
"pyfaServiceFittingOptions", serviceFittingDefaultOptions)
|
||||
@@ -151,10 +115,10 @@ class Fit(object):
|
||||
|
||||
def newFit(self, shipID, name=None):
|
||||
try:
|
||||
ship = eos.types.Ship(eos.db.getItem(shipID))
|
||||
ship = es_Ship(eos.db.getItem(shipID))
|
||||
except ValueError:
|
||||
ship = eos.types.Citadel(eos.db.getItem(shipID))
|
||||
fit = eos.types.Fit(ship)
|
||||
ship = es_Citadel(eos.db.getItem(shipID))
|
||||
fit = FitType(ship)
|
||||
fit.name = name if name is not None else "New %s" % fit.ship.item.name
|
||||
fit.damagePattern = self.pattern
|
||||
fit.targetResists = self.targetResists
|
||||
@@ -226,11 +190,12 @@ class Fit(object):
|
||||
self.recalc(fit, withBoosters=True)
|
||||
|
||||
def getFit(self, fitID, projected=False, basic=False):
|
||||
''' Gets fit from database
|
||||
"""
|
||||
Gets fit from database
|
||||
|
||||
Projected is a recursion flag that is set to reduce recursions into projected fits
|
||||
Basic is a flag to simply return the fit without any other processing
|
||||
'''
|
||||
"""
|
||||
if fitID is None:
|
||||
return None
|
||||
fit = eos.db.getFit(fitID)
|
||||
@@ -270,7 +235,7 @@ class Fit(object):
|
||||
fit = eos.db.getFit(fitID)
|
||||
item = eos.db.getItem(itemID, eager="attributes")
|
||||
try:
|
||||
implant = eos.types.Implant(item)
|
||||
implant = es_Implant(item)
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
@@ -296,7 +261,7 @@ class Fit(object):
|
||||
fit = eos.db.getFit(fitID)
|
||||
item = eos.db.getItem(itemID, eager="attributes")
|
||||
try:
|
||||
booster = eos.types.Booster(item)
|
||||
booster = es_Booster(item)
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
@@ -324,7 +289,7 @@ class Fit(object):
|
||||
thing = eos.db.getItem(thing,
|
||||
eager=("attributes", "group.category"))
|
||||
|
||||
if isinstance(thing, eos.types.Fit):
|
||||
if isinstance(thing, FitType):
|
||||
if thing in fit.projectedFits:
|
||||
return
|
||||
|
||||
@@ -341,19 +306,19 @@ class Fit(object):
|
||||
break
|
||||
|
||||
if drone is None:
|
||||
drone = eos.types.Drone(thing)
|
||||
drone = es_Drone(thing)
|
||||
fit.projectedDrones.append(drone)
|
||||
|
||||
drone.amount += 1
|
||||
elif thing.category.name == "Fighter":
|
||||
fighter = eos.types.Fighter(thing)
|
||||
fighter = es_Fighter(thing)
|
||||
fit.projectedFighters.append(fighter)
|
||||
elif thing.group.name == "Effect Beacon":
|
||||
module = eos.types.Module(thing)
|
||||
module = es_Module(thing)
|
||||
module.state = State.ONLINE
|
||||
fit.projectedModules.append(module)
|
||||
else:
|
||||
module = eos.types.Module(thing)
|
||||
module = es_Module(thing)
|
||||
module.state = State.ACTIVE
|
||||
if not module.canHaveState(module.state, fit):
|
||||
module.state = State.OFFLINE
|
||||
@@ -384,18 +349,18 @@ class Fit(object):
|
||||
|
||||
def toggleProjected(self, fitID, thing, click):
|
||||
fit = eos.db.getFit(fitID)
|
||||
if isinstance(thing, eos.types.Drone):
|
||||
if isinstance(thing, es_Drone):
|
||||
if thing.amountActive == 0 and thing.canBeApplied(fit):
|
||||
thing.amountActive = thing.amount
|
||||
else:
|
||||
thing.amountActive = 0
|
||||
elif isinstance(thing, eos.types.Fighter):
|
||||
elif isinstance(thing, es_Fighter):
|
||||
thing.active = not thing.active
|
||||
elif isinstance(thing, eos.types.Module):
|
||||
elif isinstance(thing, es_Module):
|
||||
thing.state = self.__getProposedState(thing, click)
|
||||
if not thing.canHaveState(thing.state, fit):
|
||||
thing.state = State.OFFLINE
|
||||
elif isinstance(thing, eos.types.Fit):
|
||||
elif isinstance(thing, FitType):
|
||||
projectionInfo = thing.getProjectionInfo(fitID)
|
||||
if projectionInfo:
|
||||
projectionInfo.active = not projectionInfo.active
|
||||
@@ -432,11 +397,11 @@ class Fit(object):
|
||||
|
||||
def removeProjected(self, fitID, thing):
|
||||
fit = eos.db.getFit(fitID)
|
||||
if isinstance(thing, eos.types.Drone):
|
||||
if isinstance(thing, es_Drone):
|
||||
fit.projectedDrones.remove(thing)
|
||||
elif isinstance(thing, eos.types.Module):
|
||||
elif isinstance(thing, es_Module):
|
||||
fit.projectedModules.remove(thing)
|
||||
elif isinstance(thing, eos.types.Fighter):
|
||||
elif isinstance(thing, es_Fighter):
|
||||
fit.projectedFighters.remove(thing)
|
||||
else:
|
||||
del fit.__projectedFits[thing.ID]
|
||||
@@ -456,7 +421,7 @@ class Fit(object):
|
||||
fit = eos.db.getFit(fitID)
|
||||
item = eos.db.getItem(itemID, eager=("attributes", "group.category"))
|
||||
try:
|
||||
m = eos.types.Module(item)
|
||||
m = es_Module(item)
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
@@ -503,7 +468,7 @@ class Fit(object):
|
||||
|
||||
item = eos.db.getItem(newItemID, eager=("attributes", "group.category"))
|
||||
try:
|
||||
m = eos.types.Module(item)
|
||||
m = es_Module(item)
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
@@ -540,7 +505,7 @@ class Fit(object):
|
||||
|
||||
# Gather modules and convert Cargo item to Module, silently return if not a module
|
||||
try:
|
||||
cargoP = eos.types.Module(cargo.item)
|
||||
cargoP = es_Module(cargo.item)
|
||||
cargoP.owner = fit
|
||||
if cargoP.isValidState(State.ACTIVE):
|
||||
cargoP.state = State.ACTIVE
|
||||
@@ -570,7 +535,7 @@ class Fit(object):
|
||||
x.amount += 1
|
||||
break
|
||||
else:
|
||||
moduleP = eos.types.Cargo(module.item)
|
||||
moduleP = es_Cargo(module.item)
|
||||
moduleP.amount = 1
|
||||
fit.cargo.insert(cargoIdx, moduleP)
|
||||
|
||||
@@ -635,7 +600,7 @@ class Fit(object):
|
||||
|
||||
if cargo is None:
|
||||
# if we don't have the item already in cargo, use default values
|
||||
cargo = eos.types.Cargo(item)
|
||||
cargo = es_Cargo(item)
|
||||
|
||||
fit.cargo.append(cargo)
|
||||
if replace:
|
||||
@@ -673,10 +638,10 @@ class Fit(object):
|
||||
break
|
||||
'''
|
||||
if fighter is None:
|
||||
fighter = eos.types.Fighter(item)
|
||||
fighter = es_Fighter(item)
|
||||
used = fit.getSlotsUsed(fighter.slot)
|
||||
total = fit.getNumSlots(fighter.slot)
|
||||
standardAttackActive = False;
|
||||
standardAttackActive = False
|
||||
for ability in fighter.abilities:
|
||||
if (ability.effect.isImplemented and ability.effect.handlerName == u'fighterabilityattackm'):
|
||||
# Activate "standard attack" if available
|
||||
@@ -684,10 +649,10 @@ class Fit(object):
|
||||
standardAttackActive = True
|
||||
else:
|
||||
# Activate all other abilities (Neut, Web, etc) except propmods if no standard attack is active
|
||||
if (ability.effect.isImplemented
|
||||
and standardAttackActive == False
|
||||
and ability.effect.handlerName != u'fighterabilitymicrowarpdrive'
|
||||
and ability.effect.handlerName != u'fighterabilityevasivemaneuvers'):
|
||||
if ability.effect.isImplemented and \
|
||||
standardAttackActive is False and \
|
||||
ability.effect.handlerName != u'fighterabilitymicrowarpdrive' and \
|
||||
ability.effect.handlerName != u'fighterabilityevasivemaneuvers':
|
||||
ability.active = True
|
||||
|
||||
if used >= total:
|
||||
@@ -727,7 +692,7 @@ class Fit(object):
|
||||
break
|
||||
|
||||
if drone is None:
|
||||
drone = eos.types.Drone(item)
|
||||
drone = es_Drone(item)
|
||||
if drone.fits(fit) is True:
|
||||
fit.drones.append(drone)
|
||||
else:
|
||||
@@ -764,7 +729,7 @@ class Fit(object):
|
||||
d.amount = amount
|
||||
d.amountActive = amount if active else 0
|
||||
|
||||
newD = eos.types.Drone(d.item)
|
||||
newD = es_Drone(d.item)
|
||||
newD.amount = total - amount
|
||||
newD.amountActive = newD.amount if active else 0
|
||||
l.append(newD)
|
||||
@@ -929,7 +894,7 @@ class Fit(object):
|
||||
sDP = DamagePattern.getInstance()
|
||||
dp = sDP.getDamagePattern("Selected Ammo")
|
||||
if dp is None:
|
||||
dp = eos.types.DamagePattern()
|
||||
dp = es_DamagePattern()
|
||||
dp.name = "Selected Ammo"
|
||||
|
||||
fit = eos.db.getFit(fitID)
|
||||
@@ -939,140 +904,6 @@ class Fit(object):
|
||||
fit.damagePattern = dp
|
||||
self.recalc(fit)
|
||||
|
||||
def exportFit(self, fitID):
|
||||
fit = eos.db.getFit(fitID)
|
||||
return Port.exportEft(fit)
|
||||
|
||||
def exportEftImps(self, fitID):
|
||||
fit = eos.db.getFit(fitID)
|
||||
return Port.exportEftImps(fit)
|
||||
|
||||
def exportDna(self, fitID):
|
||||
fit = eos.db.getFit(fitID)
|
||||
return Port.exportDna(fit)
|
||||
|
||||
def exportCrest(self, fitID, callback=None):
|
||||
fit = eos.db.getFit(fitID)
|
||||
return Port.exportCrest(fit, callback)
|
||||
|
||||
def exportXml(self, callback=None, *fitIDs):
|
||||
fits = map(lambda fitID: eos.db.getFit(fitID), fitIDs)
|
||||
return Port.exportXml(callback, *fits)
|
||||
|
||||
def exportMultiBuy(self, fitID):
|
||||
fit = eos.db.getFit(fitID)
|
||||
return Port.exportMultiBuy(fit)
|
||||
|
||||
def backupFits(self, path, callback):
|
||||
thread = FitBackupThread(path, callback)
|
||||
thread.start()
|
||||
|
||||
def importFitsThreaded(self, paths, callback):
|
||||
thread = FitImportThread(paths, callback)
|
||||
thread.start()
|
||||
|
||||
def importFitFromFiles(self, paths, callback=None):
|
||||
"""
|
||||
Imports fits from file(s). First processes all provided paths and stores
|
||||
assembled fits into a list. This allows us to call back to the GUI as
|
||||
fits are processed as well as when fits are being saved.
|
||||
returns
|
||||
"""
|
||||
defcodepage = locale.getpreferredencoding()
|
||||
|
||||
fits = []
|
||||
for path in paths:
|
||||
if callback: # Pulse
|
||||
wx.CallAfter(callback, 1, "Processing file:\n%s" % path)
|
||||
|
||||
file = open(path, "r")
|
||||
srcString = file.read()
|
||||
|
||||
if len(srcString) == 0: # ignore blank files
|
||||
continue
|
||||
|
||||
codec_found = None
|
||||
# If file had ANSI encoding, decode it to unicode using detection
|
||||
# of BOM header or if there is no header try default
|
||||
# codepage then fallback to utf-16, cp1252
|
||||
|
||||
if isinstance(srcString, str):
|
||||
encoding_map = (
|
||||
('\xef\xbb\xbf', 'utf-8'),
|
||||
('\xff\xfe\0\0', 'utf-32'),
|
||||
('\0\0\xfe\xff', 'UTF-32BE'),
|
||||
('\xff\xfe', 'utf-16'),
|
||||
('\xfe\xff', 'UTF-16BE'))
|
||||
|
||||
for bom, encoding in encoding_map:
|
||||
if srcString.startswith(bom):
|
||||
codec_found = encoding
|
||||
savebom = bom
|
||||
|
||||
if codec_found is None:
|
||||
logger.info("Unicode BOM not found in file %s.", path)
|
||||
attempt_codecs = (defcodepage, "utf-8", "utf-16", "cp1252")
|
||||
|
||||
for page in attempt_codecs:
|
||||
try:
|
||||
logger.info("Attempting to decode file %s using %s page.", path, page)
|
||||
srcString = unicode(srcString, page)
|
||||
codec_found = page
|
||||
logger.info("File %s decoded using %s page.", path, page)
|
||||
except UnicodeDecodeError:
|
||||
logger.info("Error unicode decoding %s from page %s, trying next codec", path, page)
|
||||
else:
|
||||
break
|
||||
else:
|
||||
logger.info("Unicode BOM detected in %s, using %s page.", path, codec_found)
|
||||
srcString = unicode(srcString[len(savebom):], codec_found)
|
||||
|
||||
else:
|
||||
# nasty hack to detect other transparent utf-16 loading
|
||||
if srcString[0] == '<' and 'utf-16' in srcString[:128].lower():
|
||||
codec_found = "utf-16"
|
||||
else:
|
||||
codec_found = "utf-8"
|
||||
|
||||
if codec_found is None:
|
||||
return False, "Proper codec could not be established for %s" % path
|
||||
|
||||
try:
|
||||
_, fitsImport = Port.importAuto(srcString, path, callback=callback, encoding=codec_found)
|
||||
fits += fitsImport
|
||||
except xml.parsers.expat.ExpatError, e:
|
||||
return False, "Malformed XML in %s" % path
|
||||
except Exception, e:
|
||||
logger.exception("Unknown exception processing: %s", path)
|
||||
return False, "Unknown Error while processing %s" % path
|
||||
|
||||
IDs = []
|
||||
numFits = len(fits)
|
||||
for i, fit in enumerate(fits):
|
||||
# Set some more fit attributes and save
|
||||
fit.character = self.character
|
||||
fit.damagePattern = self.pattern
|
||||
fit.targetResists = self.targetResists
|
||||
eos.db.save(fit)
|
||||
IDs.append(fit.ID)
|
||||
if callback: # Pulse
|
||||
wx.CallAfter(
|
||||
callback, 1,
|
||||
"Processing complete, saving fits to database\n(%d/%d)" %
|
||||
(i + 1, numFits)
|
||||
)
|
||||
|
||||
return True, fits
|
||||
|
||||
def importFitFromBuffer(self, bufferStr, activeFit=None):
|
||||
_, fits = Port.importAuto(bufferStr, activeFit=activeFit)
|
||||
for fit in fits:
|
||||
fit.character = self.character
|
||||
fit.damagePattern = self.pattern
|
||||
fit.targetResists = self.targetResists
|
||||
eos.db.save(fit)
|
||||
return fits
|
||||
|
||||
def checkStates(self, fit, base):
|
||||
changed = False
|
||||
for mod in fit.modules:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
# Copyright (C) 2016 Ryan Holmes
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
@@ -15,18 +15,23 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
|
||||
import copy
|
||||
|
||||
import eos.db
|
||||
import eos.types
|
||||
import copy
|
||||
import service.market
|
||||
from service.market import Market
|
||||
from eos.saveddata.implant import Implant as es_Implant
|
||||
from eos.saveddata.implantSet import ImplantSet as es_ImplantSet
|
||||
|
||||
|
||||
class ImportError(Exception):
|
||||
pass
|
||||
|
||||
class ImplantSets():
|
||||
|
||||
class ImplantSets(object):
|
||||
instance = None
|
||||
|
||||
@classmethod
|
||||
def getInstance(cls):
|
||||
if cls.instance is None:
|
||||
@@ -41,43 +46,41 @@ class ImplantSets():
|
||||
return eos.db.getImplantSet(name)
|
||||
|
||||
def getImplants(self, setID):
|
||||
set = eos.db.getImplantSet(setID)
|
||||
return set.implants
|
||||
return eos.db.getImplantSet(setID).implants
|
||||
|
||||
def addImplant(self, setID, itemID):
|
||||
set = eos.db.getImplantSet(setID)
|
||||
implant = eos.types.Implant(eos.db.getItem(itemID))
|
||||
set.implants.append(implant)
|
||||
implant_set = eos.db.getImplantSet(setID)
|
||||
implant = es_Implant(eos.db.getItem(itemID))
|
||||
implant_set.implants.append(implant)
|
||||
eos.db.commit()
|
||||
|
||||
def removeImplant(self, setID, implant):
|
||||
set = eos.db.getImplantSet(setID)
|
||||
set.implants.remove(implant)
|
||||
eos.db.getImplantSet(setID).implants.remove(implant)
|
||||
eos.db.commit()
|
||||
|
||||
def newSet(self, name):
|
||||
s = eos.types.ImplantSet()
|
||||
s.name = name
|
||||
eos.db.save(s)
|
||||
return s
|
||||
implant_set = es_ImplantSet()
|
||||
implant_set.name = name
|
||||
eos.db.save(implant_set)
|
||||
return implant_set
|
||||
|
||||
def renameSet(self, s, newName):
|
||||
s.name = newName
|
||||
eos.db.save(s)
|
||||
def renameSet(self, implant_set, newName):
|
||||
implant_set.name = newName
|
||||
eos.db.save(implant_set)
|
||||
|
||||
def deleteSet(self, s):
|
||||
eos.db.remove(s)
|
||||
def deleteSet(self, implant_set):
|
||||
eos.db.remove(implant_set)
|
||||
|
||||
def copySet(self, s):
|
||||
newS = copy.deepcopy(s)
|
||||
def copySet(self, implant_set):
|
||||
newS = copy.deepcopy(implant_set)
|
||||
eos.db.save(newS)
|
||||
return newS
|
||||
|
||||
def saveChanges(self, s):
|
||||
eos.db.save(s)
|
||||
|
||||
def saveChanges(self, implant_set):
|
||||
eos.db.save(implant_set)
|
||||
|
||||
def importSets(self, text):
|
||||
sMkt = service.Market.getInstance()
|
||||
sMkt = Market.getInstance()
|
||||
lines = text.splitlines()
|
||||
newSets = []
|
||||
errors = 0
|
||||
@@ -90,25 +93,25 @@ class ImplantSets():
|
||||
if line == '' or line[0] == "#": # comments / empty string
|
||||
continue
|
||||
if line[:1] == "[" and line[-1:] == "]":
|
||||
current = eos.types.ImplantSet(line[1:-1])
|
||||
current = es_ImplantSet(line[1:-1])
|
||||
newSets.append(current)
|
||||
else:
|
||||
item = sMkt.getItem(line)
|
||||
current.implants.append(eos.types.Implant(item))
|
||||
current.implants.append(es_Implant(item))
|
||||
except:
|
||||
errors += 1
|
||||
continue
|
||||
|
||||
for set in self.getImplantSetList():
|
||||
lookup[set.name] = set
|
||||
for implant_set in self.getImplantSetList():
|
||||
lookup[implant_set.name] = implant_set
|
||||
|
||||
for set in newSets:
|
||||
if set.name in lookup:
|
||||
match = lookup[set.name]
|
||||
for implant in set.implants:
|
||||
match.implants.append(eos.types.Implant(implant.item))
|
||||
for implant_set in newSets:
|
||||
if implant_set.name in lookup:
|
||||
match = lookup[implant_set.name]
|
||||
for implant in implant_set.implants:
|
||||
match.implants.append(es_Implant(implant.item))
|
||||
else:
|
||||
eos.db.save(set)
|
||||
eos.db.save(implant_set)
|
||||
|
||||
eos.db.commit()
|
||||
|
||||
@@ -116,10 +119,10 @@ class ImplantSets():
|
||||
if lenImports == 0:
|
||||
raise ImportError("No patterns found for import")
|
||||
if errors > 0:
|
||||
raise ImportError("%d sets imported from clipboard; %d errors"%(lenImports, errors))
|
||||
|
||||
raise ImportError("%d sets imported from clipboard; %d errors" %
|
||||
(lenImports, errors))
|
||||
|
||||
def exportSets(self):
|
||||
patterns = self.getImplantSetList()
|
||||
patterns.sort(key=lambda p: p.name)
|
||||
return eos.types.ImplantSet.exportSets(*patterns)
|
||||
|
||||
return es_ImplantSet.exportSets(*patterns)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
@@ -15,22 +15,28 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
#===============================================================================
|
||||
# ===============================================================================
|
||||
|
||||
import re
|
||||
import threading
|
||||
import wx
|
||||
|
||||
import logging
|
||||
import Queue
|
||||
|
||||
import wx
|
||||
from sqlalchemy.sql import or_
|
||||
|
||||
import config
|
||||
import eos.db
|
||||
import eos.types
|
||||
from sqlalchemy.sql import and_, or_
|
||||
from service.settings import SettingsProvider, NetworkSettings
|
||||
import service
|
||||
import service.conversions as conversions
|
||||
import logging
|
||||
# TODO: Find out what this is. There is no conversions
|
||||
from service import conversions
|
||||
from service.settings import SettingsProvider
|
||||
from service.price import Price
|
||||
|
||||
# TODO: Convert eos.types over to eos.gamedata
|
||||
# from eos.gamedata import Category as e_Category, Group as e_Group, Item as e_Item
|
||||
|
||||
from eos.types import MarketGroup as types_MarketGroup, MetaGroup as types_MetaGroup, MetaType as types_MetaType, \
|
||||
Category as types_Category, Item as types_Item, Group as types_Group, Price as types_Price
|
||||
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
@@ -42,6 +48,7 @@ logger = logging.getLogger(__name__)
|
||||
# Event which tells threads dependent on Market that it's initialized
|
||||
mktRdy = threading.Event()
|
||||
|
||||
|
||||
class ShipBrowserWorkerThread(threading.Thread):
|
||||
def run(self):
|
||||
self.queue = Queue.Queue()
|
||||
@@ -57,13 +64,13 @@ class ShipBrowserWorkerThread(threading.Thread):
|
||||
sMkt = Market.getInstance()
|
||||
while True:
|
||||
try:
|
||||
id, callback = queue.get()
|
||||
set = cache.get(id)
|
||||
if set is None:
|
||||
set = sMkt.getShipList(id)
|
||||
cache[id] = set
|
||||
id_, callback = queue.get()
|
||||
set_ = cache.get(id_)
|
||||
if set_ is None:
|
||||
set_ = sMkt.getShipList(id_)
|
||||
cache[id_] = set_
|
||||
|
||||
wx.CallAfter(callback, (id, set))
|
||||
wx.CallAfter(callback, (id_, set_))
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
@@ -72,6 +79,7 @@ class ShipBrowserWorkerThread(threading.Thread):
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class PriceWorkerThread(threading.Thread):
|
||||
def run(self):
|
||||
self.queue = Queue.Queue()
|
||||
@@ -86,7 +94,7 @@ class PriceWorkerThread(threading.Thread):
|
||||
|
||||
# Grab prices, this is the time-consuming part
|
||||
if len(requests) > 0:
|
||||
service.Price.fetchPrices(requests)
|
||||
Price.fetchPrices(requests)
|
||||
|
||||
wx.CallAfter(callback)
|
||||
queue.task_done()
|
||||
@@ -106,6 +114,7 @@ class PriceWorkerThread(threading.Thread):
|
||||
self.wait[itemID] = []
|
||||
self.wait[itemID].append(callback)
|
||||
|
||||
|
||||
class SearchWorkerThread(threading.Thread):
|
||||
def run(self):
|
||||
self.cv = threading.Condition()
|
||||
@@ -126,14 +135,14 @@ class SearchWorkerThread(threading.Thread):
|
||||
sMkt = Market.getInstance()
|
||||
if filterOn is True:
|
||||
# Rely on category data provided by eos as we don't hardcode them much in service
|
||||
filter = or_(eos.types.Category.name.in_(sMkt.SEARCH_CATEGORIES), eos.types.Group.name.in_(sMkt.SEARCH_GROUPS))
|
||||
filter_ = or_(types_Category.name.in_(sMkt.SEARCH_CATEGORIES), types_Group.name.in_(sMkt.SEARCH_GROUPS))
|
||||
elif filterOn: # filter by selected categories
|
||||
filter = eos.types.Category.name.in_(filterOn)
|
||||
filter_ = types_Category.name.in_(filterOn)
|
||||
else:
|
||||
filter=None
|
||||
filter_ = None
|
||||
|
||||
results = eos.db.searchItems(request, where=filter,
|
||||
join=(eos.types.Item.group, eos.types.Group.category),
|
||||
results = eos.db.searchItems(request, where=filter_,
|
||||
join=(types_Item.group, types_Group.category),
|
||||
eager=("icon", "group.category", "metaGroup", "metaGroup.parent"))
|
||||
|
||||
items = set()
|
||||
@@ -149,15 +158,18 @@ class SearchWorkerThread(threading.Thread):
|
||||
self.cv.notify()
|
||||
self.cv.release()
|
||||
|
||||
|
||||
class Market():
|
||||
instance = None
|
||||
|
||||
def __init__(self):
|
||||
self.priceCache = {}
|
||||
|
||||
#Init recently used module storage
|
||||
# Init recently used module storage
|
||||
serviceMarketRecentlyUsedModules = {"pyfaMarketRecentlyUsedModules": []}
|
||||
|
||||
self.serviceMarketRecentlyUsedModules = SettingsProvider.getInstance().getSettings("pyfaMarketRecentlyUsedModules", serviceMarketRecentlyUsedModules)
|
||||
self.serviceMarketRecentlyUsedModules = SettingsProvider.getInstance().getSettings(
|
||||
"pyfaMarketRecentlyUsedModules", serviceMarketRecentlyUsedModules)
|
||||
|
||||
# Start price fetcher
|
||||
self.priceWorkerThread = PriceWorkerThread()
|
||||
@@ -177,7 +189,7 @@ class Market():
|
||||
# Items' group overrides
|
||||
self.customGroups = set()
|
||||
# Limited edition ships
|
||||
self.les_grp = eos.types.Group()
|
||||
self.les_grp = types_Group()
|
||||
self.les_grp.ID = -1
|
||||
self.les_grp.name = "Limited Issue Ships"
|
||||
self.les_grp.published = True
|
||||
@@ -187,36 +199,37 @@ class Market():
|
||||
self.les_grp.description = ""
|
||||
self.les_grp.icon = None
|
||||
self.ITEMS_FORCEGROUP = {
|
||||
"Opux Luxury Yacht": self.les_grp, # One of those is wedding present at CCP fanfest, another was hijacked from ISD guy during an event
|
||||
"Opux Luxury Yacht": self.les_grp,
|
||||
# One of those is wedding present at CCP fanfest, another was hijacked from ISD guy during an event
|
||||
"Silver Magnate": self.les_grp, # Amarr Championship prize
|
||||
"Gold Magnate": self.les_grp, # Amarr Championship prize
|
||||
"Armageddon Imperial Issue": self.les_grp, # Amarr Championship prize
|
||||
"Apocalypse Imperial Issue": self.les_grp, # Amarr Championship prize
|
||||
"Guardian-Vexor": self.les_grp, # Illegal rewards for the Gallente Frontier Tour Lines event arc
|
||||
"Megathron Federate Issue": self.les_grp, # Reward during Crielere event
|
||||
"Apocalypse Imperial Issue": self.les_grp, # Amarr Championship prize
|
||||
"Guardian-Vexor": self.les_grp, # Illegal rewards for the Gallente Frontier Tour Lines event arc
|
||||
"Megathron Federate Issue": self.les_grp, # Reward during Crielere event
|
||||
"Raven State Issue": self.les_grp, # AT4 prize
|
||||
"Tempest Tribal Issue": self.les_grp, # AT4 prize
|
||||
"Apotheosis": self.les_grp, # 5th EVE anniversary present
|
||||
"Zephyr": self.les_grp, # 2010 new year gift
|
||||
"Primae": self.les_grp, # Promotion of planetary interaction
|
||||
"Freki": self.les_grp, # AT7 prize
|
||||
"Mimir": self.les_grp, # AT7 prize
|
||||
"Utu": self.les_grp, # AT8 prize
|
||||
"Adrestia": self.les_grp, # AT8 prize
|
||||
"Echelon": self.les_grp, # 2011 new year gift
|
||||
"Malice": self.les_grp, # AT9 prize
|
||||
"Vangel": self.les_grp, # AT9 prize
|
||||
"Cambion": self.les_grp, # AT10 prize
|
||||
"Etana": self.les_grp, # AT10 prize
|
||||
"Chremoas": self.les_grp, # AT11 prize :(
|
||||
"Moracha": self.les_grp, # AT11 prize
|
||||
"Stratios Emergency Responder": self.les_grp, # Issued for Somer Blink lottery
|
||||
"Miasmos Quafe Ultra Edition": self.les_grp, # Gift to people who purchased FF HD stream
|
||||
"Tempest Tribal Issue": self.les_grp, # AT4 prize
|
||||
"Apotheosis": self.les_grp, # 5th EVE anniversary present
|
||||
"Zephyr": self.les_grp, # 2010 new year gift
|
||||
"Primae": self.les_grp, # Promotion of planetary interaction
|
||||
"Freki": self.les_grp, # AT7 prize
|
||||
"Mimir": self.les_grp, # AT7 prize
|
||||
"Utu": self.les_grp, # AT8 prize
|
||||
"Adrestia": self.les_grp, # AT8 prize
|
||||
"Echelon": self.les_grp, # 2011 new year gift
|
||||
"Malice": self.les_grp, # AT9 prize
|
||||
"Vangel": self.les_grp, # AT9 prize
|
||||
"Cambion": self.les_grp, # AT10 prize
|
||||
"Etana": self.les_grp, # AT10 prize
|
||||
"Chremoas": self.les_grp, # AT11 prize :(
|
||||
"Moracha": self.les_grp, # AT11 prize
|
||||
"Stratios Emergency Responder": self.les_grp, # Issued for Somer Blink lottery
|
||||
"Miasmos Quafe Ultra Edition": self.les_grp, # Gift to people who purchased FF HD stream
|
||||
"InterBus Shuttle": self.les_grp,
|
||||
"Leopard": self.les_grp, # 2013 new year gift
|
||||
"Whiptail": self.les_grp, # AT12 prize
|
||||
"Chameleon": self.les_grp, # AT12 prize
|
||||
"Victorieux Luxury Yacht": self.les_grp, # Worlds Collide prize \o/ chinese getting owned
|
||||
"Leopard": self.les_grp, # 2013 new year gift
|
||||
"Whiptail": self.les_grp, # AT12 prize
|
||||
"Chameleon": self.les_grp, # AT12 prize
|
||||
"Victorieux Luxury Yacht": self.les_grp, # Worlds Collide prize \o/ chinese getting owned
|
||||
"Imp": self.les_grp, # AT13 prize
|
||||
"Fiend": self.les_grp, # AT13 prize
|
||||
"Caedes": self.les_grp, # AT14 prize
|
||||
@@ -229,8 +242,8 @@ class Market():
|
||||
|
||||
# List of items which are forcibly published or hidden
|
||||
self.ITEMS_FORCEPUBLISHED = {
|
||||
"Data Subverter I": False, # Not used in EVE, probably will appear with Dust link
|
||||
"QA Cross Protocol Analyzer": False, # QA modules used by CCP internally
|
||||
"Data Subverter I": False, # Not used in EVE, probably will appear with Dust link
|
||||
"QA Cross Protocol Analyzer": False, # QA modules used by CCP internally
|
||||
"QA Damage Module": False,
|
||||
"QA ECCM": False,
|
||||
"QA Immunity Module": False,
|
||||
@@ -251,9 +264,12 @@ class Market():
|
||||
"Civilian Light Electron Blaster": True,
|
||||
}
|
||||
|
||||
# TODO: Find out what this is. There is no conversions
|
||||
# do not publish ships that we convert
|
||||
'''
|
||||
for name in conversions.packs['skinnedShips']:
|
||||
self.ITEMS_FORCEPUBLISHED[name] = False
|
||||
'''
|
||||
|
||||
if config.debug:
|
||||
# Publish Tactical Dessy Modes if in debug
|
||||
@@ -266,7 +282,7 @@ class Market():
|
||||
|
||||
# List of groups which are forcibly published
|
||||
self.GROUPS_FORCEPUBLISHED = {
|
||||
"Prototype Exploration Ship": False } # We moved the only ship from this group to other group anyway
|
||||
"Prototype Exploration Ship": False} # We moved the only ship from this group to other group anyway
|
||||
|
||||
# Dictionary of items with forced meta groups, uses following format:
|
||||
# Item name: (metagroup name, parent type name)
|
||||
@@ -275,82 +291,112 @@ class Market():
|
||||
"'Wild' Miner I": ("Storyline", "Miner I"),
|
||||
"Medium Nano Armor Repair Unit I": ("Tech I", "Medium Armor Repairer I"),
|
||||
"Large 'Reprieve' Vestment Reconstructer I": ("Storyline", "Large Armor Repairer I"),
|
||||
"Khanid Navy Torpedo Launcher": ("Faction", "Torpedo Launcher I"),}
|
||||
"Khanid Navy Torpedo Launcher": ("Faction", "Torpedo Launcher I"), }
|
||||
# Parent type name: set(item names)
|
||||
self.ITEMS_FORCEDMETAGROUP_R = {}
|
||||
for item, value in self.ITEMS_FORCEDMETAGROUP.items():
|
||||
parent = value[1]
|
||||
if not parent in self.ITEMS_FORCEDMETAGROUP_R:
|
||||
if parent not in self.ITEMS_FORCEDMETAGROUP_R:
|
||||
self.ITEMS_FORCEDMETAGROUP_R[parent] = set()
|
||||
self.ITEMS_FORCEDMETAGROUP_R[parent].add(item)
|
||||
# Dictionary of items with forced market group (service assumes they have no
|
||||
# market group assigned in db, otherwise they'll appear in both original and forced groups)
|
||||
self.ITEMS_FORCEDMARKETGROUP = {
|
||||
"'Alpha' Data Analyzer I": 714, # Ship Equipment > Electronics and Sensor Upgrades > Scanners > Data and Composition Scanners
|
||||
"'Codex' Data Analyzer I": 714, # Ship Equipment > Electronics and Sensor Upgrades > Scanners > Data and Composition Scanners
|
||||
"'Daemon' Data Analyzer I": 714, # Ship Equipment > Electronics and Sensor Upgrades > Scanners > Data and Composition Scanners
|
||||
"'Libram' Data Analyzer I": 714, # Ship Equipment > Electronics and Sensor Upgrades > Scanners > Data and Composition Scanners
|
||||
"Advanced Cerebral Accelerator": 977, # Implants & Boosters > Booster
|
||||
"Civilian Damage Control": 615, # Ship Equipment > Hull & Armor > Damage Controls
|
||||
"Civilian EM Ward Field": 1695, # Ship Equipment > Shield > Shield Hardeners > EM Shield Hardeners
|
||||
"Civilian Explosive Deflection Field": 1694, # Ship Equipment > Shield > Shield Hardeners > Explosive Shield Hardeners
|
||||
"Civilian Hobgoblin": 837, # Drones > Combat Drones > Light Scout Drones
|
||||
"Civilian Kinetic Deflection Field": 1693, # Ship Equipment > Shield > Shield Hardeners > Kinetic Shield Hardeners
|
||||
"Civilian Light Missile Launcher": 640, # Ship Equipment > Turrets & Bays > Missile Launchers > Light Missile Launchers
|
||||
"Civilian Scourge Light Missile": 920, # Ammunition & Charges > Missiles > Light Missiles > Standard Light Missiles
|
||||
"Civilian Small Remote Armor Repairer": 1059, # Ship Equipment > Hull & Armor > Remote Armor Repairers > Small
|
||||
"Civilian Small Remote Shield Booster": 603, # Ship Equipment > Shield > Remote Shield Boosters > Small
|
||||
"Civilian Stasis Webifier": 683, # Ship Equipment > Electronic Warfare > Stasis Webifiers
|
||||
"Civilian Thermic Dissipation Field": 1692, # Ship Equipment > Shield > Shield Hardeners > Thermal Shield Hardeners
|
||||
"Civilian Warp Disruptor": 1935, # Ship Equipment > Electronic Warfare > Warp Disruptors
|
||||
"Hardwiring - Zainou 'Sharpshooter' ZMX10": 1493, # Implants & Boosters > Implants > Skill Hardwiring > Missile Implants > Implant Slot 06
|
||||
"Hardwiring - Zainou 'Sharpshooter' ZMX100": 1493, # Implants & Boosters > Implants > Skill Hardwiring > Missile Implants > Implant Slot 06
|
||||
"Hardwiring - Zainou 'Sharpshooter' ZMX1000": 1493, # Implants & Boosters > Implants > Skill Hardwiring > Missile Implants > Implant Slot 06
|
||||
"Hardwiring - Zainou 'Sharpshooter' ZMX11": 1493, # Implants & Boosters > Implants > Skill Hardwiring > Missile Implants > Implant Slot 06
|
||||
"Hardwiring - Zainou 'Sharpshooter' ZMX110": 1493, # Implants & Boosters > Implants > Skill Hardwiring > Missile Implants > Implant Slot 06
|
||||
"Hardwiring - Zainou 'Sharpshooter' ZMX1100": 1493, # Implants & Boosters > Implants > Skill Hardwiring > Missile Implants > Implant Slot 06
|
||||
"Nugoehuvi Synth Blue Pill Booster": 977, # Implants & Boosters > Booster
|
||||
"Prototype Cerebral Accelerator": 977, # Implants & Boosters > Booster
|
||||
"Prototype Iris Probe Launcher": 712, # Ship Equipment > Turrets & Bays > Scan Probe Launchers
|
||||
"Shadow": 1310, # Drones > Combat Drones > Fighter Bombers
|
||||
"Sleeper Data Analyzer I": 714, # Ship Equipment > Electronics and Sensor Upgrades > Scanners > Data and Composition Scanners
|
||||
"Standard Cerebral Accelerator": 977, # Implants & Boosters > Booster
|
||||
"Talocan Data Analyzer I": 714, # Ship Equipment > Electronics and Sensor Upgrades > Scanners > Data and Composition Scanners
|
||||
"Terran Data Analyzer I": 714, # Ship Equipment > Electronics and Sensor Upgrades > Scanners > Data and Composition Scanners
|
||||
"Tetrimon Data Analyzer I": 714 # Ship Equipment > Electronics and Sensor Upgrades > Scanners > Data and Composition Scanners
|
||||
"'Alpha' Data Analyzer I": 714,
|
||||
# Ship Equipment > Electronics and Sensor Upgrades > Scanners > Data and Composition Scanners
|
||||
"'Codex' Data Analyzer I": 714,
|
||||
# Ship Equipment > Electronics and Sensor Upgrades > Scanners > Data and Composition Scanners
|
||||
"'Daemon' Data Analyzer I": 714,
|
||||
# Ship Equipment > Electronics and Sensor Upgrades > Scanners > Data and Composition Scanners
|
||||
"'Libram' Data Analyzer I": 714,
|
||||
# Ship Equipment > Electronics and Sensor Upgrades > Scanners > Data and Composition Scanners
|
||||
"Advanced Cerebral Accelerator": 977, # Implants & Boosters > Booster
|
||||
"Civilian Damage Control": 615, # Ship Equipment > Hull & Armor > Damage Controls
|
||||
"Civilian EM Ward Field": 1695, # Ship Equipment > Shield > Shield Hardeners > EM Shield Hardeners
|
||||
"Civilian Explosive Deflection Field": 1694,
|
||||
# Ship Equipment > Shield > Shield Hardeners > Explosive Shield Hardeners
|
||||
"Civilian Hobgoblin": 837, # Drones > Combat Drones > Light Scout Drones
|
||||
"Civilian Kinetic Deflection Field": 1693,
|
||||
# Ship Equipment > Shield > Shield Hardeners > Kinetic Shield Hardeners
|
||||
"Civilian Light Missile Launcher": 640,
|
||||
# Ship Equipment > Turrets & Bays > Missile Launchers > Light Missile Launchers
|
||||
"Civilian Scourge Light Missile": 920,
|
||||
# Ammunition & Charges > Missiles > Light Missiles > Standard Light Missiles
|
||||
"Civilian Small Remote Armor Repairer": 1059,
|
||||
# Ship Equipment > Hull & Armor > Remote Armor Repairers > Small
|
||||
"Civilian Small Remote Shield Booster": 603, # Ship Equipment > Shield > Remote Shield Boosters > Small
|
||||
"Civilian Stasis Webifier": 683, # Ship Equipment > Electronic Warfare > Stasis Webifiers
|
||||
"Civilian Thermic Dissipation Field": 1692,
|
||||
# Ship Equipment > Shield > Shield Hardeners > Thermal Shield Hardeners
|
||||
"Civilian Warp Disruptor": 1935, # Ship Equipment > Electronic Warfare > Warp Disruptors
|
||||
"Hardwiring - Zainou 'Sharpshooter' ZMX10": 1493,
|
||||
# Implants & Boosters > Implants > Skill Hardwiring > Missile Implants > Implant Slot 06
|
||||
"Hardwiring - Zainou 'Sharpshooter' ZMX100": 1493,
|
||||
# Implants & Boosters > Implants > Skill Hardwiring > Missile Implants > Implant Slot 06
|
||||
"Hardwiring - Zainou 'Sharpshooter' ZMX1000": 1493,
|
||||
# Implants & Boosters > Implants > Skill Hardwiring > Missile Implants > Implant Slot 06
|
||||
"Hardwiring - Zainou 'Sharpshooter' ZMX11": 1493,
|
||||
# Implants & Boosters > Implants > Skill Hardwiring > Missile Implants > Implant Slot 06
|
||||
"Hardwiring - Zainou 'Sharpshooter' ZMX110": 1493,
|
||||
# Implants & Boosters > Implants > Skill Hardwiring > Missile Implants > Implant Slot 06
|
||||
"Hardwiring - Zainou 'Sharpshooter' ZMX1100": 1493,
|
||||
# Implants & Boosters > Implants > Skill Hardwiring > Missile Implants > Implant Slot 06
|
||||
"Nugoehuvi Synth Blue Pill Booster": 977, # Implants & Boosters > Booster
|
||||
"Prototype Cerebral Accelerator": 977, # Implants & Boosters > Booster
|
||||
"Prototype Iris Probe Launcher": 712, # Ship Equipment > Turrets & Bays > Scan Probe Launchers
|
||||
"Shadow": 1310, # Drones > Combat Drones > Fighter Bombers
|
||||
"Sleeper Data Analyzer I": 714,
|
||||
# Ship Equipment > Electronics and Sensor Upgrades > Scanners > Data and Composition Scanners
|
||||
"Standard Cerebral Accelerator": 977, # Implants & Boosters > Booster
|
||||
"Talocan Data Analyzer I": 714,
|
||||
# Ship Equipment > Electronics and Sensor Upgrades > Scanners > Data and Composition Scanners
|
||||
"Terran Data Analyzer I": 714,
|
||||
# Ship Equipment > Electronics and Sensor Upgrades > Scanners > Data and Composition Scanners
|
||||
"Tetrimon Data Analyzer I": 714
|
||||
# Ship Equipment > Electronics and Sensor Upgrades > Scanners > Data and Composition Scanners
|
||||
}
|
||||
|
||||
self.ITEMS_FORCEDMARKETGROUP_R = self.__makeRevDict(self.ITEMS_FORCEDMARKETGROUP)
|
||||
|
||||
self.FORCEDMARKETGROUP = {
|
||||
685: False, # Ship Equipment > Electronic Warfare > ECCM
|
||||
681: False, # Ship Equipment > Electronic Warfare > Sensor Backup Arrays
|
||||
685: False, # Ship Equipment > Electronic Warfare > ECCM
|
||||
681: False, # Ship Equipment > Electronic Warfare > Sensor Backup Arrays
|
||||
}
|
||||
|
||||
# Misc definitions
|
||||
# 0 is for items w/o meta group
|
||||
self.META_MAP = OrderedDict([("normal", frozenset((0, 1, 2, 14))),
|
||||
self.META_MAP = OrderedDict([("normal", frozenset((0, 1, 2, 14))),
|
||||
("faction", frozenset((4, 3))),
|
||||
("complex", frozenset((6,))),
|
||||
("officer", frozenset((5,)))])
|
||||
self.SEARCH_CATEGORIES = ("Drone", "Module", "Subsystem", "Charge", "Implant", "Deployable", "Fighter", "Structure", "Structure Module")
|
||||
self.SEARCH_CATEGORIES = (
|
||||
"Drone",
|
||||
"Module",
|
||||
"Subsystem",
|
||||
"Charge",
|
||||
"Implant",
|
||||
"Deployable",
|
||||
"Fighter",
|
||||
"Structure",
|
||||
"Structure Module",
|
||||
)
|
||||
self.SEARCH_GROUPS = ("Ice Product",)
|
||||
self.ROOT_MARKET_GROUPS = (9, # Modules
|
||||
self.ROOT_MARKET_GROUPS = (9, # Modules
|
||||
1111, # Rigs
|
||||
157, # Drones
|
||||
11, # Ammo
|
||||
157, # Drones
|
||||
11, # Ammo
|
||||
1112, # Subsystems
|
||||
24, # Implants & Boosters
|
||||
404, # Deployables
|
||||
24, # Implants & Boosters
|
||||
404, # Deployables
|
||||
2202, # Structure Equipment
|
||||
2203 # Structure Modifications
|
||||
2203 # Structure Modifications
|
||||
)
|
||||
# Tell other threads that Market is at their service
|
||||
mktRdy.set()
|
||||
|
||||
@classmethod
|
||||
def getInstance(cls):
|
||||
if cls.instance == None:
|
||||
if cls.instance is None:
|
||||
cls.instance = Market()
|
||||
return cls.instance
|
||||
|
||||
@@ -358,7 +404,7 @@ class Market():
|
||||
"""Creates reverse dictionary"""
|
||||
rev = {}
|
||||
for item, value in orig.items():
|
||||
if not value in rev:
|
||||
if value not in rev:
|
||||
rev[value] = set()
|
||||
rev[value].add(item)
|
||||
return rev
|
||||
@@ -366,18 +412,20 @@ class Market():
|
||||
def getItem(self, identity, *args, **kwargs):
|
||||
"""Get item by its ID or name"""
|
||||
try:
|
||||
if isinstance(identity, eos.types.Item):
|
||||
if isinstance(identity, types_Item):
|
||||
item = identity
|
||||
elif isinstance(identity, int):
|
||||
item = eos.db.getItem(identity, *args, **kwargs)
|
||||
# TODO: Import refactor - Find out what this is. There is no conversions
|
||||
elif isinstance(identity, basestring):
|
||||
# We normally lookup with string when we are using import/export
|
||||
# features. Check against overrides
|
||||
identity = conversions.all.get(identity, identity)
|
||||
item = eos.db.getItem(identity, *args, **kwargs)
|
||||
|
||||
elif isinstance(identity, float):
|
||||
id = int(identity)
|
||||
item = eos.db.getItem(id, *args, **kwargs)
|
||||
id_ = int(identity)
|
||||
item = eos.db.getItem(id_, *args, **kwargs)
|
||||
else:
|
||||
raise TypeError("Need Item object, integer, float or string as argument")
|
||||
except:
|
||||
@@ -388,7 +436,7 @@ class Market():
|
||||
|
||||
def getGroup(self, identity, *args, **kwargs):
|
||||
"""Get group by its ID or name"""
|
||||
if isinstance(identity, eos.types.Group):
|
||||
if isinstance(identity, types_Group):
|
||||
return identity
|
||||
elif isinstance(identity, (int, float, basestring)):
|
||||
if isinstance(identity, float):
|
||||
@@ -406,37 +454,37 @@ class Market():
|
||||
|
||||
def getCategory(self, identity, *args, **kwargs):
|
||||
"""Get category by its ID or name"""
|
||||
if isinstance(identity, eos.types.Category):
|
||||
if isinstance(identity, types_Category):
|
||||
category = identity
|
||||
elif isinstance(identity, (int, basestring)):
|
||||
category = eos.db.getCategory(identity, *args, **kwargs)
|
||||
elif isinstance(identity, float):
|
||||
id = int(identity)
|
||||
category = eos.db.getCategory(id, *args, **kwargs)
|
||||
id_ = int(identity)
|
||||
category = eos.db.getCategory(id_, *args, **kwargs)
|
||||
else:
|
||||
raise TypeError("Need Category object, integer, float or string as argument")
|
||||
return category
|
||||
|
||||
def getMetaGroup(self, identity, *args, **kwargs):
|
||||
"""Get meta group by its ID or name"""
|
||||
if isinstance(identity, eos.types.MetaGroup):
|
||||
if isinstance(identity, types_MetaGroup):
|
||||
metaGroup = identity
|
||||
elif isinstance(identity, (int, basestring)):
|
||||
metaGroup = eos.db.getMetaGroup(identity, *args, **kwargs)
|
||||
elif isinstance(identity, float):
|
||||
id = int(identity)
|
||||
metaGroup = eos.db.getMetaGroup(id, *args, **kwargs)
|
||||
id_ = int(identity)
|
||||
metaGroup = eos.db.getMetaGroup(id_, *args, **kwargs)
|
||||
else:
|
||||
raise TypeError("Need MetaGroup object, integer, float or string as argument")
|
||||
return metaGroup
|
||||
|
||||
def getMarketGroup(self, identity, *args, **kwargs):
|
||||
"""Get market group by its ID"""
|
||||
if isinstance(identity, eos.types.MarketGroup):
|
||||
if isinstance(identity, types_MarketGroup):
|
||||
marketGroup = identity
|
||||
elif isinstance(identity, (int, float)):
|
||||
id = int(identity)
|
||||
marketGroup = eos.db.getMarketGroup(id, *args, **kwargs)
|
||||
id_ = int(identity)
|
||||
marketGroup = eos.db.getMarketGroup(id_, *args, **kwargs)
|
||||
else:
|
||||
raise TypeError("Need MarketGroup object, integer or float as argument")
|
||||
return marketGroup
|
||||
@@ -460,7 +508,7 @@ class Market():
|
||||
# Check if item is in forced metagroup map
|
||||
if item.name in self.ITEMS_FORCEDMETAGROUP:
|
||||
# Create meta group from scratch
|
||||
metaGroup = eos.types.MetaType()
|
||||
metaGroup = types_MetaType()
|
||||
# Get meta group info object based on meta group name
|
||||
metaGroupInfo = self.getMetaGroup(self.ITEMS_FORCEDMETAGROUP[item.name][0])
|
||||
# Get parent item based on its name
|
||||
@@ -480,8 +528,8 @@ class Market():
|
||||
|
||||
def getMetaGroupIdByItem(self, item, fallback=0):
|
||||
"""Get meta group ID by item"""
|
||||
id = getattr(self.getMetaGroupByItem(item), "ID", fallback)
|
||||
return id
|
||||
id_ = getattr(self.getMetaGroupByItem(item), "ID", fallback)
|
||||
return id_
|
||||
|
||||
def getMarketGroupByItem(self, item, parentcheck=True):
|
||||
"""Get market group by item, its ID or name"""
|
||||
@@ -562,10 +610,11 @@ class Market():
|
||||
groupItems = set(group.items)
|
||||
if hasattr(group, 'addItems'):
|
||||
groupItems.update(group.addItems)
|
||||
items = set(filter(lambda item: self.getPublicityByItem(item) and self.getGroupByItem(item) == group, groupItems))
|
||||
items = set(
|
||||
filter(lambda item: self.getPublicityByItem(item) and self.getGroupByItem(item) == group, groupItems))
|
||||
return items
|
||||
|
||||
def getItemsByMarketGroup(self, mg, vars=True):
|
||||
def getItemsByMarketGroup(self, mg, vars_=True):
|
||||
"""Get items in the given market group"""
|
||||
result = set()
|
||||
# Get items from eos market group
|
||||
@@ -574,7 +623,7 @@ class Market():
|
||||
if mg.ID in self.ITEMS_FORCEDMARKETGROUP_R:
|
||||
forceditms = set(self.getItem(itmn) for itmn in self.ITEMS_FORCEDMARKETGROUP_R[mg.ID])
|
||||
baseitms.update(forceditms)
|
||||
if vars:
|
||||
if vars_:
|
||||
parents = set()
|
||||
for item in baseitms:
|
||||
# Add one of the base market group items to result
|
||||
@@ -592,7 +641,7 @@ class Market():
|
||||
else:
|
||||
result = baseitms
|
||||
# Get rid of unpublished items
|
||||
result = set(filter(lambda item: self.getPublicityByItem(item), result))
|
||||
result = set(filter(lambda item_: self.getPublicityByItem(item_), result))
|
||||
return result
|
||||
|
||||
def marketGroupHasTypesCheck(self, mg):
|
||||
@@ -627,7 +676,7 @@ class Market():
|
||||
elif self.marketGroupHasTypesCheck(mg):
|
||||
# Do not request variations to make process faster
|
||||
# Pick random item and use its icon
|
||||
items = self.getItemsByMarketGroup(mg, vars=False)
|
||||
items = self.getItemsByMarketGroup(mg, vars_=False)
|
||||
try:
|
||||
item = items.pop()
|
||||
except KeyError:
|
||||
@@ -665,8 +714,8 @@ class Market():
|
||||
the ID, the name and the icon of the group
|
||||
"""
|
||||
root = set()
|
||||
for id in self.ROOT_MARKET_GROUPS:
|
||||
mg = self.getMarketGroup(id, eager="icon")
|
||||
for id_ in self.ROOT_MARKET_GROUPS:
|
||||
mg = self.getMarketGroup(id_, eager="icon")
|
||||
root.add(mg)
|
||||
|
||||
return root
|
||||
@@ -686,15 +735,15 @@ class Market():
|
||||
ship.race
|
||||
return ships
|
||||
|
||||
def getShipListDelayed(self, id, callback):
|
||||
def getShipListDelayed(self, id_, callback):
|
||||
"""Background version of getShipList"""
|
||||
self.shipBrowserWorkerThread.queue.put((id, callback))
|
||||
self.shipBrowserWorkerThread.queue.put((id_, callback))
|
||||
|
||||
def searchShips(self, name):
|
||||
"""Find ships according to given text pattern"""
|
||||
filter = eos.types.Category.name.in_(["Ship", "Structure"])
|
||||
results = eos.db.searchItems(name, where=filter,
|
||||
join=(eos.types.Item.group, eos.types.Group.category),
|
||||
filter_ = types_Category.name.in_(["Ship", "Structure"])
|
||||
results = eos.db.searchItems(name, where=filter_,
|
||||
join=(types_Item.group, types_Group.category),
|
||||
eager=("icon", "group.category", "metaGroup", "metaGroup.parent"))
|
||||
ships = set()
|
||||
for item in results:
|
||||
@@ -748,7 +797,7 @@ class Market():
|
||||
if price is None:
|
||||
price = eos.db.getPrice(typeID)
|
||||
if price is None:
|
||||
price = eos.types.Price(typeID)
|
||||
price = types_Price(typeID)
|
||||
eos.db.add(price)
|
||||
|
||||
self.priceCache[typeID] = price
|
||||
@@ -769,7 +818,7 @@ class Market():
|
||||
def cb():
|
||||
try:
|
||||
callback(requests)
|
||||
except Exception, e:
|
||||
except Exception:
|
||||
pass
|
||||
eos.db.commit()
|
||||
|
||||
@@ -792,7 +841,7 @@ class Market():
|
||||
|
||||
def clearPriceCache(self):
|
||||
self.priceCache.clear()
|
||||
deleted_rows = eos.db.clearPrices()
|
||||
eos.db.clearPrices()
|
||||
|
||||
def getSystemWideEffects(self):
|
||||
"""
|
||||
@@ -835,7 +884,7 @@ class Market():
|
||||
groupname = re.sub(garbage, "", groupname)
|
||||
groupname = re.sub(" {2,}", " ", groupname).strip()
|
||||
# Add stuff to dictionary
|
||||
if not groupname in effects:
|
||||
if groupname not in effects:
|
||||
effects[groupname] = set()
|
||||
effects[groupname].add((beacon, beaconname, shortname))
|
||||
# Break loop on 1st result
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
# Copyright (C) 2014 Ryan Holmes
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
@@ -15,31 +15,38 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
|
||||
|
||||
from service.settings import NetworkSettings
|
||||
import urllib2
|
||||
import urllib
|
||||
import config
|
||||
import socket
|
||||
|
||||
import config
|
||||
from service.settings import NetworkSettings
|
||||
|
||||
# network timeout, otherwise pyfa hangs for a long while if no internet connection
|
||||
timeout = 3
|
||||
socket.setdefaulttimeout(timeout)
|
||||
|
||||
|
||||
class Error(StandardError):
|
||||
def __init__(self, msg=None):
|
||||
self.message = msg
|
||||
|
||||
|
||||
class RequestError(StandardError):
|
||||
pass
|
||||
|
||||
|
||||
class AuthenticationError(StandardError):
|
||||
pass
|
||||
|
||||
|
||||
class ServerError(StandardError):
|
||||
pass
|
||||
|
||||
|
||||
class TimeoutError(StandardError):
|
||||
pass
|
||||
|
||||
@@ -53,16 +60,17 @@ class Network():
|
||||
UPDATE = 8
|
||||
|
||||
_instance = None
|
||||
|
||||
@classmethod
|
||||
def getInstance(cls):
|
||||
if cls._instance == None:
|
||||
if cls._instance is None:
|
||||
cls._instance = Network()
|
||||
|
||||
return cls._instance
|
||||
|
||||
def request(self, url, type, data=None):
|
||||
# URL is required to be https as of right now
|
||||
#print "Starting request: %s\n\tType: %s\n\tPost Data: %s"%(url,type,data)
|
||||
# print "Starting request: %s\n\tType: %s\n\tPost Data: %s"%(url,type,data)
|
||||
|
||||
# Make sure request is enabled
|
||||
access = NetworkSettings.getInstance().getAccess()
|
||||
@@ -71,8 +79,9 @@ class Network():
|
||||
raise Error("Access not enabled - please enable in Preferences > Network")
|
||||
|
||||
# Set up some things for the request
|
||||
versionString = "{0} {1} - {2} {3}".format(config.version, config.tag, config.expansionName, config.expansionVersion)
|
||||
headers = {"User-Agent" : "pyfa {0} (Python-urllib2)".format(versionString)}
|
||||
versionString = "{0} {1} - {2} {3}".format(config.version, config.tag, config.expansionName,
|
||||
config.expansionVersion)
|
||||
headers = {"User-Agent": "pyfa {0} (Python-urllib2)".format(versionString)}
|
||||
|
||||
proxy = NetworkSettings.getInstance().getProxySettings()
|
||||
if proxy is not None:
|
||||
@@ -101,14 +110,14 @@ class Network():
|
||||
request = urllib2.Request(url, headers=headers, data=urllib.urlencode(data) if data else None)
|
||||
try:
|
||||
return urllib2.urlopen(request)
|
||||
except urllib2.HTTPError, error:
|
||||
except urllib2.HTTPError as error:
|
||||
if error.code == 404:
|
||||
raise RequestError()
|
||||
elif error.code == 403:
|
||||
raise AuthenticationError()
|
||||
elif error.code >= 500:
|
||||
raise ServerError()
|
||||
except urllib2.URLError, error:
|
||||
except urllib2.URLError as error:
|
||||
if "timed out" in error.reason:
|
||||
raise TimeoutError()
|
||||
else:
|
||||
|
||||
302
service/port.py
302
service/port.py
@@ -1,4 +1,4 @@
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
# Copyright (C) 2014 Ryan Holmes
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
@@ -15,19 +15,29 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
|
||||
import re
|
||||
import os
|
||||
import xml.dom
|
||||
|
||||
from eos.types import State, Slot, Module, Cargo, Fit, Ship, Drone, Implant, Booster, Citadel, Fighter
|
||||
import service
|
||||
import wx
|
||||
import logging
|
||||
import config
|
||||
import collections
|
||||
import json
|
||||
import threading
|
||||
import locale
|
||||
|
||||
from codecs import open
|
||||
|
||||
import xml.parsers.expat
|
||||
|
||||
from eos import db
|
||||
from service.fit import Fit as svcFit
|
||||
|
||||
import wx
|
||||
|
||||
from eos.types import State, Slot, Module, Cargo, Ship, Drone, Implant, Booster, Citadel, Fighter, Fit
|
||||
from service.crest import Crest
|
||||
from service.market import Market
|
||||
|
||||
logger = logging.getLogger("pyfa.service.port")
|
||||
|
||||
@@ -38,18 +48,133 @@ except ImportError:
|
||||
|
||||
EFT_SLOT_ORDER = [Slot.LOW, Slot.MED, Slot.HIGH, Slot.RIG, Slot.SUBSYSTEM]
|
||||
INV_FLAGS = {
|
||||
Slot.LOW: 11,
|
||||
Slot.MED: 19,
|
||||
Slot.HIGH: 27,
|
||||
Slot.RIG: 92,
|
||||
Slot.SUBSYSTEM: 125}
|
||||
Slot.LOW: 11,
|
||||
Slot.MED: 19,
|
||||
Slot.HIGH: 27,
|
||||
Slot.RIG: 92,
|
||||
Slot.SUBSYSTEM: 125
|
||||
}
|
||||
|
||||
INV_FLAG_CARGOBAY = 5
|
||||
INV_FLAG_DRONEBAY = 87
|
||||
INV_FLAG_FIGHTER = 158
|
||||
|
||||
|
||||
class Port(object):
|
||||
def backupFits(self, path, callback):
|
||||
thread = FitBackupThread(path, callback)
|
||||
thread.start()
|
||||
|
||||
def importFitsThreaded(self, paths, callback):
|
||||
thread = FitImportThread(paths, callback)
|
||||
thread.start()
|
||||
|
||||
def importFitFromFiles(self, paths, callback=None):
|
||||
"""
|
||||
Imports fits from file(s). First processes all provided paths and stores
|
||||
assembled fits into a list. This allows us to call back to the GUI as
|
||||
fits are processed as well as when fits are being saved.
|
||||
returns
|
||||
"""
|
||||
defcodepage = locale.getpreferredencoding()
|
||||
sFit = svcFit.getInstance()
|
||||
|
||||
fits = []
|
||||
for path in paths:
|
||||
if callback: # Pulse
|
||||
wx.CallAfter(callback, 1, "Processing file:\n%s" % path)
|
||||
|
||||
file_ = open(path, "r")
|
||||
srcString = file_.read()
|
||||
|
||||
if len(srcString) == 0: # ignore blank files
|
||||
continue
|
||||
|
||||
codec_found = None
|
||||
# If file had ANSI encoding, decode it to unicode using detection
|
||||
# of BOM header or if there is no header try default
|
||||
# codepage then fallback to utf-16, cp1252
|
||||
|
||||
if isinstance(srcString, str):
|
||||
encoding_map = (
|
||||
('\xef\xbb\xbf', 'utf-8'),
|
||||
('\xff\xfe\0\0', 'utf-32'),
|
||||
('\0\0\xfe\xff', 'UTF-32BE'),
|
||||
('\xff\xfe', 'utf-16'),
|
||||
('\xfe\xff', 'UTF-16BE'))
|
||||
|
||||
for bom, encoding in encoding_map:
|
||||
if srcString.startswith(bom):
|
||||
codec_found = encoding
|
||||
savebom = bom
|
||||
|
||||
if codec_found is None:
|
||||
logger.info("Unicode BOM not found in file %s.", path)
|
||||
attempt_codecs = (defcodepage, "utf-8", "utf-16", "cp1252")
|
||||
|
||||
for page in attempt_codecs:
|
||||
try:
|
||||
logger.info("Attempting to decode file %s using %s page.", path, page)
|
||||
srcString = unicode(srcString, page)
|
||||
codec_found = page
|
||||
logger.info("File %s decoded using %s page.", path, page)
|
||||
except UnicodeDecodeError:
|
||||
logger.info("Error unicode decoding %s from page %s, trying next codec", path, page)
|
||||
else:
|
||||
break
|
||||
else:
|
||||
logger.info("Unicode BOM detected in %s, using %s page.", path, codec_found)
|
||||
srcString = unicode(srcString[len(savebom):], codec_found)
|
||||
|
||||
else:
|
||||
# nasty hack to detect other transparent utf-16 loading
|
||||
if srcString[0] == '<' and 'utf-16' in srcString[:128].lower():
|
||||
codec_found = "utf-16"
|
||||
else:
|
||||
codec_found = "utf-8"
|
||||
|
||||
if codec_found is None:
|
||||
return False, "Proper codec could not be established for %s" % path
|
||||
|
||||
try:
|
||||
_, fitsImport = Port.importAuto(srcString, path, callback=callback, encoding=codec_found)
|
||||
fits += fitsImport
|
||||
except xml.parsers.expat.ExpatError:
|
||||
return False, "Malformed XML in %s" % path
|
||||
except Exception:
|
||||
logger.exception("Unknown exception processing: %s", path)
|
||||
return False, "Unknown Error while processing %s" % path
|
||||
|
||||
IDs = []
|
||||
numFits = len(fits)
|
||||
for i, fit in enumerate(fits):
|
||||
# Set some more fit attributes and save
|
||||
fit.character = sFit.character
|
||||
fit.damagePattern = sFit.pattern
|
||||
fit.targetResists = sFit.targetResists
|
||||
db.save(fit)
|
||||
IDs.append(fit.ID)
|
||||
if callback: # Pulse
|
||||
wx.CallAfter(
|
||||
callback, 1,
|
||||
"Processing complete, saving fits to database\n(%d/%d)" %
|
||||
(i + 1, numFits)
|
||||
)
|
||||
|
||||
return True, fits
|
||||
|
||||
def importFitFromBuffer(self, bufferStr, activeFit=None):
|
||||
sFit = svcFit.getInstance()
|
||||
_, fits = Port.importAuto(bufferStr, activeFit=activeFit)
|
||||
for fit in fits:
|
||||
fit.character = sFit.character
|
||||
fit.damagePattern = sFit.pattern
|
||||
fit.targetResists = sFit.targetResists
|
||||
db.save(fit)
|
||||
return fits
|
||||
|
||||
"""Service which houses all import/export format functions"""
|
||||
|
||||
@classmethod
|
||||
def exportCrest(cls, ofit, callback=None):
|
||||
# A few notes:
|
||||
@@ -58,19 +183,19 @@ class Port(object):
|
||||
|
||||
nested_dict = lambda: collections.defaultdict(nested_dict)
|
||||
fit = nested_dict()
|
||||
sCrest = service.Crest.getInstance()
|
||||
sFit = service.Fit.getInstance()
|
||||
sCrest = Crest.getInstance()
|
||||
sFit = svcFit.getInstance()
|
||||
|
||||
eve = sCrest.eve
|
||||
|
||||
# max length is 50 characters
|
||||
name = ofit.name[:47] + '...' if len(ofit.name) > 50 else ofit.name
|
||||
fit['name'] = name
|
||||
fit['ship']['href'] = "%sinventory/types/%d/"%(eve._authed_endpoint, ofit.ship.item.ID)
|
||||
fit['ship']['href'] = "%sinventory/types/%d/" % (eve._authed_endpoint, ofit.ship.item.ID)
|
||||
fit['ship']['id'] = ofit.ship.item.ID
|
||||
fit['ship']['name'] = ''
|
||||
|
||||
fit['description'] = "<pyfa:%d />"%ofit.ID
|
||||
fit['description'] = "<pyfa:%d />" % ofit.ID
|
||||
fit['items'] = []
|
||||
|
||||
slotNum = {}
|
||||
@@ -87,20 +212,20 @@ class Port(object):
|
||||
slot = int(module.getModifiedItemAttr("subSystemSlot"))
|
||||
item['flag'] = slot
|
||||
else:
|
||||
if not slot in slotNum:
|
||||
if slot not in slotNum:
|
||||
slotNum[slot] = INV_FLAGS[slot]
|
||||
|
||||
item['flag'] = slotNum[slot]
|
||||
slotNum[slot] += 1
|
||||
|
||||
item['quantity'] = 1
|
||||
item['type']['href'] = "%sinventory/types/%d/"%(eve._authed_endpoint, module.item.ID)
|
||||
item['type']['href'] = "%sinventory/types/%d/" % (eve._authed_endpoint, module.item.ID)
|
||||
item['type']['id'] = module.item.ID
|
||||
item['type']['name'] = ''
|
||||
fit['items'].append(item)
|
||||
|
||||
if module.charge and sFit.serviceFittingOptions["exportCharges"]:
|
||||
if not module.chargeID in charges:
|
||||
if module.chargeID not in charges:
|
||||
charges[module.chargeID] = 0
|
||||
# `or 1` because some charges (ie scripts) are without qty
|
||||
charges[module.chargeID] += module.numCharges or 1
|
||||
@@ -109,7 +234,7 @@ class Port(object):
|
||||
item = nested_dict()
|
||||
item['flag'] = INV_FLAG_CARGOBAY
|
||||
item['quantity'] = cargo.amount
|
||||
item['type']['href'] = "%sinventory/types/%d/"%(eve._authed_endpoint, cargo.item.ID)
|
||||
item['type']['href'] = "%sinventory/types/%d/" % (eve._authed_endpoint, cargo.item.ID)
|
||||
item['type']['id'] = cargo.item.ID
|
||||
item['type']['name'] = ''
|
||||
fit['items'].append(item)
|
||||
@@ -118,7 +243,7 @@ class Port(object):
|
||||
item = nested_dict()
|
||||
item['flag'] = INV_FLAG_CARGOBAY
|
||||
item['quantity'] = amount
|
||||
item['type']['href'] = "%sinventory/types/%d/"%(eve._authed_endpoint, chargeID)
|
||||
item['type']['href'] = "%sinventory/types/%d/" % (eve._authed_endpoint, chargeID)
|
||||
item['type']['id'] = chargeID
|
||||
item['type']['name'] = ''
|
||||
fit['items'].append(item)
|
||||
@@ -127,7 +252,7 @@ class Port(object):
|
||||
item = nested_dict()
|
||||
item['flag'] = INV_FLAG_DRONEBAY
|
||||
item['quantity'] = drone.amount
|
||||
item['type']['href'] = "%sinventory/types/%d/"%(eve._authed_endpoint, drone.item.ID)
|
||||
item['type']['href'] = "%sinventory/types/%d/" % (eve._authed_endpoint, drone.item.ID)
|
||||
item['type']['id'] = drone.item.ID
|
||||
item['type']['name'] = ''
|
||||
fit['items'].append(item)
|
||||
@@ -136,7 +261,7 @@ class Port(object):
|
||||
item = nested_dict()
|
||||
item['flag'] = INV_FLAG_FIGHTER
|
||||
item['quantity'] = fighter.amountActive
|
||||
item['type']['href'] = "%sinventory/types/%d/"%(eve._authed_endpoint, fighter.item.ID)
|
||||
item['type']['href'] = "%sinventory/types/%d/" % (eve._authed_endpoint, fighter.item.ID)
|
||||
item['type']['id'] = fighter.item.ID
|
||||
item['type']['name'] = fighter.item.name
|
||||
fit['items'].append(item)
|
||||
@@ -176,9 +301,9 @@ class Port(object):
|
||||
return "DNA", (cls.importDna(string),)
|
||||
|
||||
@staticmethod
|
||||
def importCrest(str):
|
||||
fit = json.loads(str)
|
||||
sMkt = service.Market.getInstance()
|
||||
def importCrest(str_):
|
||||
fit = json.loads(str_)
|
||||
sMkt = Market.getInstance()
|
||||
|
||||
f = Fit()
|
||||
f.name = fit['name']
|
||||
@@ -229,7 +354,7 @@ class Port(object):
|
||||
continue
|
||||
|
||||
# Recalc to get slot numbers correct for T3 cruisers
|
||||
service.Fit.getInstance().recalc(f)
|
||||
svcFit.getInstance().recalc(f)
|
||||
|
||||
for module in moduleList:
|
||||
if module.fits(f):
|
||||
@@ -239,19 +364,19 @@ class Port(object):
|
||||
|
||||
@staticmethod
|
||||
def importDna(string):
|
||||
sMkt = service.Market.getInstance()
|
||||
sMkt = Market.getInstance()
|
||||
|
||||
ids = map(int, re.findall(r'\d+', string))
|
||||
for id in ids:
|
||||
for id_ in ids:
|
||||
try:
|
||||
try:
|
||||
try:
|
||||
Ship(sMkt.getItem(sMkt.getItem(id)))
|
||||
Ship(sMkt.getItem(sMkt.getItem(id_)))
|
||||
except ValueError:
|
||||
Citadel(sMkt.getItem(sMkt.getItem(id)))
|
||||
Citadel(sMkt.getItem(sMkt.getItem(id_)))
|
||||
except ValueError:
|
||||
Citadel(sMkt.getItem(id))
|
||||
string = string[string.index(str(id)):]
|
||||
Citadel(sMkt.getItem(id_))
|
||||
string = string[string.index(str(id_)):]
|
||||
break
|
||||
except:
|
||||
pass
|
||||
@@ -265,12 +390,13 @@ class Port(object):
|
||||
except ValueError:
|
||||
f.ship = Citadel(sMkt.getItem(int(info[0])))
|
||||
f.name = "{0} - DNA Imported".format(f.ship.item.name)
|
||||
except UnicodeEncodeError as e:
|
||||
def logtransform(s):
|
||||
if len(s) > 10:
|
||||
return s[:10] + "..."
|
||||
return s
|
||||
logger.exception("Couldn't import ship data %r", [ logtransform(s) for s in info ])
|
||||
except UnicodeEncodeError:
|
||||
def logtransform(s_):
|
||||
if len(s_) > 10:
|
||||
return s_[:10] + "..."
|
||||
return s_
|
||||
|
||||
logger.exception("Couldn't import ship data %r", [logtransform(s) for s in info])
|
||||
return None
|
||||
|
||||
moduleList = []
|
||||
@@ -309,7 +435,7 @@ class Port(object):
|
||||
moduleList.append(m)
|
||||
|
||||
# Recalc to get slot numbers correct for T3 cruisers
|
||||
service.Fit.getInstance().recalc(f)
|
||||
svcFit.getInstance().recalc(f)
|
||||
|
||||
for module in moduleList:
|
||||
if module.fits(f):
|
||||
@@ -322,7 +448,7 @@ class Port(object):
|
||||
|
||||
@staticmethod
|
||||
def importEft(eftString):
|
||||
sMkt = service.Market.getInstance()
|
||||
sMkt = Market.getInstance()
|
||||
offineSuffix = " /OFFLINE"
|
||||
|
||||
fit = Fit()
|
||||
@@ -389,20 +515,20 @@ class Port(object):
|
||||
|
||||
if item.category.name == "Drone":
|
||||
extraAmount = int(extraAmount) if extraAmount is not None else 1
|
||||
if not modName in droneMap:
|
||||
if modName not in droneMap:
|
||||
droneMap[modName] = 0
|
||||
droneMap[modName] += extraAmount
|
||||
elif item.category.name == "Fighter":
|
||||
extraAmount = int(extraAmount) if extraAmount is not None else 1
|
||||
fighterItem = Fighter(item)
|
||||
if (extraAmount > fighterItem.fighterSquadronMaxSize): #Amount bigger then max fightergroup size
|
||||
if (extraAmount > fighterItem.fighterSquadronMaxSize): # Amount bigger then max fightergroup size
|
||||
extraAmount = fighterItem.fighterSquadronMaxSize
|
||||
if fighterItem.fits(fit):
|
||||
fit.fighters.append(fighterItem)
|
||||
|
||||
if len(modExtra) == 2 and item.category.name != "Drone" and item.category.name != "Fighter":
|
||||
extraAmount = int(extraAmount) if extraAmount is not None else 1
|
||||
if not modName in cargoMap:
|
||||
if modName not in cargoMap:
|
||||
cargoMap[modName] = 0
|
||||
cargoMap[modName] += extraAmount
|
||||
elif item.category.name == "Implant":
|
||||
@@ -446,13 +572,13 @@ class Port(object):
|
||||
moduleList.append(m)
|
||||
|
||||
# Recalc to get slot numbers correct for T3 cruisers
|
||||
service.Fit.getInstance().recalc(fit)
|
||||
svcFit.getInstance().recalc(fit)
|
||||
|
||||
for m in moduleList:
|
||||
if m.fits(fit):
|
||||
m.owner = fit
|
||||
if not m.isValidState(m.state):
|
||||
print "Error: Module", m, "cannot have state", m.state
|
||||
print("Error: Module", m, "cannot have state", m.state)
|
||||
|
||||
fit.modules.append(m)
|
||||
|
||||
@@ -473,7 +599,7 @@ class Port(object):
|
||||
"""Handle import from EFT config store file"""
|
||||
|
||||
# Check if we have such ship in database, bail if we don't
|
||||
sMkt = service.Market.getInstance()
|
||||
sMkt = Market.getInstance()
|
||||
try:
|
||||
sMkt.getItem(shipname)
|
||||
except:
|
||||
@@ -632,7 +758,7 @@ class Port(object):
|
||||
moduleList.append(m)
|
||||
|
||||
# Recalc to get slot numbers correct for T3 cruisers
|
||||
service.Fit.getInstance().recalc(f)
|
||||
svcFit.getInstance().recalc(f)
|
||||
|
||||
for module in moduleList:
|
||||
if module.fits(f):
|
||||
@@ -651,7 +777,7 @@ class Port(object):
|
||||
|
||||
@staticmethod
|
||||
def importXml(text, callback=None, encoding="utf-8"):
|
||||
sMkt = service.Market.getInstance()
|
||||
sMkt = Market.getInstance()
|
||||
|
||||
doc = xml.dom.minidom.parseString(text.encode(encoding))
|
||||
fittings = doc.getElementsByTagName("fittings").item(0)
|
||||
@@ -716,7 +842,7 @@ class Port(object):
|
||||
continue
|
||||
|
||||
# Recalc to get slot numbers correct for T3 cruisers
|
||||
service.Fit.getInstance().recalc(f)
|
||||
svcFit.getInstance().recalc(f)
|
||||
|
||||
for module in moduleList:
|
||||
if module.fits(f):
|
||||
@@ -734,12 +860,13 @@ class Port(object):
|
||||
offineSuffix = " /OFFLINE"
|
||||
export = "[%s, %s]\n" % (fit.ship.item.name, fit.name)
|
||||
stuff = {}
|
||||
sFit = service.Fit.getInstance()
|
||||
sFit = svcFit.getInstance()
|
||||
for module in fit.modules:
|
||||
slot = module.slot
|
||||
if not slot in stuff:
|
||||
if slot not in stuff:
|
||||
stuff[slot] = []
|
||||
curr = module.item.name if module.item else ("[Empty %s slot]" % Slot.getName(slot).capitalize() if slot is not None else "")
|
||||
curr = module.item.name if module.item \
|
||||
else ("[Empty %s slot]" % Slot.getName(slot).capitalize() if slot is not None else "")
|
||||
if module.charge and sFit.serviceFittingOptions["exportCharges"]:
|
||||
curr += ", %s" % module.charge.name
|
||||
if module.state == State.OFFLINE:
|
||||
@@ -816,29 +943,27 @@ class Port(object):
|
||||
if mod.slot == Slot.SUBSYSTEM:
|
||||
subsystems.append(mod)
|
||||
continue
|
||||
if not mod.itemID in mods:
|
||||
if mod.itemID not in mods:
|
||||
mods[mod.itemID] = 0
|
||||
mods[mod.itemID] += 1
|
||||
|
||||
if mod.charge:
|
||||
if not mod.chargeID in charges:
|
||||
if mod.chargeID not in charges:
|
||||
charges[mod.chargeID] = 0
|
||||
# `or 1` because some charges (ie scripts) are without qty
|
||||
charges[mod.chargeID] += mod.numCharges or 1
|
||||
|
||||
for subsystem in sorted(subsystems, key=lambda mod: mod.getModifiedItemAttr("subSystemSlot")):
|
||||
for subsystem in sorted(subsystems, key=lambda mod_: mod_.getModifiedItemAttr("subSystemSlot")):
|
||||
dna += ":{0};1".format(subsystem.itemID)
|
||||
|
||||
for mod in mods:
|
||||
dna += ":{0};{1}".format(mod, mods[mod])
|
||||
|
||||
# drones are known to be in split stacks
|
||||
groupedDrones = OrderedDict()
|
||||
for drone in fit.drones:
|
||||
groupedDrones[drone.itemID] = groupedDrones.get(drone.itemID, 0) + drone.amount
|
||||
dna += ":{0};{1}".format(drone.itemID, drone.amount)
|
||||
|
||||
for droneItemID in groupedDrones:
|
||||
dna += ":{0};{1}".format(droneItemID, groupedDrones[droneItemID])
|
||||
for fighter in fit.fighters:
|
||||
dna += ":{0};{1}".format(fighter.itemID, fighter.amountActive)
|
||||
|
||||
for fighter in fit.fighters:
|
||||
dna += ":{0};{1}".format(fighter.itemID, fighter.amountActive)
|
||||
@@ -850,7 +975,7 @@ class Port(object):
|
||||
# as being "Fitted" to whatever slot they are for, and it causes an corruption error in the
|
||||
# client when trying to save the fit
|
||||
if cargo.item.category.name == "Charge":
|
||||
if not cargo.item.ID in charges:
|
||||
if cargo.item.ID not in charges:
|
||||
charges[cargo.item.ID] = 0
|
||||
charges[cargo.item.ID] += cargo.amount
|
||||
|
||||
@@ -864,7 +989,7 @@ class Port(object):
|
||||
doc = xml.dom.minidom.Document()
|
||||
fittings = doc.createElement("fittings")
|
||||
doc.appendChild(fittings)
|
||||
sFit = service.Fit.getInstance()
|
||||
sFit = svcFit.getInstance()
|
||||
|
||||
for i, fit in enumerate(fits):
|
||||
try:
|
||||
@@ -890,7 +1015,7 @@ class Port(object):
|
||||
# Order of subsystem matters based on this attr. See GH issue #130
|
||||
slotId = module.getModifiedItemAttr("subSystemSlot") - 125
|
||||
else:
|
||||
if not slot in slotNum:
|
||||
if slot not in slotNum:
|
||||
slotNum[slot] = 0
|
||||
|
||||
slotId = slotNum[slot]
|
||||
@@ -904,7 +1029,7 @@ class Port(object):
|
||||
fitting.appendChild(hardware)
|
||||
|
||||
if module.charge and sFit.serviceFittingOptions["exportCharges"]:
|
||||
if not module.charge.name in charges:
|
||||
if module.charge.name not in charges:
|
||||
charges[module.charge.name] = 0
|
||||
# `or 1` because some charges (ie scripts) are without qty
|
||||
charges[module.charge.name] += module.numCharges or 1
|
||||
@@ -924,7 +1049,7 @@ class Port(object):
|
||||
fitting.appendChild(hardware)
|
||||
|
||||
for cargo in fit.cargo:
|
||||
if not cargo.item.name in charges:
|
||||
if cargo.item.name not in charges:
|
||||
charges[cargo.item.name] = 0
|
||||
charges[cargo.item.name] += cargo.amount
|
||||
|
||||
@@ -935,7 +1060,7 @@ class Port(object):
|
||||
hardware.setAttribute("type", name)
|
||||
fitting.appendChild(hardware)
|
||||
except:
|
||||
print "Failed on fitID: %d"%fit.ID
|
||||
print("Failed on fitID: %d" % fit.ID)
|
||||
continue
|
||||
finally:
|
||||
if callback:
|
||||
@@ -947,13 +1072,12 @@ class Port(object):
|
||||
def exportMultiBuy(fit):
|
||||
export = "%s\n" % (fit.ship.item.name)
|
||||
stuff = {}
|
||||
sFit = service.Fit.getInstance()
|
||||
sFit = svcFit.getInstance()
|
||||
for module in fit.modules:
|
||||
slot = module.slot
|
||||
if not slot in stuff:
|
||||
if slot not in stuff:
|
||||
stuff[slot] = []
|
||||
curr = "%s\n" % module.item.name if module.item else (
|
||||
"")
|
||||
curr = "%s\n" % module.item.name if module.item else ""
|
||||
if module.charge and sFit.serviceFittingOptions["exportCharges"]:
|
||||
curr += "%s x%s\n" % (module.charge.name, module.numCharges)
|
||||
stuff[slot].append(curr)
|
||||
@@ -989,3 +1113,39 @@ class Port(object):
|
||||
export = export[:-1]
|
||||
|
||||
return export
|
||||
|
||||
|
||||
class FitBackupThread(threading.Thread):
|
||||
def __init__(self, path, callback):
|
||||
threading.Thread.__init__(self)
|
||||
self.path = path
|
||||
self.callback = callback
|
||||
|
||||
def run(self):
|
||||
path = self.path
|
||||
sFit = svcFit.getInstance()
|
||||
allFits = map(lambda x: x[0], sFit.getAllFits())
|
||||
backedUpFits = sFit.exportXml(self.callback, *allFits)
|
||||
backupFile = open(path, "w", encoding="utf-8")
|
||||
backupFile.write(backedUpFits)
|
||||
backupFile.close()
|
||||
|
||||
# Send done signal to GUI
|
||||
wx.CallAfter(self.callback, -1)
|
||||
|
||||
|
||||
class FitImportThread(threading.Thread):
|
||||
def __init__(self, paths, callback):
|
||||
threading.Thread.__init__(self)
|
||||
self.paths = paths
|
||||
self.callback = callback
|
||||
|
||||
def run(self):
|
||||
sFit = svcFit.getInstance()
|
||||
success, result = sFit.importFitFromFiles(self.paths, self.callback)
|
||||
|
||||
if not success: # there was an error during processing
|
||||
logger.error("Error while processing file import: %s", result)
|
||||
wx.CallAfter(self.callback, -2, result)
|
||||
else: # Send done signal to GUI
|
||||
wx.CallAfter(self.callback, -1, result)
|
||||
|
||||
@@ -1,82 +1,87 @@
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
#
|
||||
# pyfa is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# pyfa is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
#===============================================================================
|
||||
|
||||
import threading
|
||||
import config
|
||||
import os
|
||||
import eos.types
|
||||
import eos.db.migration as migration
|
||||
from eos.db.saveddata.loadDefaultDatabaseValues import DefaultDatabaseValues
|
||||
from eos.db.saveddata.databaseRepair import DatabaseCleanup
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PrefetchThread(threading.Thread):
|
||||
def run(self):
|
||||
# We're a daemon thread, as such, interpreter might get shut down while we do stuff
|
||||
# Make sure we don't throw tracebacks to console
|
||||
try:
|
||||
eos.types.Character.setSkillList(eos.db.getItemsByCategory("Skill", eager=("effects", "attributes", "attributes.info.icon", "attributes.info.unit", "icon")))
|
||||
except:
|
||||
pass
|
||||
|
||||
prefetch = PrefetchThread()
|
||||
prefetch.daemon = True
|
||||
prefetch.start()
|
||||
|
||||
########
|
||||
# The following code does not belong here, however until we rebuild skeletons
|
||||
# to include modified pyfa.py, this is the best place to put it. See GH issue
|
||||
# #176
|
||||
# @ todo: move this to pyfa.py
|
||||
########
|
||||
|
||||
#Make sure the saveddata db exists
|
||||
if not os.path.exists(config.savePath):
|
||||
os.mkdir(config.savePath)
|
||||
|
||||
if os.path.isfile(config.saveDB):
|
||||
# If database exists, run migration after init'd database
|
||||
eos.db.saveddata_meta.create_all()
|
||||
migration.update(eos.db.saveddata_engine)
|
||||
# Import default database values
|
||||
# Import values that must exist otherwise Pyfa breaks
|
||||
DefaultDatabaseValues.importRequiredDefaults()
|
||||
|
||||
logging.debug("Starting database validation.")
|
||||
database_cleanup_instance = DatabaseCleanup()
|
||||
database_cleanup_instance.OrphanedCharacterSkills(eos.db.saveddata_engine)
|
||||
database_cleanup_instance.OrphanedFitCharacterIDs(eos.db.saveddata_engine)
|
||||
database_cleanup_instance.OrphanedFitDamagePatterns(eos.db.saveddata_engine)
|
||||
logging.debug("Completed database validation.")
|
||||
|
||||
else:
|
||||
# If database does not exist, do not worry about migration. Simply
|
||||
# create and set version
|
||||
eos.db.saveddata_meta.create_all()
|
||||
eos.db.saveddata_engine.execute('PRAGMA user_version = {}'.format(migration.getAppVersion()))
|
||||
#Import default database values
|
||||
# Import values that must exist otherwise Pyfa breaks
|
||||
DefaultDatabaseValues.importRequiredDefaults()
|
||||
# Import default values for damage profiles
|
||||
DefaultDatabaseValues.importDamageProfileDefaults()
|
||||
# Import default values for target resist profiles
|
||||
DefaultDatabaseValues.importResistProfileDefaults()
|
||||
# =============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
#
|
||||
# pyfa is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# pyfa is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
# =============================================================================
|
||||
|
||||
import threading
|
||||
import os
|
||||
|
||||
import config
|
||||
from eos import db
|
||||
from eos.db import migration
|
||||
from eos.db.saveddata.loadDefaultDatabaseValues import DefaultDatabaseValues
|
||||
from eos.db.saveddata.databaseRepair import DatabaseCleanup
|
||||
from eos.saveddata.character import Character as es_Character
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PrefetchThread(threading.Thread):
|
||||
def run(self):
|
||||
# We're a daemon thread, as such, interpreter might get shut down while we do stuff
|
||||
# Make sure we don't throw tracebacks to console
|
||||
try:
|
||||
es_Character.setSkillList(db.getItemsByCategory(
|
||||
"Skill",
|
||||
eager=("effects", "attributes", "attributes.info.icon", "attributes.info.unit", "icon")
|
||||
))
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
prefetch = PrefetchThread()
|
||||
prefetch.daemon = True
|
||||
prefetch.start()
|
||||
|
||||
# The following code does not belong here, however until we rebuild skeletons
|
||||
# to include modified pyfa.py, this is the best place to put it. See GH issue
|
||||
# #176
|
||||
# @ todo: move this to pyfa.py
|
||||
|
||||
# Make sure the saveddata db exists
|
||||
if config.savePath and not os.path.exists(config.savePath):
|
||||
os.mkdir(config.savePath)
|
||||
|
||||
if config.saveDB and os.path.isfile(config.saveDB):
|
||||
# If database exists, run migration after init'd database
|
||||
db.saveddata_meta.create_all()
|
||||
migration.update(db.saveddata_engine)
|
||||
# Import default database values
|
||||
# Import values that must exist otherwise Pyfa breaks
|
||||
DefaultDatabaseValues.importRequiredDefaults()
|
||||
|
||||
logging.debug("Starting database validation.")
|
||||
database_cleanup_instance = DatabaseCleanup()
|
||||
database_cleanup_instance.OrphanedCharacterSkills(eos.db.saveddata_engine)
|
||||
database_cleanup_instance.OrphanedFitCharacterIDs(eos.db.saveddata_engine)
|
||||
database_cleanup_instance.OrphanedFitDamagePatterns(eos.db.saveddata_engine)
|
||||
logging.debug("Completed database validation.")
|
||||
|
||||
else:
|
||||
# If database does not exist, do not worry about migration. Simply
|
||||
# create and set version
|
||||
db.saveddata_meta.create_all()
|
||||
db.saveddata_engine.execute('PRAGMA user_version = {}'.format(migration.getAppVersion()))
|
||||
# Import default database values
|
||||
# Import values that must exist otherwise Pyfa breaks
|
||||
DefaultDatabaseValues.importRequiredDefaults()
|
||||
# Import default values for damage profiles
|
||||
DefaultDatabaseValues.importDamageProfileDefaults()
|
||||
# Import default values for target resist profiles
|
||||
DefaultDatabaseValues.importResistProfileDefaults()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
@@ -15,20 +15,21 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
|
||||
|
||||
import service
|
||||
import eos.db
|
||||
import eos.types
|
||||
import time
|
||||
from xml.dom import minidom
|
||||
|
||||
VALIDITY = 24*60*60 # Price validity period, 24 hours
|
||||
REREQUEST = 4*60*60 # Re-request delay for failed fetches, 4 hours
|
||||
TIMEOUT = 15*60 # Network timeout delay for connection issues, 15 minutes
|
||||
from eos import db
|
||||
from service.network import Network, TimeoutError
|
||||
|
||||
class Price():
|
||||
VALIDITY = 24 * 60 * 60 # Price validity period, 24 hours
|
||||
REREQUEST = 4 * 60 * 60 # Re-request delay for failed fetches, 4 hours
|
||||
TIMEOUT = 15 * 60 # Network timeout delay for connection issues, 15 minutes
|
||||
|
||||
|
||||
class Price(object):
|
||||
@classmethod
|
||||
def fetchPrices(cls, prices):
|
||||
"""Fetch all prices passed to this method"""
|
||||
@@ -49,7 +50,7 @@ class Price():
|
||||
# Compose list of items we're going to request
|
||||
for typeID in priceMap:
|
||||
# Get item object
|
||||
item = eos.db.getItem(typeID)
|
||||
item = db.getItem(typeID)
|
||||
# We're not going to request items only with market group, as eve-central
|
||||
# doesn't provide any data for items not on the market
|
||||
if item is not None and item.marketGroupID:
|
||||
@@ -64,22 +65,22 @@ class Price():
|
||||
|
||||
# Base request URL
|
||||
baseurl = "https://eve-central.com/api/marketstat"
|
||||
data.append(("usesystem", 30000142)) # Use Jita for market
|
||||
data.append(("usesystem", 30000142)) # Use Jita for market
|
||||
|
||||
for typeID in toRequest: # Add all typeID arguments
|
||||
data.append(("typeid", typeID))
|
||||
|
||||
# Attempt to send request and process it
|
||||
try:
|
||||
network = service.Network.getInstance()
|
||||
network = Network.getInstance()
|
||||
data = network.request(baseurl, network.PRICES, data)
|
||||
xml = minidom.parse(data)
|
||||
types = xml.getElementsByTagName("marketstat").item(0).getElementsByTagName("type")
|
||||
# Cycle through all types we've got from request
|
||||
for type in types:
|
||||
for type_ in types:
|
||||
# Get data out of each typeID details tree
|
||||
typeID = int(type.getAttribute("id"))
|
||||
sell = type.getElementsByTagName("sell").item(0)
|
||||
typeID = int(type_.getAttribute("id"))
|
||||
sell = type_.getElementsByTagName("sell").item(0)
|
||||
# If price data wasn't there, set price to zero
|
||||
try:
|
||||
percprice = float(sell.getElementsByTagName("percentile").item(0).firstChild.data)
|
||||
@@ -96,7 +97,7 @@ class Price():
|
||||
del priceMap[typeID]
|
||||
|
||||
# If getting or processing data returned any errors
|
||||
except service.network.TimeoutError, e:
|
||||
except TimeoutError:
|
||||
# Timeout error deserves special treatment
|
||||
for typeID in priceMap.keys():
|
||||
priceobj = priceMap[typeID]
|
||||
|
||||
@@ -5,9 +5,8 @@ class NullHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
pass
|
||||
|
||||
|
||||
logger = logging.getLogger('pycrest')
|
||||
logger.addHandler(NullHandler())
|
||||
|
||||
version = "0.0.1"
|
||||
|
||||
from .eve import EVE
|
||||
@@ -21,4 +21,4 @@ def text_(s, encoding='latin-1', errors='strict'): # pragma: no cover
|
||||
def bytes_(s, encoding='latin-1', errors='strict'): # pragma: no cover
|
||||
if isinstance(s, text_type):
|
||||
return s.encode(encoding, errors)
|
||||
return s
|
||||
return s
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
class APIException(Exception):
|
||||
pass
|
||||
pass
|
||||
|
||||
@@ -9,14 +9,13 @@ import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
import config
|
||||
from compat import bytes_, text_
|
||||
from errors import APIException
|
||||
from . import version
|
||||
from service.pycrest.compat import bytes_, text_
|
||||
from service.pycrest.errors import APIException
|
||||
|
||||
try:
|
||||
from urllib.parse import urlparse, urlunparse, parse_qsl
|
||||
from urllib.parse import urlparse, urlunparse, parse_qsl, quote
|
||||
except ImportError: # pragma: no cover
|
||||
from urlparse import urlparse, urlunparse, parse_qsl
|
||||
from urlparse import urlparse, urlunparse, parse_qsl, quote
|
||||
|
||||
try:
|
||||
import pickle
|
||||
@@ -27,7 +26,9 @@ try:
|
||||
from urllib.parse import quote
|
||||
except ImportError: # pragma: no cover
|
||||
from urllib import quote
|
||||
|
||||
import logging
|
||||
import re
|
||||
import config
|
||||
|
||||
logger = logging.getLogger("pycrest.eve")
|
||||
cache_re = re.compile(r'max-age=([0-9]+)')
|
||||
@@ -52,8 +53,7 @@ class FileCache(APICache):
|
||||
os.mkdir(self.path, 0o700)
|
||||
|
||||
def _getpath(self, key):
|
||||
path = config.parsePath(self.path, str(hash(key)) + '.cache')
|
||||
return path
|
||||
return config.parsePath(self.path, str(hash(key)) + '.cache')
|
||||
|
||||
def put(self, key, value):
|
||||
with open(self._getpath(key), 'wb') as f:
|
||||
@@ -112,8 +112,7 @@ class APIConnection(object):
|
||||
"Accept": "application/json",
|
||||
})
|
||||
session.headers.update(additional_headers)
|
||||
session.mount('https://public-crest.eveonline.com',
|
||||
HTTPAdapter())
|
||||
session.mount('https://public-crest.eveonline.com', HTTPAdapter())
|
||||
self._session = session
|
||||
if cache:
|
||||
if isinstance(cache, APICache):
|
||||
|
||||
@@ -1,36 +1,26 @@
|
||||
import datetime
|
||||
import ssl
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
try:
|
||||
from requests.packages import urllib3
|
||||
from requests.packages.urllib3.util import ssl_
|
||||
|
||||
from requests.packages.urllib3.exceptions import (
|
||||
SystemTimeWarning,
|
||||
SecurityWarning,
|
||||
)
|
||||
from requests.packages.urllib3.packages.ssl_match_hostname import \
|
||||
match_hostname
|
||||
from requests.packages import urllib3
|
||||
from requests.packages.urllib3.util import ssl_
|
||||
from requests.packages.urllib3.exceptions import (
|
||||
SystemTimeWarning,
|
||||
SecurityWarning,
|
||||
)
|
||||
from requests.packages.urllib3.packages.ssl_match_hostname import \
|
||||
match_hostname
|
||||
except:
|
||||
import urllib3
|
||||
from urllib3.util import ssl_
|
||||
|
||||
from urllib3.exceptions import (
|
||||
SystemTimeWarning,
|
||||
SecurityWarning,
|
||||
)
|
||||
from urllib3.packages.ssl_match_hostname import \
|
||||
match_hostname
|
||||
import urllib3
|
||||
from urllib3.util import ssl_
|
||||
from urllib3.exceptions import SystemTimeWarning, SecurityWarning
|
||||
from urllib3.packages.ssl_match_hostname import match_hostname
|
||||
|
||||
|
||||
|
||||
|
||||
class WeakCiphersHTTPSConnection(
|
||||
urllib3.connection.VerifiedHTTPSConnection): # pragma: no cover
|
||||
class WeakCiphersHTTPSConnection(urllib3.connection.VerifiedHTTPSConnection): # pragma: no cover
|
||||
|
||||
# Python versions >=2.7.9 and >=3.4.1 do not (by default) allow ciphers
|
||||
# with MD5. Unfortunately, the CREST public server _only_ supports
|
||||
@@ -77,22 +67,26 @@ class WeakCiphersHTTPSConnection(
|
||||
warnings.warn((
|
||||
'System time is way off (before {0}). This will probably '
|
||||
'lead to SSL verification errors').format(
|
||||
urllib3.connection.RECENT_DATE),
|
||||
urllib3.connection.RECENT_DATE),
|
||||
SystemTimeWarning
|
||||
)
|
||||
|
||||
# Wrap socket using verification with the root certs in
|
||||
# trusted_root_certs
|
||||
self.sock = ssl_.ssl_wrap_socket(conn, self.key_file, self.cert_file,
|
||||
cert_reqs=resolved_cert_reqs,
|
||||
ca_certs=self.ca_certs,
|
||||
server_hostname=hostname,
|
||||
ssl_version=resolved_ssl_version,
|
||||
ciphers=self.ciphers)
|
||||
self.sock = ssl_.ssl_wrap_socket(
|
||||
conn,
|
||||
self.key_file,
|
||||
self.cert_file,
|
||||
cert_reqs=resolved_cert_reqs,
|
||||
ca_certs=self.ca_certs,
|
||||
server_hostname=hostname,
|
||||
ssl_version=resolved_ssl_version,
|
||||
ciphers=self.ciphers,
|
||||
)
|
||||
|
||||
if self.assert_fingerprint:
|
||||
ssl_.assert_fingerprint(self.sock.getpeercert(binary_form=True),
|
||||
self.assert_fingerprint)
|
||||
self.assert_fingerprint)
|
||||
elif resolved_cert_reqs != ssl.CERT_NONE \
|
||||
and self.assert_hostname is not False:
|
||||
cert = self.sock.getpeercert()
|
||||
@@ -105,36 +99,34 @@ class WeakCiphersHTTPSConnection(
|
||||
)
|
||||
match_hostname(cert, self.assert_hostname or hostname)
|
||||
|
||||
self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED
|
||||
or self.assert_fingerprint is not None)
|
||||
self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED or self.assert_fingerprint is not None)
|
||||
|
||||
|
||||
class WeakCiphersHTTPSConnectionPool(
|
||||
urllib3.connectionpool.HTTPSConnectionPool):
|
||||
|
||||
class WeakCiphersHTTPSConnectionPool(urllib3.connectionpool.HTTPSConnectionPool):
|
||||
ConnectionCls = WeakCiphersHTTPSConnection
|
||||
|
||||
|
||||
class WeakCiphersPoolManager(urllib3.poolmanager.PoolManager):
|
||||
|
||||
def _new_pool(self, scheme, host, port):
|
||||
if scheme == 'https':
|
||||
return WeakCiphersHTTPSConnectionPool(host, port,
|
||||
**(self.connection_pool_kw))
|
||||
return super(WeakCiphersPoolManager, self)._new_pool(scheme, host,
|
||||
port)
|
||||
return WeakCiphersHTTPSConnectionPool(host, port, **(self.connection_pool_kw))
|
||||
return super(WeakCiphersPoolManager, self)._new_pool(scheme, host, port)
|
||||
|
||||
|
||||
class WeakCiphersAdapter(HTTPAdapter):
|
||||
""""Transport adapter" that allows us to use TLS_RSA_WITH_RC4_128_MD5."""
|
||||
|
||||
def init_poolmanager(self, connections, maxsize, block=False,
|
||||
**pool_kwargs):
|
||||
def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs):
|
||||
# Rewrite of the requests.adapters.HTTPAdapter.init_poolmanager method
|
||||
# to use WeakCiphersPoolManager instead of urllib3's PoolManager
|
||||
self._pool_connections = connections
|
||||
self._pool_maxsize = maxsize
|
||||
self._pool_block = block
|
||||
|
||||
self.poolmanager = WeakCiphersPoolManager(num_pools=connections,
|
||||
maxsize=maxsize, block=block, strict=True, **pool_kwargs)
|
||||
self.poolmanager = WeakCiphersPoolManager(
|
||||
num_pools=connections,
|
||||
maxsize=maxsize,
|
||||
block=block,
|
||||
strict=True,
|
||||
**pool_kwargs
|
||||
)
|
||||
|
||||
@@ -2,13 +2,15 @@ import BaseHTTPServer
|
||||
import urlparse
|
||||
import socket
|
||||
import thread
|
||||
import wx
|
||||
from service.settings import CRESTSettings
|
||||
|
||||
import logging
|
||||
|
||||
import wx
|
||||
|
||||
from service.settings import CRESTSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# noinspection PyPep8
|
||||
HTML = '''
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
@@ -56,6 +58,7 @@ else {
|
||||
</html>
|
||||
'''
|
||||
|
||||
|
||||
# https://github.com/fuzzysteve/CREST-Market-Downloader/
|
||||
class AuthHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
def do_GET(self):
|
||||
@@ -72,9 +75,9 @@ class AuthHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
def log_message(self, format, *args):
|
||||
return
|
||||
|
||||
|
||||
# http://code.activestate.com/recipes/425210-simple-stoppable-server-using-socket-timeout/
|
||||
class StoppableHTTPServer(BaseHTTPServer.HTTPServer):
|
||||
|
||||
def server_bind(self):
|
||||
BaseHTTPServer.HTTPServer.server_bind(self)
|
||||
self.settings = CRESTSettings.getInstance()
|
||||
@@ -101,7 +104,7 @@ class StoppableHTTPServer(BaseHTTPServer.HTTPServer):
|
||||
self.run = False
|
||||
|
||||
def handle_timeout(self):
|
||||
#logger.debug("Number of tries: %d"%self.tries)
|
||||
# logger.debug("Number of tries: %d"%self.tries)
|
||||
self.tries += 1
|
||||
if self.tries == self.max_tries:
|
||||
logger.debug("Server timed out waiting for connection")
|
||||
@@ -116,9 +119,9 @@ class StoppableHTTPServer(BaseHTTPServer.HTTPServer):
|
||||
pass
|
||||
self.server_close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
httpd = StoppableHTTPServer(('', 6461), AuthHandler)
|
||||
thread.start_new_thread(httpd.serve, ())
|
||||
raw_input("Press <RETURN> to stop server\n")
|
||||
httpd.stop()
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
# =============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
@@ -15,7 +15,7 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
# =============================================================================
|
||||
|
||||
import cPickle
|
||||
import os.path
|
||||
@@ -24,7 +24,7 @@ import urllib2
|
||||
import config
|
||||
|
||||
|
||||
class SettingsProvider():
|
||||
class SettingsProvider(object):
|
||||
BASE_PATH = config.getSavePath("settings")
|
||||
settings = {}
|
||||
_instance = None
|
||||
@@ -75,7 +75,7 @@ class SettingsProvider():
|
||||
settings.save()
|
||||
|
||||
|
||||
class Settings():
|
||||
class Settings(object):
|
||||
def __init__(self, location, info):
|
||||
self.location = location
|
||||
self.info = info
|
||||
@@ -115,7 +115,7 @@ class Settings():
|
||||
return self.info.items()
|
||||
|
||||
|
||||
class NetworkSettings():
|
||||
class NetworkSettings(object):
|
||||
_instance = None
|
||||
|
||||
# constants for serviceNetworkDefaultSettings["mode"] parameter
|
||||
@@ -197,7 +197,7 @@ class NetworkSettings():
|
||||
validPrefixes = ("http", "https")
|
||||
|
||||
for prefix in validPrefixes:
|
||||
if not prefix in proxydict:
|
||||
if prefix not in proxydict:
|
||||
continue
|
||||
proxyline = proxydict[prefix]
|
||||
proto = "{0}://".format(prefix)
|
||||
@@ -240,12 +240,10 @@ class NetworkSettings():
|
||||
self.serviceNetworkSettings["password"] = password
|
||||
|
||||
|
||||
"""
|
||||
Settings used by the HTML export feature.
|
||||
"""
|
||||
|
||||
|
||||
class HTMLExportSettings():
|
||||
class HTMLExportSettings(object):
|
||||
"""
|
||||
Settings used by the HTML export feature.
|
||||
"""
|
||||
_instance = None
|
||||
|
||||
@classmethod
|
||||
@@ -256,10 +254,15 @@ class HTMLExportSettings():
|
||||
return cls._instance
|
||||
|
||||
def __init__(self):
|
||||
serviceHTMLExportDefaultSettings = {"enabled": False, "path": config.pyfaPath + os.sep + 'pyfaFits.html',
|
||||
"minimal": False}
|
||||
self.serviceHTMLExportSettings = SettingsProvider.getInstance().getSettings("pyfaServiceHTMLExportSettings",
|
||||
serviceHTMLExportDefaultSettings)
|
||||
serviceHTMLExportDefaultSettings = {
|
||||
"enabled": False,
|
||||
"path": config.pyfaPath + os.sep + 'pyfaFits.html',
|
||||
"minimal": False
|
||||
}
|
||||
self.serviceHTMLExportSettings = SettingsProvider.getInstance().getSettings(
|
||||
"pyfaServiceHTMLExportSettings",
|
||||
serviceHTMLExportDefaultSettings
|
||||
)
|
||||
|
||||
def getEnabled(self):
|
||||
return self.serviceHTMLExportSettings["enabled"]
|
||||
@@ -280,12 +283,10 @@ class HTMLExportSettings():
|
||||
self.serviceHTMLExportSettings["path"] = path
|
||||
|
||||
|
||||
"""
|
||||
Settings used by update notification
|
||||
"""
|
||||
|
||||
|
||||
class UpdateSettings():
|
||||
class UpdateSettings(object):
|
||||
"""
|
||||
Settings used by update notification
|
||||
"""
|
||||
_instance = None
|
||||
|
||||
@classmethod
|
||||
@@ -301,8 +302,10 @@ class UpdateSettings():
|
||||
# prerelease - If True, suppress prerelease notifications
|
||||
# version - Set to release tag that user does not want notifications for
|
||||
serviceUpdateDefaultSettings = {"prerelease": True, 'version': None}
|
||||
self.serviceUpdateSettings = SettingsProvider.getInstance().getSettings("pyfaServiceUpdateSettings",
|
||||
serviceUpdateDefaultSettings)
|
||||
self.serviceUpdateSettings = SettingsProvider.getInstance().getSettings(
|
||||
"pyfaServiceUpdateSettings",
|
||||
serviceUpdateDefaultSettings
|
||||
)
|
||||
|
||||
def get(self, type):
|
||||
return self.serviceUpdateSettings[type]
|
||||
@@ -311,7 +314,7 @@ class UpdateSettings():
|
||||
self.serviceUpdateSettings[type] = value
|
||||
|
||||
|
||||
class CRESTSettings():
|
||||
class CRESTSettings(object):
|
||||
_instance = None
|
||||
|
||||
@classmethod
|
||||
@@ -327,8 +330,10 @@ class CRESTSettings():
|
||||
# 1 - User-supplied client details
|
||||
serviceCRESTDefaultSettings = {"mode": 0, "server": 0, "clientID": "", "clientSecret": "", "timeout": 60}
|
||||
|
||||
self.serviceCRESTSettings = SettingsProvider.getInstance().getSettings("pyfaServiceCRESTSettings",
|
||||
serviceCRESTDefaultSettings)
|
||||
self.serviceCRESTSettings = SettingsProvider.getInstance().getSettings(
|
||||
"pyfaServiceCRESTSettings",
|
||||
serviceCRESTDefaultSettings
|
||||
)
|
||||
|
||||
def get(self, type):
|
||||
return self.serviceCRESTSettings[type]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
# Copyright (C) 2014 Ryan Holmes
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
@@ -15,17 +15,21 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
|
||||
import eos.db
|
||||
import eos.types
|
||||
import copy
|
||||
|
||||
from eos import db
|
||||
from eos.saveddata.targetResists import TargetResists as es_TargetResists
|
||||
|
||||
|
||||
class ImportError(Exception):
|
||||
pass
|
||||
|
||||
class TargetResists():
|
||||
|
||||
class TargetResists(object):
|
||||
instance = None
|
||||
|
||||
@classmethod
|
||||
def getInstance(cls):
|
||||
if cls.instance is None:
|
||||
@@ -34,31 +38,31 @@ class TargetResists():
|
||||
return cls.instance
|
||||
|
||||
def getTargetResistsList(self):
|
||||
return eos.db.getTargetResistsList()
|
||||
return db.getTargetResistsList()
|
||||
|
||||
def getTargetResists(self, name):
|
||||
return eos.db.getTargetResists(name)
|
||||
return db.getTargetResists(name)
|
||||
|
||||
def newPattern(self, name):
|
||||
p = eos.types.TargetResists(0.0, 0.0, 0.0, 0.0)
|
||||
p = es_TargetResists(0.0, 0.0, 0.0, 0.0)
|
||||
p.name = name
|
||||
eos.db.save(p)
|
||||
db.save(p)
|
||||
return p
|
||||
|
||||
def renamePattern(self, p, newName):
|
||||
p.name = newName
|
||||
eos.db.save(p)
|
||||
db.save(p)
|
||||
|
||||
def deletePattern(self, p):
|
||||
eos.db.remove(p)
|
||||
db.remove(p)
|
||||
|
||||
def copyPattern(self, p):
|
||||
newP = copy.deepcopy(p)
|
||||
eos.db.save(newP)
|
||||
db.save(newP)
|
||||
return newP
|
||||
|
||||
def saveChanges(self, p):
|
||||
eos.db.save(p)
|
||||
db.save(p)
|
||||
|
||||
def importPatterns(self, text):
|
||||
lookup = {}
|
||||
@@ -66,23 +70,22 @@ class TargetResists():
|
||||
for pattern in current:
|
||||
lookup[pattern.name] = pattern
|
||||
|
||||
imports, num = eos.types.TargetResists.importPatterns(text)
|
||||
imports, num = es_TargetResists.importPatterns(text)
|
||||
for pattern in imports:
|
||||
if pattern.name in lookup:
|
||||
match = lookup[pattern.name]
|
||||
match.__dict__.update(pattern.__dict__)
|
||||
else:
|
||||
eos.db.save(pattern)
|
||||
eos.db.commit()
|
||||
db.save(pattern)
|
||||
db.commit()
|
||||
|
||||
lenImports = len(imports)
|
||||
if lenImports == 0:
|
||||
raise ImportError("No patterns found for import")
|
||||
if lenImports != num:
|
||||
raise ImportError("%d patterns imported from clipboard; %d had errors"%(num, num-lenImports))
|
||||
raise ImportError("%d patterns imported from clipboard; %d had errors" % (num, num - lenImports))
|
||||
|
||||
def exportPatterns(self):
|
||||
patterns = self.getTargetResistsList()
|
||||
patterns.sort(key=lambda p: p.name)
|
||||
return eos.types.TargetResists.exportPatterns(*patterns)
|
||||
|
||||
return es_TargetResists.exportPatterns(*patterns)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
# Copyright (C) 2014 Ryan Holmes
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
@@ -15,31 +15,37 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
#===============================================================================
|
||||
# =============================================================================
|
||||
|
||||
import threading
|
||||
import wx
|
||||
import urllib2
|
||||
import json
|
||||
import config
|
||||
import service
|
||||
import dateutil.parser
|
||||
import calendar
|
||||
|
||||
import wx
|
||||
import dateutil.parser
|
||||
|
||||
import config
|
||||
from service.network import Network
|
||||
from service.settings import UpdateSettings
|
||||
|
||||
|
||||
class CheckUpdateThread(threading.Thread):
|
||||
def __init__(self, callback):
|
||||
threading.Thread.__init__(self)
|
||||
self.callback = callback
|
||||
self.settings = service.settings.UpdateSettings.getInstance()
|
||||
self.network = service.Network.getInstance()
|
||||
self.settings = UpdateSettings.getInstance()
|
||||
self.network = Network.getInstance()
|
||||
|
||||
def run(self):
|
||||
network = service.Network.getInstance()
|
||||
network = Network.getInstance()
|
||||
|
||||
try:
|
||||
response = network.request('https://api.github.com/repos/pyfa-org/Pyfa/releases', network.UPDATE)
|
||||
jsonResponse = json.loads(response.read())
|
||||
jsonResponse.sort(key=lambda x: calendar.timegm(dateutil.parser.parse(x['published_at']).utctimetuple()), reverse=True)
|
||||
jsonResponse.sort(
|
||||
key=lambda x: calendar.timegm(dateutil.parser.parse(x['published_at']).utctimetuple()),
|
||||
reverse=True
|
||||
)
|
||||
|
||||
for release in jsonResponse:
|
||||
# Suppress pre releases
|
||||
@@ -47,7 +53,7 @@ class CheckUpdateThread(threading.Thread):
|
||||
continue
|
||||
|
||||
# Handle use-case of updating to suppressed version
|
||||
if self.settings.get('version') == 'v'+config.version:
|
||||
if self.settings.get('version') == 'v' + config.version:
|
||||
self.settings.set('version', None)
|
||||
|
||||
# Suppress version
|
||||
@@ -60,16 +66,18 @@ class CheckUpdateThread(threading.Thread):
|
||||
else:
|
||||
rVersion = release['tag_name'].replace('v', '', 1)
|
||||
|
||||
if config.tag is 'git' and not release['prerelease'] and self.versiontuple(rVersion) >= self.versiontuple(config.version):
|
||||
wx.CallAfter(self.callback, release) # git (dev/Singularity) -> Stable
|
||||
if config.tag is 'git' and \
|
||||
not release['prerelease'] and \
|
||||
self.versiontuple(rVersion) >= self.versiontuple(config.version):
|
||||
wx.CallAfter(self.callback, release) # git (dev/Singularity) -> Stable
|
||||
elif config.expansionName is not "Singularity":
|
||||
if release['prerelease']:
|
||||
wx.CallAfter(self.callback, release) # Stable -> Singularity
|
||||
wx.CallAfter(self.callback, release) # Stable -> Singularity
|
||||
elif self.versiontuple(rVersion) > self.versiontuple(config.version):
|
||||
wx.CallAfter(self.callback, release) # Stable -> Stable
|
||||
wx.CallAfter(self.callback, release) # Stable -> Stable
|
||||
else:
|
||||
if release['prerelease'] and rVersion > config.expansionVersion:
|
||||
wx.CallAfter(self.callback, release) # Singularity -> Singularity
|
||||
wx.CallAfter(self.callback, release) # Singularity -> Singularity
|
||||
break
|
||||
except:
|
||||
pass
|
||||
@@ -77,10 +85,9 @@ class CheckUpdateThread(threading.Thread):
|
||||
def versiontuple(self, v):
|
||||
return tuple(map(int, (v.split("."))))
|
||||
|
||||
|
||||
class Update():
|
||||
instance = None
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def CheckUpdate(self, callback):
|
||||
thread = CheckUpdateThread(callback)
|
||||
@@ -88,8 +95,6 @@ class Update():
|
||||
|
||||
@classmethod
|
||||
def getInstance(cls):
|
||||
if cls.instance == None:
|
||||
if cls.instance is None:
|
||||
cls.instance = Update()
|
||||
return cls.instance
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user