Do not write non-ascii comments to effect files
This commit is contained in:
180
config.py
180
config.py
@@ -1,90 +1,90 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Load variable overrides specific to distribution type
|
||||
try:
|
||||
import configforced
|
||||
except ImportError:
|
||||
configforced = None
|
||||
|
||||
# Turns on debug mode
|
||||
debug = False
|
||||
# Defines if our saveddata will be in pyfa root or not
|
||||
saveInRoot = False
|
||||
|
||||
# Version data
|
||||
version = "1.8.0"
|
||||
tag = "Stable"
|
||||
expansionName = "Proteus"
|
||||
expansionVersion = "1.0"
|
||||
evemonMinVersion = "4081"
|
||||
|
||||
# Database version (int ONLY)
|
||||
# Increment every time we need to flag for user database upgrade/modification
|
||||
dbversion = 3
|
||||
|
||||
pyfaPath = None
|
||||
savePath = None
|
||||
staticPath = None
|
||||
saveDB = None
|
||||
gameDB = None
|
||||
|
||||
def defPaths():
|
||||
global pyfaPath
|
||||
global savePath
|
||||
global staticPath
|
||||
global saveDB
|
||||
global gameDB
|
||||
global saveInRoot
|
||||
# The main pyfa directory which contains run.py
|
||||
# Python 2.X uses ANSI by default, so we need to convert the character encoding
|
||||
pyfaPath = getattr(configforced, "pyfaPath", pyfaPath)
|
||||
if pyfaPath is None:
|
||||
pyfaPath = unicode(os.path.dirname(os.path.realpath(os.path.abspath(
|
||||
sys.modules['__main__'].__file__))), sys.getfilesystemencoding())
|
||||
|
||||
# Where we store the saved fits etc, default is the current users home directory
|
||||
if saveInRoot is True:
|
||||
savePath = getattr(configforced, "savePath", None)
|
||||
if savePath is None:
|
||||
savePath = os.path.join(pyfaPath, "saveddata")
|
||||
else:
|
||||
savePath = getattr(configforced, "savePath", None)
|
||||
if savePath is None:
|
||||
savePath = unicode(os.path.expanduser(os.path.join("~", ".pyfa")),
|
||||
sys.getfilesystemencoding())
|
||||
|
||||
# Redirect stderr to file if we're requested to do so
|
||||
stderrToFile = getattr(configforced, "stderrToFile", None)
|
||||
if stderrToFile is True:
|
||||
if not os.path.exists(savePath):
|
||||
os.mkdir(savePath)
|
||||
sys.stderr = open(os.path.join(savePath, "error_log.txt"), "w")
|
||||
|
||||
# Same for stdout
|
||||
stdoutToFile = getattr(configforced, "stdoutToFile", None)
|
||||
if stdoutToFile is True:
|
||||
if not os.path.exists(savePath):
|
||||
os.mkdir(savePath)
|
||||
sys.stdout = open(os.path.join(savePath, "output_log.txt"), "w")
|
||||
|
||||
# Static EVE Data from the staticdata repository, should be in the staticdata
|
||||
# directory in our pyfa directory
|
||||
staticPath = os.path.join(pyfaPath, "staticdata")
|
||||
|
||||
# The database where we store all the fits etc
|
||||
saveDB = os.path.join(savePath, "saveddata.db")
|
||||
|
||||
# The database where the static EVE data from the datadump is kept.
|
||||
# This is not the standard sqlite datadump but a modified version created by eos
|
||||
# maintenance script
|
||||
gameDB = os.path.join(staticPath, "eve.db")
|
||||
|
||||
## DON'T MODIFY ANYTHING BELOW ##
|
||||
import eos.config
|
||||
|
||||
#Caching modifiers, disable all gamedata caching, its unneeded.
|
||||
eos.config.gamedataCache = False
|
||||
# saveddata db location modifier, shouldn't ever need to touch this
|
||||
eos.config.saveddata_connectionstring = "sqlite:///" + saveDB + "?check_same_thread=False"
|
||||
eos.config.gamedata_connectionstring = "sqlite:///" + gameDB + "?check_same_thread=False"
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Load variable overrides specific to distribution type
|
||||
try:
|
||||
import configforced
|
||||
except ImportError:
|
||||
configforced = None
|
||||
|
||||
# Turns on debug mode
|
||||
debug = False
|
||||
# Defines if our saveddata will be in pyfa root or not
|
||||
saveInRoot = False
|
||||
|
||||
# Version data
|
||||
version = "1.8.1"
|
||||
tag = "git"
|
||||
expansionName = "Proteus"
|
||||
expansionVersion = "1.0"
|
||||
evemonMinVersion = "4081"
|
||||
|
||||
# Database version (int ONLY)
|
||||
# Increment every time we need to flag for user database upgrade/modification
|
||||
dbversion = 3
|
||||
|
||||
pyfaPath = None
|
||||
savePath = None
|
||||
staticPath = None
|
||||
saveDB = None
|
||||
gameDB = None
|
||||
|
||||
def defPaths():
|
||||
global pyfaPath
|
||||
global savePath
|
||||
global staticPath
|
||||
global saveDB
|
||||
global gameDB
|
||||
global saveInRoot
|
||||
# The main pyfa directory which contains run.py
|
||||
# Python 2.X uses ANSI by default, so we need to convert the character encoding
|
||||
pyfaPath = getattr(configforced, "pyfaPath", pyfaPath)
|
||||
if pyfaPath is None:
|
||||
pyfaPath = unicode(os.path.dirname(os.path.realpath(os.path.abspath(
|
||||
sys.modules['__main__'].__file__))), sys.getfilesystemencoding())
|
||||
|
||||
# Where we store the saved fits etc, default is the current users home directory
|
||||
if saveInRoot is True:
|
||||
savePath = getattr(configforced, "savePath", None)
|
||||
if savePath is None:
|
||||
savePath = os.path.join(pyfaPath, "saveddata")
|
||||
else:
|
||||
savePath = getattr(configforced, "savePath", None)
|
||||
if savePath is None:
|
||||
savePath = unicode(os.path.expanduser(os.path.join("~", ".pyfa")),
|
||||
sys.getfilesystemencoding())
|
||||
|
||||
# Redirect stderr to file if we're requested to do so
|
||||
stderrToFile = getattr(configforced, "stderrToFile", None)
|
||||
if stderrToFile is True:
|
||||
if not os.path.exists(savePath):
|
||||
os.mkdir(savePath)
|
||||
sys.stderr = open(os.path.join(savePath, "error_log.txt"), "w")
|
||||
|
||||
# Same for stdout
|
||||
stdoutToFile = getattr(configforced, "stdoutToFile", None)
|
||||
if stdoutToFile is True:
|
||||
if not os.path.exists(savePath):
|
||||
os.mkdir(savePath)
|
||||
sys.stdout = open(os.path.join(savePath, "output_log.txt"), "w")
|
||||
|
||||
# Static EVE Data from the staticdata repository, should be in the staticdata
|
||||
# directory in our pyfa directory
|
||||
staticPath = os.path.join(pyfaPath, "staticdata")
|
||||
|
||||
# The database where we store all the fits etc
|
||||
saveDB = os.path.join(savePath, "saveddata.db")
|
||||
|
||||
# The database where the static EVE data from the datadump is kept.
|
||||
# This is not the standard sqlite datadump but a modified version created by eos
|
||||
# maintenance script
|
||||
gameDB = os.path.join(staticPath, "eve.db")
|
||||
|
||||
## DON'T MODIFY ANYTHING BELOW ##
|
||||
import eos.config
|
||||
|
||||
#Caching modifiers, disable all gamedata caching, its unneeded.
|
||||
eos.config.gamedataCache = False
|
||||
# saveddata db location modifier, shouldn't ever need to touch this
|
||||
eos.config.saveddata_connectionstring = "sqlite:///" + saveDB + "?check_same_thread=False"
|
||||
eos.config.gamedata_connectionstring = "sqlite:///" + gameDB + "?check_same_thread=False"
|
||||
|
||||
@@ -92,6 +92,7 @@ TYPENAMECOMBS_WEIGHT = 1.0
|
||||
# If score drops below this value, remaining items will be listed
|
||||
# without any grouping
|
||||
LOWEST_SCORE = 0.7
|
||||
|
||||
# Adjust scoring formulae
|
||||
def calc_innerscore(affected_decribed, affected_undescribed, total,
|
||||
pereffect_totalaffected, weight=1.0):
|
||||
@@ -109,6 +110,7 @@ def calc_innerscore(affected_decribed, affected_undescribed, total,
|
||||
innerscore = (coverage_total ** 0.23) * coverage_additionalfactor * \
|
||||
affected_total_factor * weight
|
||||
return innerscore
|
||||
|
||||
def calc_outerscore(innerscore_dict, pereffect_totalaffected, weight):
|
||||
"""Outer score calculation formula"""
|
||||
# Return just max of the inner scores, including weight factor
|
||||
@@ -118,6 +120,16 @@ def calc_outerscore(innerscore_dict, pereffect_totalaffected, weight):
|
||||
return outerscore
|
||||
else: return 0.0
|
||||
|
||||
def validate_string(s):
|
||||
try:
|
||||
s.encode('ascii')
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except Exception:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
# Connect to database and set up cursor
|
||||
db = sqlite3.connect(os.path.expanduser(options.database))
|
||||
cursor = db.cursor()
|
||||
@@ -968,8 +980,9 @@ inner score: {5:.3})"
|
||||
# Append line for printing to list
|
||||
catname = type[2]
|
||||
typename = type[1]
|
||||
printstr = "# {0}: {1}"
|
||||
printing_typelines.append(printstr.format(catname, typename))
|
||||
printstr = "# {0}: {1}".format(catname, typename)
|
||||
if validate_string(printstr):
|
||||
printing_typelines.append(printstr)
|
||||
# Do the same for groups
|
||||
printing_grouplines = []
|
||||
printing_groups = sorted(printing_groups, key=lambda tuple: tuple[1])
|
||||
@@ -979,9 +992,9 @@ inner score: {5:.3})"
|
||||
groupname = group[1]
|
||||
described = len(effectmap_groupid_typeid[group[0]][0])
|
||||
total = len(globalmap_groupid_typeid[group[0]])
|
||||
printstr = "# {0}s from group: {1} ({2} of {3})"
|
||||
printing_grouplines.append(printstr.format(catname, groupname,
|
||||
described, total))
|
||||
printstr = "# {0}s from group: {1} ({2} of {3})".format(catname, groupname, described, total)
|
||||
if validate_string(printstr):
|
||||
printing_grouplines.append(printstr)
|
||||
# Process categories
|
||||
printing_categorylines = []
|
||||
printing_categories = sorted(printing_categories,
|
||||
@@ -990,9 +1003,9 @@ inner score: {5:.3})"
|
||||
catname = category[1]
|
||||
described = len(effectmap_categoryid_typeid[category[0]][0])
|
||||
total = len(globalmap_categoryid_typeid[category[0]])
|
||||
printstr = "# Items from category: {0} ({1} of {2})"
|
||||
printing_categorylines.append(printstr.format(catname, described,
|
||||
total))
|
||||
printstr = "# Items from category: {0} ({1} of {2})".format(catname, described, total)
|
||||
if validate_string(printstr):
|
||||
printing_categorylines.append(printstr)
|
||||
# Process variations
|
||||
printing_basetypelines = []
|
||||
printing_basetypes = sorted(printing_basetypes,
|
||||
@@ -1004,9 +1017,9 @@ inner score: {5:.3})"
|
||||
basename = basetype[1]
|
||||
described = len(effectmap_basetypeid_typeid[basetype[0]][0])
|
||||
total = len(globalmap_basetypeid_typeid[basetype[0]])
|
||||
printstr = "# Variations of {0}: {1} ({2} of {3})"
|
||||
printing_basetypelines.append(printstr.format(catname, basename,
|
||||
described, total))
|
||||
printstr = "# Variations of {0}: {1} ({2} of {3})".format(catname, basename, described, total)
|
||||
if validate_string(printstr):
|
||||
printing_basetypelines.append(printstr)
|
||||
# Process market groups with variations
|
||||
printing_marketgroupwithvarslines = []
|
||||
printing_marketgroupswithvars = sorted(printing_marketgroupswithvars,
|
||||
@@ -1017,9 +1030,9 @@ inner score: {5:.3})"
|
||||
[marketgroup[0]][0])
|
||||
total = len(globalmap_marketgroupid_typeidwithvariations
|
||||
[marketgroup[0]])
|
||||
printstr = "# Items from market group: {0} ({1} of {2})"
|
||||
printing_marketgroupwithvarslines.append(printstr.
|
||||
format(marketgroupname, described, total))
|
||||
printstr = "# Items from market group: {0} ({1} of {2})".format(marketgroupname, described, total)
|
||||
if validate_string(printstr):
|
||||
printing_marketgroupwithvarslines.append(printstr)
|
||||
# Process type name combinations
|
||||
printing_typenamecombtuplelines = []
|
||||
printing_typenamecombtuples = sorted(printing_typenamecombtuples,
|
||||
@@ -1032,9 +1045,9 @@ inner score: {5:.3})"
|
||||
described = len(effectmap_typenamecombtuple_typeid
|
||||
[typenamecomb[0]][0])
|
||||
total = len(globalmap_typenamecombtuple_typeid[typenamecomb[0]])
|
||||
printstr = "# {0}s named like: {1} ({2} of {3})"
|
||||
printing_typenamecombtuplelines.append(printstr.format(catname,
|
||||
namedlike, described, total))
|
||||
printstr = "# {0}s named like: {1} ({2} of {3})".format(catname, namedlike, described, total)
|
||||
if validate_string(printstr):
|
||||
printing_typenamecombtuplelines.append(printstr)
|
||||
|
||||
# Compose single list of lines using custom sorting
|
||||
commentlines = printing_categorylines + printing_grouplines + \
|
||||
|
||||
Reference in New Issue
Block a user