Merge branch 'test-3'
This commit is contained in:
@@ -7,7 +7,7 @@ import sqlite3
|
||||
import sys
|
||||
|
||||
# Add eos root path to sys.path so we can import ourselves
|
||||
path = os.path.dirname(unicode(__file__, sys.getfilesystemencoding()))
|
||||
path = os.path.dirname(str(__file__, sys.getfilesystemencoding()))
|
||||
sys.path.append(os.path.realpath(os.path.join(path, "..")))
|
||||
|
||||
# change to correct conversion
|
||||
@@ -275,14 +275,14 @@ def main(old, new):
|
||||
c = x.split(rename_phrase)
|
||||
container = renames
|
||||
else:
|
||||
print "Unknown format: {}".format(x)
|
||||
print("Unknown format: {}".format(x))
|
||||
sys.exit()
|
||||
|
||||
old_name, new_name = c[0], c[1]
|
||||
old_item, new_item = None, None
|
||||
|
||||
if "Blueprint" in old_name or "Blueprint" in new_name:
|
||||
print "Blueprint: Skipping this line: %s"%x
|
||||
print("Blueprint: Skipping this line: %s"%x)
|
||||
continue
|
||||
|
||||
# gather item info
|
||||
@@ -297,9 +297,9 @@ def main(old, new):
|
||||
break
|
||||
|
||||
if not old_item:
|
||||
print "Error finding old item in {} -> {}".format(old_name, new_name)
|
||||
print("Error finding old item in {} -> {}".format(old_name, new_name))
|
||||
if not new_item:
|
||||
print "Error finding new item in {} -> {}".format(old_name, new_name)
|
||||
print("Error finding new item in {} -> {}".format(old_name, new_name))
|
||||
|
||||
if not container.get((new_item,new_name), None):
|
||||
container[(new_item,new_name)] = []
|
||||
@@ -307,31 +307,31 @@ def main(old, new):
|
||||
|
||||
container[(new_item,new_name)].append((old_item, old_name))
|
||||
|
||||
print " # Renamed items"
|
||||
print(" # Renamed items")
|
||||
|
||||
for new, old in renames.iteritems():
|
||||
for new, old in renames.items():
|
||||
if len(old) != 1:
|
||||
print "Incorrect length, key: {}, value: {}".format(new, old)
|
||||
print("Incorrect length, key: {}, value: {}".format(new, old))
|
||||
sys.exit()
|
||||
old = old[0]
|
||||
|
||||
print " \"{}\": \"{}\",".format(old[1], new[1])
|
||||
print(" \"{}\": \"{}\",".format(old[1], new[1]))
|
||||
|
||||
# Convert modules
|
||||
print "\n # Converted items"
|
||||
print("\n # Converted items")
|
||||
|
||||
for new, olds in conversions.iteritems():
|
||||
for new, olds in conversions.items():
|
||||
for old in olds:
|
||||
print " \"{}\": \"{}\",".format(old[1], new[1])
|
||||
print(" \"{}\": \"{}\",".format(old[1], new[1]))
|
||||
|
||||
print
|
||||
print
|
||||
print()
|
||||
print()
|
||||
|
||||
for new, old in conversions.iteritems():
|
||||
print " {}: ( # {}".format(new[0], new[1])
|
||||
for new, old in conversions.items():
|
||||
print(" {}: ( # {}".format(new[0], new[1]))
|
||||
for item in old:
|
||||
print " {}, # {}".format(item[0], item[1])
|
||||
print " ),"
|
||||
print(" {}, # {}".format(item[0], item[1]))
|
||||
print(" ),")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -87,7 +87,7 @@ if __name__ == "__main__":
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if options.skeleton is None or options.base is None or options.destination is None:
|
||||
print "Need --skeleton argument as well as --base and --destination argument"
|
||||
print("Need --skeleton argument as well as --base and --destination argument")
|
||||
parser.print_help()
|
||||
sys.exit()
|
||||
|
||||
@@ -100,20 +100,20 @@ if __name__ == "__main__":
|
||||
if skel not in options.platforms:
|
||||
continue
|
||||
|
||||
print "\n======== %s ========"%skel
|
||||
print("\n======== %s ========"%skel)
|
||||
|
||||
info = {}
|
||||
config = {}
|
||||
setup = {}
|
||||
skeleton = os.path.expanduser(os.path.join(options.skeleton, skel))
|
||||
|
||||
execfile(os.path.join(options.base, "config.py"), config)
|
||||
execfile(os.path.join(skeleton, "info.py"), info)
|
||||
execfile(os.path.join(options.base, "setup.py"), setup)
|
||||
exec(compile(open(os.path.join(options.base, "config.py")).read(), os.path.join(options.base, "config.py"), 'exec'), config)
|
||||
exec(compile(open(os.path.join(skeleton, "info.py")).read(), os.path.join(skeleton, "info.py"), 'exec'), info)
|
||||
exec(compile(open(os.path.join(options.base, "setup.py")).read(), os.path.join(options.base, "setup.py"), 'exec'), setup)
|
||||
|
||||
destination = os.path.expanduser(options.destination)
|
||||
if not os.path.isdir(destination) or not os.access(destination, os.W_OK | os.X_OK):
|
||||
print "Destination directory does not exist or is not writable: {}".format(destination)
|
||||
print("Destination directory does not exist or is not writable: {}".format(destination))
|
||||
sys.exit()
|
||||
|
||||
dirName = info["arcname"]
|
||||
@@ -143,9 +143,9 @@ if __name__ == "__main__":
|
||||
tmpFile = os.path.join(os.getcwd(), archiveName)
|
||||
|
||||
try:
|
||||
print "Copying skeleton to ", tmpDir
|
||||
print("Copying skeleton to ", tmpDir)
|
||||
shutil.copytree(skeleton, tmpDir, ignore=loginfo)
|
||||
print
|
||||
print()
|
||||
source = os.path.expanduser(options.base)
|
||||
root = os.path.join(tmpDir, info["base"])
|
||||
|
||||
@@ -154,7 +154,7 @@ if __name__ == "__main__":
|
||||
os.chdir(source)
|
||||
|
||||
if info["library"]:
|
||||
print "Injecting files into", info["library"]
|
||||
print("Injecting files into", info["library"])
|
||||
libraryFile = os.path.join(root, info["library"])
|
||||
|
||||
with zipfile.ZipFile(libraryFile, 'a') as library:
|
||||
@@ -163,7 +163,7 @@ if __name__ == "__main__":
|
||||
library.write('pyfa.py', 'pyfa__main__.py')
|
||||
library.write('config.py')
|
||||
else: # platforms where we don't have a packaged library
|
||||
print "Copying modules into", root
|
||||
print("Copying modules into", root)
|
||||
for dir in setup['packages']:
|
||||
copyanything(dir, os.path.join(root, dir))
|
||||
|
||||
@@ -175,27 +175,27 @@ if __name__ == "__main__":
|
||||
if skel in ('src', 'mac-deprecated'):
|
||||
setup['include_files'] += ['pyfa.py', 'config.py']
|
||||
|
||||
print
|
||||
print "Copying included files:",
|
||||
print()
|
||||
print("Copying included files:", end=' ')
|
||||
|
||||
for file in setup['include_files']:
|
||||
if isinstance(file, basestring):
|
||||
print file,
|
||||
if isinstance(file, str):
|
||||
print(file, end=' ')
|
||||
copyanything(file, os.path.join(root, file))
|
||||
|
||||
print
|
||||
print "Creating images zipfile:",
|
||||
print()
|
||||
print("Creating images zipfile:", end=' ')
|
||||
os.chdir('imgs')
|
||||
imagesFile = os.path.join(root, "imgs.zip")
|
||||
|
||||
with zipfile.ZipFile(imagesFile, 'w') as images:
|
||||
for dir in setup['icon_dirs']:
|
||||
print dir,
|
||||
print(dir, end=' ')
|
||||
zipdir(dir, images)
|
||||
os.chdir(oldcwd)
|
||||
|
||||
print
|
||||
print "Creating archive"
|
||||
print()
|
||||
print("Creating archive")
|
||||
if options.zip:
|
||||
archive = zipfile.ZipFile(tmpFile, 'w', compression=zipfile.ZIP_DEFLATED)
|
||||
zipdir(dirName, archive)
|
||||
@@ -205,11 +205,11 @@ if __name__ == "__main__":
|
||||
archive.add(tmpDir, arcname=info["arcname"])
|
||||
archive.close()
|
||||
|
||||
print "Moving archive to ", destination
|
||||
print("Moving archive to ", destination)
|
||||
shutil.move(tmpFile, destination)
|
||||
|
||||
if "win" in skel and options.winexe:
|
||||
print "Compiling EXE"
|
||||
print("Compiling EXE")
|
||||
|
||||
if config['tag'].lower() == "git":
|
||||
if git: # if git repo info available, use git commit
|
||||
@@ -230,13 +230,13 @@ if __name__ == "__main__":
|
||||
"/dMyOutputDir=%s"%destination,
|
||||
"/dMyOutputFile=%s"%fileName]) #stdout=devnull, stderr=devnull
|
||||
|
||||
print "EXE completed"
|
||||
print("EXE completed")
|
||||
|
||||
except Exception as e:
|
||||
print "Encountered an error: \n\t", e
|
||||
print("Encountered an error: \n\t", e)
|
||||
raise
|
||||
finally:
|
||||
print "Deleting tmp files\n"
|
||||
print("Deleting tmp files\n")
|
||||
try:
|
||||
shutil.rmtree("dist") # Inno dir
|
||||
except:
|
||||
@@ -252,6 +252,6 @@ if __name__ == "__main__":
|
||||
|
||||
sys.stdout = oldstd
|
||||
if os.path.isdir(destination):
|
||||
print os.path.join(destination, os.path.split(tmpFile)[1])
|
||||
print(os.path.join(destination, os.path.split(tmpFile)[1]))
|
||||
else:
|
||||
print destination
|
||||
print(destination)
|
||||
|
||||
@@ -285,7 +285,7 @@ for marketgroupid in INITIALMARKETGROUPIDS:
|
||||
(globalmap_marketgroupid_typeid[marketgroupid])
|
||||
else: break
|
||||
# Now, make a reverse map
|
||||
for marketgroupid, typeidset in globalmap_marketgroupid_typeid.items():
|
||||
for marketgroupid, typeidset in list(globalmap_marketgroupid_typeid.items()):
|
||||
for typeid in typeidset:
|
||||
if not typeid in globalmap_typeid_marketgroupid:
|
||||
globalmap_typeid_marketgroupid[typeid] = set()
|
||||
@@ -310,7 +310,7 @@ for marketgroupid in globalmap_marketgroupid_typeidwithvariations:
|
||||
(typestoadd)
|
||||
# Make reverse map using simple way too
|
||||
for marketgroupid, typeidwithvariationsset in \
|
||||
globalmap_marketgroupid_typeidwithvariations.items():
|
||||
list(globalmap_marketgroupid_typeidwithvariations.items()):
|
||||
for typeid in typeidwithvariationsset:
|
||||
if not typeid in globalmap_typeidwithvariations_marketgroupid:
|
||||
globalmap_typeidwithvariations_marketgroupid[typeid] = set()
|
||||
@@ -423,12 +423,12 @@ for effect_name in effect_list:
|
||||
effectids = globalmap_effectnameeos_effectid[effect_name]
|
||||
else:
|
||||
if options.remove2:
|
||||
print("Warning: effect file " + effect_name +
|
||||
" exists but is not in database, removing")
|
||||
print(("Warning: effect file " + effect_name +
|
||||
" exists but is not in database, removing"))
|
||||
os.remove(os.path.join(effects_path, effect_file))
|
||||
else:
|
||||
print("Warning: effect file " + effect_name +
|
||||
" exists but is not in database")
|
||||
print(("Warning: effect file " + effect_name +
|
||||
" exists but is not in database"))
|
||||
continue
|
||||
for effectid in effectids:
|
||||
cursor.execute(QUERY_EFFECTID_TYPEID, (effectid,))
|
||||
@@ -498,8 +498,8 @@ for effect_name in effect_list:
|
||||
|
||||
stopdebugprints = False
|
||||
if DEBUG_LEVEL >= 1:
|
||||
print("\nEffect:", effect_name)
|
||||
print("Total items affected: {0}".format(pereffect_totalaffected))
|
||||
print(("\nEffect:", effect_name))
|
||||
print(("Total items affected: {0}".format(pereffect_totalaffected)))
|
||||
|
||||
# Stage 2.2
|
||||
# This set holds all ids of already described items
|
||||
@@ -554,18 +554,18 @@ for effect_name in effect_list:
|
||||
if DEBUG_LEVEL == 1:
|
||||
printstr = "Group: {0}: {1}/{2} ({3:.3}%, inner \
|
||||
score: {4:.3})"
|
||||
print(printstr.format(groupName,
|
||||
print((printstr.format(groupName,
|
||||
affected_undescribed, total, coverage,
|
||||
groupscore[groupid]))
|
||||
groupscore[groupid])))
|
||||
# If it's 2, print results for each
|
||||
# iteration, so we need to include number
|
||||
# of already described items
|
||||
if DEBUG_LEVEL == 2:
|
||||
printstr = "Group: {0}: {1}+{2}/{3} ({4:.3}%, \
|
||||
inner score: {5:.3})"
|
||||
print(printstr.format(groupName,
|
||||
print((printstr.format(groupName,
|
||||
affected_undescribed, affected_decribed,
|
||||
total, coverage, groupscore[groupid]))
|
||||
total, coverage, groupscore[groupid])))
|
||||
# Calculate outer score for this grouping type
|
||||
groupouterscore = calc_outerscore(groupscore,
|
||||
pereffect_totalaffected,
|
||||
@@ -573,7 +573,7 @@ inner score: {5:.3})"
|
||||
# Debug print for outer data
|
||||
if DEBUG_LEVEL >= 1 and not stopdebugprints:
|
||||
printstr = "Groups outer score: {0:.3}"
|
||||
print(printstr.format(groupouterscore))
|
||||
print((printstr.format(groupouterscore)))
|
||||
|
||||
categoryscore = {}
|
||||
for categoryid in effectmap_categoryid_typeid:
|
||||
@@ -598,21 +598,21 @@ inner score: {5:.3})"
|
||||
if DEBUG_LEVEL == 1:
|
||||
printstr = "Category: {0}: {1}/{2} ({3:.3}%, \
|
||||
inner score: {4:.3})"
|
||||
print(printstr.format(categoryname,
|
||||
print((printstr.format(categoryname,
|
||||
affected_undescribed, total, coverage,
|
||||
categoryscore[categoryid]))
|
||||
categoryscore[categoryid])))
|
||||
if DEBUG_LEVEL == 2:
|
||||
printstr = "Category: {0}: {1}+{2}/{3} ({4:.3}%, \
|
||||
inner score: {5:.3})"
|
||||
print(printstr.format(categoryname,
|
||||
print((printstr.format(categoryname,
|
||||
affected_undescribed, affected_decribed,
|
||||
total, coverage, categoryscore[categoryid]))
|
||||
total, coverage, categoryscore[categoryid])))
|
||||
categoryouterscore = calc_outerscore(categoryscore,
|
||||
pereffect_totalaffected,
|
||||
CATEGORY_WEIGHT)
|
||||
if DEBUG_LEVEL >= 1 and not stopdebugprints:
|
||||
printstr = "Category outer score: {0:.3}"
|
||||
print(printstr.format(categoryouterscore))
|
||||
print((printstr.format(categoryouterscore)))
|
||||
|
||||
basetypescore = {}
|
||||
for basetypeid in effectmap_basetypeid_typeid:
|
||||
@@ -636,22 +636,22 @@ inner score: {5:.3})"
|
||||
if DEBUG_LEVEL == 1:
|
||||
printstr = "Base item: {0}: {1}/{2} ({3:.3}%, \
|
||||
inner score: {4:.3})"
|
||||
print(printstr.format(basetypename,
|
||||
print((printstr.format(basetypename,
|
||||
affected_undescribed, total, coverage,
|
||||
basetypescore[basetypeid]))
|
||||
basetypescore[basetypeid])))
|
||||
if DEBUG_LEVEL == 2:
|
||||
printstr = "Base item: {0}: {1}+{2}/{3} ({4:.3}%, \
|
||||
inner score: {5:.3})"
|
||||
print(printstr.format(basetypename,
|
||||
print((printstr.format(basetypename,
|
||||
affected_undescribed, affected_decribed,
|
||||
total, coverage, basetypescore[basetypeid]))
|
||||
total, coverage, basetypescore[basetypeid])))
|
||||
basetypeouterscore = calc_outerscore(basetypescore,
|
||||
pereffect_totalaffected,
|
||||
BASETYPE_WEIGHT)
|
||||
#Print outer data
|
||||
if DEBUG_LEVEL >= 1 and not stopdebugprints:
|
||||
printstr = "Base item outer score: {0:.3}"
|
||||
print(printstr.format(basetypeouterscore))
|
||||
print((printstr.format(basetypeouterscore)))
|
||||
|
||||
marketgroupwithvarsscore = {}
|
||||
for marketgroupid in effectmap_marketgroupid_typeidwithvars:
|
||||
@@ -699,22 +699,22 @@ inner score: {5:.3})"
|
||||
if DEBUG_LEVEL == 1:
|
||||
printstr = "Market group with variations: {0}: \
|
||||
{1}/{2} ({3:.3}%, inner score: {4:.3})"
|
||||
print(printstr.format(marketgroupname,
|
||||
print((printstr.format(marketgroupname,
|
||||
affected_undescribed, total, coverage,
|
||||
marketgroupwithvarsscore[marketgroupid]))
|
||||
marketgroupwithvarsscore[marketgroupid])))
|
||||
if DEBUG_LEVEL == 2:
|
||||
printstr = "Market group with variations: {0}: \
|
||||
{1}+{2}/{3} ({4:.3}%, inner score: {5:.3})"
|
||||
print(printstr.format(marketgroupname,
|
||||
print((printstr.format(marketgroupname,
|
||||
affected_undescribed,
|
||||
affected_decribed, total, coverage,
|
||||
marketgroupwithvarsscore[marketgroupid]))
|
||||
marketgroupwithvarsscore[marketgroupid])))
|
||||
marketgroupwithvarsouterscore = calc_outerscore\
|
||||
(marketgroupwithvarsscore, pereffect_totalaffected,
|
||||
MARKETGROUPWITHVARS_WEIGHT)
|
||||
if DEBUG_LEVEL >= 1 and not stopdebugprints:
|
||||
printstr = "Market group outer score: {0:.3}"
|
||||
print(printstr.format(marketgroupwithvarsouterscore))
|
||||
print((printstr.format(marketgroupwithvarsouterscore)))
|
||||
|
||||
typenamecombscore = {}
|
||||
for typenamecombtuple in effectmap_typenamecombtuple_typeid:
|
||||
@@ -756,22 +756,22 @@ inner score: {5:.3})"
|
||||
if DEBUG_LEVEL == 1:
|
||||
printstr = "Type name combination: \"{0}\": \
|
||||
{1}/{2} ({3:.3}%, inner score: {4:.3})"
|
||||
print(printstr.format(typenamecombprintable,
|
||||
print((printstr.format(typenamecombprintable,
|
||||
affected_undescribed, total, coverage,
|
||||
typenamecombscore[typenamecombtuple]))
|
||||
typenamecombscore[typenamecombtuple])))
|
||||
if DEBUG_LEVEL == 2:
|
||||
printstr = "Type name combination: \"{0}\": \
|
||||
{1}+{2}/{3} ({4:.3}%, inner score: {5:.3})"
|
||||
print(printstr.format(typenamecombprintable,
|
||||
print((printstr.format(typenamecombprintable,
|
||||
affected_undescribed, affected_decribed,
|
||||
total, coverage,
|
||||
typenamecombscore[typenamecombtuple]))
|
||||
typenamecombscore[typenamecombtuple])))
|
||||
typenamecombouterscore = calc_outerscore(typenamecombscore,
|
||||
pereffect_totalaffected,
|
||||
TYPENAMECOMBS_WEIGHT)
|
||||
if DEBUG_LEVEL >= 1 and not stopdebugprints:
|
||||
printstr = "Type name combination outer score: {0:.3}"
|
||||
print(printstr.format(typenamecombouterscore))
|
||||
print((printstr.format(typenamecombouterscore)))
|
||||
|
||||
# Don't print anything after 1st iteration at 1st debugging
|
||||
# level
|
||||
@@ -805,7 +805,7 @@ inner score: {5:.3})"
|
||||
effectmap_categoryid_typeid[categorywinner][1] = True
|
||||
if DEBUG_LEVEL >= 2:
|
||||
printstr = "Category winner: {0}"
|
||||
print(printstr.format(categorywinner))
|
||||
print((printstr.format(categorywinner)))
|
||||
elif maxouterscore == groupouterscore:
|
||||
groupwinner = max(groupscore, key=groupscore.get)
|
||||
describedbygroup.append(groupwinner)
|
||||
@@ -814,7 +814,7 @@ inner score: {5:.3})"
|
||||
effectmap_groupid_typeid[groupwinner][1] = True
|
||||
if DEBUG_LEVEL >= 2:
|
||||
printstr = "Group winner: {0}"
|
||||
print(printstr.format(groupwinner))
|
||||
print((printstr.format(groupwinner)))
|
||||
elif maxouterscore == typenamecombouterscore:
|
||||
typenamecombwinner = max(typenamecombscore,
|
||||
key=typenamecombscore.get)
|
||||
@@ -825,7 +825,7 @@ inner score: {5:.3})"
|
||||
[1] = True
|
||||
if DEBUG_LEVEL >= 2:
|
||||
printstr = "Named like winner: {0}"
|
||||
print(printstr.format(typenamecombwinner))
|
||||
print((printstr.format(typenamecombwinner)))
|
||||
elif maxouterscore == marketgroupwithvarsouterscore:
|
||||
marketgroupwithvarswinner = max(marketgroupwithvarsscore,
|
||||
key=marketgroupwithvarsscore.get)
|
||||
@@ -838,7 +838,7 @@ inner score: {5:.3})"
|
||||
[marketgroupwithvarswinner][1] = True
|
||||
if DEBUG_LEVEL >= 2:
|
||||
printstr = "Market group with variations winner: {0}"
|
||||
print(printstr.format(marketgroupwithvarswinner))
|
||||
print((printstr.format(marketgroupwithvarswinner)))
|
||||
elif maxouterscore == basetypeouterscore:
|
||||
basetypewinner = max(basetypescore, key=basetypescore.get)
|
||||
describedbybasetype.append(basetypewinner)
|
||||
@@ -847,7 +847,7 @@ inner score: {5:.3})"
|
||||
effectmap_basetypeid_typeid[basetypewinner][1] = True
|
||||
if DEBUG_LEVEL >= 2:
|
||||
printstr = "Base item winner: {0}"
|
||||
print(printstr.format(basetypewinner))
|
||||
print((printstr.format(basetypewinner)))
|
||||
# Stop if we have score less than some critical value,
|
||||
# all undescribed items will be provided as plain list
|
||||
else:
|
||||
@@ -865,13 +865,13 @@ inner score: {5:.3})"
|
||||
(effect_describedtypes)
|
||||
if DEBUG_LEVEL >= 1:
|
||||
print("Effect will be described by:")
|
||||
print("Single item IDs:", singleitems)
|
||||
print("Group IDs:", describedbygroup)
|
||||
print("Category IDs:", describedbycategory)
|
||||
print("Base item IDs:", describedbybasetype)
|
||||
print("Market group with variations IDs:",
|
||||
describedbymarketgroupwithvars)
|
||||
print("Type name combinations:", describedbytypenamecomb)
|
||||
print(("Single item IDs:", singleitems))
|
||||
print(("Group IDs:", describedbygroup))
|
||||
print(("Category IDs:", describedbycategory))
|
||||
print(("Base item IDs:", describedbybasetype))
|
||||
print(("Market group with variations IDs:",
|
||||
describedbymarketgroupwithvars))
|
||||
print(("Type name combinations:", describedbytypenamecomb))
|
||||
|
||||
# Stage 2.1
|
||||
# Read effect file and split it into lines
|
||||
@@ -1069,13 +1069,13 @@ inner score: {5:.3})"
|
||||
else:
|
||||
commentlines = ["# Not used by any item"]
|
||||
if options.remove:
|
||||
print("Warning: effect file " + effect_name +
|
||||
" is not used by any item, removing")
|
||||
print(("Warning: effect file " + effect_name +
|
||||
" is not used by any item, removing"))
|
||||
os.remove(os.path.join(effects_path, effect_file))
|
||||
continue
|
||||
else:
|
||||
print("Warning: effect file " + effect_name +
|
||||
" is not used by any item")
|
||||
print(("Warning: effect file " + effect_name +
|
||||
" is not used by any item"))
|
||||
# Combine "used by" comment lines and actual effect lines
|
||||
outputlines = commentlines + effectLines
|
||||
# Combine all lines into single string
|
||||
@@ -1089,4 +1089,4 @@ inner score: {5:.3})"
|
||||
effectfile.close()
|
||||
elif DEBUG_LEVEL >= 2:
|
||||
print("Comment to write to file:")
|
||||
print("\n".join(commentlines))
|
||||
print(("\n".join(commentlines)))
|
||||
|
||||
@@ -5,7 +5,7 @@ import os.path
|
||||
import re
|
||||
import sqlite3
|
||||
|
||||
script_dir = os.path.dirname(unicode(__file__, sys.getfilesystemencoding()))
|
||||
script_dir = os.path.dirname(str(__file__, sys.getfilesystemencoding()))
|
||||
|
||||
# Connect to database and set up cursor
|
||||
db = sqlite3.connect(os.path.join(script_dir, "..", "eve.db"))
|
||||
@@ -156,7 +156,7 @@ for marketgroupid in INITIALMARKETGROUPIDS:
|
||||
(globalmap_marketgroupid_typeid[marketgroupid])
|
||||
else: break
|
||||
# Now, make a reverse map
|
||||
for marketgroupid, typeidset in globalmap_marketgroupid_typeid.items():
|
||||
for marketgroupid, typeidset in list(globalmap_marketgroupid_typeid.items()):
|
||||
for typeid in typeidset:
|
||||
if not typeid in globalmap_typeid_marketgroupid:
|
||||
globalmap_typeid_marketgroupid[typeid] = set()
|
||||
@@ -181,7 +181,7 @@ for marketgroupid in globalmap_marketgroupid_typeidwithvariations:
|
||||
(typestoadd)
|
||||
# Make reverse map using simple way too
|
||||
for marketgroupid, typeidwithvariationsset in \
|
||||
globalmap_marketgroupid_typeidwithvariations.items():
|
||||
list(globalmap_marketgroupid_typeidwithvariations.items()):
|
||||
for typeid in typeidwithvariationsset:
|
||||
if not typeid in globalmap_typeidwithvariations_marketgroupid:
|
||||
globalmap_typeidwithvariations_marketgroupid[typeid] = set()
|
||||
@@ -310,7 +310,7 @@ def suggestMktGrp(typeid, mode="grp"):
|
||||
similarity_factor *= 0.01
|
||||
mktgrps_w_cos[marketgroupid] += similarity_factor
|
||||
if mktgrps_w_cos:
|
||||
winner = max(mktgrps_w_cos.keys(), key=lambda k: mktgrps_w_cos[k])
|
||||
winner = max(list(mktgrps_w_cos.keys()), key=lambda k: mktgrps_w_cos[k])
|
||||
else:
|
||||
winner = None
|
||||
return winner
|
||||
@@ -429,4 +429,4 @@ for typeid in nonmarket:
|
||||
#print("---\nItem: {0}\nGroup: {1}\nSuggested market group: {2} ({3})\nMeta group: {4}".format(typename, grpname, marketgroupname, mkt, metagroupname))
|
||||
|
||||
#print("\n\nmap = {{ {0} }}".format(", ".join("{0}: ({1}, {2})".format(key, map_typeid_stuff[key][0], map_typeid_stuff[key][1]) for key in sorted(map_typeid_stuff))))
|
||||
print("---\n{0}".format("\n".join("\"{0}\": {1}, # {2}".format(key, map_typeid_stuff2[key][0], map_typeid_stuff2[key][1]) for key in sorted(map_typeid_stuff2))))
|
||||
print(("---\n{0}".format("\n".join("\"{0}\": {1}, # {2}".format(key, map_typeid_stuff2[key][0], map_typeid_stuff2[key][1]) for key in sorted(map_typeid_stuff2)))))
|
||||
|
||||
@@ -153,7 +153,7 @@ for fname in os.listdir(icons_dir):
|
||||
fname = strip_path(fname)
|
||||
# Get rid of "icon" prefix as well
|
||||
#fname = re.sub('^icon', '', fname)
|
||||
print fname,"exists"
|
||||
print(fname,"exists")
|
||||
existing.add(fname)
|
||||
|
||||
# Get a list of all the icons currently available in export
|
||||
@@ -232,12 +232,12 @@ if toremove:
|
||||
print('Some icons are not used and will be removed:')
|
||||
for fname in sorted(toremove):
|
||||
fullname = '{}.png'.format(fname)
|
||||
print(' {}'.format(fullname))
|
||||
print((' {}'.format(fullname)))
|
||||
fullpath = os.path.join(icons_dir, fullname)
|
||||
os.remove(fullpath)
|
||||
|
||||
if toupdate:
|
||||
print('Updating {} icons...'.format(len(toupdate)))
|
||||
print(('Updating {} icons...'.format(len(toupdate))))
|
||||
missing = set()
|
||||
for fname in sorted(toupdate):
|
||||
icon = get_icon_file(fname)
|
||||
@@ -248,12 +248,12 @@ if toupdate:
|
||||
fullpath = os.path.join(icons_dir, fullname)
|
||||
icon.save(fullpath, 'png')
|
||||
if missing:
|
||||
print(' {} icons are missing in export:'.format(len(missing)))
|
||||
print((' {} icons are missing in export:'.format(len(missing))))
|
||||
for fname in sorted(missing):
|
||||
print(' {}'.format(fname))
|
||||
print((' {}'.format(fname)))
|
||||
|
||||
if toadd:
|
||||
print('Adding {} icons...'.format(len(toadd)))
|
||||
print(('Adding {} icons...'.format(len(toadd))))
|
||||
missing = set()
|
||||
for fname in sorted(toadd):
|
||||
icon = get_icon_file(fname)
|
||||
@@ -264,6 +264,6 @@ if toadd:
|
||||
fullpath = os.path.join(icons_dir, fullname)
|
||||
icon.save(fullpath, 'png')
|
||||
if missing:
|
||||
print(' {} icons are missing in export:'.format(len(missing)))
|
||||
print((' {} icons are missing in export:'.format(len(missing))))
|
||||
for fname in sorted(missing):
|
||||
print(' {}'.format(fname))
|
||||
print((' {}'.format(fname)))
|
||||
|
||||
@@ -90,7 +90,7 @@ def main(old, new, groups=True, effects=True, attributes=True, renames=True):
|
||||
name = re.sub(stripspec, "", name)
|
||||
dictionary[id] = name
|
||||
|
||||
for id in set(old_namedata.keys()).intersection(new_namedata.keys()):
|
||||
for id in set(old_namedata.keys()).intersection(list(new_namedata.keys())):
|
||||
oldname = old_namedata[id] if old_namedata[id] is not None else 'None'
|
||||
newname = new_namedata[id] if new_namedata[id] is not None else 'None'
|
||||
if oldname != newname:
|
||||
@@ -103,9 +103,9 @@ def main(old, new, groups=True, effects=True, attributes=True, renames=True):
|
||||
for id in sorted(ren_dict):
|
||||
couple = ren_dict[id]
|
||||
if implementedtag:
|
||||
print("\n[{0}] \"{1}\"\n[{2}] \"{3}\"".format(geteffst(couple[0]), couple[0], geteffst(couple[1]), couple[1]))
|
||||
print(("\n[{0}] \"{1}\"\n[{2}] \"{3}\"".format(geteffst(couple[0]), couple[0], geteffst(couple[1]), couple[1])))
|
||||
else:
|
||||
print(" \"{0}\": \"{1}\",".format(couple[0].encode('utf-8'), couple[1].encode('utf-8')))
|
||||
print((" \"{0}\": \"{1}\",".format(couple[0].encode('utf-8'), couple[1].encode('utf-8'))))
|
||||
|
||||
groupcats = {}
|
||||
def getgroupcat(grp):
|
||||
@@ -349,7 +349,7 @@ def main(old, new, groups=True, effects=True, attributes=True, renames=True):
|
||||
if attributes:
|
||||
oldattrs = old_itmdata[item][2]
|
||||
newattrs = new_itmdata[item][2]
|
||||
for attr in set(oldattrs.keys()).union(newattrs.keys()):
|
||||
for attr in set(oldattrs.keys()).union(list(newattrs.keys())):
|
||||
# NULL will mean there's no such attribute in db
|
||||
oldattr = oldattrs.get(attr, "NULL")
|
||||
newattr = newattrs.get(attr, "NULL")
|
||||
@@ -412,7 +412,7 @@ def main(old, new, groups=True, effects=True, attributes=True, renames=True):
|
||||
except:
|
||||
pass
|
||||
# Print jobs
|
||||
print("Comparing databases:\n{0} -> {1}\n".format(old_meta.get("client_build"), new_meta.get("client_build")))
|
||||
print(("Comparing databases:\n{0} -> {1}\n".format(old_meta.get("client_build"), new_meta.get("client_build"))))
|
||||
|
||||
if renames:
|
||||
title = 'effects'
|
||||
@@ -443,7 +443,7 @@ def main(old, new, groups=True, effects=True, attributes=True, renames=True):
|
||||
grpleg = "(x => y) - group changes\n" if groups else ""
|
||||
attreffleg = " [+] - effect or attribute has been added to item\n [-] - effect or attribute has been removed from item\n" if attributes or effects else ""
|
||||
effleg = " [y] - effect is implemented\n [n] - effect is not implemented\n" if effects else ""
|
||||
print("{0}{1}{2}{3}\nItems:".format(genleg, grpleg, attreffleg, effleg))
|
||||
print(("{0}{1}{2}{3}\nItems:".format(genleg, grpleg, attreffleg, effleg)))
|
||||
|
||||
# Make sure our states are sorted
|
||||
stateorder = sorted(global_itmdata)
|
||||
@@ -468,7 +468,7 @@ def main(old, new, groups=True, effects=True, attributes=True, renames=True):
|
||||
for item in itemorder:
|
||||
groupdata = items[item][0]
|
||||
groupstr = " ({0} => {1})".format(getgroupname(groupdata[1]), getgroupname(groupdata[2])) if groupdata[0] == S["changed"] else ""
|
||||
print("\n[{0}] {1}{2}".format(TG[itmstate], getitemname(item).encode('utf-8'), groupstr))
|
||||
print(("\n[{0}] {1}{2}".format(TG[itmstate], getitemname(item).encode('utf-8'), groupstr)))
|
||||
|
||||
effdata = items[item][1]
|
||||
for effstate in stateorder:
|
||||
@@ -481,7 +481,7 @@ def main(old, new, groups=True, effects=True, attributes=True, renames=True):
|
||||
for eff in efforder:
|
||||
# Take tag from item if item was added or removed
|
||||
tag = TG[effstate] if itmstate not in (S["removed"], S["added"]) else TG[itmstate]
|
||||
print(" [{0}|{1}] {2}".format(tag, "y" if geteffst(geteffectname(eff)) else "n", geteffectname(eff)))
|
||||
print((" [{0}|{1}] {2}".format(tag, "y" if geteffst(geteffectname(eff)) else "n", geteffectname(eff))))
|
||||
|
||||
attrdata = items[item][2]
|
||||
for attrstate in stateorder:
|
||||
@@ -498,7 +498,7 @@ def main(old, new, groups=True, effects=True, attributes=True, renames=True):
|
||||
valline = "{0}".format(attrs[attr][0] or 0)
|
||||
else:
|
||||
valline = "{0} => {1}".format(attrs[attr][0] or 0, attrs[attr][1] or 0)
|
||||
print(" [{0}] {1}: {2}".format(TG[attrstate], getattrname(attr), valline))
|
||||
print((" [{0}] {1}: {2}".format(TG[attrstate], getattrname(attr), valline)))
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Compare two databases generated from eve dump to find eos-related differences")
|
||||
|
||||
@@ -23,7 +23,7 @@ import sys
|
||||
import re
|
||||
|
||||
# Add eos root path to sys.path so we can import ourselves
|
||||
path = os.path.dirname(unicode(__file__, sys.getfilesystemencoding()))
|
||||
path = os.path.dirname(str(__file__, sys.getfilesystemencoding()))
|
||||
sys.path.append(os.path.realpath(os.path.join(path, "..")))
|
||||
|
||||
import json
|
||||
@@ -105,7 +105,7 @@ def main(db, json_path):
|
||||
|
||||
def convertIcons(data):
|
||||
new = []
|
||||
for k, v in data.items():
|
||||
for k, v in list(data.items()):
|
||||
v["iconID"] = k
|
||||
new.append(v)
|
||||
return new
|
||||
@@ -144,13 +144,13 @@ def main(db, json_path):
|
||||
|
||||
def convertSection(sectionData):
|
||||
sectionLines = []
|
||||
headerText = u"<b>{}</b>".format(sectionData["header"])
|
||||
headerText = "<b>{}</b>".format(sectionData["header"])
|
||||
sectionLines.append(headerText)
|
||||
for bonusData in sectionData["bonuses"]:
|
||||
prefix = u"{} ".format(bonusData["number"]) if "number" in bonusData else ""
|
||||
bonusText = u"{}{}".format(prefix, bonusData["text"].replace(u"\u00B7", u"\u2022 "))
|
||||
prefix = "{} ".format(bonusData["number"]) if "number" in bonusData else ""
|
||||
bonusText = "{}{}".format(prefix, bonusData["text"].replace("\u00B7", "\u2022 "))
|
||||
sectionLines.append(bonusText)
|
||||
sectionLine = u"<br />\n".join(sectionLines)
|
||||
sectionLine = "<br />\n".join(sectionLines)
|
||||
return sectionLine
|
||||
|
||||
newData = []
|
||||
@@ -164,7 +164,7 @@ def main(db, json_path):
|
||||
typeLines.append(convertSection(traitData["role"]))
|
||||
if "misc" in traitData:
|
||||
typeLines.append(convertSection(traitData["misc"]))
|
||||
traitLine = u"<br />\n<br />\n".join(typeLines)
|
||||
traitLine = "<br />\n<br />\n".join(typeLines)
|
||||
newRow = {"typeID": typeId, "traitText": traitLine}
|
||||
newData.append(newRow)
|
||||
return newData
|
||||
@@ -176,7 +176,7 @@ def main(db, json_path):
|
||||
factionMap = {}
|
||||
with open(os.path.join(jsonPath, "fsdTypeOverrides.json")) as f:
|
||||
overridesData = json.load(f)
|
||||
for typeID, typeData in overridesData.items():
|
||||
for typeID, typeData in list(overridesData.items()):
|
||||
factionID = typeData.get("factionID")
|
||||
if factionID is not None:
|
||||
factionMap[int(typeID)] = factionID
|
||||
@@ -187,7 +187,7 @@ def main(db, json_path):
|
||||
data = {}
|
||||
|
||||
# Dump all data to memory so we can easely cross check ignored rows
|
||||
for jsonName, cls in tables.iteritems():
|
||||
for jsonName, cls in tables.items():
|
||||
with open(os.path.join(jsonPath, "{}.json".format(jsonName))) as f:
|
||||
tableData = json.load(f)
|
||||
if jsonName in rowsInValues:
|
||||
@@ -221,11 +221,11 @@ def main(db, json_path):
|
||||
return False
|
||||
|
||||
# Loop through each json file and write it away, checking ignored rows
|
||||
for jsonName, table in data.iteritems():
|
||||
for jsonName, table in data.items():
|
||||
fieldMap = fieldMapping.get(jsonName, {})
|
||||
tmp = []
|
||||
|
||||
print "processing {}".format(jsonName)
|
||||
print("processing {}".format(jsonName))
|
||||
|
||||
for row in table:
|
||||
# We don't care about some kind of rows, filter it out if so
|
||||
@@ -253,8 +253,8 @@ def main(db, json_path):
|
||||
eos.db.gamedata_session.add(cloneParent)
|
||||
tmp.append(row['alphaCloneID'])
|
||||
|
||||
for k, v in row.iteritems():
|
||||
if (isinstance(v, basestring)):
|
||||
for k, v in row.items():
|
||||
if (isinstance(v, str)):
|
||||
v = v.strip()
|
||||
setattr(instance, fieldMap.get(k, k), v)
|
||||
|
||||
|
||||
@@ -25,23 +25,23 @@ parser.add_argument("-s", "--singularity", action="store_true", help="Singularit
|
||||
parser.add_argument("-j", "--nojson", dest="nojson", action="store_true", help="Skip Phobos JSON data dump.")
|
||||
|
||||
args = parser.parse_args()
|
||||
eve_path = os.path.expanduser(unicode(args.eve_path, sys.getfilesystemencoding()))
|
||||
cache_path = os.path.expanduser(unicode(args.cache_path, sys.getfilesystemencoding())) if args.cache_path else None
|
||||
res_path = os.path.expanduser(unicode(args.res_path, sys.getfilesystemencoding())) if args.res_path else None
|
||||
dump_path = os.path.expanduser(unicode(args.dump_path, sys.getfilesystemencoding()))
|
||||
script_path = os.path.dirname(unicode(__file__, sys.getfilesystemencoding()))
|
||||
eve_path = os.path.expanduser(str(args.eve_path, sys.getfilesystemencoding()))
|
||||
cache_path = os.path.expanduser(str(args.cache_path, sys.getfilesystemencoding())) if args.cache_path else None
|
||||
res_path = os.path.expanduser(str(args.res_path, sys.getfilesystemencoding())) if args.res_path else None
|
||||
dump_path = os.path.expanduser(str(args.dump_path, sys.getfilesystemencoding()))
|
||||
script_path = os.path.dirname(str(__file__, sys.getfilesystemencoding()))
|
||||
|
||||
### Append Phobos to path
|
||||
sys.path.append(os.path.expanduser(unicode(args.phb_path, sys.getfilesystemencoding())))
|
||||
sys.path.append(os.path.expanduser(str(args.phb_path, sys.getfilesystemencoding())))
|
||||
|
||||
def header(text, subtext=None):
|
||||
print
|
||||
print "* "*30
|
||||
print text.center(60)
|
||||
print()
|
||||
print("* "*30)
|
||||
print(text.center(60))
|
||||
if subtext:
|
||||
print subtext.center(60)
|
||||
print "* "*30
|
||||
print
|
||||
print(subtext.center(60))
|
||||
print("* "*30)
|
||||
print()
|
||||
|
||||
### Data dump
|
||||
if not args.nojson:
|
||||
@@ -54,10 +54,10 @@ if not args.nojson:
|
||||
from writer import *
|
||||
|
||||
rvr = reverence.blue.EVE(eve_path, cachepath=args.cache_path, sharedcachepath=res_path, server="singularity" if args.singularity else "tranquility")
|
||||
print "EVE Directory: {}".format(rvr.paths.root)
|
||||
print "Cache Directory: {}".format(rvr.paths.cache)
|
||||
print "Shared Resource Directory: {}".format(rvr.paths.sharedcache)
|
||||
print
|
||||
print("EVE Directory: {}".format(rvr.paths.root))
|
||||
print("Cache Directory: {}".format(rvr.paths.cache))
|
||||
print("Shared Resource Directory: {}".format(rvr.paths.sharedcache))
|
||||
print()
|
||||
|
||||
pickle_miner = ResourcePickleMiner(rvr)
|
||||
trans = Translator(pickle_miner)
|
||||
@@ -106,4 +106,4 @@ sys.stdout = open(diff_file, 'w')
|
||||
itemDiff.main(old=old_db, new=db_file)
|
||||
sys.stdout = old_stdout
|
||||
|
||||
print "\nAll done."
|
||||
print("\nAll done.")
|
||||
|
||||
@@ -90,12 +90,12 @@ if toremove:
|
||||
print('Some renders are not used and will be removed:')
|
||||
for type_id in sorted(toremove):
|
||||
fullname = '{}.png'.format(type_id)
|
||||
print(' {}'.format(fullname))
|
||||
print((' {}'.format(fullname)))
|
||||
fullpath = os.path.join(icons_dir, fullname)
|
||||
os.remove(fullpath)
|
||||
|
||||
if toupdate:
|
||||
print('Updating {} renders...'.format(len(toupdate)))
|
||||
print(('Updating {} renders...'.format(len(toupdate))))
|
||||
missing = toupdate.difference(export)
|
||||
toupdate.intersection_update(export)
|
||||
for type_id in sorted(toupdate):
|
||||
@@ -104,12 +104,12 @@ if toupdate:
|
||||
fullpath = os.path.join(icons_dir, fname)
|
||||
render.save(fullpath, 'png')
|
||||
if missing:
|
||||
print(' {} renders are missing in export:'.format(len(missing)))
|
||||
print((' {} renders are missing in export:'.format(len(missing))))
|
||||
for type_id in sorted(missing):
|
||||
print(' {}.png'.format(type_id))
|
||||
print((' {}.png'.format(type_id)))
|
||||
|
||||
if toadd:
|
||||
print('Adding {} renders...'.format(len(toadd)))
|
||||
print(('Adding {} renders...'.format(len(toadd))))
|
||||
missing = toadd.difference(export)
|
||||
toadd.intersection_update(export)
|
||||
for type_id in sorted(toadd):
|
||||
@@ -118,6 +118,6 @@ if toadd:
|
||||
fullpath = os.path.join(icons_dir, fname)
|
||||
render.save(fullpath, 'png')
|
||||
if missing:
|
||||
print(' {} renders are missing in export:'.format(len(missing)))
|
||||
print((' {} renders are missing in export:'.format(len(missing))))
|
||||
for type_id in sorted(missing):
|
||||
print(' {}.png'.format(type_id))
|
||||
print((' {}.png'.format(type_id)))
|
||||
|
||||
@@ -13,10 +13,10 @@ stream = open(r"C:\Users\Ryan\Sync\Git\blitzmann\Pyfa\scripts\iconIDs.yaml", "r"
|
||||
docs = yaml.load_all(stream)
|
||||
|
||||
for doc in docs:
|
||||
for k,v in doc.items():
|
||||
for k,v in list(doc.items()):
|
||||
iconDict[str(k)] = {"iconFile": v['iconFile']}
|
||||
|
||||
with open('icons.json', 'w') as outfile:
|
||||
json.dump(iconDict, outfile)
|
||||
|
||||
print "done"
|
||||
print("done")
|
||||
Reference in New Issue
Block a user