Some fixes to DB conversion script
This commit is contained in:
@@ -25,7 +25,7 @@ import re
|
||||
|
||||
# Add eos root path to sys.path so we can import ourselves
|
||||
path = os.path.dirname(__file__)
|
||||
sys.path.append(os.path.realpath(os.path.join(path, "..")))
|
||||
sys.path.insert(0, os.path.realpath(os.path.join(path, '..')))
|
||||
|
||||
import json
|
||||
import argparse
|
||||
@@ -50,66 +50,66 @@ def main(db, json_path):
|
||||
|
||||
# Config dict
|
||||
tables = {
|
||||
"clonegrades": eos.gamedata.AlphaCloneSkill,
|
||||
"dgmattribs": eos.gamedata.AttributeInfo,
|
||||
"dgmeffects": eos.gamedata.Effect,
|
||||
"dgmtypeattribs": eos.gamedata.Attribute,
|
||||
"dgmtypeeffects": eos.gamedata.ItemEffect,
|
||||
"dgmunits": eos.gamedata.Unit,
|
||||
"icons": eos.gamedata.Icon,
|
||||
"evecategories": eos.gamedata.Category,
|
||||
"evegroups": eos.gamedata.Group,
|
||||
"invmetagroups": eos.gamedata.MetaGroup,
|
||||
"invmetatypes": eos.gamedata.MetaType,
|
||||
"evetypes": eos.gamedata.Item,
|
||||
"phbtraits": eos.gamedata.Traits,
|
||||
"phbmetadata": eos.gamedata.MetaData,
|
||||
"mapbulk_marketGroups": eos.gamedata.MarketGroup,
|
||||
'clonegrades': eos.gamedata.AlphaCloneSkill,
|
||||
'dgmattribs': eos.gamedata.AttributeInfo,
|
||||
'dgmeffects': eos.gamedata.Effect,
|
||||
'dgmtypeattribs': eos.gamedata.Attribute,
|
||||
'dgmtypeeffects': eos.gamedata.ItemEffect,
|
||||
'dgmunits': eos.gamedata.Unit,
|
||||
'icons': eos.gamedata.Icon,
|
||||
'evecategories': eos.gamedata.Category,
|
||||
'evegroups': eos.gamedata.Group,
|
||||
'invmetagroups': eos.gamedata.MetaGroup,
|
||||
'invmetatypes': eos.gamedata.MetaType,
|
||||
'evetypes': eos.gamedata.Item,
|
||||
'phbtraits': eos.gamedata.Traits,
|
||||
'phbmetadata': eos.gamedata.MetaData,
|
||||
'mapbulk_marketGroups': eos.gamedata.MarketGroup,
|
||||
}
|
||||
|
||||
fieldMapping = {
|
||||
"dgmattribs": {
|
||||
"displayName_en-us": "displayName"
|
||||
'dgmattribs': {
|
||||
'displayName_en-us': 'displayName'
|
||||
},
|
||||
"dgmeffects": {
|
||||
"displayName_en-us": "displayName",
|
||||
"description_en-us": "description"
|
||||
'dgmeffects': {
|
||||
'displayName_en-us': 'displayName',
|
||||
'description_en-us': 'description'
|
||||
},
|
||||
"dgmunits": {
|
||||
"displayName_en-us": "displayName"
|
||||
'dgmunits': {
|
||||
'displayName_en-us': 'displayName'
|
||||
},
|
||||
#icons???
|
||||
"evecategories": {
|
||||
"categoryName_en-us": "categoryName"
|
||||
'evecategories': {
|
||||
'categoryName_en-us': 'categoryName'
|
||||
},
|
||||
"evegroups": {
|
||||
"groupName_en-us": "groupName"
|
||||
'evegroups': {
|
||||
'groupName_en-us': 'groupName'
|
||||
},
|
||||
"invmetagroups": {
|
||||
"metaGroupName_en-us": "metaGroupName"
|
||||
'invmetagroups': {
|
||||
'metaGroupName_en-us': 'metaGroupName'
|
||||
},
|
||||
"evetypes": {
|
||||
"typeName_en-us": "typeName",
|
||||
"description_en-us": "description"
|
||||
'evetypes': {
|
||||
'typeName_en-us': 'typeName',
|
||||
'description_en-us': 'description'
|
||||
},
|
||||
#phbtraits???
|
||||
"mapbulk_marketGroups": {
|
||||
"marketGroupName_en-us": "marketGroupName",
|
||||
"description_en-us": "description"
|
||||
'mapbulk_marketGroups': {
|
||||
'marketGroupName_en-us': 'marketGroupName',
|
||||
'description_en-us': 'description'
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
rowsInValues = (
|
||||
"evetypes",
|
||||
"evegroups",
|
||||
"evecategories"
|
||||
'evetypes',
|
||||
'evegroups',
|
||||
'evecategories'
|
||||
)
|
||||
|
||||
def convertIcons(data):
|
||||
new = []
|
||||
for k, v in list(data.items()):
|
||||
v["iconID"] = k
|
||||
v['iconID'] = k
|
||||
new.append(v)
|
||||
return new
|
||||
|
||||
@@ -123,23 +123,23 @@ def main(db, json_path):
|
||||
check = {}
|
||||
|
||||
for ID in data:
|
||||
for skill in data[ID]["skills"]:
|
||||
for skill in data[ID]['skills']:
|
||||
newData.append({
|
||||
"alphaCloneID": int(ID),
|
||||
"alphaCloneName": "Alpha Clone",
|
||||
"typeID": skill["typeID"],
|
||||
"level": skill["level"]})
|
||||
'alphaCloneID': int(ID),
|
||||
'alphaCloneName': 'Alpha Clone',
|
||||
'typeID': skill['typeID'],
|
||||
'level': skill['level']})
|
||||
if ID not in check:
|
||||
check[ID] = {}
|
||||
check[ID][int(skill["typeID"])] = int(skill["level"])
|
||||
check[ID][int(skill['typeID'])] = int(skill['level'])
|
||||
|
||||
if not functools.reduce(lambda a, b: a if a == b else False, [v for _, v in check.items()]):
|
||||
raise Exception("Alpha Clones not all equal")
|
||||
raise Exception('Alpha Clones not all equal')
|
||||
|
||||
newData = [x for x in newData if x['alphaCloneID'] == 1]
|
||||
|
||||
if len(newData) == 0:
|
||||
raise Exception("Alpha Clone processing failed")
|
||||
raise Exception('Alpha Clone processing failed')
|
||||
|
||||
return newData
|
||||
|
||||
@@ -147,61 +147,44 @@ def main(db, json_path):
|
||||
|
||||
def convertSection(sectionData):
|
||||
sectionLines = []
|
||||
headerText = "<b>{}</b>".format(sectionData["header"])
|
||||
headerText = '<b>{}</b>'.format(sectionData['header'])
|
||||
sectionLines.append(headerText)
|
||||
for bonusData in sectionData["bonuses"]:
|
||||
prefix = "{} ".format(bonusData["number"]) if "number" in bonusData else ""
|
||||
bonusText = "{}{}".format(prefix, bonusData["text"].replace("\u00B7", "\u2022 "))
|
||||
for bonusData in sectionData['bonuses']:
|
||||
prefix = '{} '.format(bonusData['number']) if 'number' in bonusData else ''
|
||||
bonusText = '{}{}'.format(prefix, bonusData['text'].replace('\u00B7', '\u2022 '))
|
||||
sectionLines.append(bonusText)
|
||||
sectionLine = "<br />\n".join(sectionLines)
|
||||
sectionLine = '<br />\n'.join(sectionLines)
|
||||
return sectionLine
|
||||
|
||||
newData = []
|
||||
for row in data:
|
||||
typeLines = []
|
||||
typeId = row["typeID"]
|
||||
traitData = row["traits"]
|
||||
for skillData in sorted(traitData.get("skills", ()), key=lambda i: i["header"]):
|
||||
typeId = row['typeID']
|
||||
traitData = row['traits_en-us']
|
||||
for skillData in sorted(traitData.get('skills', ()), key=lambda i: i['header']):
|
||||
typeLines.append(convertSection(skillData))
|
||||
if "role" in traitData:
|
||||
typeLines.append(convertSection(traitData["role"]))
|
||||
if "misc" in traitData:
|
||||
typeLines.append(convertSection(traitData["misc"]))
|
||||
traitLine = "<br />\n<br />\n".join(typeLines)
|
||||
newRow = {"typeID": typeId, "traitText": traitLine}
|
||||
if 'role' in traitData:
|
||||
typeLines.append(convertSection(traitData['role']))
|
||||
if 'misc' in traitData:
|
||||
typeLines.append(convertSection(traitData['misc']))
|
||||
traitLine = '<br />\n<br />\n'.join(typeLines)
|
||||
newRow = {'typeID': typeId, 'traitText': traitLine}
|
||||
newData.append(newRow)
|
||||
return newData
|
||||
|
||||
def convertTypes(typesData):
|
||||
"""
|
||||
Add factionID column to evetypes table.
|
||||
"""
|
||||
factionMap = {}
|
||||
with open(os.path.join(jsonPath, "fsdTypeOverrides.json")) as f:
|
||||
overridesData = json.load(f)
|
||||
for typeID, typeData in list(overridesData.items()):
|
||||
factionID = typeData.get("factionID")
|
||||
if factionID is not None:
|
||||
factionMap[int(typeID)] = factionID
|
||||
for row in typesData:
|
||||
row['factionID'] = factionMap.get(int(row['typeID']))
|
||||
return typesData
|
||||
|
||||
data = {}
|
||||
|
||||
# Dump all data to memory so we can easely cross check ignored rows
|
||||
for jsonName, cls in tables.items():
|
||||
with open(os.path.join(jsonPath, "{}.json".format(jsonName)), encoding="utf-8") as f:
|
||||
with open(os.path.join(jsonPath, '{}.json'.format(jsonName)), encoding='utf-8') as f:
|
||||
tableData = json.load(f)
|
||||
if jsonName in rowsInValues:
|
||||
tableData = list(tableData.values())
|
||||
if jsonName == "icons":
|
||||
if jsonName == 'icons':
|
||||
tableData = convertIcons(tableData)
|
||||
if jsonName == "phbtraits":
|
||||
if jsonName == 'phbtraits':
|
||||
tableData = convertTraits(tableData)
|
||||
if jsonName == "evetypes":
|
||||
tableData = convertTypes(tableData)
|
||||
if jsonName == "clonegrades":
|
||||
if jsonName == 'clonegrades':
|
||||
tableData = convertClones(tableData)
|
||||
data[jsonName] = tableData
|
||||
|
||||
@@ -209,23 +192,23 @@ def main(db, json_path):
|
||||
# Sometimes CCP unpublishes some items we want to have published, we
|
||||
# can do it here - just add them to initial set
|
||||
eveTypes = set()
|
||||
for row in data["evetypes"]:
|
||||
if (row["published"]
|
||||
for row in data['evetypes']:
|
||||
if (row['published']
|
||||
or row['groupID'] == 1306 # group Ship Modifiers, for items like tactical t3 ship modes
|
||||
or row['typeName'].startswith('Civilian') # Civilian weapons
|
||||
or row['typeName_en-us'].startswith('Civilian') # Civilian weapons
|
||||
or row['typeID'] in (41549, 41548, 41551, 41550) # Micro Bombs (Fighters)
|
||||
or row['groupID'] in (
|
||||
1882,
|
||||
1975,
|
||||
1971,
|
||||
1983 # the "container" for the abysmal environments
|
||||
) # Abysmal weather (environment)
|
||||
1983 # the "container" for the abyssal environments
|
||||
) # Abyssal weather (environment)
|
||||
):
|
||||
eveTypes.add(row["typeID"])
|
||||
eveTypes.add(row['typeID'])
|
||||
|
||||
# ignore checker
|
||||
def isIgnored(file, row):
|
||||
if file in ("evetypes", "dgmtypeeffects", "dgmtypeattribs", "invmetatypes") and row['typeID'] not in eveTypes:
|
||||
if file in ('evetypes', 'dgmtypeeffects', 'dgmtypeattribs', 'invmetatypes') and row['typeID'] not in eveTypes:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -234,31 +217,31 @@ def main(db, json_path):
|
||||
fieldMap = fieldMapping.get(jsonName, {})
|
||||
tmp = []
|
||||
|
||||
print("processing {}".format(jsonName))
|
||||
print('processing {}'.format(jsonName))
|
||||
|
||||
for row in table:
|
||||
# We don't care about some kind of rows, filter it out if so
|
||||
if not isIgnored(jsonName, row):
|
||||
if jsonName == 'evetypes' and row["typeName"].startswith('Civilian'): # Apparently people really want Civilian modules available
|
||||
row["published"] = True
|
||||
if jsonName == 'evetypes' and row['typeName_en-us'].startswith('Civilian'): # Apparently people really want Civilian modules available
|
||||
row['published'] = True
|
||||
|
||||
instance = tables[jsonName]()
|
||||
# fix for issue 80
|
||||
if jsonName is "icons" and "res:/ui/texture/icons/" in str(row["iconFile"]).lower():
|
||||
row["iconFile"] = row["iconFile"].lower().replace("res:/ui/texture/icons/", "").replace(".png", "")
|
||||
if jsonName is 'icons' and 'res:/ui/texture/icons/' in str(row['iconFile']).lower():
|
||||
row['iconFile'] = row['iconFile'].lower().replace('res:/ui/texture/icons/', '').replace('.png', '')
|
||||
# with res:/ui... references, it points to the actual icon file (including it's size variation of #_size_#)
|
||||
# strip this info out and get the identifying info
|
||||
split = row['iconFile'].split('_')
|
||||
if len(split) == 3:
|
||||
row['iconFile'] = "{}_{}".format(split[0], split[2])
|
||||
if jsonName is "icons" and "modules/" in str(row["iconFile"]).lower():
|
||||
row["iconFile"] = row["iconFile"].lower().replace("modules/", "").replace(".png", "")
|
||||
row['iconFile'] = '{}_{}'.format(split[0], split[2])
|
||||
if jsonName is 'icons' and 'modules/' in str(row['iconFile']).lower():
|
||||
row['iconFile'] = row['iconFile'].lower().replace('modules/', '').replace('.png', '')
|
||||
|
||||
if jsonName is "clonegrades":
|
||||
if (row["alphaCloneID"] not in tmp):
|
||||
if jsonName is 'clonegrades':
|
||||
if (row['alphaCloneID'] not in tmp):
|
||||
cloneParent = eos.gamedata.AlphaClone()
|
||||
setattr(cloneParent, "alphaCloneID", row["alphaCloneID"])
|
||||
setattr(cloneParent, "alphaCloneName", row["alphaCloneName"])
|
||||
setattr(cloneParent, 'alphaCloneID', row['alphaCloneID'])
|
||||
setattr(cloneParent, 'alphaCloneName', row['alphaCloneName'])
|
||||
eos.db.gamedata_session.add(cloneParent)
|
||||
tmp.append(row['alphaCloneID'])
|
||||
|
||||
@@ -274,15 +257,15 @@ def main(db, json_path):
|
||||
# CCP still has 5 subsystems assigned to T3Cs, even though only 4 are available / usable. They probably have some
|
||||
# old legacy requirement or assumption that makes it difficult for them to change this value in the data. But for
|
||||
# pyfa, we can do it here as a post-processing step
|
||||
eos.db.gamedata_engine.execute("UPDATE dgmtypeattribs SET value = 4.0 WHERE attributeID = ?", (1367,))
|
||||
eos.db.gamedata_engine.execute('UPDATE dgmtypeattribs SET value = 4.0 WHERE attributeID = ?', (1367,))
|
||||
|
||||
eos.db.gamedata_engine.execute("UPDATE invtypes SET published = 0 WHERE typeName LIKE '%abyssal%'")
|
||||
print("done")
|
||||
eos.db.gamedata_engine.execute('UPDATE invtypes SET published = 0 WHERE typeName LIKE '%abyssal%'')
|
||||
print('done')
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="This scripts dumps effects from an sqlite cache dump to mongo")
|
||||
parser.add_argument("-d", "--db", required=True, type=str, help="The sqlalchemy connectionstring, example: sqlite:///c:/tq.db")
|
||||
parser.add_argument("-j", "--json", required=True, type=str, help="The path to the json dump")
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='This scripts dumps effects from an sqlite cache dump to mongo')
|
||||
parser.add_argument('-d', '--db', required=True, type=str, help='The sqlalchemy connectionstring, example: sqlite:///c:/tq.db')
|
||||
parser.add_argument('-j', '--json', required=True, type=str, help='The path to the json dump')
|
||||
args = parser.parse_args()
|
||||
|
||||
main(args.db, args.json)
|
||||
|
||||
Reference in New Issue
Block a user