Remove obsolete scripts
This commit is contained in:
@@ -1,350 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
#===============================================================================
|
|
||||||
# Copyright (C) 2010 Anton Vorobyov
|
|
||||||
# Copyright (C) 2010 Diego Duclos
|
|
||||||
#
|
|
||||||
# This file is part of eos.
|
|
||||||
#
|
|
||||||
# eos is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Lesser General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 2 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# eos is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Lesser General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU Lesser General Public License
|
|
||||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
#===============================================================================
|
|
||||||
|
|
||||||
'''
|
|
||||||
This script pulls data out of EVE cache and makes a database dump. To get most of the data,
|
|
||||||
you need to just log into game; however, for some special data sometimes you need to dump
|
|
||||||
it by executing corresponding action in game, for example - open market tree to get data for
|
|
||||||
invmarketgroups table.
|
|
||||||
Reverence library by Entity is used, check http://wiki.github.com/ntt/reverence/ for info
|
|
||||||
As reverence uses the same Python version as EVE client (2.x series), script cannot be converted to python3
|
|
||||||
Example commands to run the script under Linux with default eve paths for getting SQLite dump:
|
|
||||||
Tranquility: python eveCacheToDb.py --eve="~/.wine/drive_c/Program Files/CCP/EVE" --cache="~/.wine/drive_c/users/"$USER"/Local Settings/Application Data/CCP/EVE/c_program_files_ccp_eve_tranquility/cache" --dump="sqlite:////home/"$USER"/Desktop/eve.db"
|
|
||||||
Singularity: python eveCacheToDb.py --eve="~/.wine/drive_c/Program Files/CCP/Singularity" --cache="~/.wine/drive_c/users/"$USER"/Local Settings/Application Data/CCP/EVE/c_program_files_ccp_singularity_singularity/cache" --sisi --dump="sqlite:////home/"$USER"/Desktop/evetest.db"
|
|
||||||
'''
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
# Add eos root path to sys.path
|
|
||||||
path = os.path.dirname(unicode(__file__, sys.getfilesystemencoding()))
|
|
||||||
sys.path.append(os.path.realpath(os.path.join(path, "..", "..", "..")))
|
|
||||||
|
|
||||||
def get_map():
|
|
||||||
"""
|
|
||||||
Return table name - table class map
|
|
||||||
"""
|
|
||||||
return {"allianceshortnames": None,
|
|
||||||
"billtypes": None,
|
|
||||||
"certificaterelationships": None,
|
|
||||||
"certificates": None,
|
|
||||||
"corptickernames": None,
|
|
||||||
"dgmattribs": AttributeInfo,
|
|
||||||
"dgmeffects": EffectInfo,
|
|
||||||
"dgmtypeattribs": Attribute,
|
|
||||||
"dgmtypeeffects": Effect,
|
|
||||||
"evegraphics": None,
|
|
||||||
"evelocations": None,
|
|
||||||
"eveowners": None,
|
|
||||||
"eveunits": Unit,
|
|
||||||
"groupsByCategories": None,
|
|
||||||
"icons": Icon,
|
|
||||||
"invbptypes": None,
|
|
||||||
"invcategories": Category,
|
|
||||||
"invcontrabandTypesByFaction": None,
|
|
||||||
"invcontrabandTypesByType": None,
|
|
||||||
"invgroups": Group,
|
|
||||||
"invmetagroups": MetaGroup,
|
|
||||||
"invmarketgroups": MarketGroup,
|
|
||||||
"invmetatypes": MetaType,
|
|
||||||
"invmetatypesByTypeID": None,
|
|
||||||
"invreactiontypes": None,
|
|
||||||
"invtypes": Item,
|
|
||||||
"locationscenes": None,
|
|
||||||
"locationwormholeclasses": None,
|
|
||||||
"mapcelestialdescriptions": None,
|
|
||||||
"ownericons": None,
|
|
||||||
"ramactivities": None,
|
|
||||||
"ramaltypes": None,
|
|
||||||
"ramaltypesdetailpercategory": None,
|
|
||||||
"ramaltypesdetailpergroup": None,
|
|
||||||
"ramcompletedstatuses": None,
|
|
||||||
"ramtyperequirements": None,
|
|
||||||
"schematics": None,
|
|
||||||
"schematicsByPin": None,
|
|
||||||
"schematicsByType": None,
|
|
||||||
"schematicspinmap": None,
|
|
||||||
"schematicstypemap": None,
|
|
||||||
"shiptypes": None,
|
|
||||||
"sounds": None,
|
|
||||||
"typesByGroups": None,
|
|
||||||
"typesByMarketGroups": None}
|
|
||||||
|
|
||||||
def get_order():
|
|
||||||
"""
|
|
||||||
Return order for table processing
|
|
||||||
"""
|
|
||||||
return ("icons",
|
|
||||||
"invmarketgroups",
|
|
||||||
"eveunits",
|
|
||||||
"dgmattribs",
|
|
||||||
"dgmeffects",
|
|
||||||
"invcategories",
|
|
||||||
"invgroups",
|
|
||||||
"invmetagroups",
|
|
||||||
"invtypes",
|
|
||||||
"invmetatypes",
|
|
||||||
"dgmtypeattribs",
|
|
||||||
"dgmtypeeffects")
|
|
||||||
|
|
||||||
def get_customcalls():
|
|
||||||
"""
|
|
||||||
Return custom table - call to get data for it map
|
|
||||||
"""
|
|
||||||
return {"invmarketgroups": eve.RemoteSvc("marketProxy").GetMarketGroups()}
|
|
||||||
|
|
||||||
def process_table(sourcetable, tablename, tableclass):
|
|
||||||
"""
|
|
||||||
Get all data from cache and write it to database
|
|
||||||
"""
|
|
||||||
# Get data from source and process it
|
|
||||||
tabledata = get_table_data(sourcetable, tablename, get_source_headers(sourcetable))
|
|
||||||
# Insert everything into table
|
|
||||||
insert_table_values(tabledata, tableclass)
|
|
||||||
return
|
|
||||||
|
|
||||||
def get_source_headers(sourcetable):
|
|
||||||
"""
|
|
||||||
Pull list of headers from the source table
|
|
||||||
"""
|
|
||||||
sourceheaders = None
|
|
||||||
guid = getattr(sourcetable, "__guid__", "None")
|
|
||||||
# For IndexRowset and IndexedRowLists Reverence provides list of headers
|
|
||||||
if guid in ("util.IndexRowset", "util.FilterRowset"):
|
|
||||||
sourceheaders = tuple(sourcetable.header)
|
|
||||||
# For IndexedRowLists, we need to compose list ourselves
|
|
||||||
elif guid == "util.IndexedRowLists":
|
|
||||||
headerset = set()
|
|
||||||
for item in sourcetable:
|
|
||||||
for row in sourcetable[item]:
|
|
||||||
for headername in row.__header__.Keys():
|
|
||||||
headerset.add(headername)
|
|
||||||
sourceheaders = tuple(headerset)
|
|
||||||
return sourceheaders
|
|
||||||
|
|
||||||
def get_table_data(sourcetable, tablename, headers):
|
|
||||||
"""
|
|
||||||
Pull data out of source table
|
|
||||||
"""
|
|
||||||
# Each row is enclosed into dictionary, full table is list of these dictionaries
|
|
||||||
datarows = []
|
|
||||||
guid = getattr(sourcetable, "__guid__", "None")
|
|
||||||
# We have Select method for IndexRowset tables
|
|
||||||
if guid == "util.IndexRowset":
|
|
||||||
for values in sourcetable.Select(*headers):
|
|
||||||
# When Select is asked to find single value, it is returned in its raw
|
|
||||||
# form. Convert is to tuple for proper further processing
|
|
||||||
if not isinstance(values, (list, tuple, set)):
|
|
||||||
values = (values,)
|
|
||||||
headerslen = len(headers)
|
|
||||||
datarow = {}
|
|
||||||
# 1 row value should correspond to 1 header, if number or values doesn't
|
|
||||||
# correspond to number of headers then something went wrong
|
|
||||||
if headerslen != len(values):
|
|
||||||
print "Error: malformed data in source table {0}".format(tablename)
|
|
||||||
return None
|
|
||||||
# Fill row dictionary with values and append it to list
|
|
||||||
for i in xrange(headerslen):
|
|
||||||
# If we've got ASCII string, convert it to Unicode
|
|
||||||
if isinstance(values[i], str):
|
|
||||||
datarow[headers[i]] = unicode(values[i], 'ISO-8859-1')
|
|
||||||
else:
|
|
||||||
datarow[headers[i]] = values[i]
|
|
||||||
datarows.append(datarow)
|
|
||||||
# FilterRowset and IndexedRowLists are accessible almost like dictionaries
|
|
||||||
elif guid in ("util.FilterRowset", "util.IndexedRowLists"):
|
|
||||||
# Go through all source table elements
|
|
||||||
for element in sourcetable.iterkeys():
|
|
||||||
# Go through all rows of an element
|
|
||||||
for row in sourcetable[element]:
|
|
||||||
datarow = {}
|
|
||||||
# Fill row dictionary with values we need and append it to the list
|
|
||||||
for header in headers:
|
|
||||||
value = getattr(row, header, None)
|
|
||||||
# None and zero values are different, and we want to write zero
|
|
||||||
# values to database
|
|
||||||
if value or value in (0, 0.0):
|
|
||||||
datarow[header] = value
|
|
||||||
datarows.append(datarow)
|
|
||||||
|
|
||||||
return datarows
|
|
||||||
|
|
||||||
def insert_table_values(tabledata, tableclass):
|
|
||||||
"""
|
|
||||||
Insert values into tables and show progress
|
|
||||||
"""
|
|
||||||
rows = 0
|
|
||||||
rows_skipped = 0
|
|
||||||
# Go through all table rows
|
|
||||||
for row in tabledata:
|
|
||||||
instance = tableclass()
|
|
||||||
# Print dot each 1k inserted rows
|
|
||||||
if rows / 1000.0 == int(rows / 1000.0):
|
|
||||||
sys.stdout.write(".")
|
|
||||||
sys.stdout.flush()
|
|
||||||
try:
|
|
||||||
# Go through all fields of a row, process them and insert
|
|
||||||
for header in row:
|
|
||||||
setattr(instance, header, process_value(row[header], tableclass, header))
|
|
||||||
eos.db.gamedata_session.add(instance)
|
|
||||||
rows += 1
|
|
||||||
except ValueError:
|
|
||||||
rows_skipped += 1
|
|
||||||
# Print out results and actually commit results to database
|
|
||||||
print "\nInserted {0} rows. skipped {1} rows".format(rows, rows_skipped)
|
|
||||||
eos.db.gamedata_session.commit()
|
|
||||||
|
|
||||||
def process_value(value, tableclass, header):
|
|
||||||
# Get column info
|
|
||||||
info = tableclass._sa_class_manager.mapper.c.get(header)
|
|
||||||
if info is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Null out non-existent foreign key relations
|
|
||||||
foreign_keys = info.foreign_keys
|
|
||||||
if len(foreign_keys) > 0:
|
|
||||||
for key in foreign_keys:
|
|
||||||
col = key.column
|
|
||||||
if not query_existence(col, value) and not key.deferrable:
|
|
||||||
if info.nullable:
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
raise ValueError("Integrity check failed")
|
|
||||||
else:
|
|
||||||
return value
|
|
||||||
#Turn booleans into actual booleans, don't leave them as integers
|
|
||||||
elif type(info.type) == Boolean:
|
|
||||||
return bool(value)
|
|
||||||
else:
|
|
||||||
return value
|
|
||||||
|
|
||||||
existence_cache = {}
|
|
||||||
def query_existence(col, value):
|
|
||||||
key = (col, col.table, value)
|
|
||||||
info = existence_cache.get(key)
|
|
||||||
if info is None:
|
|
||||||
info = eos.db.gamedata_session.query(col.table).filter(col == value).count() > 0
|
|
||||||
existence_cache[key] = info
|
|
||||||
|
|
||||||
return info
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
from ConfigParser import ConfigParser
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
from reverence import blue
|
|
||||||
from sqlalchemy import Boolean
|
|
||||||
from sqlalchemy.orm import class_mapper, ColumnProperty
|
|
||||||
|
|
||||||
import eos.config
|
|
||||||
|
|
||||||
# Parse command line options
|
|
||||||
usage = "usage: %prog --eve=EVE --cache=CACHE --dump=DUMP [--release=RELEASE --sisi]"
|
|
||||||
parser = OptionParser(usage=usage)
|
|
||||||
parser.add_option("-e", "--eve", help="path to eve folder")
|
|
||||||
parser.add_option("-c", "--cache", help="path to eve cache folder")
|
|
||||||
parser.add_option("-d", "--dump", help="the SQL Alchemy connection string of where we should place our final dump")
|
|
||||||
parser.add_option("-r", "--release", help="database release number, defaults to 1", default="1")
|
|
||||||
parser.add_option("-s", "--sisi", action="store_true", dest="singularity", help="if you're going to work with Singularity test server data, use this option", default=False)
|
|
||||||
(options, args) = parser.parse_args()
|
|
||||||
|
|
||||||
|
|
||||||
# Exit if we do not have any of required options
|
|
||||||
if not options.eve or not options.cache or not options.dump:
|
|
||||||
sys.stderr.write("You need to specify paths to eve folder, cache folder and SQL Alchemy connection string. Run script with --help option for further info.\n")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
# We can deal either with singularity or tranquility servers
|
|
||||||
if options.singularity: server = "singularity"
|
|
||||||
else: server = "tranquility"
|
|
||||||
|
|
||||||
# Set static variables for paths
|
|
||||||
PATH_EVE = os.path.expanduser(options.eve)
|
|
||||||
PATH_CACHE = os.path.expanduser(options.cache)
|
|
||||||
|
|
||||||
eos.config.gamedata_connectionstring = options.dump
|
|
||||||
eos.config.debug = False
|
|
||||||
|
|
||||||
from eos.gamedata import *
|
|
||||||
import eos.db
|
|
||||||
|
|
||||||
# Get version of EVE client
|
|
||||||
config = ConfigParser()
|
|
||||||
config.read(os.path.join(PATH_EVE, "common.ini"))
|
|
||||||
|
|
||||||
# Form metadata dictionary for corresponding table
|
|
||||||
metadata = {}
|
|
||||||
metadata["version"] = config.getint("main", "build")
|
|
||||||
metadata["release"] = options.release
|
|
||||||
|
|
||||||
# Initialize Reverence cache manager
|
|
||||||
eve = blue.EVE(PATH_EVE, cachepath=PATH_CACHE, server=server)
|
|
||||||
cfg = eve.getconfigmgr()
|
|
||||||
|
|
||||||
# Create all tables we need
|
|
||||||
eos.db.gamedata_meta.create_all()
|
|
||||||
|
|
||||||
# Add versioning info to the metadata table
|
|
||||||
for fieldname in metadata:
|
|
||||||
eos.db.gamedata_session.add(MetaData(fieldname, metadata[fieldname]))
|
|
||||||
|
|
||||||
eos.db.gamedata_session.commit()
|
|
||||||
|
|
||||||
# Get table map, processing order and special table data
|
|
||||||
TABLE_MAP = get_map()
|
|
||||||
TABLE_ORDER = get_order()
|
|
||||||
CUSTOM_CALLS = get_customcalls()
|
|
||||||
|
|
||||||
# Warn about various stuff
|
|
||||||
for table in cfg.tables:
|
|
||||||
if not table in TABLE_MAP:
|
|
||||||
# Warn about new tables in cache which are still not described by table map
|
|
||||||
print "Warning: unmapped table {0} found in cache".format(table)
|
|
||||||
for table in TABLE_MAP:
|
|
||||||
if not table in cfg.tables and not table in CUSTOM_CALLS:
|
|
||||||
# Warn about mapped tables which are missing in cache
|
|
||||||
print "Warning: mapped table {0} cannot be found in cache".format(table)
|
|
||||||
if not table in TABLE_ORDER and TABLE_MAP[table] is not None:
|
|
||||||
# Warn about mapped tables not specified in processing order
|
|
||||||
print "Warning: mapped table {0} is missing in processing order".format(table)
|
|
||||||
for table in TABLE_ORDER:
|
|
||||||
if not table in TABLE_MAP:
|
|
||||||
# Warn about unmapped tables in processing order
|
|
||||||
print "Warning: unmapped table {0} is specified in processing order".format(table)
|
|
||||||
|
|
||||||
# Get data from reverence and write it
|
|
||||||
for tablename in TABLE_ORDER:
|
|
||||||
tableclass = TABLE_MAP[tablename]
|
|
||||||
if tableclass is not None:
|
|
||||||
# Print currently processed table name
|
|
||||||
print "Processing: {0}".format(tablename)
|
|
||||||
# Get table object from the Reverence and process it
|
|
||||||
source_table = getattr(cfg, tablename) if tablename not in CUSTOM_CALLS else CUSTOM_CALLS[tablename]
|
|
||||||
# Gather data regarding columns for current table in cache and eos
|
|
||||||
cols_eos = set(prop.key for prop in class_mapper(TABLE_MAP[tablename]).iterate_properties if isinstance(prop, ColumnProperty))
|
|
||||||
cols_rev = set(get_source_headers(source_table))
|
|
||||||
notineos = cols_rev.difference(cols_eos)
|
|
||||||
notinrev = cols_eos.difference(cols_rev)
|
|
||||||
if notineos:
|
|
||||||
print "Warning: {0} found in cache but missing in eos definitions: {1}".format("column" if len(notineos) == 1 else "columns", ", ".join(sorted(notineos)))
|
|
||||||
if notinrev:
|
|
||||||
print "Warning: {0} found in eos definitions but missing in cache: {1}".format("column" if len(notinrev) == 1 else "columns", ", ".join(sorted(notinrev)))
|
|
||||||
process_table(source_table, tablename, tableclass)
|
|
||||||
@@ -1,565 +0,0 @@
|
|||||||
"""
|
|
||||||
This is ugly, tricky and unreadable script which helps to detect which items should be tested,
|
|
||||||
based on how its current effects work.
|
|
||||||
"""
|
|
||||||
import sqlite3
|
|
||||||
import os.path
|
|
||||||
import copy
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
parser = OptionParser()
|
|
||||||
parser.add_option("-d", "--database", help="path to eve cache data dump in \
|
|
||||||
sqlite format, default eos database path is used if none specified",
|
|
||||||
type="string", default=os.path.join("~", ".pyfa","eve.db"))
|
|
||||||
parser.add_option("-a", "--attr", help="find items with all of these attributes",
|
|
||||||
type="string", default="")
|
|
||||||
parser.add_option("-s", "--srq", help="find items with any of these skill requirements",
|
|
||||||
type="string", default="")
|
|
||||||
parser.add_option("-g", "--grp", help="find items from any of these groups",
|
|
||||||
type="string", default="")
|
|
||||||
parser.add_option("-z", "--nozero", action="store_true", help="ignore attributes with zero values",
|
|
||||||
default=False)
|
|
||||||
parser.add_option("-o", "--noone", action="store_true", help="ignore attributes with value equal to 1",
|
|
||||||
default=False)
|
|
||||||
parser.add_option("-t", "--tech12", action="store_true", help="show only t12 items (with exception for items with no t1 variations)",
|
|
||||||
default=False)
|
|
||||||
(options, args) = parser.parse_args()
|
|
||||||
|
|
||||||
if not options.attr:
|
|
||||||
import sys
|
|
||||||
|
|
||||||
sys.stderr.write("You need to specify an attribute name.\n")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
# Connect to database and set up cursor
|
|
||||||
db = sqlite3.connect(os.path.expanduser(options.database))
|
|
||||||
cursor = db.cursor()
|
|
||||||
|
|
||||||
# As we don't rely on eos's overrides, we need to set them manually
|
|
||||||
OVERRIDES = '''
|
|
||||||
UPDATE invtypes SET published = '1' WHERE typeName = 'Freki';
|
|
||||||
UPDATE invtypes SET published = '1' WHERE typeName = 'Mimir';
|
|
||||||
UPDATE invtypes SET published = '1' WHERE typeName = 'Utu';
|
|
||||||
UPDATE invtypes SET published = '1' WHERE typeName = 'Adrestia';
|
|
||||||
'''
|
|
||||||
for statement in OVERRIDES.split(";\n"):
|
|
||||||
cursor.execute(statement)
|
|
||||||
|
|
||||||
# Queries to get raw data
|
|
||||||
# Limit categories to Celestials (2, only for wormhole effects),
|
|
||||||
# Ships (6), Modules (7), Charges (8), Skills (16), Drones (18),
|
|
||||||
# Implants (20), Subsystems (32)
|
|
||||||
QUERY_PUBLISHEDTYPEIDS = 'SELECT it.typeID FROM invtypes AS it INNER JOIN \
|
|
||||||
invgroups AS ig ON it.groupID = ig.groupID INNER JOIN invcategories AS ic ON \
|
|
||||||
ig.categoryID = ic.categoryID WHERE it.published = 1 AND ic.categoryID IN \
|
|
||||||
(2, 6, 7, 8, 16, 18, 20, 32)'
|
|
||||||
QUERY_ATTRIBUTEID_TYPEID = "SELECT it.typeID, dta.value FROM invtypes AS it INNER JOIN \
|
|
||||||
dgmtypeattribs AS dta ON it.typeID = dta.typeID INNER JOIN dgmattribs AS da \
|
|
||||||
ON dta.attributeID = da.attributeID WHERE da.attributeID = ?"
|
|
||||||
QUERY_TYPEID_GROUPID = 'SELECT groupID FROM invtypes WHERE typeID = ? LIMIT 1'
|
|
||||||
QUERY_GROUPID_CATEGORYID = 'SELECT categoryID FROM invgroups WHERE \
|
|
||||||
groupID = ? LIMIT 1'
|
|
||||||
QUERY_TYPEID_PARENTTYPEID = 'SELECT parentTypeID FROM invmetatypes WHERE \
|
|
||||||
typeID = ? LIMIT 1'
|
|
||||||
QUERY_TYPEID_METAGROUPID = 'SELECT metaGroupID FROM invmetatypes WHERE \
|
|
||||||
typeID = ? LIMIT 1'
|
|
||||||
QUERY_TYPEID_SKILLRQ = 'SELECT dta.value FROM dgmtypeattribs AS dta INNER JOIN \
|
|
||||||
dgmattribs AS da ON da.attributeID = dta.attributeID WHERE (da.attributeName = \
|
|
||||||
"requiredSkill1" OR da.attributeName = "requiredSkill2" OR da.attributeName = \
|
|
||||||
"requiredSkill3") AND dta.typeID = ?'
|
|
||||||
QUERY_TYPEID_MARKETGROUPID = 'SELECT marketGroupID FROM invtypes WHERE \
|
|
||||||
typeID = ? LIMIT 1'
|
|
||||||
QUERY_TYPEID_TYPENAME = 'SELECT typeName FROM invtypes WHERE typeID = ? \
|
|
||||||
LIMIT 1'
|
|
||||||
QUERY_MARKETGROUPID_PARENTGROUPID = 'SELECT parentGroupID FROM \
|
|
||||||
invmarketgroups WHERE marketGroupID = ? LIMIT 1'
|
|
||||||
QUERY_EFFECTID_TYPEID = 'SELECT typeID FROM dgmtypeeffects WHERE effectID = ?'
|
|
||||||
# Queries for printing
|
|
||||||
QUERY_GROUPID_GROUPNAME = 'SELECT groupName FROM invgroups WHERE groupID = ? \
|
|
||||||
LIMIT 1'
|
|
||||||
QUERY_CATEGORYID_CATEGORYNAME = 'SELECT categoryName FROM invcategories \
|
|
||||||
WHERE categoryID = ? LIMIT 1'
|
|
||||||
QUERY_MARKETGROUPID_MARKETGROUPNAME = 'SELECT marketGroupName FROM \
|
|
||||||
invmarketgroups WHERE marketGroupID = ? LIMIT 1'
|
|
||||||
|
|
||||||
QUERY_ATTRIBUTENAME_ATTRIBUTEID = 'SELECT attributeID FROM dgmattribs WHERE attributeName = ?'
|
|
||||||
QUERY_TYPENAME_TYPEID = 'SELECT typeID FROM invtypes WHERE typeName = ?'
|
|
||||||
QUERY_GROUPNAME_GROUPID = 'SELECT groupID FROM invgroups WHERE groupName = ?'
|
|
||||||
|
|
||||||
if options.srq:
|
|
||||||
global_skillrqids = set()
|
|
||||||
for srq in options.srq.split(","):
|
|
||||||
srqid = 0
|
|
||||||
cursor.execute(QUERY_TYPENAME_TYPEID, (srq,))
|
|
||||||
for row in cursor:
|
|
||||||
srqid = row[0]
|
|
||||||
if not srqid:
|
|
||||||
import sys
|
|
||||||
sys.stderr.write("You need to specify proper skill requirement name.\n")
|
|
||||||
sys.exit()
|
|
||||||
else:
|
|
||||||
global_skillrqids.add(srqid)
|
|
||||||
|
|
||||||
if options.grp:
|
|
||||||
global_groupids = set()
|
|
||||||
for grp in options.grp.split(","):
|
|
||||||
grouplist = []
|
|
||||||
cursor.execute(QUERY_GROUPNAME_GROUPID, (grp,))
|
|
||||||
for row in cursor:
|
|
||||||
grouplist.append(row[0])
|
|
||||||
if len(grouplist) > 1:
|
|
||||||
print("Warning: multiple groups found, using ID", grouplist[0])
|
|
||||||
elif len(grouplist) == 0:
|
|
||||||
import sys
|
|
||||||
sys.stderr.write("You need to specify proper group name.\n")
|
|
||||||
sys.exit()
|
|
||||||
global_groupids.add(grouplist[0])
|
|
||||||
|
|
||||||
# Published types set
|
|
||||||
publishedtypes = set()
|
|
||||||
cursor.execute(QUERY_PUBLISHEDTYPEIDS)
|
|
||||||
for row in cursor:
|
|
||||||
publishedtypes.add(row[0])
|
|
||||||
|
|
||||||
# We'll use list of items with given attributes as base for any operations
|
|
||||||
# Term item means item with given attribute
|
|
||||||
typeswithattr = set()
|
|
||||||
first = True
|
|
||||||
for attr in options.attr.split(","):
|
|
||||||
tmp = set()
|
|
||||||
cursor.execute(QUERY_ATTRIBUTENAME_ATTRIBUTEID, (attr,))
|
|
||||||
noattr = True
|
|
||||||
for row in cursor:
|
|
||||||
noattr = False
|
|
||||||
attrid = row[0]
|
|
||||||
if noattr:
|
|
||||||
import sys
|
|
||||||
sys.stderr.write("No \"{0}\" attribute found.\n".format(attr))
|
|
||||||
sys.exit()
|
|
||||||
cursor.execute(QUERY_ATTRIBUTEID_TYPEID, (attrid,))
|
|
||||||
for row in cursor:
|
|
||||||
if options.nozero:
|
|
||||||
if row[0] in publishedtypes and row[1] not in (None, 0, 0.0):
|
|
||||||
tmp.add(row[0])
|
|
||||||
elif options.noone:
|
|
||||||
if row[0] in publishedtypes and row[1] != 1.0:
|
|
||||||
tmp.add(row[0])
|
|
||||||
else:
|
|
||||||
if row[0] in publishedtypes:
|
|
||||||
tmp.add(row[0])
|
|
||||||
if first:
|
|
||||||
first = False
|
|
||||||
typeswithattr = copy.deepcopy(tmp)
|
|
||||||
else:
|
|
||||||
typeswithattr.intersection_update(tmp)
|
|
||||||
if len(typeswithattr) == 0:
|
|
||||||
import sys
|
|
||||||
sys.stderr.write("No items found with all of supplied attributes.\n")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
# Base type maps
|
|
||||||
# { basetypeid : set(typeid) }
|
|
||||||
map_basetypeid_typeid = {}
|
|
||||||
# { typeid : basetypeid }
|
|
||||||
map_typeid_basetypeid = {}
|
|
||||||
for typeid in typeswithattr:
|
|
||||||
# Not all typeIDs in the database have baseTypeID, so assign some
|
|
||||||
# default value to it
|
|
||||||
basetypeid = 0
|
|
||||||
cursor.execute(QUERY_TYPEID_PARENTTYPEID, (typeid,))
|
|
||||||
for row in cursor:
|
|
||||||
basetypeid = row[0]
|
|
||||||
# If base type is not published or is not set in database, consider
|
|
||||||
# item as variation of self
|
|
||||||
if basetypeid not in typeswithattr:
|
|
||||||
basetypeid = typeid
|
|
||||||
if not basetypeid in map_basetypeid_typeid:
|
|
||||||
map_basetypeid_typeid[basetypeid] = set()
|
|
||||||
map_basetypeid_typeid[basetypeid].add(typeid)
|
|
||||||
map_typeid_basetypeid[typeid] = basetypeid
|
|
||||||
|
|
||||||
# Meta group maps
|
|
||||||
# { metagroupid : set(typeid) }
|
|
||||||
map_metagroupid_typeid = {}
|
|
||||||
# { typeid : metagroupid }
|
|
||||||
map_typeid_metagroupid = {}
|
|
||||||
for typeid in typeswithattr:
|
|
||||||
# Assume items are tech 1 by default
|
|
||||||
metagroupid = 1
|
|
||||||
cursor.execute(QUERY_TYPEID_METAGROUPID, (typeid,))
|
|
||||||
for row in cursor:
|
|
||||||
metagroupid = row[0]
|
|
||||||
if not metagroupid in map_metagroupid_typeid:
|
|
||||||
map_metagroupid_typeid[metagroupid] = set()
|
|
||||||
map_metagroupid_typeid[metagroupid].add(typeid)
|
|
||||||
map_typeid_metagroupid[typeid] = metagroupid
|
|
||||||
|
|
||||||
# Filter out non-t1/t2 items if we're asked to do so
|
|
||||||
if options.tech12:
|
|
||||||
toremove = set()
|
|
||||||
for typeid in typeswithattr:
|
|
||||||
if map_typeid_basetypeid[typeid] != typeid and map_typeid_metagroupid[typeid] != 2:
|
|
||||||
toremove.add(typeid)
|
|
||||||
for id in toremove:
|
|
||||||
typeswithattr.remove(id)
|
|
||||||
|
|
||||||
print("Attributes:")
|
|
||||||
for attr in sorted(options.attr.split(",")):
|
|
||||||
print(attr)
|
|
||||||
print("")
|
|
||||||
|
|
||||||
# Compose group maps
|
|
||||||
# { groupid : set(typeid) }
|
|
||||||
map_groupid_typeid = {}
|
|
||||||
# { typeid : groupid }
|
|
||||||
map_typeid_groupid = {}
|
|
||||||
for typeid in typeswithattr:
|
|
||||||
groupid = 0
|
|
||||||
cursor.execute(QUERY_TYPEID_GROUPID, (typeid,))
|
|
||||||
for row in cursor:
|
|
||||||
groupid = row[0]
|
|
||||||
if not groupid in map_groupid_typeid:
|
|
||||||
map_groupid_typeid[groupid] = set()
|
|
||||||
map_groupid_typeid[groupid].add(typeid)
|
|
||||||
map_typeid_groupid[typeid] = groupid
|
|
||||||
|
|
||||||
# Category maps
|
|
||||||
# { categoryid : set(typeid) }
|
|
||||||
map_categoryid_typeid = {}
|
|
||||||
# { typeid : categoryid }
|
|
||||||
map_typeid_categoryid = {}
|
|
||||||
for typeid in typeswithattr:
|
|
||||||
categoryid = 0
|
|
||||||
cursor.execute(QUERY_GROUPID_CATEGORYID,
|
|
||||||
(map_typeid_groupid[typeid],))
|
|
||||||
for row in cursor:
|
|
||||||
categoryid = row[0]
|
|
||||||
if not categoryid in map_categoryid_typeid:
|
|
||||||
map_categoryid_typeid[categoryid] = set()
|
|
||||||
map_categoryid_typeid[categoryid].add(typeid)
|
|
||||||
map_typeid_categoryid[typeid] = categoryid
|
|
||||||
# { categoryid : set(groupid) }
|
|
||||||
map_categoryid_groupid = {}
|
|
||||||
# { groupid : categoryid }
|
|
||||||
map_groupid_categoryid = {}
|
|
||||||
for groupid in map_groupid_typeid:
|
|
||||||
categoryid = 0
|
|
||||||
cursor.execute(QUERY_GROUPID_CATEGORYID,
|
|
||||||
(groupid,))
|
|
||||||
for row in cursor:
|
|
||||||
categoryid = row[0]
|
|
||||||
if not categoryid in map_categoryid_groupid:
|
|
||||||
map_categoryid_groupid[categoryid] = set()
|
|
||||||
map_categoryid_groupid[categoryid].add(groupid)
|
|
||||||
map_groupid_categoryid[groupid] = categoryid
|
|
||||||
|
|
||||||
# Skill required maps
|
|
||||||
# { skillid : set(typeid) }
|
|
||||||
map_skillrq_typeid = {}
|
|
||||||
# { typeid : set(skillid) }
|
|
||||||
map_typeid_skillrq = {}
|
|
||||||
# list of items without skill requirements
|
|
||||||
set_typeid_noskillrq = set()
|
|
||||||
for typeid in typeswithattr:
|
|
||||||
map_typeid_skillrq[typeid] = set()
|
|
||||||
cursor.execute(QUERY_TYPEID_SKILLRQ, (typeid,))
|
|
||||||
no_rqs = True
|
|
||||||
for row in cursor:
|
|
||||||
no_rqs = False
|
|
||||||
skillid = row[0]
|
|
||||||
if not skillid in map_skillrq_typeid:
|
|
||||||
map_skillrq_typeid[skillid] = set()
|
|
||||||
map_skillrq_typeid[skillid].add(typeid)
|
|
||||||
map_typeid_skillrq[typeid].add(skillid)
|
|
||||||
if no_rqs:
|
|
||||||
set_typeid_noskillrq.add(typeid)
|
|
||||||
|
|
||||||
def gettypename(typeid):
|
|
||||||
typename = ""
|
|
||||||
cursor.execute(QUERY_TYPEID_TYPENAME, (typeid,))
|
|
||||||
for row in cursor:
|
|
||||||
typename = row[0]
|
|
||||||
return typename
|
|
||||||
|
|
||||||
def getgroupname(grpid):
|
|
||||||
grpname = ""
|
|
||||||
cursor.execute(QUERY_GROUPID_GROUPNAME, (grpid,))
|
|
||||||
for row in cursor:
|
|
||||||
grpname = row[0]
|
|
||||||
return grpname
|
|
||||||
|
|
||||||
def getcatname(catid):
|
|
||||||
catname = ""
|
|
||||||
cursor.execute(QUERY_CATEGORYID_CATEGORYNAME, (catid,))
|
|
||||||
for row in cursor:
|
|
||||||
catname = row[0]
|
|
||||||
return catname
|
|
||||||
|
|
||||||
if options.grp and options.srq:
|
|
||||||
# Set of items which are supposed to be affected
|
|
||||||
targetitems = set()
|
|
||||||
for groupid in global_groupids:
|
|
||||||
for srqid in global_skillrqids:
|
|
||||||
if groupid in map_groupid_typeid and srqid in map_skillrq_typeid:
|
|
||||||
targetitems.update(map_groupid_typeid[groupid].intersection(map_skillrq_typeid[srqid]))
|
|
||||||
targetitems_noskillrqs = targetitems.intersection(set_typeid_noskillrq)
|
|
||||||
# All skill requirements of items which are supposed to be affected
|
|
||||||
targetitems_skillrqs = set()
|
|
||||||
for itemid in targetitems:
|
|
||||||
targetitems_skillrqs.update(map_typeid_skillrq[itemid])
|
|
||||||
# Remove skill requirement supplied as argument to script
|
|
||||||
# we can use that argument when needed manually, and it
|
|
||||||
# covers all targetitems which we don't want to do with single skill
|
|
||||||
for srqid in global_skillrqids:
|
|
||||||
targetitems_skillrqs.remove(srqid)
|
|
||||||
|
|
||||||
if targetitems:
|
|
||||||
# Print items which are supposed to be affected
|
|
||||||
print("Affected items:")
|
|
||||||
for groupid in sorted(global_groupids, key=lambda grid: getgroupname(grid)):
|
|
||||||
targetitems_grp = targetitems.intersection(map_groupid_typeid[groupid])
|
|
||||||
print(" Items from {0} group:".format(getgroupname(groupid)))
|
|
||||||
# Cycle through all required skills
|
|
||||||
targetitems_skillrqs_withgiven = copy.deepcopy(targetitems_skillrqs)
|
|
||||||
for srqid in global_skillrqids:
|
|
||||||
targetitems_skillrqs_withgiven.add(srqid)
|
|
||||||
for skillrq in sorted(targetitems_skillrqs_withgiven, key=lambda sk: gettypename(sk)):
|
|
||||||
targetitems_grp_srq = targetitems_grp.intersection(map_skillrq_typeid[skillrq])
|
|
||||||
if targetitems_grp_srq:
|
|
||||||
print(" Items requiring {0} skill:".format(gettypename(skillrq)))
|
|
||||||
for item in sorted(targetitems_grp_srq, key=lambda item: gettypename(item)):
|
|
||||||
# If item has 3rd skill requirement (besides supplied as argument and
|
|
||||||
# included into header of current section), mention it
|
|
||||||
if len(map_typeid_skillrq[item]) in (2, 3):
|
|
||||||
otherskillrq = copy.deepcopy(map_typeid_skillrq[item])
|
|
||||||
otherskillrq.discard(skillrq)
|
|
||||||
print(" {0} ({1})".format(gettypename(item), ", ".join(sorted(gettypename(id) for id in otherskillrq))))
|
|
||||||
# Just print item names if there's only 1 skill requirement
|
|
||||||
elif len(map_typeid_skillrq[item]) == 1:
|
|
||||||
print(" {0}".format(gettypename(item)))
|
|
||||||
else:
|
|
||||||
print("WARNING: Bad things happened, we never should get here")
|
|
||||||
|
|
||||||
print("\nUnaffected items")
|
|
||||||
|
|
||||||
|
|
||||||
items_in_groups = set()
|
|
||||||
for groupid in global_groupids:
|
|
||||||
items_in_groups.update(map_groupid_typeid[groupid])
|
|
||||||
items_with_skillrqs = set()
|
|
||||||
for srqid in global_skillrqids:
|
|
||||||
items_with_skillrqs.update(map_skillrq_typeid[srqid])
|
|
||||||
# List items which do not belong to given group, but have given skill requirement
|
|
||||||
wskill = typeswithattr.intersection(items_with_skillrqs)
|
|
||||||
wogroup = typeswithattr.difference(items_in_groups)
|
|
||||||
nontarget_wskill_wogroup = wskill.intersection(wogroup)
|
|
||||||
if nontarget_wskill_wogroup:
|
|
||||||
print(" With {0} skill requirements, not belonging to {1} groups:".format(", ".join(sorted(gettypename(id) for id in global_skillrqids)), ", ".join(sorted(getgroupname(grid) for grid in global_groupids))))
|
|
||||||
for item in sorted(nontarget_wskill_wogroup, key=lambda item: gettypename(item)):
|
|
||||||
print(" {0}".format(gettypename(item)))
|
|
||||||
|
|
||||||
# List items which belong to given group, but do not have given skill requirement
|
|
||||||
woskill = typeswithattr.difference(items_with_skillrqs)
|
|
||||||
wgroup = typeswithattr.intersection(items_in_groups)
|
|
||||||
nontarget_woskill_wgroup = woskill.intersection(wgroup)
|
|
||||||
if nontarget_woskill_wgroup:
|
|
||||||
print(" Without {0} skill requirement, belonging to {1} group:".format(", ".join(sorted(gettypename(id) for id in global_skillrqids)), ", ".join(sorted(getgroupname(grid) for grid in global_groupids))))
|
|
||||||
for item in sorted(nontarget_woskill_wgroup, key=lambda item: gettypename(item)):
|
|
||||||
print(" {0}".format(gettypename(item)))
|
|
||||||
|
|
||||||
# If any of the above lists is missing, list all unaffected items
|
|
||||||
if not nontarget_wskill_wogroup or not nontarget_woskill_wgroup:
|
|
||||||
nontarget = typeswithattr.difference(items_in_groups)
|
|
||||||
for srqid in global_skillrqids:
|
|
||||||
nontarget.difference_update(map_skillrq_typeid[srqid])
|
|
||||||
if nontarget_wskill_wogroup:
|
|
||||||
nontarget.difference_update(nontarget_wskill_wogroup)
|
|
||||||
if nontarget_woskill_wgroup:
|
|
||||||
nontarget.difference_update(nontarget_woskill_wgroup)
|
|
||||||
nontarget_groups = set()
|
|
||||||
nontarget_cats = set()
|
|
||||||
print(" Plain list:")
|
|
||||||
for item in sorted(nontarget, key=lambda item: gettypename(item)):
|
|
||||||
nontarget_groups.add(map_typeid_groupid[item])
|
|
||||||
print(" {0} ({1})".format(gettypename(item), getgroupname(map_typeid_groupid[item])))
|
|
||||||
#print(" Groups:")
|
|
||||||
#for group in sorted(nontarget_groups, key=lambda grp: getgroupname(grp)):
|
|
||||||
# nontarget_cats.add(map_groupid_categoryid[group])
|
|
||||||
# print(" {0} ({1})".format(getgroupname(group), getcatname(map_groupid_categoryid[group])))
|
|
||||||
#print(" Categories:")
|
|
||||||
#for cat in sorted(nontarget_cats, key=lambda cat: getcatname(cat)):
|
|
||||||
# print(" {0}".format(getcatname(cat)))
|
|
||||||
|
|
||||||
elif options.grp:
|
|
||||||
# Set of items which are supposed to be affected
|
|
||||||
targetitems = set()
|
|
||||||
for groupid in global_groupids:
|
|
||||||
if groupid in map_groupid_typeid:
|
|
||||||
targetitems.update(map_groupid_typeid[groupid])
|
|
||||||
# All skill requirements of items which are supposed to be affected
|
|
||||||
targetitems_skillrqs = set()
|
|
||||||
for itemid in targetitems:
|
|
||||||
targetitems_skillrqs.update(map_typeid_skillrq[itemid])
|
|
||||||
targetitems_noskillrqs = targetitems.intersection(set_typeid_noskillrq)
|
|
||||||
if targetitems:
|
|
||||||
# Print items which are supposed to be affected
|
|
||||||
print("Affected items:")
|
|
||||||
for groupid in sorted(global_groupids, key=lambda grid: getgroupname(grid)):
|
|
||||||
print(" From {0} group:".format(getgroupname(groupid)))
|
|
||||||
targetitems_grp = targetitems.intersection(map_groupid_typeid[groupid])
|
|
||||||
targetitems_noskillrqs_grp = targetitems_noskillrqs.intersection(map_groupid_typeid[groupid])
|
|
||||||
# Cycle through all required skills
|
|
||||||
for skillrq in sorted(targetitems_skillrqs, key=lambda sk: gettypename(sk)):
|
|
||||||
items_grpsrq = targetitems_grp.intersection(map_skillrq_typeid[skillrq])
|
|
||||||
if items_grpsrq:
|
|
||||||
print(" Requiring {0} skill:".format(gettypename(skillrq)))
|
|
||||||
for item in sorted(items_grpsrq, key=lambda item: gettypename(item)):
|
|
||||||
# If item has other skill requirements, print them
|
|
||||||
if len(map_typeid_skillrq[item]) == 3 or len(map_typeid_skillrq[item]) == 2:
|
|
||||||
otherskillrq = copy.deepcopy(map_typeid_skillrq[item])
|
|
||||||
otherskillrq.discard(skillrq)
|
|
||||||
print(" {0} ({1})".format(gettypename(item), ", ".join(sorted(gettypename(id) for id in otherskillrq))))
|
|
||||||
# Just print item names if there're only 2 skill requirements
|
|
||||||
elif len(map_typeid_skillrq[item]) == 1:
|
|
||||||
print(" {0}".format(gettypename(item)))
|
|
||||||
else:
|
|
||||||
print("WARNING: Bad things happened, we never should get here")
|
|
||||||
if targetitems_noskillrqs:
|
|
||||||
print(" Requiring no skills:")
|
|
||||||
for item in sorted(targetitems_noskillrqs_grp, key=lambda item: gettypename(item)):
|
|
||||||
print(" {0}".format(gettypename(item)))
|
|
||||||
|
|
||||||
print("\nUnaffected items")
|
|
||||||
|
|
||||||
# List items which are supposed to be unaffected
|
|
||||||
nontarget = typeswithattr.difference(targetitems)
|
|
||||||
nontarget_groups = set()
|
|
||||||
nontarget_cats = set()
|
|
||||||
print(" Not belonging to groups {0}:".format(", ".join(getgroupname(id) for id in global_groupids)))
|
|
||||||
|
|
||||||
removeitms = set()
|
|
||||||
# Check 1 unaffected item with each skill requirement, if some items with it were affected
|
|
||||||
for skillrq in sorted(targetitems_skillrqs, key=lambda srq: gettypename(srq)):
|
|
||||||
if nontarget.intersection(map_skillrq_typeid[skillrq]):
|
|
||||||
print(" With {0} skill requirement:".format(gettypename(skillrq)))
|
|
||||||
for item in sorted(nontarget.intersection(map_skillrq_typeid[skillrq]), key=lambda item: gettypename(item)):
|
|
||||||
print(" {0}".format(gettypename(item)))
|
|
||||||
removeitms.update(map_skillrq_typeid[skillrq])
|
|
||||||
nontarget.difference_update(removeitms)
|
|
||||||
print(" With other or no skill requirements:")
|
|
||||||
for item in sorted(nontarget, key=lambda item: gettypename(item)):
|
|
||||||
nontarget_groups.add(map_typeid_groupid[item])
|
|
||||||
print(" {0} ({1})".format(gettypename(item), getgroupname(map_typeid_groupid[item])))
|
|
||||||
|
|
||||||
#print(" Groups:")
|
|
||||||
#for group in sorted(nontarget_groups, key=lambda grp: getgroupname(grp)):
|
|
||||||
# nontarget_cats.add(map_groupid_categoryid[group])
|
|
||||||
# print(" {0} ({1})".format(getgroupname(group), getcatname(map_groupid_categoryid[group])))
|
|
||||||
#print(" Categories:")
|
|
||||||
#for cat in sorted(nontarget_cats, key=lambda cat: getcatname(cat)):
|
|
||||||
# print(" {0}".format(getcatname(cat)))
|
|
||||||
|
|
||||||
elif options.srq:
|
|
||||||
# Set of items which are supposed to be affected
|
|
||||||
targetitems = set()
|
|
||||||
for srqid in global_skillrqids:
|
|
||||||
if srqid in map_skillrq_typeid:
|
|
||||||
targetitems.update(map_skillrq_typeid[srqid])
|
|
||||||
|
|
||||||
# All groups of items which are supposed to be affected
|
|
||||||
targetitems_groups = set()
|
|
||||||
targetitems_srqs = set()
|
|
||||||
targetitems_cats = set()
|
|
||||||
for itemid in targetitems:
|
|
||||||
targetitems_groups.add(map_typeid_groupid[itemid])
|
|
||||||
targetitems_srqs.update(map_typeid_skillrq[itemid])
|
|
||||||
targetitems_cats.add(map_typeid_categoryid[itemid])
|
|
||||||
if targetitems:
|
|
||||||
# Print items which are supposed to be affected
|
|
||||||
print("Affected items:")
|
|
||||||
for srqid in sorted(global_skillrqids, key=lambda itm: gettypename(itm)):
|
|
||||||
print(" With {0} skill requirements:".format(gettypename(srqid)))
|
|
||||||
targetitems_srq = targetitems.intersection(map_skillrq_typeid[srqid])
|
|
||||||
targetitems_srq_groups = set()
|
|
||||||
targetitems_srq_cats = set()
|
|
||||||
for itemid in targetitems_srq:
|
|
||||||
targetitems_srq_groups.add(map_typeid_groupid[itemid])
|
|
||||||
targetitems_srq_cats.add(map_typeid_categoryid[itemid])
|
|
||||||
# Cycle through groups
|
|
||||||
for groupid in sorted(targetitems_srq_groups, key=lambda grp: getgroupname(grp)):
|
|
||||||
print(" From {0} group:".format(getgroupname(groupid)))
|
|
||||||
for item in sorted(targetitems_srq.intersection(map_groupid_typeid[groupid]), key=lambda item: gettypename(item)):
|
|
||||||
print(" {0} ({1})".format(gettypename(item), ", ".join(sorted(gettypename(itm) for itm in map_typeid_skillrq[item].difference(global_skillrqids))) or "None"))
|
|
||||||
|
|
||||||
print("\nUnaffected items")
|
|
||||||
|
|
||||||
# List items which are supposed to be unaffected
|
|
||||||
nontarget = typeswithattr.difference(targetitems)
|
|
||||||
nontarget_groups = set()
|
|
||||||
nontarget_cats = set()
|
|
||||||
print(" Without {0} skills requirement:".format(", ".join(gettypename(id) for id in global_skillrqids)))
|
|
||||||
removeitms = set()
|
|
||||||
# Check 1 unaffected item from each group where some items were affected
|
|
||||||
for groupid in sorted(targetitems_groups, key=lambda grp: getgroupname(grp)):
|
|
||||||
if nontarget.intersection(map_groupid_typeid[groupid]):
|
|
||||||
print(" From {0} group:".format(getgroupname(groupid)))
|
|
||||||
for skillrqid in sorted(targetitems_srqs.difference(global_skillrqids), key=lambda srq: gettypename(srq)):
|
|
||||||
itmset = nontarget.intersection(map_groupid_typeid[groupid]).intersection(map_skillrq_typeid[skillrqid])
|
|
||||||
if itmset:
|
|
||||||
print(" Items with {0} skill requirement:".format(gettypename(skillrqid)))
|
|
||||||
for item in sorted(itmset, key=lambda itm: gettypename(itm)):
|
|
||||||
otherskrqs = map_typeid_skillrq[item].difference(global_skillrqids)
|
|
||||||
otherskrqs.remove(skillrqid)
|
|
||||||
print(" {0} ({1})".format(gettypename(item), ", ".join(sorted(gettypename(itm) for itm in otherskrqs)) or "None"))
|
|
||||||
removeitms.update(itmset)
|
|
||||||
nontarget.difference_update(removeitms)
|
|
||||||
otsk = nontarget.intersection(map_groupid_typeid[groupid]).difference(set_typeid_noskillrq)
|
|
||||||
if otsk:
|
|
||||||
print(" Items with other skill requirements:")
|
|
||||||
for item in sorted(otsk, key=lambda itm: gettypename(itm)):
|
|
||||||
print(" {0} (None)".format(gettypename(item)))
|
|
||||||
removeitms.update(otsk)
|
|
||||||
nosk = nontarget.intersection(map_groupid_typeid[groupid]).intersection(set_typeid_noskillrq)
|
|
||||||
if nosk:
|
|
||||||
print(" Items with no skill requirement:")
|
|
||||||
for item in sorted(nosk, key=lambda itm: gettypename(itm)):
|
|
||||||
print(" {0} (None)".format(gettypename(item)))
|
|
||||||
removeitms.update(nosk)
|
|
||||||
nontarget.difference_update(removeitms)
|
|
||||||
for catid in sorted(targetitems_cats, key=lambda cat: getcatname(cat)):
|
|
||||||
if nontarget.intersection(map_categoryid_typeid[catid]):
|
|
||||||
print(" From {0} category:".format(getcatname(catid)))
|
|
||||||
for item in sorted(nontarget.intersection(map_categoryid_typeid[catid]), key=lambda item: gettypename(item)):
|
|
||||||
print(" {0}".format(gettypename(item)))
|
|
||||||
removeitms.update(map_categoryid_typeid[catid])
|
|
||||||
nontarget.difference_update(removeitms)
|
|
||||||
if nontarget:
|
|
||||||
# Check any other unaffected item
|
|
||||||
print(" Remaining items:")
|
|
||||||
for item in sorted(nontarget, key=lambda item: gettypename(item)):
|
|
||||||
nontarget_groups.add(map_typeid_groupid[item])
|
|
||||||
print(" {0} ({1})".format(gettypename(item), getgroupname(map_typeid_groupid[item])))
|
|
||||||
#print(" Groups:")
|
|
||||||
#for group in sorted(nontarget_groups, key=lambda grp: getgroupname(grp)):
|
|
||||||
# nontarget_cats.add(map_groupid_categoryid[group])
|
|
||||||
# print(" {0} ({1})".format(getgroupname(group), getcatname(map_groupid_categoryid[group])))
|
|
||||||
#print(" Categories:")
|
|
||||||
#for cat in sorted(nontarget_cats, key=lambda cat: getcatname(cat)):
|
|
||||||
# print(" {0}".format(getcatname(cat)))
|
|
||||||
|
|
||||||
else:
|
|
||||||
print("Affected items")
|
|
||||||
targetitems = typeswithattr
|
|
||||||
targetitems_groups = set()
|
|
||||||
targetitems_cats = set()
|
|
||||||
print(" Assumed set of items:")
|
|
||||||
for item in sorted(targetitems, key=lambda item: gettypename(item)):
|
|
||||||
targetitems_groups.add(map_typeid_groupid[item])
|
|
||||||
print(" {0} ({1})".format(gettypename(item), getgroupname(map_typeid_groupid[item])))
|
|
||||||
print(" Groups:")
|
|
||||||
for group in sorted(targetitems_groups, key=lambda grp: getgroupname(grp)):
|
|
||||||
targetitems_cats.add(map_groupid_categoryid[group])
|
|
||||||
print(" {0} ({1})".format(getgroupname(group), getcatname(map_groupid_categoryid[group])))
|
|
||||||
print(" Categories:")
|
|
||||||
for cat in sorted(targetitems_cats, key=lambda cat: getcatname(cat)):
|
|
||||||
print(" {0}".format(getcatname(cat)))
|
|
||||||
Reference in New Issue
Block a user