1 changed files with 237 additions and 0 deletions
@ -0,0 +1,237 @@ |
|||
import argparse |
|||
import psycopg2 |
|||
from time import time |
|||
import traceback |
|||
|
|||
class DB_Connector: |
|||
def __init__(self, host, port, db, user, password, schema, dryrun = False): |
|||
self.dryrun = dryrun |
|||
self.connector = psycopg2.connect(host = host, port = port, database=db, user = user, password=password, options="-c search_path="+schema) |
|||
#self.connector = pymysql.connect(host = host, port = port, database=db, user = user, password=password, cursorclass=pymysql.cursors.DictCursor) |
|||
|
|||
def execute(self, sql, payload = ()): |
|||
if self.dryrun: |
|||
print(sql, payload) |
|||
return |
|||
|
|||
with self.connector.cursor() as cursor: |
|||
print(sql, payload) |
|||
cursor.execute(sql, payload) |
|||
|
|||
def commit(self): |
|||
if self.dryrun: |
|||
print("--commit data") |
|||
return |
|||
|
|||
self.connector.commit() |
|||
|
|||
|
|||
class tf2idb: |
|||
def __init__(self, TF_FOLDER, DB_CONNECTOR, ITEMS_GAME = "scripts/items/items_game.txt"): |
|||
import vdf |
|||
import collections |
|||
import copy |
|||
|
|||
def dict_merge(dct, merge_dct): |
|||
""" Recursive dict merge. Inspired by :meth:dict.update(), instead of |
|||
updating only top-level keys, dict_merge recurses down into dicts nested |
|||
to an arbitrary depth, updating keys. The merge_dct is merged into |
|||
dct. |
|||
:param dct: dict onto which the merge is executed |
|||
:param merge_dct: dct merged into dct |
|||
:return: None |
|||
""" |
|||
for k, v in merge_dct.items(): |
|||
if (k == 'used_by_classes' or k == 'model_player_per_class'): #handles Demoman vs demoman... Valve pls |
|||
v = dict((k2.lower(), v2) for k2, v2 in v.items()) |
|||
if (k in dct and isinstance(dct[k], dict) and isinstance(v, collections.abc.Mapping)): |
|||
dict_merge(dct[k], v) |
|||
else: |
|||
dct[k] = copy.deepcopy(v) |
|||
|
|||
def resolve_prefabs(item, prefabs): |
|||
# generate list of prefabs |
|||
prefab_list = item.get('prefab', '').split() |
|||
for prefab in prefab_list: |
|||
subprefabs = prefabs[prefab].get('prefab', '').split() |
|||
prefab_list.extend(p for p in subprefabs if p not in prefab_list) |
|||
|
|||
# iterate over prefab list and merge, nested prefabs first |
|||
result = {} |
|||
for prefab in ( prefabs[p] for p in reversed(prefab_list) ): |
|||
dict_merge(result, prefab) |
|||
|
|||
dict_merge(result, item) |
|||
return result, prefab_list |
|||
|
|||
data = None |
|||
|
|||
ITEMS_GAME = TF_FOLDER + "/" + ITEMS_GAME |
|||
|
|||
dbc = DB_CONNECTOR |
|||
|
|||
with open(ITEMS_GAME) as f: |
|||
data = vdf.parse(f) |
|||
data = data['items_game'] |
|||
|
|||
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_class') |
|||
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_item_attributes') |
|||
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_item') |
|||
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_particles') |
|||
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_equip_conflicts') |
|||
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_equip_regions') |
|||
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_capabilities') |
|||
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_attributes') |
|||
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_qualities') |
|||
|
|||
dbc.execute('CREATE TABLE new_tf2idb_class (id INT NOT NULL , class VARCHAR(64) NOT NULL , slot VARCHAR(64) NULL, PRIMARY KEY (id, class));') |
|||
dbc.execute('CREATE TABLE new_tf2idb_item_attributes (id INT NOT NULL, attribute INT NOT NULL, value VARCHAR(128) NOT NULL,static INT NULL, PRIMARY KEY (id, attribute));' |
|||
) |
|||
dbc.execute('CREATE TABLE new_tf2idb_item (id INT PRIMARY KEY NOT NULL, name VARCHAR(128) NOT NULL, item_name VARCHAR(128) NULL, class VARCHAR(128) NOT NULL, slot VARCHAR(128) NULL, quality VARCHAR(128) NOT NULL, tool_type VARCHAR(128) NULL, min_ilevel INT NULL, max_ilevel INT NULL, baseitem INT NULL, holiday_restriction VARCHAR(128) NULL, has_string_attribute INT NULL, propername INT NULL )' |
|||
) |
|||
dbc.execute('CREATE TABLE new_tf2idb_particles (id INT PRIMARY KEY NOT NULL , name VARCHAR(128) NOT NULL )') |
|||
dbc.execute('CREATE TABLE new_tf2idb_equip_conflicts (name VARCHAR(128) NOT NULL , region VARCHAR(128) NOT NULL , PRIMARY KEY (name, region))') |
|||
dbc.execute('CREATE TABLE new_tf2idb_equip_regions (id INT NOT NULL , region VARCHAR(128) NOT NULL , PRIMARY KEY (id, region))') |
|||
dbc.execute('CREATE TABLE new_tf2idb_capabilities (id INT NOT NULL , capability VARCHAR(128) NOT NULL )') |
|||
dbc.execute('CREATE TABLE new_tf2idb_attributes (' |
|||
'id INT PRIMARY KEY NOT NULL,' |
|||
'name VARCHAR(128) NOT NULL,' |
|||
'attribute_class VARCHAR(128) NULL,' |
|||
'attribute_type VARCHAR(128) NULL,' |
|||
'description_string VARCHAR(128) NULL,' |
|||
'description_format VARCHAR(128) NULL,' |
|||
'effect_type VARCHAR(128) NULL,' |
|||
'hidden INT NULL,' |
|||
'stored_as_integer INT NULL,' |
|||
'armory_desc VARCHAR(128) NULL,' |
|||
'is_set_bonus INT NULL,' |
|||
'is_user_generated INT NULL,' |
|||
'can_affect_recipe_component_name INT NULL,' |
|||
'apply_tag_to_item_definition VARCHAR(128) NULL' |
|||
')' |
|||
) |
|||
dbc.execute('CREATE TABLE new_tf2idb_qualities (name VARCHAR(128) PRIMARY KEY NOT NULL , value INT NOT NULL )') |
|||
|
|||
nonce = int(time()) |
|||
dbc.execute('CREATE INDEX tf2idb_item_attributes_%i ON new_tf2idb_item_attributes (attribute ASC)' % nonce) |
|||
dbc.execute('CREATE INDEX tf2idb_class_%i ON new_tf2idb_class (class ASC)' % nonce) |
|||
dbc.execute('CREATE INDEX tf2idb_item_%i ON new_tf2idb_item (slot ASC)' % nonce) |
|||
|
|||
|
|||
# qualities |
|||
for qname,qdata in data['qualities'].items(): |
|||
dbc.execute('INSERT INTO new_tf2idb_qualities (name, value) VALUES (%s,%s)', (qname, qdata['value'])) |
|||
|
|||
# particles |
|||
for particle_type, particle_list in data['attribute_controlled_attached_particles'].items(): |
|||
for k,v in particle_list.items(): |
|||
dbc.execute('INSERT INTO new_tf2idb_particles (id,name) VALUES (%s,%s)', (k, v['system']) ) #TODO add the other fields too |
|||
|
|||
# attributes |
|||
attribute_type = {} |
|||
for k,v in data['attributes'].items(): |
|||
at = v.get('attribute_type') |
|||
if at: |
|||
atype = at |
|||
else: |
|||
if v.get('stored_as_integer'): |
|||
atype = 'integer' |
|||
else: |
|||
atype = 'float' |
|||
attribute_type[v['name'].lower()] = (k, atype) |
|||
dbc.execute('INSERT INTO new_tf2idb_attributes (id,name,attribute_class,attribute_type,description_string,description_format,effect_type,hidden,stored_as_integer,armory_desc,is_set_bonus, is_user_generated,can_affect_recipe_component_name,apply_tag_to_item_definition) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)', |
|||
(k,v.get('name'),v.get('attribute_class'),v.get('attribute_type'),v.get('description_string'),v.get('description_format'), |
|||
v.get('effect_type'),v.get('hidden'),v.get('stored_as_integer'),v.get('armory_desc'),v.get('is_set_bonus'), |
|||
v.get('is_user_generated'),v.get('can_affect_recipe_component_name'),v.get('apply_tag_to_item_definition') |
|||
) |
|||
) |
|||
|
|||
# conflicts |
|||
for k,v in data['equip_conflicts'].items(): |
|||
for region in v.keys(): |
|||
dbc.execute('INSERT INTO new_tf2idb_equip_conflicts (name,region) VALUES (%s,%s)', (k, region)) |
|||
|
|||
# items |
|||
for id,v in data['items'].items(): |
|||
if id == 'default': |
|||
continue |
|||
i, prefabs_used = resolve_prefabs(v, data['prefabs']) |
|||
baseitem = 1 if 'baseitem' in i else 0 |
|||
|
|||
try: |
|||
tool = None |
|||
if 'tool' in i: |
|||
tool = i['tool'].get('type') |
|||
|
|||
has_string_attribute = 0 |
|||
if 'static_attrs' in i: |
|||
for name,value in i['static_attrs'].items(): |
|||
aid,atype = attribute_type[name.lower()] |
|||
if atype == 'string': |
|||
has_string_attribute = 1 |
|||
dbc.execute('INSERT INTO new_tf2idb_item_attributes (id,attribute,value,static) VALUES (%s,%s,%s,%s)', (id,aid,value,1)) |
|||
|
|||
if 'attributes' in i: |
|||
for name,info in i['attributes'].items(): |
|||
aid,atype = attribute_type[name.lower()] |
|||
if atype == 'string': |
|||
has_string_attribute = 1 |
|||
dbc.execute('INSERT INTO new_tf2idb_item_attributes (id,attribute,value,static) VALUES (%s,%s,%s,%s)', (id,aid,info['value'],0)) |
|||
|
|||
dbc.execute('INSERT INTO new_tf2idb_item ' |
|||
'(id,name,item_name,class,slot,quality,tool_type,min_ilevel,max_ilevel,baseitem,holiday_restriction,has_string_attribute,propername) ' |
|||
'VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)', |
|||
(id,i['name'],i.get('item_name'),i['item_class'],i.get('item_slot'),i.get('item_quality', ''), tool, i.get('min_ilevel'), i.get('max_ilevel'),baseitem, |
|||
i.get('holiday_restriction'), has_string_attribute, i.get('propername')) |
|||
) |
|||
|
|||
if 'used_by_classes' in i: |
|||
for prof, val in i['used_by_classes'].items(): |
|||
dbc.execute('INSERT INTO new_tf2idb_class (id,class,slot) VALUES (%s,%s,%s)', (id, prof.lower(), val if val != '1' else None)) |
|||
|
|||
region_field = i.get('equip_region') or i.get('equip_regions') |
|||
if region_field: |
|||
if type(region_field) is str: |
|||
region_field = {region_field: 1} |
|||
for region in region_field.keys(): |
|||
dbc.execute('INSERT INTO new_tf2idb_equip_regions (id,region) VALUES (%s,%s)', (id, region)) |
|||
|
|||
# capabilties |
|||
for capability,val in i.get('capabilities', {}).items(): |
|||
dbc.execute('INSERT INTO new_tf2idb_capabilities (id,capability) VALUES (%s,%s)', (id, (capability if val != '0' else '!'+capability))) |
|||
|
|||
except: |
|||
traceback.print_exc() |
|||
print(id) |
|||
raise |
|||
|
|||
def replace_table(name): |
|||
dbc.execute('DROP TABLE IF EXISTS %s' % name) |
|||
dbc.execute('ALTER TABLE new_%s RENAME TO %s' % (name,name)) |
|||
|
|||
replace_table('tf2idb_class') |
|||
replace_table('tf2idb_item_attributes') |
|||
replace_table('tf2idb_item') |
|||
replace_table('tf2idb_particles') |
|||
replace_table('tf2idb_equip_conflicts') |
|||
replace_table('tf2idb_equip_regions') |
|||
replace_table('tf2idb_capabilities') |
|||
replace_table('tf2idb_attributes') |
|||
replace_table('tf2idb_qualities') |
|||
|
|||
dbc.commit() |
|||
|
|||
if __name__ == "__main__": |
|||
parser = argparse.ArgumentParser() |
|||
parser.add_argument("tf_directory", type=str, default="") |
|||
parser.add_argument("--host", type=str, default="localhost") |
|||
parser.add_argument("--port", type=int, default=5432) |
|||
parser.add_argument("db", type=str, default="") |
|||
parser.add_argument("schema", type=str) |
|||
parser.add_argument("user", type=str, default="") |
|||
parser.add_argument("password", type=str, default="") |
|||
parser.add_argument("--dry-run", action="store_true", default=False) |
|||
args = parser.parse_args() |
|||
c = DB_Connector(args.host, args.port, args.db, args.user, args.password, args.schema, args.dry_run) |
|||
tf2idb(args.tf_directory, c) |
|||
|
Loading…
Reference in new issue