gsd 12 months ago
parent
commit
86f0d8534c
  1. 94
      SourceManager.py
  2. 224
      tf2idb_mysql.py

94
SourceManager.py

@ -89,6 +89,22 @@ def create_symbolic_file(source_file, symbolic_file):
print(f"Execute system cmd: {cmd}")
SCall(cmd.split())
def compress_files(files, place, archive_name = None):
if archive_name is None:
archive_name = f"{time()}.tar.gz"
else:
archive_name += ".tar.gz"
import tarfile
with tarfile.open(f"{place}/{archive_name}", "w:gz") as tar:
for file in files:
tar.add(file)
#cmd = f"find app -name {files} | tar cvzf {place}/{archive_name} -T -"
#print(f"Execute system cmd: {cmd}")
#SCall(cmd.split())
class tf2idb:
def __init__(self, TF_FOLDER, ITEMS_GAME, DB_FILE):
import vdf
@ -108,7 +124,7 @@ class tf2idb:
for k, v in merge_dct.items():
if (k == 'used_by_classes' or k == 'model_player_per_class'): #handles Demoman vs demoman... Valve pls
v = dict((k2.lower(), v2) for k2, v2 in v.items())
if (k in dct and isinstance(dct[k], dict) and isinstance(v, collections.Mapping)):
if (k in dct and isinstance(dct[k], dict) and isinstance(v, collections.abc.Mapping)):
dict_merge(dct[k], v)
else:
dct[k] = copy.deepcopy(v)
@ -487,6 +503,66 @@ class Server:
self.o.info(f"Deleted {count} cache files")
return True
def clear_logs(self):
touch_if_date_lt = 60 * 60 * 24
default_logs_place = f"{self.root}/logs/*.log"
sourcemod_logs_place = f"{self.root}/addons/sourcemod/logs/*.log"
lgsm_logs_place = f"{self.root}/../../log/console/*.log"
###search default logs
try:
default_logs = [f for f in glob(default_logs_place) if time() - os.path.getmtime(f) > touch_if_date_lt]
self.o.info(f"Found {len(default_logs)} logs in default directory")
except:
default_logs = []
self.o.error(f"Cannot find logs in {default_logs_place}")
###search sourcemod logs
try:
sourcemod_logs = [f for f in glob(sourcemod_logs_place) if time() - os.path.getmtime(f) > touch_if_date_lt]
self.o.info(f"Found {len(sourcemod_logs)} logs in sourcemod directory")
except:
sourcemod_logs = []
self.o.error(f"Cannot find logs in {sourcemod_logs_place}")
###search lgsm logs
try:
lgsm_logs = [f for f in glob(lgsm_logs_place) if time() - os.path.getmtime(f) > touch_if_date_lt]
self.o.info(f"Found {len(lgsm_logs)} logs in lgsm directory")
except:
lgsm_logs = []
self.o.error(f"Cannot find logs in {lgsm_logs_place}")
logs_files = default_logs + sourcemod_logs + lgsm_logs
self.o.info(f"Compress {len(logs_files)} logs files and delete?")
if not self.wait_input():
return True
###compress stage
archive_name = f"dl_{time()}"
if default_logs:
self.o.info(f"Compress {default_logs_place}")
compress_files(default_logs, default_logs_place.replace("/*.log",""), archive_name)
if sourcemod_logs:
self.o.info(f"Compress {sourcemod_logs_place}")
compress_files(sourcemod_logs, sourcemod_logs_place.replace("/*.log",""), archive_name)
if lgsm_logs:
self.o.info(f"Compress {lgsm_logs_place}")
compress_files(lgsm_logs, lgsm_logs_place.replace("/*.log",""), archive_name)
###delete stage
count = 0
for log_file in logs_files:
try:
remove_file(log_file)
count += 1
except Exception as delete_error:
print(log_file, delete_error)
sys.exit(1)
self.o.info(f"Deleted {count} logs files")
return True
def upgrade_plugin(self, fresh_path, need_reloads_plugins = True):
new_hash = hashfile(fresh_path)
fresh_plugin_name = fresh_path.split("/")[-1:][0]
@ -603,6 +679,11 @@ class Server:
print(f"{self} | Line: {line}\nNot append in: {config}")
return False
def executemap_config(self):
config = self.map()
if config:
self.rcon(f"sm_execcfg {config}.cfg", hide_server_name=True)
def map(self):
current_map = self.current_map()
if current_map:
@ -860,6 +941,7 @@ if __name__ == "__main__":
parser.add_argument("--netstatus","-ns", help = "Show json net_status", default = False, action = "store_true")
parser.add_argument("--map","-m", help = "Show map on server", default = False, action = "store_true")
parser.add_argument("--add_map_config", "-amc", help = "Add line to config map", default = "", type = str, nargs="+")
parser.add_argument("--execute_map_config", default=False, action="store_true")
################################################################################################################################################
parser.add_argument("--CopyFile", "-cp", help = "Path of file to copy in root directory\nNeed second argument: --DestDir", type = str, default = "")
parser.add_argument("--SymLinkFile", "-ln", help = "Path of file to create symbolic link in root directory\nNeed second argument: --DestDir", type = str, default = "")
@ -873,6 +955,7 @@ if __name__ == "__main__":
################################################################################################################################################
parser.add_argument("--DeleteDownloadCache", "-ddc", help = "Clear download cache", default = False, action = "store_true")
parser.add_argument("--DeleteUnusedMaps", "-dum", help = "Delete maps from maps folder", default = False, action = "store_true")
parser.add_argument("--DeleteLogsFiles", "--dlf", help = "Delete logs file if older 1 day", default = False, action = "store_true")
################################################################################################################################################
parser.add_argument("--UpdateTF2IDB", help = "Update tf2idb database", default = False, action = "store_true")
################################################################################################################################################
@ -903,6 +986,9 @@ if __name__ == "__main__":
append_line = append_line[:-1]
manager.execute("add2map_config", append_line)
sys.exit(0)
if args.execute_map_config:
manager.execute("executemap_config")
sys.exit(0)
##################################
if args.netstatus:
manager.execute("net_status_json")
@ -976,4 +1062,8 @@ if __name__ == "__main__":
###################################
if args.UpdateTF2IDB:
manager.execute("update_tf2idb")
sys.exit(0)
sys.exit(0)
###################################
if args.DeleteLogsFiles:
manager.execute("clear_logs")
sys.exit(0)

224
tf2idb_mysql.py

@ -0,0 +1,224 @@
import argparse
import pymysql
from time import time
import traceback
class DB_Connector:
def __init__(self, host, port, db, user, password):
self.connector = pymysql.connect(host = host, port = port, database=db, user = user, password=password, cursorclass=pymysql.cursors.DictCursor)
def execute(self, sql, payload = ()):
with self.connector.cursor() as cursor:
cursor.execute(sql, payload)
def cursor(self):
self.connector.commit()
class tf2idb:
def __init__(self, TF_FOLDER, DB_CONNECTOR, ITEMS_GAME = "scripts/items/items_game.txt"):
import vdf
import collections
import copy
def dict_merge(dct, merge_dct):
""" Recursive dict merge. Inspired by :meth:``dict.update()``, instead of
updating only top-level keys, dict_merge recurses down into dicts nested
to an arbitrary depth, updating keys. The ``merge_dct`` is merged into
``dct``.
:param dct: dict onto which the merge is executed
:param merge_dct: dct merged into dct
:return: None
"""
for k, v in merge_dct.items():
if (k == 'used_by_classes' or k == 'model_player_per_class'): #handles Demoman vs demoman... Valve pls
v = dict((k2.lower(), v2) for k2, v2 in v.items())
if (k in dct and isinstance(dct[k], dict) and isinstance(v, collections.abc.Mapping)):
dict_merge(dct[k], v)
else:
dct[k] = copy.deepcopy(v)
def resolve_prefabs(item, prefabs):
# generate list of prefabs
prefab_list = item.get('prefab', '').split()
for prefab in prefab_list:
subprefabs = prefabs[prefab].get('prefab', '').split()
prefab_list.extend(p for p in subprefabs if p not in prefab_list)
# iterate over prefab list and merge, nested prefabs first
result = {}
for prefab in ( prefabs[p] for p in reversed(prefab_list) ):
dict_merge(result, prefab)
dict_merge(result, item)
return result, prefab_list
data = None
ITEMS_GAME = TF_FOLDER + "/" + ITEMS_GAME
dbc = DB_CONNECTOR
with open(ITEMS_GAME) as f:
data = vdf.parse(f)
data = data['items_game']
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_class')
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_item_attributes')
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_item')
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_particles')
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_equip_conflicts')
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_equip_regions')
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_capabilities')
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_attributes')
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_qualities')
dbc.execute('CREATE TABLE `new_tf2idb_class` (`id` INT NOT NULL , `class` VARCHAR(64) NOT NULL , `slot` VARCHAR(64) NULL, PRIMARY KEY (`id`, `class`)) ENGINE = InnoDB;')
dbc.execute('CREATE TABLE `new_tf2idb_item_attributes` (`id` INT NOT NULL, `attribute` INT NOT NULL, `value` VARCHAR(128) NOT NULL,`static` INT NULL, PRIMARY KEY (`id`, `attribute`)) ENGINE = InnoDB;'
)
dbc.execute('CREATE TABLE `new_tf2idb_item` (`id` INT PRIMARY KEY NOT NULL, `name` VARCHAR(128) NOT NULL, `item_name` VARCHAR(64) NULL, `class` VARCHAR(128) NOT NULL, `slot` VARCHAR(128) NULL, `quality` VARCHAR(128) NOT NULL, `tool_type` VARCHAR(128) NULL, `min_ilevel` INT NULL, `max_ilevel` INT NULL, `baseitem` INT NULL, `holiday_restriction` VARCHAR(128) NULL, `has_string_attribute` INT NULL, `propername` INT NULL )'
)
dbc.execute('CREATE TABLE `new_tf2idb_particles` (`id` INT PRIMARY KEY NOT NULL , `name` VARCHAR(128) NOT NULL )')
dbc.execute('CREATE TABLE `new_tf2idb_equip_conflicts` (`name` VARCHAR(128) NOT NULL , `region` VARCHAR(128) NOT NULL , PRIMARY KEY (`name`, `region`))')
dbc.execute('CREATE TABLE `new_tf2idb_equip_regions` (`id` INT NOT NULL , `region` VARCHAR(128) NOT NULL , PRIMARY KEY (`id`, `region`))')
dbc.execute('CREATE TABLE `new_tf2idb_capabilities` (`id` INT NOT NULL , `capability` VARCHAR(128) NOT NULL )')
dbc.execute('CREATE TABLE `new_tf2idb_attributes` ('
'`id` INT PRIMARY KEY NOT NULL,'
'`name` VARCHAR(128) NOT NULL,'
'`attribute_class` VARCHAR(128) NULL,'
'`attribute_type` VARCHAR(128) NULL,'
'`description_string` VARCHAR(128) NULL,'
'`description_format` VARCHAR(128) NULL,'
'`effect_type` VARCHAR(128) NULL,'
'`hidden` INT NULL,'
'`stored_as_integer` INT NULL,'
'`armory_desc` VARCHAR(128) NULL,'
'`is_set_bonus` INT NULL,'
'`is_user_generated` INT NULL,'
'`can_affect_recipe_component_name` INT NULL,'
'`apply_tag_to_item_definition` VARCHAR(128) NULL'
')'
)
dbc.execute('CREATE TABLE `new_tf2idb_qualities` (`name` VARCHAR(128) PRIMARY KEY NOT NULL , `value` INT NOT NULL )')
nonce = int(time())
dbc.execute('CREATE INDEX `tf2idb_item_attributes_%i` ON `new_tf2idb_item_attributes` (`attribute` ASC)' % nonce)
dbc.execute('CREATE INDEX `tf2idb_class_%i` ON `new_tf2idb_class` (`class` ASC)' % nonce)
dbc.execute('CREATE INDEX `tf2idb_item_%i` ON `new_tf2idb_item` (`slot` ASC)' % nonce)
# qualities
for qname,qdata in data['qualities'].items():
dbc.execute('INSERT INTO new_tf2idb_qualities (name, value) VALUES (%s,%s)', (qname, qdata['value']))
# particles
for particle_type, particle_list in data['attribute_controlled_attached_particles'].items():
for k,v in particle_list.items():
dbc.execute('INSERT INTO new_tf2idb_particles (id,name) VALUES (%s,%s)', (k, v['system']) ) #TODO add the other fields too
# attributes
attribute_type = {}
for k,v in data['attributes'].items():
at = v.get('attribute_type')
if at:
atype = at
else:
if v.get('stored_as_integer'):
atype = 'integer'
else:
atype = 'float'
attribute_type[v['name'].lower()] = (k, atype)
dbc.execute('INSERT INTO new_tf2idb_attributes (id,name,attribute_class,attribute_type,description_string,description_format,effect_type,hidden,stored_as_integer,armory_desc,is_set_bonus, is_user_generated,can_affect_recipe_component_name,apply_tag_to_item_definition) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)',
(k,v.get('name'),v.get('attribute_class'),v.get('attribute_type'),v.get('description_string'),v.get('description_format'),
v.get('effect_type'),v.get('hidden'),v.get('stored_as_integer'),v.get('armory_desc'),v.get('is_set_bonus'),
v.get('is_user_generated'),v.get('can_affect_recipe_component_name'),v.get('apply_tag_to_item_definition')
)
)
# conflicts
for k,v in data['equip_conflicts'].items():
for region in v.keys():
dbc.execute('INSERT INTO new_tf2idb_equip_conflicts (name,region) VALUES (%s,%s)', (k, region))
# items
for id,v in data['items'].items():
if id == 'default':
continue
i, prefabs_used = resolve_prefabs(v, data['prefabs'])
baseitem = 'baseitem' in i
try:
tool = None
if 'tool' in i:
tool = i['tool'].get('type')
has_string_attribute = False
if 'static_attrs' in i:
for name,value in i['static_attrs'].items():
aid,atype = attribute_type[name.lower()]
if atype == 'string':
has_string_attribute = True
dbc.execute('INSERT INTO new_tf2idb_item_attributes (id,attribute,value,static) VALUES (%s,%s,%s,%s)', (id,aid,value,1))
if 'attributes' in i:
for name,info in i['attributes'].items():
aid,atype = attribute_type[name.lower()]
if atype == 'string':
has_string_attribute = True
dbc.execute('INSERT INTO new_tf2idb_item_attributes (id,attribute,value,static) VALUES (%s,%s,%s,%s)', (id,aid,info['value'],0))
dbc.execute('INSERT INTO new_tf2idb_item '
'(id,name,item_name,class,slot,quality,tool_type,min_ilevel,max_ilevel,baseitem,holiday_restriction,has_string_attribute,propername) '
'VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)',
(id,i['name'],i.get('item_name'),i['item_class'],i.get('item_slot'),i.get('item_quality', ''), tool, i.get('min_ilevel'), i.get('max_ilevel'),baseitem,
i.get('holiday_restriction'), has_string_attribute, i.get('propername'))
)
if 'used_by_classes' in i:
for prof, val in i['used_by_classes'].items():
dbc.execute('INSERT INTO new_tf2idb_class (id,class,slot) VALUES (%s,%s,%s)', (id, prof.lower(), val if val != '1' else None))
region_field = i.get('equip_region') or i.get('equip_regions')
if region_field:
if type(region_field) is str:
region_field = {region_field: 1}
for region in region_field.keys():
dbc.execute('INSERT INTO new_tf2idb_equip_regions (id,region) VALUES (%s,%s)', (id, region))
# capabilties
for capability,val in i.get('capabilities', {}).items():
dbc.execute('INSERT INTO new_tf2idb_capabilities (id,capability) VALUES (%s,%s)', (id, (capability if val != '0' else '!'+capability)))
except:
traceback.print_exc()
print(id)
raise
def replace_table(name):
dbc.execute('DROP TABLE IF EXISTS %s' % name)
dbc.execute('ALTER TABLE new_%s RENAME TO %s' % (name,name))
replace_table('tf2idb_class')
replace_table('tf2idb_item_attributes')
replace_table('tf2idb_item')
replace_table('tf2idb_particles')
replace_table('tf2idb_equip_conflicts')
replace_table('tf2idb_equip_regions')
replace_table('tf2idb_capabilities')
replace_table('tf2idb_attributes')
replace_table('tf2idb_qualities')
dbc.commit()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("tf_directory", type=str, default="")
parser.add_argument("--host", type=str, default="localhost")
parser.add_argument("--port", type=int, default=3306)
parser.add_argument("db", type=str, default="")
parser.add_argument("user", type=str, default="")
parser.add_argument("password", type=str, default="")
args = parser.parse_args()
c = DB_Connector(args.host, args.port, args.db, args.user, args.password)
tf2idb(args.tf_directory, c)
Loading…
Cancel
Save