diff --git a/app/classes/controllers/servers_controller.py b/app/classes/controllers/servers_controller.py index c6596cd1..ad17494d 100644 --- a/app/classes/controllers/servers_controller.py +++ b/app/classes/controllers/servers_controller.py @@ -36,6 +36,14 @@ class Servers_Controller: def create_server(name: str, server_uuid: str, server_dir: str, backup_path: str, server_command: str, server_file: str, server_log_file: str, server_stop: str, server_port=25565): return servers_helper.create_server(name, server_uuid, server_dir, backup_path, server_command, server_file, server_log_file, server_stop, server_port) + @staticmethod + def get_server_obj(server_id): + return servers_helper.get_server_obj(server_id) + + @staticmethod + def update_server(server_obj): + return servers_helper.update_server(server_obj) + @staticmethod def remove_server(server_id): roles_list = server_permissions.get_roles_from_server(server_id) @@ -146,7 +154,7 @@ class Servers_Controller: path = os.path.join(server_path, 'banned-players.json') try: - with open(path) as file: + with open(helper.get_os_understandable_path(path)) as file: content = file.read() file.close() except Exception as ex: @@ -170,7 +178,7 @@ class Servers_Controller: )) for log_file in log_files: log_file_path = os.path.join(logs_path, log_file) - if self.check_file_exists(log_file_path) and \ - self.is_file_older_than_x_days(log_file_path, logs_delete_after): + if helper.check_file_exists(log_file_path) and \ + helper.is_file_older_than_x_days(log_file_path, logs_delete_after): os.remove(log_file_path) diff --git a/app/classes/minecraft/server_props.py b/app/classes/minecraft/server_props.py index 89c24c99..356fac67 100644 --- a/app/classes/minecraft/server_props.py +++ b/app/classes/minecraft/server_props.py @@ -1,66 +1,66 @@ -import pprint -import os - -class ServerProps: - - def __init__(self, filepath): - self.filepath = filepath - self.props = self._parse() - - def _parse(self): - """Loads and parses the file speified in self.filepath""" - with open(self.filepath) as fp: - line = fp.readline() - d = {} - if os.path.exists(".header"): - os.remove(".header") - while line: - if '#' != line[0]: - s = line - s1 = s[:s.find('=')] - if '\n' in s: - s2 = s[s.find('=')+1:s.find('\n')] - else: - s2 = s[s.find('=')+1:] - d[s1] = s2 - else: - with open(".header", "a+") as h: - h.write(line) - line = fp.readline() - return d - - def print(self): - """Prints the properties dictionary (using pprint)""" - pprint.pprint(self.props) - - def get(self): - """Returns the properties dictionary""" - return self.props - - def update(self, key, val): - """Updates property in the properties dictionary [ update("pvp", "true") ] and returns boolean condition""" - if key in self.props.keys(): - self.props[key] = val - return True - else: - return False - - def save(self): - """Writes to the new file""" - with open(self.filepath, "a+") as f: - f.truncate(0) - with open(".header") as header: - line = header.readline() - while line: - f.write(line) - line = header.readline() - header.close() - for key, value in self.props.items(): - f.write(key + "=" + value + "\n") - if os.path.exists(".header"): - os.remove(".header") - - @staticmethod - def cleanup(): - if os.path.exists(".header"): - os.remove(".header") +import pprint +import os + +class ServerProps: + + def __init__(self, filepath): + self.filepath = filepath + self.props = self._parse() + + def _parse(self): + """Loads and parses the file specified in self.filepath""" + with open(self.filepath) as fp: + line = fp.readline() + d = {} + if os.path.exists(".header"): + os.remove(".header") + while line: + if '#' != line[0]: + s = line + s1 = s[:s.find('=')] + if '\n' in s: + s2 = s[s.find('=')+1:s.find('\n')] + else: + s2 = s[s.find('=')+1:] + d[s1] = s2 + else: + with open(".header", "a+") as h: + h.write(line) + line = fp.readline() + return d + + def print(self): + """Prints the properties dictionary (using pprint)""" + pprint.pprint(self.props) + + def get(self): + """Returns the properties dictionary""" + return self.props + + def update(self, key, val): + """Updates property in the properties dictionary [ update("pvp", "true") ] and returns boolean condition""" + if key in self.props.keys(): + self.props[key] = val + return True + else: + return False + + def save(self): + """Writes to the new file""" + with open(self.filepath, "a+") as f: + f.truncate(0) + with open(".header") as header: + line = header.readline() + while line: + f.write(line) + line = header.readline() + header.close() + for key, value in self.props.items(): + f.write(key + "=" + value + "\n") + if os.path.exists(".header"): + os.remove(".header") + + @staticmethod + def cleanup(): + if os.path.exists(".header"): + os.remove(".header") diff --git a/app/classes/minecraft/stats.py b/app/classes/minecraft/stats.py index 4b9c07fa..ee632cf6 100644 --- a/app/classes/minecraft/stats.py +++ b/app/classes/minecraft/stats.py @@ -92,7 +92,7 @@ class Stats: # print(templ % ("Device", "Total", "Used", "Free", "Use ", "Type","Mount")) for part in psutil.disk_partitions(all=False): - if os.name == 'nt': + if helper.is_os_windows(): if 'cdrom' in part.opts or part.fstype == '': # skip cd-rom drives with no disk in it; they may raise # ENOENT, pop-up a Windows GUI error for a non-ready @@ -163,7 +163,7 @@ class Stats: } return ping_data - + def get_server_players(self, server_id): server = servers_helper.get_server_data_by_id(server_id) @@ -257,7 +257,7 @@ class Stats: server_stats_list.append(server_stats) return server_stats_list - + def get_raw_server_stats(self, server_id): server_stats = {} diff --git a/app/classes/models/management.py b/app/classes/models/management.py index 4e2fedef..1146068b 100644 --- a/app/classes/models/management.py +++ b/app/classes/models/management.py @@ -132,7 +132,7 @@ class Backups(Model): database = database class helpers_management: - + #************************************************************************************************ # Host_Stats Methods #************************************************************************************************ @@ -165,7 +165,7 @@ class helpers_management: Commands.update({ Commands.executed: True }).where(Commands.command_id == command_id).execute() - + #************************************************************************************************ # Audit_Log Methods #************************************************************************************************ @@ -173,7 +173,7 @@ class helpers_management: def get_actity_log(): q = Audit_Log.select() return db_helper.return_db_rows(q) - + @staticmethod def add_to_audit_log(user_id, log_msg, server_id=None, source_ip=None): logger.debug("Adding to audit log User:{} - Message: {} ".format(user_id, log_msg)) diff --git a/app/classes/models/servers.py b/app/classes/models/servers.py index 3d80ae54..24ad7391 100644 --- a/app/classes/models/servers.py +++ b/app/classes/models/servers.py @@ -88,7 +88,7 @@ class Server_Stats(Model): # Servers Class #************************************************************************************************ class helper_servers: - + #************************************************************************************************ # Generic Servers Methods #************************************************************************************************ @@ -109,6 +109,15 @@ class helper_servers: Servers.backup_path: backup_path }).execute() + + @staticmethod + def get_server_obj(server_id): + return Servers.get_by_id(server_id) + + @staticmethod + def update_server(server_obj): + return server_obj.save() + @staticmethod def remove_server(server_id): with database.atomic(): @@ -186,7 +195,7 @@ class helper_servers: if (time_limit == -1) or (ttl_no_players > time_limit): can = True return can - + @staticmethod def set_waiting_start(server_id, value): try: diff --git a/app/classes/shared/helpers.py b/app/classes/shared/helpers.py index b02e1d8e..cf89088a 100644 --- a/app/classes/shared/helpers.py +++ b/app/classes/shared/helpers.py @@ -463,7 +463,7 @@ class Helpers: except Exception as e: logger.error("Failed to locate existing session.lock with error: {} ".format(e)) console.error("Failed to locate existing session.lock with error: {} ".format(e)) - + sys.exit(1) @@ -601,6 +601,20 @@ class Helpers: else: return False + @staticmethod + def wtol_path(w_path): + l_path = w_path.replace('\\', '/') + return l_path + + @staticmethod + def ltow_path(l_path): + w_path = l_path.replace('/', '\\') + return w_path + + @staticmethod + def get_os_understandable_path(path): + return os.path.normpath(path) + def find_default_password(self): default_file = os.path.join(self.root_dir, "default.json") data = {} diff --git a/app/classes/shared/main_controller.py b/app/classes/shared/main_controller.py index c777fb0c..ef5bc17f 100644 --- a/app/classes/shared/main_controller.py +++ b/app/classes/shared/main_controller.py @@ -69,7 +69,7 @@ class Controller: continue # if this server path no longer exists - let's warn and bomb out - if not helper.check_path_exists(s['path']): + if not helper.check_path_exists(helper.get_os_understandable_path(s['path'])): logger.warning("Unable to find server {} at path {}. Skipping this server".format(s['server_name'], s['path'])) @@ -77,7 +77,7 @@ class Controller: s['path'])) continue - settings_file = os.path.join(s['path'], 'server.properties') + settings_file = os.path.join(helper.get_os_understandable_path(s['path']), 'server.properties') # if the properties file isn't there, let's warn if not helper.check_file_exists(settings_file): @@ -228,6 +228,7 @@ class Controller: @staticmethod def verify_jar_server( server_path: str, server_jar: str): + server_path = helper.get_os_understandable_path(server_path) path_check = helper.check_path_exists(server_path) jar_check = helper.check_file_exists(os.path.join(server_path, server_jar)) if not path_check or not jar_check: @@ -236,6 +237,7 @@ class Controller: @staticmethod def verify_zip_server(zip_path: str): + zip_path = helper.get_os_understandable_path(zip_path) zip_check = helper.check_file_exists(zip_path) if not zip_check: return False @@ -248,6 +250,7 @@ class Controller: helper.ensure_dir_exists(new_server_dir) helper.ensure_dir_exists(backup_path) + server_path = helper.get_os_understandable_path(server_path) dir_util.copy_tree(server_path, new_server_dir) full_jar_path = os.path.join(new_server_dir, server_jar) @@ -265,6 +268,7 @@ class Controller: server_id = helper.create_uuid() new_server_dir = os.path.join(helper.servers_dir, server_id) backup_path = os.path.join(helper.backup_path, server_id) + zip_path = helper.get_os_understandable_path(zip_path) if helper.check_file_perms(zip_path): helper.ensure_dir_exists(new_server_dir) @@ -351,7 +355,7 @@ class Controller: if running: self.stop_server(server_id) if files: - shutil.rmtree(self.servers.get_server_data_by_id(server_id)['path']) + shutil.rmtree(helper.get_os_understandable_path(self.servers.get_server_data_by_id(server_id)['path'])) # remove the server from the DB self.servers.remove_server(server_id) diff --git a/app/classes/shared/migration.py b/app/classes/shared/migration.py index 9a9d821e..8baa4673 100644 --- a/app/classes/shared/migration.py +++ b/app/classes/shared/migration.py @@ -1,536 +1,536 @@ -from datetime import datetime -import logging -import typing as t -import sys -import os -import re -from importlib import import_module -from functools import wraps - -try: - from functools import cached_property -except ImportError: - from cached_property import cached_property - -from app.classes.shared.helpers import helper -from app.classes.shared.console import console - -logger = logging.getLogger(__name__) - -try: - import peewee - from playhouse.migrate import ( - SchemaMigrator as ScM, - SqliteMigrator as SqM, - Operation, SQL, operation, SqliteDatabase, - make_index_name, Context - ) - -except ModuleNotFoundError as e: - logger.critical("Import Error: Unable to load {} module".format( - e.name), exc_info=True) - console.critical("Import Error: Unable to load {} module".format(e.name)) - sys.exit(1) - - -class MigrateHistory(peewee.Model): - """ - Presents the migration history in a database. - """ - - name = peewee.CharField(unique=True) - migrated_at = peewee.DateTimeField(default=datetime.utcnow) - - def __unicode__(self) -> str: - """ - String representation of this migration - """ - return self.name - - -MIGRATE_TABLE = 'migratehistory' -MIGRATE_TEMPLATE = '''# Generated by database migrator - - -def migrate(migrator, database, **kwargs): - """ - Write your migrations here. - """ -{migrate} - - -def rollback(migrator, database, **kwargs): - """ - Write your rollback migrations here. - """ -{rollback}''' -VOID: t.Callable = lambda m, d: None - - -def get_model(method): - """ - Convert string to model class. - """ - - @wraps(method) - def wrapper(migrator, model, *args, **kwargs): - if isinstance(model, str): - return method(migrator, migrator.orm[model], *args, **kwargs) - return method(migrator, model, *args, **kwargs) - return wrapper - - -class Migrator(object): - def __init__(self, database: t.Union[peewee.Database, peewee.Proxy]): - """ - Initializes the migrator - """ - if isinstance(database, peewee.Proxy): - database = database.obj - self.database: SqliteDatabase = database - self.orm: t.Dict[str, peewee.Model] = {} - self.operations: t.List[Operation] = [] - self.migrator = SqliteMigrator(database) - - def run(self): - """ - Runs operations. - """ - for op in self.operations: - if isinstance(op, Operation): - op.run() - else: - op() - self.clean() - - def clean(self): - """ - Cleans the operations. - """ - self.operations = list() - - def sql(self, sql: str, *params): - """ - Executes raw SQL. - """ - self.operations.append(self.migrator.sql(sql, *params)) - - def create_table(self, model: peewee.Model) -> peewee.Model: - """ - Creates model and table in database. - """ - self.orm[model._meta.table_name] = model - model._meta.database = self.database - self.operations.append(model.create_table) - return model - - @get_model - def drop_table(self, model: peewee.Model): - """ - Drops model and table from database. - """ - del self.orm[model._meta.table_name] - self.operations.append(self.migrator.drop_table(model)) - - @get_model - def add_columns(self, model: peewee.Model, **fields: peewee.Field) -> peewee.Model: - """ - Creates new fields. - """ - for name, field in fields.items(): - model._meta.add_field(name, field) - self.operations.append(self.migrator.add_column( - model._meta.table_name, field.column_name, field)) - if field.unique: - self.operations.append(self.migrator.add_index( - model._meta.table_name, (field.column_name,), unique=True)) - return model - - @get_model - def change_columns(self, model: peewee.Model, **fields: peewee.Field) -> peewee.Model: - """ - Changes fields. - """ - for name, field in fields.items(): - old_field = model._meta.fields.get(name, field) - old_column_name = old_field and old_field.column_name - - model._meta.add_field(name, field) - - if isinstance(old_field, peewee.ForeignKeyField): - self.operations.append(self.migrator.drop_foreign_key_constraint( - model._meta.table_name, old_column_name)) - - if old_column_name != field.column_name: - self.operations.append( - self.migrator.rename_column( - model._meta.table_name, old_column_name, field.column_name)) - - if isinstance(field, peewee.ForeignKeyField): - on_delete = field.on_delete if field.on_delete else 'RESTRICT' - on_update = field.on_update if field.on_update else 'RESTRICT' - self.operations.append(self.migrator.add_foreign_key_constraint( - model._meta.table_name, field.column_name, - field.rel_model._meta.table_name, field.rel_field.name, - on_delete, on_update)) - continue - - self.operations.append(self.migrator.change_column( - model._meta.table_name, field.column_name, field)) - - if field.unique == old_field.unique: - continue - - if field.unique: - index = (field.column_name,), field.unique - self.operations.append(self.migrator.add_index( - model._meta.table_name, *index)) - model._meta.indexes.append(index) - else: - index = (field.column_name,), old_field.unique - self.operations.append(self.migrator.drop_index( - model._meta.table_name, *index)) - model._meta.indexes.remove(index) - - return model - - @get_model - def drop_columns(self, model: peewee.Model, names: str, **kwargs) -> peewee.Model: - """ - Removes fields from model. - """ - fields = [field for field in model._meta.fields.values() - if field.name in names] - cascade = kwargs.pop('cascade', True) - for field in fields: - self.__del_field__(model, field) - if field.unique: - index_name = make_index_name( - model._meta.table_name, [field.column_name]) - self.operations.append(self.migrator.drop_index( - model._meta.table_name, index_name)) - self.operations.append( - self.migrator.drop_column( - model._meta.table_name, field.column_name, cascade=False)) - return model - - def __del_field__(self, model: peewee.Model, field: peewee.Field): - """ - Deletes field from model. - """ - model._meta.remove_field(field.name) - delattr(model, field.name) - if isinstance(field, peewee.ForeignKeyField): - obj_id_name = field.column_name - if field.column_name == field.name: - obj_id_name += '_id' - delattr(model, obj_id_name) - delattr(field.rel_model, field.backref) - - @get_model - def rename_column(self, model: peewee.Model, old_name: str, new_name: str) -> peewee.Model: - """ - Renames field in model. - """ - field = model._meta.fields[old_name] - if isinstance(field, peewee.ForeignKeyField): - old_name = field.column_name - self.__del_field__(model, field) - field.name = field.column_name = new_name - model._meta.add_field(new_name, field) - if isinstance(field, peewee.ForeignKeyField): - field.column_name = new_name = field.column_name + '_id' - self.operations.append(self.migrator.rename_column( - model._meta.table_name, old_name, new_name)) - return model - - @get_model - def rename_table(self, model: peewee.Model, new_name: str) -> peewee.Model: - """ - Renames table in database. - """ - old_name = model._meta.table_name - del self.orm[model._meta.table_name] - model._meta.table_name = new_name - self.orm[model._meta.table_name] = model - self.operations.append(self.migrator.rename_table(old_name, new_name)) - return model - - @get_model - def add_index(self, model: peewee.Model, *columns: str, **kwargs) -> peewee.Model: - """Create indexes.""" - unique = kwargs.pop('unique', False) - model._meta.indexes.append((columns, unique)) - columns_ = [] - for col in columns: - field = model._meta.fields.get(col) - - if len(columns) == 1: - field.unique = unique - field.index = not unique - - if isinstance(field, peewee.ForeignKeyField): - col = col + '_id' - - columns_.append(col) - self.operations.append(self.migrator.add_index( - model._meta.table_name, columns_, unique=unique)) - return model - - @get_model - def drop_index(self, model: peewee.Model, *columns: str) -> peewee.Model: - """Drop indexes.""" - columns_ = [] - for col in columns: - field = model._meta.fields.get(col) - if not field: - continue - - if len(columns) == 1: - field.unique = field.index = False - - if isinstance(field, peewee.ForeignKeyField): - col = col + '_id' - columns_.append(col) - index_name = make_index_name(model._meta.table_name, columns_) - model._meta.indexes = [(cols, _) for ( - cols, _) in model._meta.indexes if columns != cols] - self.operations.append(self.migrator.drop_index( - model._meta.table_name, index_name)) - return model - - @get_model - def add_not_null(self, model: peewee.Model, *names: str) -> peewee.Model: - """Add not null.""" - for name in names: - field = model._meta.fields[name] - field.null = False - self.operations.append(self.migrator.add_not_null( - model._meta.table_name, field.column_name)) - return model - - @get_model - def drop_not_null(self, model: peewee.Model, *names: str) -> peewee.Model: - """Drop not null.""" - for name in names: - field = model._meta.fields[name] - field.null = True - self.operations.append(self.migrator.drop_not_null( - model._meta.table_name, field.column_name)) - return model - - @get_model - def add_default(self, model: peewee.Model, name: str, default: t.Any) -> peewee.Model: - """Add default.""" - field = model._meta.fields[name] - model._meta.defaults[field] = field.default = default - self.operations.append(self.migrator.apply_default( - model._meta.table_name, name, field)) - return model - - -class SqliteMigrator(SqM): - def drop_table(self, model): - return lambda: model.drop_table(cascade=False) - - @operation - def change_column(self, table: str, column_name: str, field: peewee.Field): - operations = [self.alter_change_column(table, column_name, field)] - if not field.null: - operations.extend([self.add_not_null(table, column_name)]) - return operations - - def alter_change_column(self, table: str, column_name: str, field: peewee.Field) -> Operation: - return self._update_column(table, column_name, lambda x, y: y) - - @operation - def sql(self, sql: str, *params) -> SQL: - """ - Executes raw SQL. - """ - return SQL(sql, *params) - - def alter_add_column( - self, table: str, column_name: str, field: peewee.Field, **kwargs) -> Operation: - """ - Fixes field name for ForeignKeys. - """ - name = field.name - op = super().alter_add_column( - table, column_name, field, **kwargs) - if isinstance(field, peewee.ForeignKeyField): - field.name = name - return op - - -class MigrationManager(object): - - filemask = re.compile(r"[\d]+_[^\.]+\.py$") - - def __init__(self, database: t.Union[peewee.Database, peewee.Proxy]): - """ - Initializes the migration manager. - """ - if not isinstance(database, (peewee.Database, peewee.Proxy)): - raise RuntimeError('Invalid database: {}'.format(database)) - self.database = database - - @cached_property - def model(self) -> peewee.Model: - """ - Initialize and cache the MigrationHistory model. - """ - MigrateHistory._meta.database = self.database - MigrateHistory._meta.table_name = 'migratehistory' - MigrateHistory._meta.schema = None - MigrateHistory.create_table(True) - return MigrateHistory - - @property - def done(self) -> t.List[str]: - """ - Scans migrations in the database. - """ - return [mm.name for mm in self.model.select().order_by(self.model.id)] - - @property - def todo(self): - """ - Scans migrations in the file system. - """ - if not os.path.exists(helper.migration_dir): - logger.warning('Migration directory: {} does not exist.'.format( - helper.migration_dir)) - os.makedirs(helper.migration_dir) - return sorted(f[:-3] for f in os.listdir(helper.migration_dir) if self.filemask.match(f)) - - @property - def diff(self) -> t.List[str]: - """ - Calculates difference between the filesystem and the database. - """ - done = set(self.done) - return [name for name in self.todo if name not in done] - - @cached_property - def migrator(self) -> Migrator: - """ - Create migrator and setup it with fake migrations. - """ - migrator = Migrator(self.database) - for name in self.done: - self.up_one(name, migrator, True) - return migrator - - def compile(self, name, migrate='', rollback=''): - """ - Compiles a migration. - """ - name = datetime.utcnow().strftime('%Y%m%d%H%M%S') + '_' + name - filename = name + '.py' - path = os.path.join(helper.migration_dir, filename) - with open(path, 'w') as f: - f.write(MIGRATE_TEMPLATE.format( - migrate=migrate, rollback=rollback, name=filename)) - - return name - - def create(self, name: str = 'auto', auto: bool = False) -> t.Optional[str]: - """ - Creates a migration. - """ - migrate = rollback = '' - if auto: - raise NotImplementedError - - logger.info('Creating migration "{}"'.format(name)) - name = self.compile(name, migrate, rollback) - logger.info('Migration has been created as "{}"'.format(name)) - return name - - def clear(self): - """Clear migrations.""" - self.model.delete().execute() - - def up(self, name: t.Optional[str] = None): - """ - Runs all unapplied migrations. - """ - logger.info('Starting migrations') - console.info('Starting migrations') - - done = [] - diff = self.diff - if not diff: - logger.info('There is nothing to migrate') - console.info('There is nothing to migrate') - return done - - migrator = self.migrator - for mname in diff: - done.append(self.up_one(mname, self.migrator)) - if name and name == mname: - break - - return done - - def read(self, name: str): - """ - Reads a migration from a file. - """ - call_params = dict() - if os.name == 'nt' and sys.version_info >= (3, 0): - # if system is windows - force utf-8 encoding - call_params['encoding'] = 'utf-8' - with open(os.path.join(helper.migration_dir, name + '.py'), **call_params) as f: - code = f.read() - scope = {} - code = compile(code, '', 'exec', dont_inherit=True) - exec(code, scope, None) - return scope.get('migrate', VOID), scope.get('rollback', VOID) - - def up_one(self, name: str, migrator: Migrator, - fake: bool = False, rollback: bool = False) -> str: - """ - Runs a migration with a given name. - """ - try: - migrate_fn, rollback_fn = self.read(name) - if fake: - migrate_fn(migrator, self.database) - migrator.clean() - return name - with self.database.transaction(): - if rollback: - logger.info('Rolling back "{}"'.format(name)) - rollback_fn(migrator, self.database) - migrator.run() - self.model.delete().where(self.model.name == name).execute() - else: - logger.info('Migrate "{}"'.format(name)) - migrate_fn(migrator, self.database) - migrator.run() - if name not in self.done: - self.model.create(name=name) - - logger.info('Done "{}"'.format(name)) - return name - - except Exception: - self.database.rollback() - operation = 'Rollback' if rollback else 'Migration' - logger.exception('{} failed: {}'.format(operation, name)) - raise - - def down(self, name: t.Optional[str] = None): - """ - Rolls back migrations. - """ - if not self.done: - raise RuntimeError('No migrations are found.') - - name = self.done[-1] - - migrator = self.migrator - self.up_one(name, migrator, False, True) - logger.warning('Rolled back migration: {}'.format(name)) +from datetime import datetime +import logging +import typing as t +import sys +import os +import re +from importlib import import_module +from functools import wraps + +try: + from functools import cached_property +except ImportError: + from cached_property import cached_property + +from app.classes.shared.helpers import helper +from app.classes.shared.console import console + +logger = logging.getLogger(__name__) + +try: + import peewee + from playhouse.migrate import ( + SchemaMigrator as ScM, + SqliteMigrator as SqM, + Operation, SQL, operation, SqliteDatabase, + make_index_name, Context + ) + +except ModuleNotFoundError as e: + logger.critical("Import Error: Unable to load {} module".format( + e.name), exc_info=True) + console.critical("Import Error: Unable to load {} module".format(e.name)) + sys.exit(1) + + +class MigrateHistory(peewee.Model): + """ + Presents the migration history in a database. + """ + + name = peewee.CharField(unique=True) + migrated_at = peewee.DateTimeField(default=datetime.utcnow) + + def __unicode__(self) -> str: + """ + String representation of this migration + """ + return self.name + + +MIGRATE_TABLE = 'migratehistory' +MIGRATE_TEMPLATE = '''# Generated by database migrator + + +def migrate(migrator, database, **kwargs): + """ + Write your migrations here. + """ +{migrate} + + +def rollback(migrator, database, **kwargs): + """ + Write your rollback migrations here. + """ +{rollback}''' +VOID: t.Callable = lambda m, d: None + + +def get_model(method): + """ + Convert string to model class. + """ + + @wraps(method) + def wrapper(migrator, model, *args, **kwargs): + if isinstance(model, str): + return method(migrator, migrator.orm[model], *args, **kwargs) + return method(migrator, model, *args, **kwargs) + return wrapper + + +class Migrator(object): + def __init__(self, database: t.Union[peewee.Database, peewee.Proxy]): + """ + Initializes the migrator + """ + if isinstance(database, peewee.Proxy): + database = database.obj + self.database: SqliteDatabase = database + self.orm: t.Dict[str, peewee.Model] = {} + self.operations: t.List[Operation] = [] + self.migrator = SqliteMigrator(database) + + def run(self): + """ + Runs operations. + """ + for op in self.operations: + if isinstance(op, Operation): + op.run() + else: + op() + self.clean() + + def clean(self): + """ + Cleans the operations. + """ + self.operations = list() + + def sql(self, sql: str, *params): + """ + Executes raw SQL. + """ + self.operations.append(self.migrator.sql(sql, *params)) + + def create_table(self, model: peewee.Model) -> peewee.Model: + """ + Creates model and table in database. + """ + self.orm[model._meta.table_name] = model + model._meta.database = self.database + self.operations.append(model.create_table) + return model + + @get_model + def drop_table(self, model: peewee.Model): + """ + Drops model and table from database. + """ + del self.orm[model._meta.table_name] + self.operations.append(self.migrator.drop_table(model)) + + @get_model + def add_columns(self, model: peewee.Model, **fields: peewee.Field) -> peewee.Model: + """ + Creates new fields. + """ + for name, field in fields.items(): + model._meta.add_field(name, field) + self.operations.append(self.migrator.add_column( + model._meta.table_name, field.column_name, field)) + if field.unique: + self.operations.append(self.migrator.add_index( + model._meta.table_name, (field.column_name,), unique=True)) + return model + + @get_model + def change_columns(self, model: peewee.Model, **fields: peewee.Field) -> peewee.Model: + """ + Changes fields. + """ + for name, field in fields.items(): + old_field = model._meta.fields.get(name, field) + old_column_name = old_field and old_field.column_name + + model._meta.add_field(name, field) + + if isinstance(old_field, peewee.ForeignKeyField): + self.operations.append(self.migrator.drop_foreign_key_constraint( + model._meta.table_name, old_column_name)) + + if old_column_name != field.column_name: + self.operations.append( + self.migrator.rename_column( + model._meta.table_name, old_column_name, field.column_name)) + + if isinstance(field, peewee.ForeignKeyField): + on_delete = field.on_delete if field.on_delete else 'RESTRICT' + on_update = field.on_update if field.on_update else 'RESTRICT' + self.operations.append(self.migrator.add_foreign_key_constraint( + model._meta.table_name, field.column_name, + field.rel_model._meta.table_name, field.rel_field.name, + on_delete, on_update)) + continue + + self.operations.append(self.migrator.change_column( + model._meta.table_name, field.column_name, field)) + + if field.unique == old_field.unique: + continue + + if field.unique: + index = (field.column_name,), field.unique + self.operations.append(self.migrator.add_index( + model._meta.table_name, *index)) + model._meta.indexes.append(index) + else: + index = (field.column_name,), old_field.unique + self.operations.append(self.migrator.drop_index( + model._meta.table_name, *index)) + model._meta.indexes.remove(index) + + return model + + @get_model + def drop_columns(self, model: peewee.Model, names: str, **kwargs) -> peewee.Model: + """ + Removes fields from model. + """ + fields = [field for field in model._meta.fields.values() + if field.name in names] + cascade = kwargs.pop('cascade', True) + for field in fields: + self.__del_field__(model, field) + if field.unique: + index_name = make_index_name( + model._meta.table_name, [field.column_name]) + self.operations.append(self.migrator.drop_index( + model._meta.table_name, index_name)) + self.operations.append( + self.migrator.drop_column( + model._meta.table_name, field.column_name, cascade=False)) + return model + + def __del_field__(self, model: peewee.Model, field: peewee.Field): + """ + Deletes field from model. + """ + model._meta.remove_field(field.name) + delattr(model, field.name) + if isinstance(field, peewee.ForeignKeyField): + obj_id_name = field.column_name + if field.column_name == field.name: + obj_id_name += '_id' + delattr(model, obj_id_name) + delattr(field.rel_model, field.backref) + + @get_model + def rename_column(self, model: peewee.Model, old_name: str, new_name: str) -> peewee.Model: + """ + Renames field in model. + """ + field = model._meta.fields[old_name] + if isinstance(field, peewee.ForeignKeyField): + old_name = field.column_name + self.__del_field__(model, field) + field.name = field.column_name = new_name + model._meta.add_field(new_name, field) + if isinstance(field, peewee.ForeignKeyField): + field.column_name = new_name = field.column_name + '_id' + self.operations.append(self.migrator.rename_column( + model._meta.table_name, old_name, new_name)) + return model + + @get_model + def rename_table(self, model: peewee.Model, new_name: str) -> peewee.Model: + """ + Renames table in database. + """ + old_name = model._meta.table_name + del self.orm[model._meta.table_name] + model._meta.table_name = new_name + self.orm[model._meta.table_name] = model + self.operations.append(self.migrator.rename_table(old_name, new_name)) + return model + + @get_model + def add_index(self, model: peewee.Model, *columns: str, **kwargs) -> peewee.Model: + """Create indexes.""" + unique = kwargs.pop('unique', False) + model._meta.indexes.append((columns, unique)) + columns_ = [] + for col in columns: + field = model._meta.fields.get(col) + + if len(columns) == 1: + field.unique = unique + field.index = not unique + + if isinstance(field, peewee.ForeignKeyField): + col = col + '_id' + + columns_.append(col) + self.operations.append(self.migrator.add_index( + model._meta.table_name, columns_, unique=unique)) + return model + + @get_model + def drop_index(self, model: peewee.Model, *columns: str) -> peewee.Model: + """Drop indexes.""" + columns_ = [] + for col in columns: + field = model._meta.fields.get(col) + if not field: + continue + + if len(columns) == 1: + field.unique = field.index = False + + if isinstance(field, peewee.ForeignKeyField): + col = col + '_id' + columns_.append(col) + index_name = make_index_name(model._meta.table_name, columns_) + model._meta.indexes = [(cols, _) for ( + cols, _) in model._meta.indexes if columns != cols] + self.operations.append(self.migrator.drop_index( + model._meta.table_name, index_name)) + return model + + @get_model + def add_not_null(self, model: peewee.Model, *names: str) -> peewee.Model: + """Add not null.""" + for name in names: + field = model._meta.fields[name] + field.null = False + self.operations.append(self.migrator.add_not_null( + model._meta.table_name, field.column_name)) + return model + + @get_model + def drop_not_null(self, model: peewee.Model, *names: str) -> peewee.Model: + """Drop not null.""" + for name in names: + field = model._meta.fields[name] + field.null = True + self.operations.append(self.migrator.drop_not_null( + model._meta.table_name, field.column_name)) + return model + + @get_model + def add_default(self, model: peewee.Model, name: str, default: t.Any) -> peewee.Model: + """Add default.""" + field = model._meta.fields[name] + model._meta.defaults[field] = field.default = default + self.operations.append(self.migrator.apply_default( + model._meta.table_name, name, field)) + return model + + +class SqliteMigrator(SqM): + def drop_table(self, model): + return lambda: model.drop_table(cascade=False) + + @operation + def change_column(self, table: str, column_name: str, field: peewee.Field): + operations = [self.alter_change_column(table, column_name, field)] + if not field.null: + operations.extend([self.add_not_null(table, column_name)]) + return operations + + def alter_change_column(self, table: str, column_name: str, field: peewee.Field) -> Operation: + return self._update_column(table, column_name, lambda x, y: y) + + @operation + def sql(self, sql: str, *params) -> SQL: + """ + Executes raw SQL. + """ + return SQL(sql, *params) + + def alter_add_column( + self, table: str, column_name: str, field: peewee.Field, **kwargs) -> Operation: + """ + Fixes field name for ForeignKeys. + """ + name = field.name + op = super().alter_add_column( + table, column_name, field, **kwargs) + if isinstance(field, peewee.ForeignKeyField): + field.name = name + return op + + +class MigrationManager(object): + + filemask = re.compile(r"[\d]+_[^\.]+\.py$") + + def __init__(self, database: t.Union[peewee.Database, peewee.Proxy]): + """ + Initializes the migration manager. + """ + if not isinstance(database, (peewee.Database, peewee.Proxy)): + raise RuntimeError('Invalid database: {}'.format(database)) + self.database = database + + @cached_property + def model(self) -> peewee.Model: + """ + Initialize and cache the MigrationHistory model. + """ + MigrateHistory._meta.database = self.database + MigrateHistory._meta.table_name = 'migratehistory' + MigrateHistory._meta.schema = None + MigrateHistory.create_table(True) + return MigrateHistory + + @property + def done(self) -> t.List[str]: + """ + Scans migrations in the database. + """ + return [mm.name for mm in self.model.select().order_by(self.model.id)] + + @property + def todo(self): + """ + Scans migrations in the file system. + """ + if not os.path.exists(helper.migration_dir): + logger.warning('Migration directory: {} does not exist.'.format( + helper.migration_dir)) + os.makedirs(helper.migration_dir) + return sorted(f[:-3] for f in os.listdir(helper.migration_dir) if self.filemask.match(f)) + + @property + def diff(self) -> t.List[str]: + """ + Calculates difference between the filesystem and the database. + """ + done = set(self.done) + return [name for name in self.todo if name not in done] + + @cached_property + def migrator(self) -> Migrator: + """ + Create migrator and setup it with fake migrations. + """ + migrator = Migrator(self.database) + for name in self.done: + self.up_one(name, migrator, True) + return migrator + + def compile(self, name, migrate='', rollback=''): + """ + Compiles a migration. + """ + name = datetime.utcnow().strftime('%Y%m%d%H%M%S') + '_' + name + filename = name + '.py' + path = os.path.join(helper.migration_dir, filename) + with open(path, 'w') as f: + f.write(MIGRATE_TEMPLATE.format( + migrate=migrate, rollback=rollback, name=filename)) + + return name + + def create(self, name: str = 'auto', auto: bool = False) -> t.Optional[str]: + """ + Creates a migration. + """ + migrate = rollback = '' + if auto: + raise NotImplementedError + + logger.info('Creating migration "{}"'.format(name)) + name = self.compile(name, migrate, rollback) + logger.info('Migration has been created as "{}"'.format(name)) + return name + + def clear(self): + """Clear migrations.""" + self.model.delete().execute() + + def up(self, name: t.Optional[str] = None): + """ + Runs all unapplied migrations. + """ + logger.info('Starting migrations') + console.info('Starting migrations') + + done = [] + diff = self.diff + if not diff: + logger.info('There is nothing to migrate') + console.info('There is nothing to migrate') + return done + + migrator = self.migrator + for mname in diff: + done.append(self.up_one(mname, self.migrator)) + if name and name == mname: + break + + return done + + def read(self, name: str): + """ + Reads a migration from a file. + """ + call_params = dict() + if helper.is_os_windows() and sys.version_info >= (3, 0): + # if system is windows - force utf-8 encoding + call_params['encoding'] = 'utf-8' + with open(os.path.join(helper.migration_dir, name + '.py'), **call_params) as f: + code = f.read() + scope = {} + code = compile(code, '', 'exec', dont_inherit=True) + exec(code, scope, None) + return scope.get('migrate', VOID), scope.get('rollback', VOID) + + def up_one(self, name: str, migrator: Migrator, + fake: bool = False, rollback: bool = False) -> str: + """ + Runs a migration with a given name. + """ + try: + migrate_fn, rollback_fn = self.read(name) + if fake: + migrate_fn(migrator, self.database) + migrator.clean() + return name + with self.database.transaction(): + if rollback: + logger.info('Rolling back "{}"'.format(name)) + rollback_fn(migrator, self.database) + migrator.run() + self.model.delete().where(self.model.name == name).execute() + else: + logger.info('Migrate "{}"'.format(name)) + migrate_fn(migrator, self.database) + migrator.run() + if name not in self.done: + self.model.create(name=name) + + logger.info('Done "{}"'.format(name)) + return name + + except Exception: + self.database.rollback() + operation = 'Rollback' if rollback else 'Migration' + logger.exception('{} failed: {}'.format(operation, name)) + raise + + def down(self, name: t.Optional[str] = None): + """ + Rolls back migrations. + """ + if not self.done: + raise RuntimeError('No migrations are found.') + + name = self.done[-1] + + migrator = self.migrator + self.up_one(name, migrator, False, True) + logger.warning('Rolled back migration: {}'.format(name)) diff --git a/app/classes/shared/server.py b/app/classes/shared/server.py index 1b4ad0c0..1e1de11f 100644 --- a/app/classes/shared/server.py +++ b/app/classes/shared/server.py @@ -157,9 +157,9 @@ class Server: def setup_server_run_command(self): # configure the server - server_exec_path = self.settings['executable'] + server_exec_path = helper.get_os_understandable_path(self.settings['executable']) self.server_command = helper.cmdparse(self.settings['execution_command']) - self.server_path = self.settings['path'] + self.server_path = helper.get_os_understandable_path(self.settings['path']) # let's do some quick checking to make sure things actually exists full_path = os.path.join(self.server_path, server_exec_path) @@ -193,7 +193,7 @@ class Server: logger.info("Launching Server {} with command {}".format(self.name, self.server_command)) console.info("Launching Server {} with command {}".format(self.name, self.server_command)) - if os.name == "nt": + if helper.is_os_windows(): logger.info("Windows Detected") creationflags=subprocess.CREATE_NEW_CONSOLE else: @@ -201,7 +201,7 @@ class Server: creationflags=None logger.info("Starting server in {p} with command: {c}".format(p=self.server_path, c=self.server_command)) - + servers_helper.set_waiting_start(self.server_id, False) try: self.process = subprocess.Popen(self.server_command, cwd=self.server_path, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) @@ -379,7 +379,7 @@ class Server: return self.process.pid else: return None - + def detect_crash(self): logger.info("Detecting possible crash for server: {} ".format(self.name)) @@ -430,7 +430,7 @@ class Server: backup_thread = threading.Thread(target=self.a_backup_server, daemon=True, name=f"backup_{self.name}") logger.info("Starting Backup Thread for server {}.".format(self.settings['server_name'])) if self.server_path == None: - self.server_path = self.settings['path'] + self.server_path = helper.get_os_understandable_path(self.settings['path']) logger.info("Backup Thread - Local server path not defined. Setting local server path variable.") #checks if the backup thread is currently alive for this server if not self.is_backingup: @@ -451,13 +451,13 @@ class Server: try: backup_filename = "{}/{}".format(self.settings['backup_path'], datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')) logger.info("Creating backup of server '{}' (ID#{}) at '{}'".format(self.settings['server_name'], self.server_id, backup_filename)) - shutil.make_archive(backup_filename, 'zip', self.server_path) + shutil.make_archive(helper.get_os_understandable_path(backup_filename), 'zip', self.server_path) while len(self.list_backups()) > conf["max_backups"] and conf["max_backups"] > 0: backup_list = self.list_backups() oldfile = backup_list[0] oldfile_path = "{}/{}".format(conf['backup_path'], oldfile['path']) logger.info("Removing old backup '{}'".format(oldfile['path'])) - os.remove(oldfile_path) + os.remove(helper.get_os_understandable_path(oldfile_path)) self.is_backingup = False logger.info("Backup of server: {} completed".format(self.name)) return @@ -468,9 +468,9 @@ class Server: def list_backups(self): conf = management_helper.get_backup_config(self.server_id) - if helper.check_path_exists(self.settings['backup_path']): - files = helper.get_human_readable_files_sizes(helper.list_dir_by_date(self.settings['backup_path'])) - return [{"path": os.path.relpath(f['path'], start=conf['backup_path']), "size": f["size"]} for f in files] + if helper.check_path_exists(helper.get_os_understandable_path(self.settings['backup_path'])): + files = helper.get_human_readable_files_sizes(helper.list_dir_by_date(helper.get_os_understandable_path(self.settings['backup_path']))) + return [{"path": os.path.relpath(f['path'], start=helper.get_os_understandable_path(conf['backup_path'])), "size": f["size"]} for f in files] else: return [] @@ -507,7 +507,7 @@ class Server: 'wasRunning': wasStarted, 'string': message }) - backup_dir = os.path.join(self.settings['path'], 'crafty_executable_backups') + backup_dir = os.path.join(helper.get_os_understandable_path(self.settings['path']), 'crafty_executable_backups') #checks if backup directory already exists if os.path.isdir(backup_dir): backup_executable = os.path.join(backup_dir, 'old_server.jar') @@ -524,7 +524,7 @@ class Server: else: logger.info("No old backups found for server: {}".format(self.name)) - current_executable = os.path.join(self.settings['path'], self.settings['executable']) + current_executable = os.path.join(helper.get_os_understandable_path(self.settings['path']), self.settings['executable']) #copies to backup dir helper.copy_files(current_executable, backup_executable) diff --git a/app/classes/shared/translation.py b/app/classes/shared/translation.py index 6d0a0a1b..837a5a1c 100644 --- a/app/classes/shared/translation.py +++ b/app/classes/shared/translation.py @@ -14,6 +14,7 @@ class Translation(): self.cached_translation = None self.cached_translation_lang = None self.lang_file_exists = [] + def translate(self, page, word, lang): translated_word = None fallback_lang = 'en_EN' @@ -30,6 +31,7 @@ class Translation(): elif iter(translated_word) and not isinstance(translated_word, str): return '\n'.join(translated_word) return translated_word return 'Error while getting translation' + def translate_inner(self, page, word, lang): lang_file = os.path.join( self.translations_path, diff --git a/app/classes/web/ajax_handler.py b/app/classes/web/ajax_handler.py index 78ad91d0..8e8ec156 100644 --- a/app/classes/web/ajax_handler.py +++ b/app/classes/web/ajax_handler.py @@ -69,7 +69,7 @@ class AjaxHandler(BaseHandler): if full_log: log_lines = helper.get_setting('max_log_lines') - data = helper.tail_file(server_data['log_path'], log_lines) + data = helper.tail_file(helper.get_os_understandable_path(server_data['log_path']), log_lines) else: data = ServerOutBuf.lines.get(server_id, []) @@ -92,21 +92,21 @@ class AjaxHandler(BaseHandler): self.render_page('ajax/notify.html', page_data) elif page == "get_file": - file_path = self.get_argument('file_path', None) + file_path = helper.get_os_understandable_path(self.get_argument('file_path', None)) server_id = self.get_argument('id', None) if not self.check_server_id(server_id, 'get_file'): return else: server_id = bleach.clean(server_id) - if not helper.in_path(self.controller.servers.get_server_data_by_id(server_id)['path'], file_path)\ + if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), file_path)\ or not helper.check_file_exists(os.path.abspath(file_path)): logger.warning("Invalid path in get_file ajax call ({})".format(file_path)) console.warning("Invalid path in get_file ajax call ({})".format(file_path)) return - - + + error = None - + try: with open(file_path) as file: file_contents = file.read() @@ -126,8 +126,8 @@ class AjaxHandler(BaseHandler): if not self.check_server_id(server_id, 'get_tree'): return else: server_id = bleach.clean(server_id) - self.write(self.controller.servers.get_server_data_by_id(server_id)['path'] + '\n' + - helper.generate_tree(self.controller.servers.get_server_data_by_id(server_id)['path'])) + self.write(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']) + '\n' + + helper.generate_tree(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']))) self.finish() @tornado.web.authenticated @@ -157,7 +157,7 @@ class AjaxHandler(BaseHandler): self.controller.management.add_to_audit_log(user_data['user_id'], "Sent command to {} terminal: {}".format(self.controller.servers.get_server_friendly_name(server_id), command), server_id, self.get_remote_ip()) elif page == "create_file": - file_parent = self.get_body_argument('file_parent', default=None, strip=True) + file_parent = helper.get_os_understandable_path(self.get_body_argument('file_parent', default=None, strip=True)) file_name = self.get_body_argument('file_name', default=None, strip=True) file_path = os.path.join(file_parent, file_name) server_id = self.get_argument('id', None) @@ -165,7 +165,7 @@ class AjaxHandler(BaseHandler): if not self.check_server_id(server_id, 'create_file'): return else: server_id = bleach.clean(server_id) - if not helper.in_path(self.controller.servers.get_server_data_by_id(server_id)['path'], file_path) \ + if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), file_path) \ or helper.check_file_exists(os.path.abspath(file_path)): logger.warning("Invalid path in create_file ajax call ({})".format(file_path)) console.warning("Invalid path in create_file ajax call ({})".format(file_path)) @@ -176,7 +176,7 @@ class AjaxHandler(BaseHandler): file_object.close() elif page == "create_dir": - dir_parent = self.get_body_argument('dir_parent', default=None, strip=True) + dir_parent = helper.get_os_understandable_path(self.get_body_argument('dir_parent', default=None, strip=True)) dir_name = self.get_body_argument('dir_name', default=None, strip=True) dir_path = os.path.join(dir_parent, dir_name) server_id = self.get_argument('id', None) @@ -184,7 +184,7 @@ class AjaxHandler(BaseHandler): if not self.check_server_id(server_id, 'create_dir'): return else: server_id = bleach.clean(server_id) - if not helper.in_path(self.controller.servers.get_server_data_by_id(server_id)['path'], dir_path) \ + if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), dir_path) \ or helper.check_path_exists(os.path.abspath(dir_path)): logger.warning("Invalid path in create_dir ajax call ({})".format(dir_path)) console.warning("Invalid path in create_dir ajax call ({})".format(dir_path)) @@ -194,7 +194,7 @@ class AjaxHandler(BaseHandler): elif page == "unzip_file": server_id = self.get_argument('id', None) - path = self.get_argument('path', None) + path = helper.get_os_understandable_path(self.get_argument('path', None)) helper.unzipFile(path) self.redirect("/panel/server_detail?id={}&subpage=files".format(server_id)) return @@ -211,21 +211,18 @@ class AjaxHandler(BaseHandler): @tornado.web.authenticated def delete(self, page): if page == "del_file": - file_path = self.get_body_argument('file_path', default=None, strip=True) + file_path = helper.get_os_understandable_path(self.get_body_argument('file_path', default=None, strip=True)) server_id = self.get_argument('id', None) - if os.name == "nt": - file_path = file_path.replace('/', "\\") - console.warning("delete {} for server {}".format(file_path, server_id)) - if not self.check_server_id(server_id, 'del_file'): + if not self.check_server_id(server_id, 'del_file'): return else: server_id = bleach.clean(server_id) server_info = self.controller.servers.get_server_data_by_id(server_id) - if not (helper.in_path(server_info['path'], file_path) \ - or helper.in_path(server_info['backup_path'], file_path)) \ + if not (helper.in_path(helper.get_os_understandable_path(server_info['path']), file_path) \ + or helper.in_path(helper.get_os_understandable_path(server_info['backup_path']), file_path)) \ or not helper.check_file_exists(os.path.abspath(file_path)): logger.warning("Invalid path in del_file ajax call ({})".format(file_path)) console.warning("Invalid path in del_file ajax call ({})".format(file_path)) @@ -235,7 +232,7 @@ class AjaxHandler(BaseHandler): os.remove(file_path) elif page == "del_dir": - dir_path = self.get_body_argument('dir_path', default=None, strip=True) + dir_path = helper.get_os_understandable_path(self.get_body_argument('dir_path', default=None, strip=True)) server_id = self.get_argument('id', None) console.warning("delete {} for server {}".format(dir_path, server_id)) @@ -244,7 +241,7 @@ class AjaxHandler(BaseHandler): else: server_id = bleach.clean(server_id) server_info = self.controller.servers.get_server_data_by_id(server_id) - if not helper.in_path(server_info['path'], dir_path) \ + if not helper.in_path(helper.get_os_understandable_path(server_info['path']), dir_path) \ or not helper.check_path_exists(os.path.abspath(dir_path)): logger.warning("Invalid path in del_file ajax call ({})".format(dir_path)) console.warning("Invalid path in del_file ajax call ({})".format(dir_path)) @@ -270,13 +267,13 @@ class AjaxHandler(BaseHandler): def put(self, page): if page == "save_file": file_contents = self.get_body_argument('file_contents', default=None, strip=True) - file_path = self.get_body_argument('file_path', default=None, strip=True) + file_path = helper.get_os_understandable_path(self.get_body_argument('file_path', default=None, strip=True)) server_id = self.get_argument('id', None) if not self.check_server_id(server_id, 'save_file'): return else: server_id = bleach.clean(server_id) - if not helper.in_path(self.controller.servers.get_server_data_by_id(server_id)['path'], file_path)\ + if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), file_path)\ or not helper.check_file_exists(os.path.abspath(file_path)): logger.warning("Invalid path in save_file ajax call ({})".format(file_path)) console.warning("Invalid path in save_file ajax call ({})".format(file_path)) @@ -287,7 +284,7 @@ class AjaxHandler(BaseHandler): file_object.write(file_contents) elif page == "rename_item": - item_path = self.get_body_argument('item_path', default=None, strip=True) + item_path = helper.get_os_understandable_path(self.get_body_argument('item_path', default=None, strip=True)) new_item_name = self.get_body_argument('new_item_name', default=None, strip=True) server_id = self.get_argument('id', None) @@ -299,7 +296,7 @@ class AjaxHandler(BaseHandler): console.warning("Invalid path(s) in rename_item ajax call") return - if not helper.in_path(self.controller.servers.get_server_data_by_id(server_id)['path'], item_path) \ + if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), item_path) \ or not helper.check_path_exists(os.path.abspath(item_path)): logger.warning("Invalid old name path in rename_item ajax call ({})".format(server_id)) console.warning("Invalid old name path in rename_item ajax call ({})".format(server_id)) @@ -307,7 +304,7 @@ class AjaxHandler(BaseHandler): new_item_path = os.path.join(os.path.split(item_path)[0], new_item_name) - if not helper.in_path(self.controller.servers.get_server_data_by_id(server_id)['path'], new_item_path) \ + if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), new_item_path) \ or helper.check_path_exists(os.path.abspath(new_item_path)): logger.warning("Invalid new name path in rename_item ajax call ({})".format(server_id)) console.warning("Invalid new name path in rename_item ajax call ({})".format(server_id)) @@ -315,6 +312,7 @@ class AjaxHandler(BaseHandler): # RENAME os.rename(item_path, new_item_path) + def check_server_id(self, server_id, page_name): if server_id is None: logger.warning("Server ID not defined in {} ajax call ({})".format(page_name, server_id)) diff --git a/app/classes/web/panel_handler.py b/app/classes/web/panel_handler.py index 1f6c5471..14dd2951 100644 --- a/app/classes/web/panel_handler.py +++ b/app/classes/web/panel_handler.py @@ -209,7 +209,8 @@ class PanelHandler(BaseHandler): server_info = self.controller.servers.get_server_data_by_id(server_id) page_data['backup_config'] = self.controller.management.get_backup_config(server_id) page_data['backup_list'] = server.list_backups() - page_data['backup_path'] = server_info["backup_path"].replace('\\', '/') + page_data['backup_path'] = helper.wtol_path(server_info["backup_path"]) + print(page_data['backup_path']) def get_banned_players_html(): banned_players = self.controller.servers.get_banned_players(server_id) @@ -256,8 +257,8 @@ class PanelHandler(BaseHandler): return server_info = self.controller.servers.get_server_data_by_id(server_id) - backup_file = os.path.abspath(os.path.join(server_info["backup_path"], file)) - if not helper.in_path(server_info["backup_path"], backup_file) \ + backup_file = os.path.abspath(os.path.join(helper.get_os_understandable_path(server_info["backup_path"]), file)) + if not helper.in_path(helper.get_os_understandable_path(server_info["backup_path"]), backup_file) \ or not os.path.isfile(backup_file): self.redirect("/panel/error?error=Invalid path detected") return @@ -540,7 +541,7 @@ class PanelHandler(BaseHandler): elif page == 'download_file': server_id = self.get_argument('id', None) - file = self.get_argument('path', "") + file = helper.get_os_understandable_path(self.get_argument('path', "")) name = self.get_argument('name', "") if server_id is None: @@ -559,7 +560,7 @@ class PanelHandler(BaseHandler): server_info = self.controller.servers.get_server_data_by_id(server_id) - if not helper.in_path(server_info["path"], file) \ + if not helper.in_path(helper.get_os_understandable_path(server_info["path"]), file) \ or not os.path.isfile(file): self.redirect("/panel/error?error=Invalid path detected") return @@ -645,22 +646,21 @@ class PanelHandler(BaseHandler): self.redirect("/panel/error?error=Invalid Server ID") return - #TODO use controller method - Servers.update({ - Servers.server_name: server_name, - Servers.path: server_path, - Servers.log_path: log_path, - Servers.executable: executable, - Servers.execution_command: execution_command, - Servers.stop_command: stop_command, - Servers.auto_start_delay: auto_start_delay, - Servers.server_ip: server_ip, - Servers.server_port: server_port, - Servers.auto_start: auto_start, - Servers.executable_update_url: executable_update_url, - Servers.crash_detection: crash_detection, - Servers.logs_delete_after: logs_delete_after, - }).where(Servers.server_id == server_id).execute() + server_obj = self.controller.servers.get_server_obj(server_id) + server_obj.server_name = server_name + server_obj.path = server_path + server_obj.log_path = log_path + server_obj.executable = executable + server_obj.execution_command = execution_command + server_obj.stop_command = stop_command + server_obj.auto_start_delay = auto_start_delay + server_obj.server_ip = server_ip + server_obj.server_port = server_port + server_obj.auto_start = auto_start + server_obj.executable_update_url = executable_update_url + server_obj.crash_detection = crash_detection + server_obj.logs_delete_after = logs_delete_after + self.controller.servers.update_server(server_obj) self.controller.refresh_server_settings(server_id) @@ -695,14 +695,16 @@ class PanelHandler(BaseHandler): if backup_path is not None: if enabled == '0': - Servers.update({ - Servers.backup_path: backup_path - }).where(Servers.server_id == server_id).execute() + #TODO Use Controller method + server_obj = self.controller.servers.get_server_obj(server_id) + server_obj.backup_path = backup_path + self.controller.servers.update_server(server_obj) self.controller.management.set_backup_config(server_id, max_backups=max_backups, auto_enabled=False) else: - Servers.update({ - Servers.backup_path: backup_path - }).where(Servers.server_id == server_id).execute() + #TODO Use Controller method + server_obj = self.controller.servers.get_server_obj(server_id) + server_obj.backup_path = backup_path + self.controller.servers.update_server(server_obj) self.controller.management.set_backup_config(server_id, max_backups=max_backups, auto_enabled=True) self.controller.management.add_to_audit_log(exec_user['user_id'], diff --git a/app/classes/web/server_handler.py b/app/classes/web/server_handler.py index 37bf1848..faaf849d 100644 --- a/app/classes/web/server_handler.py +++ b/app/classes/web/server_handler.py @@ -139,7 +139,7 @@ class ServerHandler(BaseHandler): stop_command = server_data.get('stop_command') new_server_command = str(server_data.get('execution_command')).replace(server_uuid, new_server_uuid) new_executable = server_data.get('executable') - new_server_log_file = str(server_data.get('log_path')).replace(server_uuid, new_server_uuid) + new_server_log_file = str(helper.get_os_understandable_path(server_data.get('log_path'))).replace(server_uuid, new_server_uuid) auto_start = server_data.get('auto_start') auto_start_delay = server_data.get('auto_start_delay') crash_detection = server_data.get('crash_detection') diff --git a/app/classes/web/upload_handler.py b/app/classes/web/upload_handler.py index 0d0af62d..719df844 100644 --- a/app/classes/web/upload_handler.py +++ b/app/classes/web/upload_handler.py @@ -52,8 +52,8 @@ class UploadHandler(tornado.web.RequestHandler): filename = self.request.headers.get('X-FileName', None) full_path = os.path.join(path, filename) - if not helper.in_path(self.controller.servers.get_server_data_by_id(server_id)['path'], full_path): - print(user_id, server_id, self.controller.servers.get_server_data_by_id(server_id)['path'], full_path) + if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), full_path): + print(user_id, server_id, helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), full_path) logger.warning(f'User {user_id} tried to upload a file to {server_id} but the path is not inside of the server!') console.warning(f'User {user_id} tried to upload a file to {server_id} but the path is not inside of the server!') self.do_upload = False