diff --git a/.gitlab/scripts/lang_sort.sh b/.gitlab/scripts/lang_sort.sh index 5710ce1b..9a1e1cf0 100644 --- a/.gitlab/scripts/lang_sort.sh +++ b/.gitlab/scripts/lang_sort.sh @@ -56,8 +56,8 @@ get_keys "${DIR}/en_EN.json" | sort > "${ref_keys}" # Iterate over each .json file in the directory for file in "${DIR}"/*.json; do - # Check if file is a regular file and not en_EN.json, and does not contain "_incomplete" in its name - if [[ -f "${file}" && "${file}" != "${DIR}/en_EN.json" && ! "${file}" =~ _incomplete ]]; then + # Check if file is a regular file and not en_EN.json, humanized index and does not contain "_incomplete" in its name + if [[ -f "${file}" && "${file}" != "${DIR}/en_EN.json" && "${file}" != "${DIR}/humanized_index.json" && ! "${file}" =~ _incomplete ]]; then # Get keys and subkeys from the current file current_keys=$(mktemp) diff --git a/CHANGELOG.md b/CHANGELOG.md index 04f207ea..cc3695db 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,17 +2,34 @@ ## --- [4.4.1] - 2024/TBD ### New features TBD +### Refactor +- Backups | Allow multiple backup configurations ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/711)) +- UploadAPI | Use Crafty's JWT authentication for file uploads ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762)) +- UploadAPI | Splice files on the frontend to allow chunked uploads as well as bulk uploads ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762)) +- UploadAPI | Enhance upload progress feedback on all upload pages ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762)) +- UploadAPI | Consolidate and improve speed on uploads, supporting 100mb+ uploads through Cloudflare(Free) ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762)) ### Bug fixes - Fix zip imports so the root dir selection is functional ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/764)) - Fix bug where full access gives minimal access ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/768)) +- Bump tornado & requests for sec advisories ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/774)) - Ensure audit.log exists or create it on Crafty startup ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/771)) +- Fix typing issue on ID comparison causing general users to not be able to delete their own API keys ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/775)) +- Fix user creation bug where it would fail when a role was selected ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763)) +- Security improvements for general user creations on roles page ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763)) +- Security improvements for general user creations on user page ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763)) +- Use UTC for tokens_valid_from in user config, to resolve token invalidation on instance TZ change ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/765)) ### Tweaks - Add info note to default creds file ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/760)) - Remove navigation label from sidebar ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/766)) +- Do not allow slashes in server names ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/767)) - Add a thread dump to support logs ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/769)) - Remove text from status page and use symbols ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/770)) +- Add better feedback on when errors appear on user creation ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763)) +- Workaround cpu_freq call catching on obscure cpu architectures ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/776)) ### Lang +- Show natural language name instead of country code in User Config Lang select list ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/773)) - Add remaining `he_IL`, `th_TH` translations for 4.4.0 Release ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/761)) +- Mark `he_IL` incomplete ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))

## --- [4.4.0] - 2024/05/11 diff --git a/app/classes/controllers/management_controller.py b/app/classes/controllers/management_controller.py index cab755b6..fc17b8be 100644 --- a/app/classes/controllers/management_controller.py +++ b/app/classes/controllers/management_controller.py @@ -5,6 +5,7 @@ from prometheus_client import CollectorRegistry, Gauge from app.classes.models.management import HelpersManagement, HelpersWebhooks from app.classes.models.servers import HelperServers +from app.classes.shared.helpers import Helpers logger = logging.getLogger(__name__) @@ -75,7 +76,7 @@ class ManagementController: # Commands Methods # ********************************************************************************** - def send_command(self, user_id, server_id, remote_ip, command): + def send_command(self, user_id, server_id, remote_ip, command, action_id=None): server_name = HelperServers.get_server_friendly_name(server_id) # Example: Admin issued command start_server for server Survival @@ -86,7 +87,12 @@ class ManagementController: remote_ip, ) self.queue_command( - {"server_id": server_id, "user_id": user_id, "command": command} + { + "server_id": server_id, + "user_id": user_id, + "command": command, + "action_id": action_id, + } ) def queue_command(self, command_data): @@ -123,6 +129,7 @@ class ManagementController: cron_string="* * * * *", parent=None, delay=0, + action_id=None, ): return HelpersManagement.create_scheduled_task( server_id, @@ -137,6 +144,7 @@ class ManagementController: cron_string, parent, delay, + action_id, ) @staticmethod @@ -175,34 +183,47 @@ class ManagementController: # Backups Methods # ********************************************************************************** @staticmethod - def get_backup_config(server_id): - return HelpersManagement.get_backup_config(server_id) + def get_backup_config(backup_id): + return HelpersManagement.get_backup_config(backup_id) - def set_backup_config( - self, - server_id: int, - backup_path: str = None, - max_backups: int = None, - excluded_dirs: list = None, - compress: bool = False, - shutdown: bool = False, - before: str = "", - after: str = "", - ): - return self.management_helper.set_backup_config( - server_id, - backup_path, - max_backups, - excluded_dirs, - compress, - shutdown, - before, - after, + @staticmethod + def get_backups_by_server(server_id, model=False): + return HelpersManagement.get_backups_by_server(server_id, model) + + @staticmethod + def delete_backup_config(backup_id): + HelpersManagement.remove_backup_config(backup_id) + + @staticmethod + def update_backup_config(backup_id, updates): + if "backup_location" in updates: + updates["backup_location"] = Helpers.wtol_path(updates["backup_location"]) + return HelpersManagement.update_backup_config(backup_id, updates) + + def add_backup_config(self, data) -> str: + if "backup_location" in data: + data["backup_location"] = Helpers.wtol_path(data["backup_location"]) + return self.management_helper.add_backup_config(data) + + def add_default_backup_config(self, server_id, backup_path): + return self.management_helper.add_backup_config( + { + "backup_name": "Default Backup", + "backup_location": Helpers.wtol_path(backup_path), + "max_backups": 0, + "before": "", + "after": "", + "compress": False, + "shutdown": False, + "server_id": server_id, + "excluded_dirs": [], + "default": True, + } ) @staticmethod - def get_excluded_backup_dirs(server_id: int): - return HelpersManagement.get_excluded_backup_dirs(server_id) + def get_excluded_backup_dirs(backup_id: int): + return HelpersManagement.get_excluded_backup_dirs(backup_id) def add_excluded_backup_dir(self, server_id: int, dir_to_add: str): self.management_helper.add_excluded_backup_dir(server_id, dir_to_add) diff --git a/app/classes/controllers/servers_controller.py b/app/classes/controllers/servers_controller.py index 6a5cce4e..7743ad84 100644 --- a/app/classes/controllers/servers_controller.py +++ b/app/classes/controllers/servers_controller.py @@ -48,7 +48,6 @@ class ServersController(metaclass=Singleton): name: str, server_uuid: str, server_dir: str, - backup_path: str, server_command: str, server_file: str, server_log_file: str, @@ -83,7 +82,6 @@ class ServersController(metaclass=Singleton): server_uuid, name, server_dir, - backup_path, server_command, server_file, server_log_file, @@ -148,8 +146,7 @@ class ServersController(metaclass=Singleton): PermissionsServers.delete_roles_permissions(role_id, role_data["servers"]) # Remove roles from server PermissionsServers.remove_roles_of_server(server_id) - # Remove backup configs tied to server - self.management_helper.remove_backup_config(server_id) + self.management_helper.remove_all_server_backups(server_id) # Finally remove server self.servers_helper.remove_server(server_id) diff --git a/app/classes/controllers/users_controller.py b/app/classes/controllers/users_controller.py index 5425fbf8..d45797bd 100644 --- a/app/classes/controllers/users_controller.py +++ b/app/classes/controllers/users_controller.py @@ -55,6 +55,7 @@ class UsersController: "minLength": self.helper.minimum_password_length, "examples": ["crafty"], "title": "Password", + "error": "passLength", }, "email": { "type": "string", diff --git a/app/classes/minecraft/stats.py b/app/classes/minecraft/stats.py index a3f85c05..c3474d0d 100644 --- a/app/classes/minecraft/stats.py +++ b/app/classes/minecraft/stats.py @@ -86,7 +86,7 @@ class Stats: def get_node_stats(self) -> NodeStatsReturnDict: try: cpu_freq = psutil.cpu_freq() - except (NotImplementedError, FileNotFoundError): + except (NotImplementedError, AttributeError, FileNotFoundError): cpu_freq = None if cpu_freq is None: cpu_freq = psutil._common.scpufreq(current=-1, min=-1, max=-1) diff --git a/app/classes/models/management.py b/app/classes/models/management.py index e59dd39c..1a3e4a11 100644 --- a/app/classes/models/management.py +++ b/app/classes/models/management.py @@ -16,6 +16,7 @@ from app.classes.models.base_model import BaseModel from app.classes.models.users import HelperUsers from app.classes.models.servers import Servers from app.classes.models.server_permissions import PermissionsServers +from app.classes.shared.helpers import Helpers from app.classes.shared.websocket_manager import WebSocketManager logger = logging.getLogger(__name__) @@ -87,6 +88,7 @@ class Schedules(BaseModel): interval_type = CharField() start_time = CharField(null=True) command = CharField(null=True) + action_id = CharField(null=True) name = CharField() one_time = BooleanField(default=False) cron_string = CharField(default="") @@ -102,13 +104,19 @@ class Schedules(BaseModel): # Backups Class # ********************************************************************************** class Backups(BaseModel): + backup_id = CharField(primary_key=True, default=Helpers.create_uuid) + backup_name = CharField(default="New Backup") + backup_location = CharField(default="") excluded_dirs = CharField(null=True) - max_backups = IntegerField() + max_backups = IntegerField(default=0) server_id = ForeignKeyField(Servers, backref="backups_server") compress = BooleanField(default=False) shutdown = BooleanField(default=False) before = CharField(default="") after = CharField(default="") + default = BooleanField(default=False) + status = CharField(default='{"status": "Standby", "message": ""}') + enabled = BooleanField(default=True) class Meta: table_name = "backups" @@ -263,6 +271,7 @@ class HelpersManagement: cron_string="* * * * *", parent=None, delay=0, + action_id=None, ): sch_id = Schedules.insert( { @@ -273,6 +282,7 @@ class HelpersManagement: Schedules.interval_type: interval_type, Schedules.start_time: start_time, Schedules.command: command, + Schedules.action_id: action_id, Schedules.name: name, Schedules.one_time: one_time, Schedules.cron_string: cron_string, @@ -335,133 +345,81 @@ class HelpersManagement: # Backups Methods # ********************************************************************************** @staticmethod - def get_backup_config(server_id): - try: - row = ( - Backups.select().where(Backups.server_id == server_id).join(Servers)[0] - ) - conf = { - "backup_path": row.server_id.backup_path, - "excluded_dirs": row.excluded_dirs, - "max_backups": row.max_backups, - "server_id": row.server_id_id, - "compress": row.compress, - "shutdown": row.shutdown, - "before": row.before, - "after": row.after, - } - except IndexError: - conf = { - "backup_path": None, - "excluded_dirs": None, - "max_backups": 0, - "server_id": server_id, - "compress": False, - "shutdown": False, - "before": "", - "after": "", - } - return conf + def get_backup_config(backup_id): + return model_to_dict(Backups.get(Backups.backup_id == backup_id)) @staticmethod - def remove_backup_config(server_id): + def get_backups_by_server(server_id, model=False): + if not model: + data = {} + for backup in ( + Backups.select().where(Backups.server_id == server_id).execute() + ): + data[str(backup.backup_id)] = { + "backup_id": backup.backup_id, + "backup_name": backup.backup_name, + "backup_location": backup.backup_location, + "excluded_dirs": backup.excluded_dirs, + "max_backups": backup.max_backups, + "server_id": backup.server_id_id, + "compress": backup.compress, + "shutdown": backup.shutdown, + "before": backup.before, + "after": backup.after, + "default": backup.default, + "enabled": backup.enabled, + } + else: + data = Backups.select().where(Backups.server_id == server_id).execute() + return data + + @staticmethod + def get_default_server_backup(server_id: str) -> dict: + print(server_id) + bu_query = Backups.select().where( + Backups.server_id == server_id, + Backups.default == True, # pylint: disable=singleton-comparison + ) + for item in bu_query: + print("HI", item) + backup_model = bu_query.first() + + if backup_model: + return model_to_dict(backup_model) + raise IndexError + + @staticmethod + def remove_all_server_backups(server_id): Backups.delete().where(Backups.server_id == server_id).execute() - def set_backup_config( - self, - server_id: int, - backup_path: str = None, - max_backups: int = None, - excluded_dirs: list = None, - compress: bool = False, - shutdown: bool = False, - before: str = "", - after: str = "", - ): - logger.debug(f"Updating server {server_id} backup config with {locals()}") - if Backups.select().where(Backups.server_id == server_id).exists(): - new_row = False - conf = {} - else: - conf = { - "excluded_dirs": None, - "max_backups": 0, - "server_id": server_id, - "compress": False, - "shutdown": False, - "before": "", - "after": "", - } - new_row = True - if max_backups is not None: - conf["max_backups"] = max_backups - if excluded_dirs is not None: - dirs_to_exclude = ",".join(excluded_dirs) + @staticmethod + def remove_backup_config(backup_id): + Backups.delete().where(Backups.backup_id == backup_id).execute() + + def add_backup_config(self, conf) -> str: + if "excluded_dirs" in conf: + dirs_to_exclude = ",".join(conf["excluded_dirs"]) conf["excluded_dirs"] = dirs_to_exclude - conf["compress"] = compress - conf["shutdown"] = shutdown - conf["before"] = before - conf["after"] = after - if not new_row: - with self.database.atomic(): - if backup_path is not None: - server_rows = ( - Servers.update(backup_path=backup_path) - .where(Servers.server_id == server_id) - .execute() - ) - else: - server_rows = 0 - backup_rows = ( - Backups.update(conf).where(Backups.server_id == server_id).execute() - ) - logger.debug( - f"Updating existing backup record. " - f"{server_rows}+{backup_rows} rows affected" - ) - else: - with self.database.atomic(): - conf["server_id"] = server_id - if backup_path is not None: - Servers.update(backup_path=backup_path).where( - Servers.server_id == server_id - ) - Backups.create(**conf) - logger.debug("Creating new backup record.") + backup = Backups.create(**conf) + logger.debug("Creating new backup record.") + return backup.backup_id @staticmethod - def get_excluded_backup_dirs(server_id: int): - excluded_dirs = HelpersManagement.get_backup_config(server_id)["excluded_dirs"] + def update_backup_config(backup_id, data): + if "excluded_dirs" in data: + dirs_to_exclude = ",".join(data["excluded_dirs"]) + data["excluded_dirs"] = dirs_to_exclude + Backups.update(**data).where(Backups.backup_id == backup_id).execute() + + @staticmethod + def get_excluded_backup_dirs(backup_id: int): + excluded_dirs = HelpersManagement.get_backup_config(backup_id)["excluded_dirs"] if excluded_dirs is not None and excluded_dirs != "": dir_list = excluded_dirs.split(",") else: dir_list = [] return dir_list - def add_excluded_backup_dir(self, server_id: int, dir_to_add: str): - dir_list = self.get_excluded_backup_dirs(server_id) - if dir_to_add not in dir_list: - dir_list.append(dir_to_add) - excluded_dirs = ",".join(dir_list) - self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs) - else: - logger.debug( - f"Not adding {dir_to_add} to excluded directories - " - f"already in the excluded directory list for server ID {server_id}" - ) - - def del_excluded_backup_dir(self, server_id: int, dir_to_del: str): - dir_list = self.get_excluded_backup_dirs(server_id) - if dir_to_del in dir_list: - dir_list.remove(dir_to_del) - excluded_dirs = ",".join(dir_list) - self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs) - else: - logger.debug( - f"Not removing {dir_to_del} from excluded directories - " - f"not in the excluded directory list for server ID {server_id}" - ) - # ********************************************************************************** # Webhooks Class diff --git a/app/classes/models/servers.py b/app/classes/models/servers.py index 13d9096a..e5d85c69 100644 --- a/app/classes/models/servers.py +++ b/app/classes/models/servers.py @@ -26,7 +26,6 @@ class Servers(BaseModel): created = DateTimeField(default=datetime.datetime.now) server_name = CharField(default="Server", index=True) path = CharField(default="") - backup_path = CharField(default="") executable = CharField(default="") log_path = CharField(default="") execution_command = CharField(default="") @@ -65,7 +64,6 @@ class HelperServers: server_id: str, name: str, server_dir: str, - backup_path: str, server_command: str, server_file: str, server_log_file: str, @@ -81,7 +79,6 @@ class HelperServers: name: The name of the server server_uuid: This is the UUID of the server server_dir: The directory where the server is located - backup_path: The path to the backup folder server_command: The command to start the server server_file: The name of the server file server_log_file: The path to the server log file @@ -111,7 +108,6 @@ class HelperServers: server_port=server_port, server_ip=server_host, stop_command=server_stop, - backup_path=backup_path, type=server_type, created_by=created_by, ).server_id diff --git a/app/classes/models/users.py b/app/classes/models/users.py index 3f96e651..6f6a6bde 100644 --- a/app/classes/models/users.py +++ b/app/classes/models/users.py @@ -38,7 +38,7 @@ class Users(BaseModel): superuser = BooleanField(default=False) lang = CharField(default="en_EN") support_logs = CharField(default="") - valid_tokens_from = DateTimeField(default=datetime.datetime.now) + valid_tokens_from = DateTimeField(default=Helpers.get_utc_now) server_order = CharField(default="") preparing = BooleanField(default=False) hints = BooleanField(default=True) @@ -119,7 +119,6 @@ class HelperUsers: @staticmethod def get_user_total(): count = Users.select().where(Users.username != "system").count() - print(count) return count @staticmethod diff --git a/app/classes/shared/authentication.py b/app/classes/shared/authentication.py index fad8b730..94db5532 100644 --- a/app/classes/shared/authentication.py +++ b/app/classes/shared/authentication.py @@ -1,5 +1,6 @@ import logging import time +from datetime import datetime from typing import Optional, Dict, Any, Tuple import jwt from jwt import PyJWTError @@ -62,7 +63,17 @@ class Authentication: user = HelperUsers.get_user(user_id) # TODO: Have a cache or something so we don't constantly # have to query the database - if int(user.get("valid_tokens_from").timestamp()) < iat: + valid_tokens_from_str = user.get("valid_tokens_from") + # It's possible this will be a string or a dt coming from the DB + # We need to account for that + try: + valid_tokens_from_dt = datetime.strptime( + valid_tokens_from_str, "%Y-%m-%d %H:%M:%S.%f%z" + ) + except TypeError: + valid_tokens_from_dt = valid_tokens_from_str + # Convert the string to a datetime object + if int(valid_tokens_from_dt.timestamp()) < iat: # Success! return key, data, user return None diff --git a/app/classes/shared/file_helpers.py b/app/classes/shared/file_helpers.py index 90d8e65c..23bf01dd 100644 --- a/app/classes/shared/file_helpers.py +++ b/app/classes/shared/file_helpers.py @@ -4,7 +4,10 @@ import logging import pathlib import tempfile import zipfile -from zipfile import ZipFile, ZIP_DEFLATED +import hashlib +from typing import BinaryIO +import mimetypes +from zipfile import ZipFile, ZIP_DEFLATED, ZIP_STORED import urllib.request import ssl import time @@ -22,6 +25,7 @@ class FileHelpers: def __init__(self, helper): self.helper: Helpers = helper + self.mime_types = mimetypes.MimeTypes() @staticmethod def ssl_get_file( @@ -142,6 +146,32 @@ class FileHelpers: logger.error(f"Path specified is not a file or does not exist. {path}") return e + def check_mime_types(self, file_path): + m_type, _value = self.mime_types.guess_type(file_path) + return m_type + + @staticmethod + def calculate_file_hash(file_path: str) -> str: + """ + Takes one parameter of file path. + It will generate a SHA256 hash for the path and return it. + """ + sha256_hash = hashlib.sha256() + with open(file_path, "rb") as f: + for byte_block in iter(lambda: f.read(4096), b""): + sha256_hash.update(byte_block) + return sha256_hash.hexdigest() + + @staticmethod + def calculate_buffer_hash(buffer: BinaryIO) -> str: + """ + Takes one argument of a stream buffer. Will return a + sha256 hash of the buffer + """ + sha256_hash = hashlib.sha256() + sha256_hash.update(buffer) + return sha256_hash.hexdigest() + @staticmethod def copy_dir(src_path, dest_path, dirs_exist_ok=False): # pylint: disable=unexpected-keyword-arg @@ -229,74 +259,15 @@ class FileHelpers: return True - def make_compressed_backup( - self, path_to_destination, path_to_zip, excluded_dirs, server_id, comment="" - ): - # create a ZipFile object - path_to_destination += ".zip" - ex_replace = [p.replace("\\", "/") for p in excluded_dirs] - total_bytes = 0 - dir_bytes = Helpers.get_dir_size(path_to_zip) - results = { - "percent": 0, - "total_files": self.helper.human_readable_file_size(dir_bytes), - } - WebSocketManager().broadcast_page_params( - "/panel/server_detail", - {"id": str(server_id)}, - "backup_status", - results, - ) - with ZipFile(path_to_destination, "w", ZIP_DEFLATED) as zip_file: - zip_file.comment = bytes( - comment, "utf-8" - ) # comments over 65535 bytes will be truncated - for root, dirs, files in os.walk(path_to_zip, topdown=True): - for l_dir in dirs: - if str(os.path.join(root, l_dir)).replace("\\", "/") in ex_replace: - dirs.remove(l_dir) - ziproot = path_to_zip - for file in files: - if ( - str(os.path.join(root, file)).replace("\\", "/") - not in ex_replace - and file != "crafty.sqlite" - ): - try: - logger.info(f"backing up: {os.path.join(root, file)}") - if os.name == "nt": - zip_file.write( - os.path.join(root, file), - os.path.join(root.replace(ziproot, ""), file), - ) - else: - zip_file.write( - os.path.join(root, file), - os.path.join(root.replace(ziproot, "/"), file), - ) - - except Exception as e: - logger.warning( - f"Error backing up: {os.path.join(root, file)}!" - f" - Error was: {e}" - ) - total_bytes += os.path.getsize(os.path.join(root, file)) - percent = round((total_bytes / dir_bytes) * 100, 2) - results = { - "percent": percent, - "total_files": self.helper.human_readable_file_size(dir_bytes), - } - WebSocketManager().broadcast_page_params( - "/panel/server_detail", - {"id": str(server_id)}, - "backup_status", - results, - ) - - return True - def make_backup( - self, path_to_destination, path_to_zip, excluded_dirs, server_id, comment="" + self, + path_to_destination, + path_to_zip, + excluded_dirs, + server_id, + backup_id, + comment="", + compressed=None, ): # create a ZipFile object path_to_destination += ".zip" @@ -313,7 +284,15 @@ class FileHelpers: "backup_status", results, ) - with ZipFile(path_to_destination, "w") as zip_file: + WebSocketManager().broadcast_page_params( + "/panel/edit_backup", + {"id": str(server_id)}, + "backup_status", + results, + ) + # Set the compression mode based on the `compressed` parameter + compression_mode = ZIP_DEFLATED if compressed else ZIP_STORED + with ZipFile(path_to_destination, "w", compression_mode) as zip_file: zip_file.comment = bytes( comment, "utf-8" ) # comments over 65535 bytes will be truncated @@ -364,6 +343,7 @@ class FileHelpers: results = { "percent": percent, "total_files": self.helper.human_readable_file_size(dir_bytes), + "backup_id": backup_id, } # send status results to page. WebSocketManager().broadcast_page_params( @@ -372,6 +352,12 @@ class FileHelpers: "backup_status", results, ) + WebSocketManager().broadcast_page_params( + "/panel/edit_backup", + {"id": str(server_id)}, + "backup_status", + results, + ) return True @staticmethod diff --git a/app/classes/shared/helpers.py b/app/classes/shared/helpers.py index 55a588fc..e827d5b2 100644 --- a/app/classes/shared/helpers.py +++ b/app/classes/shared/helpers.py @@ -19,7 +19,7 @@ import shutil import shlex import subprocess import itertools -from datetime import datetime +from datetime import datetime, timezone from socket import gethostname from contextlib import redirect_stderr, suppress import libgravatar @@ -508,7 +508,6 @@ class Helpers: "max_log_lines": 700, "max_audit_entries": 300, "disabled_language_files": [], - "stream_size_GB": 1, "keywords": ["help", "chunk"], "allow_nsfw_profile_pictures": False, "enable_user_self_delete": False, @@ -640,6 +639,10 @@ class Helpers: version = f"{major}.{minor}.{sub}" return str(version) + @staticmethod + def get_utc_now() -> datetime: + return datetime.fromtimestamp(time.time(), tz=timezone.utc) + def encode_pass(self, password): return self.passhasher.hash(password) @@ -1006,6 +1009,11 @@ class Helpers: except PermissionError as e: logger.critical(f"Check generated exception due to permssion error: {e}") return False + except FileNotFoundError as e: + logger.critical( + f"Check generated exception due to file does not exist error: {e}" + ) + return False def create_self_signed_cert(self, cert_dir=None): if cert_dir is None: diff --git a/app/classes/shared/main_controller.py b/app/classes/shared/main_controller.py index e31486ea..0d90a967 100644 --- a/app/classes/shared/main_controller.py +++ b/app/classes/shared/main_controller.py @@ -566,7 +566,6 @@ class Controller: name=data["name"], server_uuid=server_fs_uuid, server_dir=new_server_path, - backup_path=backup_path, server_command=server_command, server_file=server_file, server_log_file=log_location, @@ -576,7 +575,7 @@ class Controller: server_host=monitoring_host, server_type=monitoring_type, ) - self.management.set_backup_config( + self.management.add_default_backup_config( new_server_id, backup_path, ) @@ -722,7 +721,6 @@ class Controller: server_name, server_id, new_server_dir, - backup_path, server_command, server_jar, server_log_file, @@ -776,7 +774,6 @@ class Controller: server_name, server_id, new_server_dir, - backup_path, server_command, server_exe, server_log_file, @@ -821,7 +818,6 @@ class Controller: server_name, server_id, new_server_dir, - backup_path, server_command, server_exe, server_log_file, @@ -869,7 +865,6 @@ class Controller: server_name, server_id, new_server_dir, - backup_path, server_command, server_exe, server_log_file, @@ -893,16 +888,13 @@ class Controller: # ********************************************************************************** def rename_backup_dir(self, old_server_id, new_server_id, new_uuid): - server_data = self.servers.get_server_data_by_id(old_server_id) server_obj = self.servers.get_server_obj(new_server_id) - old_bu_path = server_data["backup_path"] ServerPermsController.backup_role_swap(old_server_id, new_server_id) - backup_path = old_bu_path + backup_path = os.path.join(self.helper.backup_path, old_server_id) backup_path = Path(backup_path) backup_path_components = list(backup_path.parts) backup_path_components[-1] = new_uuid new_bu_path = pathlib.PurePath(os.path.join(*backup_path_components)) - server_obj.backup_path = new_bu_path default_backup_dir = os.path.join(self.helper.backup_path, new_uuid) try: os.rmdir(default_backup_dir) @@ -916,7 +908,6 @@ class Controller: name: str, server_uuid: str, server_dir: str, - backup_path: str, server_command: str, server_file: str, server_log_file: str, @@ -931,7 +922,6 @@ class Controller: name, server_uuid, server_dir, - backup_path, server_command, server_file, server_log_file, @@ -996,16 +986,16 @@ class Controller: f"Unable to delete server files for server with ID: " f"{server_id} with error logged: {e}" ) - if Helpers.check_path_exists( - self.servers.get_server_data_by_id(server_id)["backup_path"] - ): - FileHelpers.del_dirs( - Helpers.get_os_understandable_path( - self.servers.get_server_data_by_id(server_id)[ - "backup_path" - ] + backup_configs = HelpersManagement.get_backups_by_server( + server_id, True + ) + for config in backup_configs: + if Helpers.check_path_exists(config.backup_location): + FileHelpers.del_dirs( + Helpers.get_os_understandable_path( + config.backup_location + ) ) - ) # Cleanup scheduled tasks try: diff --git a/app/classes/shared/server.py b/app/classes/shared/server.py index a6c98b89..ab8ca54a 100644 --- a/app/classes/shared/server.py +++ b/app/classes/shared/server.py @@ -207,9 +207,6 @@ class ServerInstance: self.server_scheduler.start() self.dir_scheduler.start() self.start_dir_calc_task() - self.backup_thread = threading.Thread( - target=self.backup_server, daemon=True, name=f"backup_{self.name}" - ) self.is_backingup = False # Reset crash and update at initialization self.stats_helper.server_crash_reset() @@ -940,8 +937,7 @@ class ServerInstance: WebSocketManager().broadcast_user(user, "send_start_reload", {}) def restart_threaded_server(self, user_id): - bu_conf = HelpersManagement.get_backup_config(self.server_id) - if self.is_backingup and bu_conf["shutdown"]: + if self.is_backingup: logger.info( "Restart command detected. Supressing - server has" " backup shutdown enabled and server is currently backing up." @@ -1111,12 +1107,16 @@ class ServerInstance: f.write("eula=true") self.run_threaded_server(user_id) - def a_backup_server(self): - if self.settings["backup_path"] == "": - logger.critical("Backup path is None. Canceling Backup!") - return + def server_backup_threader(self, backup_id, update=False): + # Check to see if we're already backing up + if self.check_backup_by_id(backup_id): + return False + backup_thread = threading.Thread( - target=self.backup_server, daemon=True, name=f"backup_{self.name}" + target=self.backup_server, + daemon=True, + name=f"backup_{backup_id}", + args=[backup_id, update], ) logger.info( f"Starting Backup Thread for server {self.settings['server_name']}." @@ -1127,27 +1127,20 @@ class ServerInstance: "Backup Thread - Local server path not defined. " "Setting local server path variable." ) - # checks if the backup thread is currently alive for this server - if not self.is_backingup: - try: - backup_thread.start() - self.is_backingup = True - except Exception as ex: - logger.error(f"Failed to start backup: {ex}") - return False - else: - logger.error( - f"Backup is already being processed for server " - f"{self.settings['server_name']}. Canceling backup request" - ) + + try: + backup_thread.start() + except Exception as ex: + logger.error(f"Failed to start backup: {ex}") return False logger.info(f"Backup Thread started for server {self.settings['server_name']}.") @callback - def backup_server(self): + def backup_server(self, backup_id, update): was_server_running = None logger.info(f"Starting server {self.name} (ID {self.server_id}) backup") server_users = PermissionsServers.get_server_user_list(self.server_id) + # Alert the start of the backup to the authorized users. for user in server_users: WebSocketManager().broadcast_user( user, @@ -1157,30 +1150,40 @@ class ServerInstance: ).format(self.name), ) time.sleep(3) - conf = HelpersManagement.get_backup_config(self.server_id) + + # Get the backup config + conf = HelpersManagement.get_backup_config(backup_id) + # Adjust the location to include the backup ID for destination. + backup_location = os.path.join(conf["backup_location"], conf["backup_id"]) + + # Check if the backup location even exists. + if not backup_location: + Console.critical("No backup path found. Canceling") + return None if conf["before"]: - if self.check_running(): - logger.debug( - "Found running server and send command option. Sending command" - ) - self.send_command(conf["before"]) + logger.debug( + "Found running server and send command option. Sending command" + ) + self.send_command(conf["before"]) + # Pause to let command run + time.sleep(5) if conf["shutdown"]: - if conf["before"]: - # pause to let people read message. - time.sleep(5) logger.info( "Found shutdown preference. Delaying" + "backup start. Shutting down server." ) - if self.check_running(): - self.stop_server() - was_server_running = True + if not update: + was_server_running = False + if self.check_running(): + self.stop_server() + was_server_running = True + + self.helper.ensure_dir_exists(backup_location) - self.helper.ensure_dir_exists(self.settings["backup_path"]) try: backup_filename = ( - f"{self.settings['backup_path']}/" + f"{backup_location}/" f"{datetime.datetime.now().astimezone(self.tz).strftime('%Y-%m-%d_%H-%M-%S')}" # pylint: disable=line-too-long ) logger.info( @@ -1188,42 +1191,36 @@ class ServerInstance: f" (ID#{self.server_id}, path={self.server_path}) " f"at '{backup_filename}'" ) - excluded_dirs = HelpersManagement.get_excluded_backup_dirs(self.server_id) + excluded_dirs = HelpersManagement.get_excluded_backup_dirs(backup_id) server_dir = Helpers.get_os_understandable_path(self.settings["path"]) - if conf["compress"]: - logger.debug( - "Found compress backup to be true. Calling compressed archive" - ) - self.file_helper.make_compressed_backup( - Helpers.get_os_understandable_path(backup_filename), - server_dir, - excluded_dirs, - self.server_id, - ) - else: - logger.debug( - "Found compress backup to be false. Calling NON-compressed archive" - ) - self.file_helper.make_backup( - Helpers.get_os_understandable_path(backup_filename), - server_dir, - excluded_dirs, - self.server_id, - ) + + self.file_helper.make_backup( + Helpers.get_os_understandable_path(backup_filename), + server_dir, + excluded_dirs, + self.server_id, + backup_id, + conf["backup_name"], + conf["compress"], + ) while ( - len(self.list_backups()) > conf["max_backups"] + len(self.list_backups(conf)) > conf["max_backups"] and conf["max_backups"] > 0 ): - backup_list = self.list_backups() + backup_list = self.list_backups(conf) oldfile = backup_list[0] - oldfile_path = f"{conf['backup_path']}/{oldfile['path']}" + oldfile_path = f"{backup_location}/{oldfile['path']}" logger.info(f"Removing old backup '{oldfile['path']}'") os.remove(Helpers.get_os_understandable_path(oldfile_path)) - self.is_backingup = False logger.info(f"Backup of server: {self.name} completed") - results = {"percent": 100, "total_files": 0, "current_file": 0} + results = { + "percent": 100, + "total_files": 0, + "current_file": 0, + "backup_id": backup_id, + } if len(WebSocketManager().clients) > 0: WebSocketManager().broadcast_page_params( "/panel/server_detail", @@ -1248,7 +1245,6 @@ class ServerInstance: ) self.run_threaded_server(HelperUsers.get_user_id_by_name("system")) time.sleep(3) - self.last_backup_failed = False if conf["after"]: if self.check_running(): logger.debug( @@ -1256,12 +1252,21 @@ class ServerInstance: ) self.send_command(conf["after"]) # pause to let people read message. + HelpersManagement.update_backup_config( + backup_id, + {"status": json.dumps({"status": "Standby", "message": ""})}, + ) time.sleep(5) - except: + except Exception as e: logger.exception( f"Failed to create backup of server {self.name} (ID {self.server_id})" ) - results = {"percent": 100, "total_files": 0, "current_file": 0} + results = { + "percent": 100, + "total_files": 0, + "current_file": 0, + "backup_id": backup_id, + } if len(WebSocketManager().clients) > 0: WebSocketManager().broadcast_page_params( "/panel/server_detail", @@ -1269,56 +1274,51 @@ class ServerInstance: "backup_status", results, ) - self.is_backingup = False if was_server_running: logger.info( "Backup complete. User had shutdown preference. Starting server." ) self.run_threaded_server(HelperUsers.get_user_id_by_name("system")) - self.last_backup_failed = True - - def backup_status(self, source_path, dest_path): - results = Helpers.calc_percent(source_path, dest_path) - self.backup_stats = results - if len(WebSocketManager().clients) > 0: - WebSocketManager().broadcast_page_params( - "/panel/server_detail", - {"id": str(self.server_id)}, - "backup_status", - results, + HelpersManagement.update_backup_config( + backup_id, + {"status": json.dumps({"status": "Failed", "message": f"{e}"})}, ) + self.set_backup_status() def last_backup_status(self): return self.last_backup_failed - def send_backup_status(self): - try: - return self.backup_stats - except: - return {"percent": 0, "total_files": 0} + def set_backup_status(self): + backups = HelpersManagement.get_backups_by_server(self.server_id, True) + alert = False + for backup in backups: + if json.loads(backup.status)["status"] == "Failed": + alert = True + self.last_backup_failed = alert - def list_backups(self): - if not self.settings["backup_path"]: + def list_backups(self, backup_config: dict) -> list: + if not backup_config: logger.info( f"Error putting backup file list for server with ID: {self.server_id}" ) return [] + backup_location = os.path.join( + backup_config["backup_location"], backup_config["backup_id"] + ) if not Helpers.check_path_exists( - Helpers.get_os_understandable_path(self.settings["backup_path"]) + Helpers.get_os_understandable_path(backup_location) ): return [] files = Helpers.get_human_readable_files_sizes( Helpers.list_dir_by_date( - Helpers.get_os_understandable_path(self.settings["backup_path"]) + Helpers.get_os_understandable_path(backup_location) ) ) return [ { "path": os.path.relpath( f["path"], - start=Helpers.get_os_understandable_path( - self.settings["backup_path"] - ), + start=Helpers.get_os_understandable_path(backup_location), ), "size": f["size"], } @@ -1330,7 +1330,7 @@ class ServerInstance: def jar_update(self): self.stats_helper.set_update(True) update_thread = threading.Thread( - target=self.a_jar_update, daemon=True, name=f"exe_update_{self.name}" + target=self.threaded_jar_update, daemon=True, name=f"exe_update_{self.name}" ) update_thread.start() @@ -1371,10 +1371,13 @@ class ServerInstance: def check_update(self): return self.stats_helper.get_server_stats()["updating"] - def a_jar_update(self): + def threaded_jar_update(self): server_users = PermissionsServers.get_server_user_list(self.server_id) was_started = "-1" - self.a_backup_server() + # Get default backup configuration + backup_config = HelpersManagement.get_default_server_backup(self.server_id) + # start threaded backup + self.server_backup_threader(backup_config["backup_id"], True) # checks if server is running. Calls shutdown if it is running. if self.check_running(): was_started = True @@ -1403,47 +1406,22 @@ class ServerInstance: "string": message, }, ) - backup_dir = os.path.join( - Helpers.get_os_understandable_path(self.settings["path"]), - "crafty_executable_backups", - ) - # checks if backup directory already exists - if os.path.isdir(backup_dir): - backup_executable = os.path.join(backup_dir, self.settings["executable"]) - else: - logger.info( - f"Executable backup directory not found for Server: {self.name}." - f" Creating one." - ) - os.mkdir(backup_dir) - backup_executable = os.path.join(backup_dir, self.settings["executable"]) - - if len(os.listdir(backup_dir)) > 0: - # removes old backup - logger.info(f"Old backups found for server: {self.name}. Removing...") - for item in os.listdir(backup_dir): - os.remove(os.path.join(backup_dir, item)) - logger.info(f"Old backups removed for server: {self.name}.") - else: - logger.info(f"No old backups found for server: {self.name}") - current_executable = os.path.join( Helpers.get_os_understandable_path(self.settings["path"]), self.settings["executable"], ) - - try: - # copies to backup dir - FileHelpers.copy_file(current_executable, backup_executable) - except FileNotFoundError: - logger.error("Could not create backup of jarfile. File not found.") - + backing_up = True # wait for backup - while self.is_backingup: - time.sleep(10) + while backing_up: + # Check to see if we're already backing up + backing_up = self.check_backup_by_id(backup_config["backup_id"]) + time.sleep(2) # check if backup was successful - if self.last_backup_failed: + backup_status = json.loads( + HelpersManagement.get_backup_config(backup_config["backup_id"])["status"] + )["status"] + if backup_status == "Failed": for user in server_users: WebSocketManager().broadcast_user( user, @@ -1528,12 +1506,6 @@ class ServerInstance: WebSocketManager().broadcast_user_page( user, "/panel/dashboard", "send_start_reload", {} ) - WebSocketManager().broadcast_user( - user, - "notification", - "Executable update finished for " + self.name, - ) - self.management_helper.add_to_audit_log_raw( "Alert", "-1", @@ -1656,6 +1628,14 @@ class ServerInstance: except: Console.critical("Can't broadcast server status to websocket") + def check_backup_by_id(self, backup_id: str) -> bool: + # Check to see if we're already backing up + for thread in threading.enumerate(): + if thread.getName() == f"backup_{backup_id}": + Console.debug(f"Backup with id {backup_id} already running!") + return True + return False + def get_servers_stats(self): server_stats = {} diff --git a/app/classes/shared/tasks.py b/app/classes/shared/tasks.py index b9513441..da9735a9 100644 --- a/app/classes/shared/tasks.py +++ b/app/classes/shared/tasks.py @@ -140,7 +140,7 @@ class TasksManager: ) elif command == "backup_server": - svr.a_backup_server() + svr.server_backup_threader(cmd["action_id"]) elif command == "update_executable": svr.jar_update() @@ -240,6 +240,7 @@ class TasksManager: "system" ), "command": schedule.command, + "action_id": schedule.action_id, } ], ) @@ -268,6 +269,7 @@ class TasksManager: "system" ), "command": schedule.command, + "action_id": schedule.action_id, } ], ) @@ -284,6 +286,7 @@ class TasksManager: "system" ), "command": schedule.command, + "action_id": schedule.action_id, } ], ) @@ -303,6 +306,7 @@ class TasksManager: "system" ), "command": schedule.command, + "action_id": schedule.action_id, } ], ) @@ -337,6 +341,7 @@ class TasksManager: job_data["cron_string"], job_data["parent"], job_data["delay"], + job_data["action_id"], ) # Checks to make sure some doofus didn't actually make the newly @@ -367,6 +372,7 @@ class TasksManager: "system" ), "command": job_data["command"], + "action_id": job_data["action_id"], } ], ) @@ -393,6 +399,7 @@ class TasksManager: "system" ), "command": job_data["command"], + "action_id": job_data["action_id"], } ], ) @@ -409,6 +416,7 @@ class TasksManager: "system" ), "command": job_data["command"], + "action_id": job_data["action_id"], } ], ) @@ -428,6 +436,7 @@ class TasksManager: "system" ), "command": job_data["command"], + "action_id": job_data["action_id"], } ], ) @@ -520,6 +529,7 @@ class TasksManager: "system" ), "command": job_data["command"], + "action_id": job_data["action_id"], } ], ) @@ -543,6 +553,7 @@ class TasksManager: "system" ), "command": job_data["command"], + "action_id": job_data["action_id"], } ], ) @@ -559,6 +570,7 @@ class TasksManager: "system" ), "command": job_data["command"], + "action_id": job_data["action_id"], } ], ) @@ -578,6 +590,7 @@ class TasksManager: "system" ), "command": job_data["command"], + "action_id": job_data["action_id"], } ], ) @@ -653,6 +666,7 @@ class TasksManager: "system" ), "command": schedule.command, + "action_id": schedule.action_id, } ], ) @@ -785,6 +799,18 @@ class TasksManager: self.helper.ensure_dir_exists( os.path.join(self.controller.project_root, "import", "upload") ) + self.helper.ensure_dir_exists( + os.path.join(self.controller.project_root, "temp") + ) + for file in os.listdir(os.path.join(self.controller.project_root, "temp")): + if self.helper.is_file_older_than_x_days( + os.path.join(self.controller.project_root, "temp", file) + ): + try: + os.remove(os.path.join(file)) + except FileNotFoundError: + logger.debug("Could not clear out file from temp directory") + for file in os.listdir( os.path.join(self.controller.project_root, "import", "upload") ): @@ -793,7 +819,7 @@ class TasksManager: ): try: os.remove(os.path.join(file)) - except: + except FileNotFoundError: logger.debug("Could not clear out file from import directory") def log_watcher(self): diff --git a/app/classes/shared/translation.py b/app/classes/shared/translation.py index 0e441808..538856a8 100644 --- a/app/classes/shared/translation.py +++ b/app/classes/shared/translation.py @@ -20,7 +20,7 @@ class Translation: def get_language_file(self, language: str): return os.path.join(self.translations_path, str(language) + ".json") - def translate(self, page, word, language): + def translate(self, page, word, language, error=True): fallback_language = "en_EN" translated_word = self.translate_inner(page, word, language) @@ -37,7 +37,9 @@ class Translation: if hasattr(translated_word, "__iter__"): # Multiline strings return "\n".join(translated_word) - return "Error while getting translation" + if error: + return "Error while getting translation" + return word def translate_inner(self, page, word, language) -> t.Union[t.Any, None]: language_file = self.get_language_file(language) diff --git a/app/classes/web/panel_handler.py b/app/classes/web/panel_handler.py index bbbc9d9e..8df48431 100644 --- a/app/classes/web/panel_handler.py +++ b/app/classes/web/panel_handler.py @@ -41,6 +41,8 @@ SUBPAGE_PERMS = { "webhooks": EnumPermissionsServer.CONFIG, } +SCHEDULE_AUTH_ERROR_URL = "/panel/error?error=Unauthorized access To Schedules" + class PanelHandler(BaseHandler): def get_user_roles(self) -> t.Dict[str, list]: @@ -677,36 +679,18 @@ class PanelHandler(BaseHandler): page_data["java_versions"] = page_java if subpage == "backup": server_info = self.controller.servers.get_server_data_by_id(server_id) - page_data["backup_config"] = ( - self.controller.management.get_backup_config(server_id) - ) - exclusions = [] - page_data["exclusions"] = ( - self.controller.management.get_excluded_backup_dirs(server_id) + + page_data["backups"] = self.controller.management.get_backups_by_server( + server_id, model=True ) page_data["backing_up"] = ( self.controller.servers.get_server_instance_by_id( server_id ).is_backingup ) - page_data["backup_stats"] = ( - self.controller.servers.get_server_instance_by_id( - server_id - ).send_backup_status() - ) # makes it so relative path is the only thing shown - for file in page_data["exclusions"]: - if Helpers.is_os_windows(): - exclusions.append(file.replace(server_info["path"] + "\\", "")) - else: - exclusions.append(file.replace(server_info["path"] + "/", "")) - page_data["exclusions"] = exclusions + self.controller.servers.refresh_server_settings(server_id) - try: - page_data["backup_list"] = server.list_backups() - except: - page_data["backup_list"] = [] - page_data["backup_path"] = Helpers.wtol_path(server_info["backup_path"]) if subpage == "metrics": try: @@ -780,20 +764,23 @@ class PanelHandler(BaseHandler): elif page == "download_backup": file = self.get_argument("file", "") + backup_id = self.get_argument("backup_id", "") server_id = self.check_server_id() if server_id is None: return - + backup_config = self.controller.management.get_backup_config(backup_id) server_info = self.controller.servers.get_server_data_by_id(server_id) + backup_location = os.path.join(backup_config["backup_location"], backup_id) backup_file = os.path.abspath( os.path.join( - Helpers.get_os_understandable_path(server_info["backup_path"]), file + Helpers.get_os_understandable_path(backup_location), + file, ) ) if not self.helper.is_subdir( backup_file, - Helpers.get_os_understandable_path(server_info["backup_path"]), + Helpers.get_os_understandable_path(backup_location), ) or not os.path.isfile(backup_file): self.redirect("/panel/error?error=Invalid path detected") return @@ -892,6 +879,8 @@ class PanelHandler(BaseHandler): os.path.join(self.helper.root_dir, "app", "translations") ) ): + if file == "humanized_index.json": + continue if file.endswith(".json"): if file.split(".")[0] not in self.helper.get_setting( "disabled_language_files" @@ -1130,6 +1119,9 @@ class PanelHandler(BaseHandler): page_data["server_data"] = self.controller.servers.get_server_data_by_id( server_id ) + page_data["backups"] = self.controller.management.get_backups_by_server( + server_id, True + ) page_data["server_stats"] = self.controller.servers.get_server_stats_by_id( server_id ) @@ -1150,6 +1142,7 @@ class PanelHandler(BaseHandler): page_data["schedule"]["delay"] = 0 page_data["schedule"]["time"] = "" page_data["schedule"]["interval"] = 1 + page_data["schedule"]["action_id"] = "" # we don't need to check difficulty here. # We'll just default to basic for new schedules page_data["schedule"]["difficulty"] = "basic" @@ -1158,7 +1151,7 @@ class PanelHandler(BaseHandler): if not EnumPermissionsServer.SCHEDULE in page_data["user_permissions"]: if not superuser: - self.redirect("/panel/error?error=Unauthorized access To Schedules") + self.redirect(SCHEDULE_AUTH_ERROR_URL) return template = "panel/server_schedule_edit.html" @@ -1195,6 +1188,9 @@ class PanelHandler(BaseHandler): exec_user["user_id"], server_id ) ) + page_data["backups"] = self.controller.management.get_backups_by_server( + server_id, True + ) page_data["server_data"] = self.controller.servers.get_server_data_by_id( server_id ) @@ -1209,6 +1205,7 @@ class PanelHandler(BaseHandler): page_data["schedule"]["server_id"] = server_id page_data["schedule"]["schedule_id"] = schedule.schedule_id page_data["schedule"]["action"] = schedule.action + page_data["schedule"]["action_id"] = schedule.action_id if schedule.name: page_data["schedule"]["name"] = schedule.name else: @@ -1252,11 +1249,141 @@ class PanelHandler(BaseHandler): if not EnumPermissionsServer.SCHEDULE in page_data["user_permissions"]: if not superuser: - self.redirect("/panel/error?error=Unauthorized access To Schedules") + self.redirect(SCHEDULE_AUTH_ERROR_URL) return template = "panel/server_schedule_edit.html" + elif page == "edit_backup": + server_id = self.get_argument("id", None) + backup_id = self.get_argument("backup_id", None) + page_data["active_link"] = "backups" + page_data["permissions"] = { + "Commands": EnumPermissionsServer.COMMANDS, + "Terminal": EnumPermissionsServer.TERMINAL, + "Logs": EnumPermissionsServer.LOGS, + "Schedule": EnumPermissionsServer.SCHEDULE, + "Backup": EnumPermissionsServer.BACKUP, + "Files": EnumPermissionsServer.FILES, + "Config": EnumPermissionsServer.CONFIG, + "Players": EnumPermissionsServer.PLAYERS, + } + if not self.failed_server: + server_obj = self.controller.servers.get_server_instance_by_id( + server_id + ) + page_data["backup_failed"] = server_obj.last_backup_status() + page_data["user_permissions"] = ( + self.controller.server_perms.get_user_id_permissions_list( + exec_user["user_id"], server_id + ) + ) + server_info = self.controller.servers.get_server_data_by_id(server_id) + page_data["backup_config"] = self.controller.management.get_backup_config( + backup_id + ) + page_data["backups"] = self.controller.management.get_backups_by_server( + server_id, model=True + ) + exclusions = [] + page_data["backing_up"] = self.controller.servers.get_server_instance_by_id( + server_id + ).is_backingup + self.controller.servers.refresh_server_settings(server_id) + try: + page_data["backup_list"] = server.list_backups( + page_data["backup_config"] + ) + except: + page_data["backup_list"] = [] + page_data["backup_path"] = Helpers.wtol_path( + page_data["backup_config"]["backup_location"] + ) + page_data["server_data"] = self.controller.servers.get_server_data_by_id( + server_id + ) + page_data["server_stats"] = self.controller.servers.get_server_stats_by_id( + server_id + ) + page_data["server_stats"]["server_type"] = ( + self.controller.servers.get_server_type_by_id(server_id) + ) + page_data["exclusions"] = ( + self.controller.management.get_excluded_backup_dirs(backup_id) + ) + # Make exclusion paths relative for page + for file in page_data["exclusions"]: + if Helpers.is_os_windows(): + exclusions.append(file.replace(server_info["path"] + "\\", "")) + else: + exclusions.append(file.replace(server_info["path"] + "/", "")) + page_data["exclusions"] = exclusions + + if EnumPermissionsServer.BACKUP not in page_data["user_permissions"]: + if not superuser: + self.redirect(SCHEDULE_AUTH_ERROR_URL) + return + template = "panel/server_backup_edit.html" + + elif page == "add_backup": + server_id = self.get_argument("id", None) + backup_id = self.get_argument("backup_id", None) + page_data["active_link"] = "backups" + page_data["permissions"] = { + "Commands": EnumPermissionsServer.COMMANDS, + "Terminal": EnumPermissionsServer.TERMINAL, + "Logs": EnumPermissionsServer.LOGS, + "Schedule": EnumPermissionsServer.SCHEDULE, + "Backup": EnumPermissionsServer.BACKUP, + "Files": EnumPermissionsServer.FILES, + "Config": EnumPermissionsServer.CONFIG, + "Players": EnumPermissionsServer.PLAYERS, + } + if not self.failed_server: + server_obj = self.controller.servers.get_server_instance_by_id( + server_id + ) + page_data["backup_failed"] = server_obj.last_backup_status() + page_data["user_permissions"] = ( + self.controller.server_perms.get_user_id_permissions_list( + exec_user["user_id"], server_id + ) + ) + server_info = self.controller.servers.get_server_data_by_id(server_id) + page_data["backup_config"] = { + "excluded_dirs": [], + "max_backups": 0, + "server_id": server_id, + "backup_location": os.path.join(self.helper.backup_path, server_id), + "compress": False, + "shutdown": False, + "before": "", + "after": "", + } + page_data["backing_up"] = False + self.controller.servers.refresh_server_settings(server_id) + + page_data["backup_list"] = [] + page_data["backup_path"] = Helpers.wtol_path( + page_data["backup_config"]["backup_location"] + ) + page_data["server_data"] = self.controller.servers.get_server_data_by_id( + server_id + ) + page_data["server_stats"] = self.controller.servers.get_server_stats_by_id( + server_id + ) + page_data["server_stats"]["server_type"] = ( + self.controller.servers.get_server_type_by_id(server_id) + ) + page_data["exclusions"] = [] + + if EnumPermissionsServer.BACKUP not in page_data["user_permissions"]: + if not superuser: + self.redirect(SCHEDULE_AUTH_ERROR_URL) + return + template = "panel/server_backup_edit.html" + elif page == "edit_user": user_id = self.get_argument("id", None) role_servers = self.controller.servers.get_authorized_servers(user_id) @@ -1307,6 +1434,8 @@ class PanelHandler(BaseHandler): for file in sorted( os.listdir(os.path.join(self.helper.root_dir, "app", "translations")) ): + if file == "humanized_index.json": + continue if file.endswith(".json"): if file.split(".")[0] not in self.helper.get_setting( "disabled_language_files" diff --git a/app/classes/web/routes/api/api_handlers.py b/app/classes/web/routes/api/api_handlers.py index a30350a5..78223efe 100644 --- a/app/classes/web/routes/api/api_handlers.py +++ b/app/classes/web/routes/api/api_handlers.py @@ -38,12 +38,14 @@ from app.classes.web.routes.api.servers.server.backups.index import ( ) from app.classes.web.routes.api.servers.server.backups.backup.index import ( ApiServersServerBackupsBackupIndexHandler, + ApiServersServerBackupsBackupFilesIndexHandler, ) from app.classes.web.routes.api.servers.server.files import ( ApiServersServerFilesIndexHandler, ApiServersServerFilesCreateHandler, ApiServersServerFilesZipHandler, ) +from app.classes.web.routes.api.crafty.upload.index import ApiFilesUploadHandler from app.classes.web.routes.api.servers.server.tasks.task.children import ( ApiServersServerTasksTaskChildrenHandler, ) @@ -218,13 +220,13 @@ def api_handlers(handler_args): handler_args, ), ( - r"/api/v2/servers/([a-z0-9-]+)/backups/backup/?", + r"/api/v2/servers/([a-z0-9-]+)/backups/backup/([a-z0-9-]+)/?", ApiServersServerBackupsBackupIndexHandler, handler_args, ), ( - r"/api/v2/servers/([a-z0-9-]+)/files/?", - ApiServersServerFilesIndexHandler, + r"/api/v2/servers/([a-z0-9-]+)/backups/backup/([a-z0-9-]+)/files/?", + ApiServersServerBackupsBackupFilesIndexHandler, handler_args, ), ( @@ -237,6 +239,26 @@ def api_handlers(handler_args): ApiServersServerFilesZipHandler, handler_args, ), + ( + r"/api/v2/crafty/admin/upload/?", + ApiFilesUploadHandler, + handler_args, + ), + ( + r"/api/v2/servers/import/upload/?", + ApiFilesUploadHandler, + handler_args, + ), + ( + r"/api/v2/servers/([a-z0-9-]+)/files/upload/?", + ApiFilesUploadHandler, + handler_args, + ), + ( + r"/api/v2/servers/([a-z0-9-]+)/files(?:/([a-zA-Z0-9-]+))?/?", + ApiServersServerFilesIndexHandler, + handler_args, + ), ( r"/api/v2/servers/([a-z0-9-]+)/tasks/?", ApiServersServerTasksIndexHandler, @@ -273,7 +295,8 @@ def api_handlers(handler_args): handler_args, ), ( - r"/api/v2/servers/([a-z0-9-]+)/action/([a-z_]+)/?", + # optional third argument when we need a action ID + r"/api/v2/servers/([a-z0-9-]+)/action/([a-z_]+)(?:/([a-z0-9-]+))?/?", ApiServersServerActionHandler, handler_args, ), diff --git a/app/classes/web/routes/api/auth/invalidate_tokens.py b/app/classes/web/routes/api/auth/invalidate_tokens.py index f15bf60d..9e38670a 100644 --- a/app/classes/web/routes/api/auth/invalidate_tokens.py +++ b/app/classes/web/routes/api/auth/invalidate_tokens.py @@ -1,6 +1,6 @@ -import datetime import logging from app.classes.web.base_api_handler import BaseApiHandler +from app.classes.shared.helpers import Helpers logger = logging.getLogger(__name__) @@ -13,7 +13,7 @@ class ApiAuthInvalidateTokensHandler(BaseApiHandler): logger.debug(f"Invalidate tokens for user {auth_data[4]['user_id']}") self.controller.users.raw_update_user( - auth_data[4]["user_id"], {"valid_tokens_from": datetime.datetime.now()} + auth_data[4]["user_id"], {"valid_tokens_from": Helpers.get_utc_now()} ) self.finish_json(200, {"status": "ok"}) diff --git a/app/classes/web/routes/api/crafty/upload/index.py b/app/classes/web/routes/api/crafty/upload/index.py new file mode 100644 index 00000000..b37ef796 --- /dev/null +++ b/app/classes/web/routes/api/crafty/upload/index.py @@ -0,0 +1,308 @@ +import os +import logging +import shutil +from app.classes.models.server_permissions import EnumPermissionsServer +from app.classes.shared.helpers import Helpers +from app.classes.web.base_api_handler import BaseApiHandler + +logger = logging.getLogger(__name__) +IMAGE_MIME_TYPES = [ + "image/bmp", + "image/cis-cod", + "image/gif", + "image/ief", + "image/jpeg", + "image/pipeg", + "image/svg+xml", + "image/tiff", + "image/x-cmu-raster", + "image/x-cmx", + "image/x-icon", + "image/x-portable-anymap", + "image/x-portable-bitmap", + "image/x-portable-graymap", + "image/x-portable-pixmap", + "image/x-rgb", + "image/x-xbitmap", + "image/x-xpixmap", + "image/x-xwindowdump", + "image/png", + "image/webp", +] + +ARCHIVE_MIME_TYPES = ["application/zip"] + + +class ApiFilesUploadHandler(BaseApiHandler): + async def post(self, server_id=None): + auth_data = self.authenticate_user() + if not auth_data: + return + + upload_type = self.request.headers.get("type") + accepted_types = [] + + if server_id: + # Check to make sure user is authorized for the server + if server_id not in [str(x["server_id"]) for x in auth_data[0]]: + # if the user doesn't have access to the server, return an error + return self.finish_json( + 400, {"status": "error", "error": "NOT_AUTHORIZED"} + ) + mask = self.controller.server_perms.get_lowest_api_perm_mask( + self.controller.server_perms.get_user_permissions_mask( + auth_data[4]["user_id"], server_id + ), + auth_data[5], + ) + # Make sure user has file access for the server + server_permissions = self.controller.server_perms.get_permissions(mask) + if EnumPermissionsServer.FILES not in server_permissions: + # if the user doesn't have Files permission, return an error + return self.finish_json( + 400, {"status": "error", "error": "NOT_AUTHORIZED"} + ) + + u_type = "server_upload" + # Make sure user is a super user if they're changing panel settings + elif auth_data[4]["superuser"] and upload_type == "background": + u_type = "admin_config" + self.upload_dir = os.path.join( + self.controller.project_root, + "app/frontend/static/assets/images/auth/custom", + ) + accepted_types = IMAGE_MIME_TYPES + elif upload_type == "import": + # Check that user can make servers + if ( + not self.controller.crafty_perms.can_create_server( + auth_data[4]["user_id"] + ) + and not auth_data[4]["superuser"] + ): + return self.finish_json( + 400, + { + "status": "error", + "error": "NOT_AUTHORIZED", + "data": {"message": ""}, + }, + ) + # Set directory to upload import dir + self.upload_dir = os.path.join( + self.controller.project_root, "import", "upload" + ) + u_type = "server_import" + accepted_types = ARCHIVE_MIME_TYPES + else: + return self.finish_json( + 400, + { + "status": "error", + "error": "NOT_AUTHORIZED", + "data": {"message": ""}, + }, + ) + # Get the headers from the request + self.chunk_hash = self.request.headers.get("chunkHash", 0) + self.file_id = self.request.headers.get("fileId") + self.chunked = self.request.headers.get("chunked", False) + self.filename = self.request.headers.get("fileName", None) + try: + file_size = int(self.request.headers.get("fileSize", None)) + total_chunks = int(self.request.headers.get("totalChunks", 0)) + except TypeError: + return self.finish_json( + 400, {"status": "error", "error": "TYPE ERROR", "data": {}} + ) + self.chunk_index = self.request.headers.get("chunkId") + if u_type == "server_upload": + self.upload_dir = self.request.headers.get("location", None) + self.temp_dir = os.path.join(self.controller.project_root, "temp", self.file_id) + + if u_type == "server_upload": + # If this is an upload from a server the path will be what + # Is requested + full_path = os.path.join(self.upload_dir, self.filename) + + # Check to make sure the requested path is inside the server's directory + if not self.helper.is_subdir( + full_path, + Helpers.get_os_understandable_path( + self.controller.servers.get_server_data_by_id(server_id)["path"] + ), + ): + return self.finish_json( + 400, + { + "status": "error", + "error": "NOT AUTHORIZED", + "data": {"message": "Traversal detected"}, + }, + ) + # Check to make sure the file type we're being sent is what we're expecting + if ( + self.file_helper.check_mime_types(self.filename) not in accepted_types + and u_type != "server_upload" + ): + return self.finish_json( + 422, + { + "status": "error", + "error": "INVALID FILE TYPE", + "data": { + "message": f"Invalid File Type only accepts {accepted_types}" + }, + }, + ) + _total, _used, free = shutil.disk_usage(self.upload_dir) + + # Check to see if we have enough space + if free <= file_size: + return self.finish_json( + 507, + { + "status": "error", + "error": "NO STORAGE SPACE", + "data": {"message": "Out Of Space!"}, + }, + ) + + # If this has no chunk index we know it's the inital request + if self.chunked and not self.chunk_index: + return self.finish_json( + 200, {"status": "ok", "data": {"file-id": self.file_id}} + ) + # Create the upload and temp directories if they don't exist + os.makedirs(self.upload_dir, exist_ok=True) + + # Check for chunked header. We will handle this request differently + # if it doesn't exist + if not self.chunked: + # Write the file directly to the upload dir + with open(os.path.join(self.upload_dir, self.filename), "wb") as file: + chunk = self.request.body + if chunk: + file.write(chunk) + # We'll check the file hash against the sent hash once the file is + # written. We cannot check this buffer. + calculated_hash = self.file_helper.calculate_file_hash( + os.path.join(self.upload_dir, self.filename) + ) + logger.info( + f"File upload completed. Filename: {self.filename} Type: {u_type}" + ) + return self.finish_json( + 200, + { + "status": "completed", + "data": {"message": "File uploaded successfully"}, + }, + ) + # Since this is a chunked upload we'll create the temp dir for parts. + os.makedirs(self.temp_dir, exist_ok=True) + + # Read headers and query parameters + content_length = int(self.request.headers.get("Content-Length")) + if content_length <= 0: + logger.error( + f"File upload failed. Filename: {self.filename}" + f"Type: {u_type} Error: INVALID CONTENT LENGTH" + ) + return self.finish_json( + 400, + { + "status": "error", + "error": "INVALID CONTENT LENGTH", + "data": {"message": "Invalid content length"}, + }, + ) + + # At this point filename, chunk index and total chunks are required + # in the request + if not self.filename or self.chunk_index is None: + logger.error( + f"File upload failed. Filename: {self.filename}" + f"Type: {u_type} Error: CHUNK INDEX NOT FOUND" + ) + return self.finish_json( + 400, + { + "status": "error", + "error": "INDEX ERROR", + "data": { + "message": "Filename, chunk_index," + " and total_chunks are required" + }, + }, + ) + + # Calculate the hash of the buffer and compare it against the expected hash + calculated_hash = self.file_helper.calculate_buffer_hash(self.request.body) + if str(self.chunk_hash) != str(calculated_hash): + logger.error( + f"File upload failed. Filename: {self.filename}" + f"Type: {u_type} Error: INVALID HASH" + ) + return self.finish_json( + 400, + { + "status": "error", + "error": "INVALID_HASH", + "data": { + "message": "Hash recieved does not match reported sent hash.", + "chunk_id": self.chunk_index, + }, + }, + ) + + # File paths + file_path = os.path.join(self.upload_dir, self.filename) + chunk_path = os.path.join( + self.temp_dir, f"{self.filename}.part{self.chunk_index}" + ) + + # Save the chunk + with open(chunk_path, "wb") as f: + f.write(self.request.body) + + # Check if all chunks are received + received_chunks = [ + f + for f in os.listdir(self.temp_dir) + if f.startswith(f"{self.filename}.part") + ] + # When we've reached the total chunks we'll + # Compare the hash and write the file + if len(received_chunks) == total_chunks: + with open(file_path, "wb") as outfile: + for i in range(total_chunks): + chunk_file = os.path.join(self.temp_dir, f"{self.filename}.part{i}") + with open(chunk_file, "rb") as infile: + outfile.write(infile.read()) + os.remove(chunk_file) + logger.info( + f"File upload completed. Filename: {self.filename}" + f" Path: {file_path} Type: {u_type}" + ) + self.controller.management.add_to_audit_log( + auth_data[4]["user_id"], + f"Uploaded file {self.filename}", + server_id, + self.request.remote_ip, + ) + self.finish_json( + 200, + { + "status": "completed", + "data": {"message": "File uploaded successfully"}, + }, + ) + else: + self.finish_json( + 200, + { + "status": "partial", + "data": {"message": f"Chunk {self.chunk_index} received"}, + }, + ) diff --git a/app/classes/web/routes/api/roles/index.py b/app/classes/web/routes/api/roles/index.py index b0ddd834..45a00bf0 100644 --- a/app/classes/web/routes/api/roles/index.py +++ b/app/classes/web/routes/api/roles/index.py @@ -2,6 +2,7 @@ import typing as t from jsonschema import ValidationError, validate import orjson from playhouse.shortcuts import model_to_dict +from app.classes.models.crafty_permissions import EnumPermissionsCrafty from app.classes.web.base_api_handler import BaseApiHandler create_role_schema = { @@ -72,7 +73,7 @@ class ApiRolesIndexHandler(BaseApiHandler): return ( _, - _, + exec_user_permissions_crafty, _, superuser, _, @@ -82,7 +83,10 @@ class ApiRolesIndexHandler(BaseApiHandler): # GET /api/v2/roles?ids=true get_only_ids = self.get_query_argument("ids", None) == "true" - if not superuser: + if ( + not superuser + and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty + ): return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"}) self.finish_json( @@ -105,14 +109,17 @@ class ApiRolesIndexHandler(BaseApiHandler): return ( _, - _, + exec_user_permissions_crafty, _, superuser, user, _, ) = auth_data - if not superuser: + if ( + not superuser + and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty + ): return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"}) try: @@ -139,6 +146,8 @@ class ApiRolesIndexHandler(BaseApiHandler): role_name = data["name"] manager = data.get("manager", None) + if not superuser and not manager: + manager = auth_data[4]["user_id"] if manager == self.controller.users.get_id_by_name("SYSTEM") or manager == 0: manager = None diff --git a/app/classes/web/routes/api/roles/role/index.py b/app/classes/web/routes/api/roles/role/index.py index 54b20aaa..1eab6183 100644 --- a/app/classes/web/routes/api/roles/role/index.py +++ b/app/classes/web/routes/api/roles/role/index.py @@ -1,6 +1,7 @@ from jsonschema import ValidationError, validate import orjson -from peewee import DoesNotExist +from peewee import DoesNotExist, IntegrityError +from app.classes.models.crafty_permissions import EnumPermissionsCrafty from app.classes.web.base_api_handler import BaseApiHandler modify_role_schema = { @@ -71,14 +72,17 @@ class ApiRolesRoleIndexHandler(BaseApiHandler): return ( _, - _, + exec_user_permissions_crafty, _, superuser, _, _, ) = auth_data - if not superuser: + if ( + not superuser + and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty + ): return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"}) try: @@ -101,8 +105,11 @@ class ApiRolesRoleIndexHandler(BaseApiHandler): user, _, ) = auth_data - - if not superuser: + role = self.controller.roles.get_role(role_id) + if ( + str(role.get("manager", "no manager found")) != str(auth_data[4]["user_id"]) + and not superuser + ): return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"}) self.controller.roles.remove_role(role_id) @@ -125,7 +132,7 @@ class ApiRolesRoleIndexHandler(BaseApiHandler): return ( _, - _, + exec_user_permissions_crafty, _, superuser, user, @@ -133,7 +140,10 @@ class ApiRolesRoleIndexHandler(BaseApiHandler): ) = auth_data role = self.controller.roles.get_role(role_id) - if not superuser and user["user_id"] != role["manager"]: + if not superuser and ( + user["user_id"] != role["manager"] + or EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty + ): return self.finish_json( 400, { @@ -180,7 +190,10 @@ class ApiRolesRoleIndexHandler(BaseApiHandler): ) except DoesNotExist: return self.finish_json(404, {"status": "error", "error": "ROLE_NOT_FOUND"}) - + except IntegrityError: + return self.finish_json( + 404, {"status": "error", "error": "ROLE_NAME_EXISTS"} + ) self.controller.management.add_to_audit_log( user["user_id"], f"modified role with ID {role_id}", diff --git a/app/classes/web/routes/api/servers/index.py b/app/classes/web/routes/api/servers/index.py index 43cf01e2..ca551326 100644 --- a/app/classes/web/routes/api/servers/index.py +++ b/app/classes/web/routes/api/servers/index.py @@ -23,6 +23,7 @@ new_server_schema = { "type": "string", "examples": ["My Server"], "minLength": 2, + "pattern": "^[^/\\\\]*$", }, "roles": {"title": "Roles to add", "type": "array", "examples": [1, 2, 3]}, "stop_command": { diff --git a/app/classes/web/routes/api/servers/server/action.py b/app/classes/web/routes/api/servers/server/action.py index aba06da3..d8e58b2f 100644 --- a/app/classes/web/routes/api/servers/server/action.py +++ b/app/classes/web/routes/api/servers/server/action.py @@ -1,5 +1,6 @@ import logging import os +import json from app.classes.models.server_permissions import EnumPermissionsServer from app.classes.models.servers import Servers from app.classes.shared.file_helpers import FileHelpers @@ -10,7 +11,7 @@ logger = logging.getLogger(__name__) class ApiServersServerActionHandler(BaseApiHandler): - def post(self, server_id: str, action: str): + def post(self, server_id: str, action: str, action_id=None): auth_data = self.authenticate_user() if not auth_data: return @@ -54,7 +55,7 @@ class ApiServersServerActionHandler(BaseApiHandler): return self._agree_eula(server_id, auth_data[4]["user_id"]) self.controller.management.send_command( - auth_data[4]["user_id"], server_id, self.get_remote_ip(), action + auth_data[4]["user_id"], server_id, self.get_remote_ip(), action, action_id ) self.finish_json( @@ -82,6 +83,20 @@ class ApiServersServerActionHandler(BaseApiHandler): new_server_id = self.helper.create_uuid() new_server_path = os.path.join(self.helper.servers_dir, new_server_id) new_backup_path = os.path.join(self.helper.backup_path, new_server_id) + backup_data = { + "backup_name": f"{new_server_name} Backup", + "backup_location": new_backup_path, + "excluded_dirs": "", + "max_backups": 0, + "server_id": new_server_id, + "compress": False, + "shutdown": False, + "before": "", + "after": "", + "default": True, + "status": json.dumps({"status": "Standby", "message": ""}), + "enabled": True, + } new_server_command = str(server_data.get("execution_command")).replace( server_id, new_server_id ) @@ -93,7 +108,6 @@ class ApiServersServerActionHandler(BaseApiHandler): new_server_name, new_server_id, new_server_path, - new_backup_path, new_server_command, server_data.get("executable"), new_server_log_path, @@ -103,6 +117,8 @@ class ApiServersServerActionHandler(BaseApiHandler): server_data.get("type"), ) + self.controller.management.add_backup_config(backup_data) + self.controller.management.add_to_audit_log( user_id, f"is cloning server {server_id} named {server_data.get('server_name')}", diff --git a/app/classes/web/routes/api/servers/server/backups/backup/index.py b/app/classes/web/routes/api/servers/server/backups/backup/index.py index 1b9ff915..5d8fd2b5 100644 --- a/app/classes/web/routes/api/servers/server/backups/backup/index.py +++ b/app/classes/web/routes/api/servers/server/backups/backup/index.py @@ -11,7 +11,7 @@ from app.classes.shared.helpers import Helpers logger = logging.getLogger(__name__) -backup_schema = { +BACKUP_SCHEMA = { "type": "object", "properties": { "filename": {"type": "string", "minLength": 5}, @@ -19,11 +19,44 @@ backup_schema = { "additionalProperties": False, "minProperties": 1, } +BACKUP_PATCH_SCHEMA = { + "type": "object", + "properties": { + "backup_name": {"type": "string", "minLength": 3}, + "backup_location": {"type": "string", "minLength": 1}, + "max_backups": {"type": "integer"}, + "compress": {"type": "boolean"}, + "shutdown": {"type": "boolean"}, + "before": {"type": "string"}, + "after": {"type": "string"}, + "excluded_dirs": {"type": "array"}, + }, + "additionalProperties": False, + "minProperties": 1, +} + +BASIC_BACKUP_PATCH_SCHEMA = { + "type": "object", + "properties": { + "backup_name": {"type": "string", "minLength": 3}, + "max_backups": {"type": "integer"}, + "compress": {"type": "boolean"}, + "shutdown": {"type": "boolean"}, + "before": {"type": "string"}, + "after": {"type": "string"}, + "excluded_dirs": {"type": "array"}, + }, + "additionalProperties": False, + "minProperties": 1, +} +ID_MISMATCH = "Server ID backup server ID different" +GENERAL_AUTH_ERROR = "Authorization Error" class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler): - def get(self, server_id: str): + def get(self, server_id: str, backup_id: str): auth_data = self.authenticate_user() + backup_conf = self.controller.management.get_backup_config(backup_id) if not auth_data: return mask = self.controller.server_perms.get_lowest_api_perm_mask( @@ -32,15 +65,40 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler): ), auth_data[5], ) + if backup_conf["server_id"]["server_id"] != server_id: + return self.finish_json( + 400, + { + "status": "error", + "error": "ID_MISMATCH", + "error_data": ID_MISMATCH, + }, + ) server_permissions = self.controller.server_perms.get_permissions(mask) if EnumPermissionsServer.BACKUP not in server_permissions: # if the user doesn't have Schedule permission, return an error - return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"}) - self.finish_json(200, self.controller.management.get_backup_config(server_id)) + return self.finish_json( + 400, + { + "status": "error", + "error": "NOT_AUTHORIZED", + "error_data": GENERAL_AUTH_ERROR, + }, + ) + self.finish_json(200, backup_conf) - def delete(self, server_id: str): + def delete(self, server_id: str, backup_id: str): auth_data = self.authenticate_user() - backup_conf = self.controller.management.get_backup_config(server_id) + backup_conf = self.controller.management.get_backup_config(backup_id) + if backup_conf["server_id"]["server_id"] != server_id: + return self.finish_json( + 400, + { + "status": "error", + "error": "ID_MISMATCH", + "error_data": ID_MISMATCH, + }, + ) if not auth_data: return mask = self.controller.server_perms.get_lowest_api_perm_mask( @@ -52,7 +110,66 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler): server_permissions = self.controller.server_perms.get_permissions(mask) if EnumPermissionsServer.BACKUP not in server_permissions: # if the user doesn't have Schedule permission, return an error - return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"}) + return self.finish_json( + 400, + { + "status": "error", + "error": "NOT_AUTHORIZED", + "error_data": GENERAL_AUTH_ERROR, + }, + ) + + self.controller.management.add_to_audit_log( + auth_data[4]["user_id"], + f"Edited server {server_id}: removed backup config" + f" {backup_conf['backup_name']}", + server_id, + self.get_remote_ip(), + ) + if backup_conf["default"]: + return self.finish_json( + 405, + { + "status": "error", + "error": "NOT_ALLOWED", + "error_data": "Cannot delete default backup", + }, + ) + self.controller.management.delete_backup_config(backup_id) + + return self.finish_json(200, {"status": "ok"}) + + def post(self, server_id: str, backup_id: str): + auth_data = self.authenticate_user() + if not auth_data: + return + mask = self.controller.server_perms.get_lowest_api_perm_mask( + self.controller.server_perms.get_user_permissions_mask( + auth_data[4]["user_id"], server_id + ), + auth_data[5], + ) + server_permissions = self.controller.server_perms.get_permissions(mask) + if EnumPermissionsServer.BACKUP not in server_permissions: + # if the user doesn't have Schedule permission, return an error + return self.finish_json( + 400, + { + "status": "error", + "error": "NOT_AUTHORIZED", + "error_data": GENERAL_AUTH_ERROR, + }, + ) + backup_config = self.controller.management.get_backup_config(backup_id) + if backup_config["server_id"]["server_id"] != server_id: + return self.finish_json( + 400, + { + "status": "error", + "error": "ID_MISMATCH", + "error_data": ID_MISMATCH, + }, + ) try: data = json.loads(self.request.body) @@ -61,7 +178,7 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler): 400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)} ) try: - validate(data, backup_schema) + validate(data, BACKUP_SCHEMA) except ValidationError as e: return self.finish_json( 400, @@ -72,9 +189,246 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler): }, ) + svr_obj = self.controller.servers.get_server_obj(server_id) + server_data = self.controller.servers.get_server_data_by_id(server_id) + zip_name = data["filename"] + # import the server again based on zipfile + backup_config = self.controller.management.get_backup_config(backup_id) + backup_location = os.path.join( + backup_config["backup_location"], backup_config["backup_id"] + ) + if Helpers.validate_traversal(backup_location, zip_name): + try: + temp_dir = Helpers.unzip_backup_archive(backup_location, zip_name) + except (FileNotFoundError, NotADirectoryError) as e: + return self.finish_json( + 400, {"status": "error", "error": f"NO BACKUP FOUND {e}"} + ) + if server_data["type"] == "minecraft-java": + new_server = self.controller.restore_java_zip_server( + svr_obj.server_name, + temp_dir, + server_data["executable"], + "1", + "2", + server_data["server_port"], + server_data["created_by"], + ) + elif server_data["type"] == "minecraft-bedrock": + new_server = self.controller.restore_bedrock_zip_server( + svr_obj.server_name, + temp_dir, + server_data["executable"], + server_data["server_port"], + server_data["created_by"], + ) + new_server_id = new_server + new_server = self.controller.servers.get_server_data(new_server) + self.controller.rename_backup_dir( + server_id, + new_server_id, + new_server["server_id"], + ) + # preserve current schedules + for schedule in self.controller.management.get_schedules_by_server( + server_id + ): + job_data = self.controller.management.get_scheduled_task( + schedule.schedule_id + ) + job_data["server_id"] = new_server_id + del job_data["schedule_id"] + self.tasks_manager.update_job(schedule.schedule_id, job_data) + # preserve execution command + new_server_obj = self.controller.servers.get_server_obj(new_server_id) + new_server_obj.execution_command = server_data["execution_command"] + # reset executable path + if svr_obj.path in svr_obj.executable: + new_server_obj.executable = str(svr_obj.executable).replace( + svr_obj.path, new_server_obj.path + ) + # reset run command path + if svr_obj.path in svr_obj.execution_command: + new_server_obj.execution_command = str( + svr_obj.execution_command + ).replace(svr_obj.path, new_server_obj.path) + # reset log path + if svr_obj.path in svr_obj.log_path: + new_server_obj.log_path = str(svr_obj.log_path).replace( + svr_obj.path, new_server_obj.path + ) + self.controller.servers.update_server(new_server_obj) + + # preserve backup config + server_backups = self.controller.management.get_backups_by_server(server_id) + for backup in server_backups: + old_backup_id = server_backups[backup]["backup_id"] + del server_backups[backup]["backup_id"] + server_backups[backup]["server_id"] = new_server_id + if str(server_id) in (server_backups[backup]["backup_location"]): + server_backups[backup]["backup_location"] = str( + server_backups[backup]["backup_location"] + ).replace(str(server_id), str(new_server_id)) + new_backup_id = self.controller.management.add_backup_config( + server_backups[backup] + ) + os.listdir(server_backups[backup]["backup_location"]) + FileHelpers.move_dir( + os.path.join( + server_backups[backup]["backup_location"], old_backup_id + ), + os.path.join( + server_backups[backup]["backup_location"], new_backup_id + ), + ) + # remove old server's tasks + try: + self.tasks_manager.remove_all_server_tasks(server_id) + except JobLookupError as e: + logger.info("No active tasks found for server: {e}") + self.controller.remove_server(server_id, True) + + self.controller.management.add_to_audit_log( + auth_data[4]["user_id"], + f"Restored server {server_id} backup {data['filename']}", + server_id, + self.get_remote_ip(), + ) + + return self.finish_json(200, {"status": "ok"}) + + def patch(self, server_id: str, backup_id: str): + auth_data = self.authenticate_user() + if not auth_data: + return + + try: + data = json.loads(self.request.body) + except json.decoder.JSONDecodeError as e: + return self.finish_json( + 400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)} + ) + + try: + if auth_data[4]["superuser"]: + validate(data, BACKUP_PATCH_SCHEMA) + else: + validate(data, BASIC_BACKUP_PATCH_SCHEMA) + except ValidationError as e: + return self.finish_json( + 400, + { + "status": "error", + "error": "INVALID_JSON_SCHEMA", + "error_data": str(e), + }, + ) + backup_conf = self.controller.management.get_backup_config(backup_id) + if server_id not in [str(x["server_id"]) for x in auth_data[0]]: + # if the user doesn't have access to the server, return an error + return self.finish_json( + 400, + { + "status": "error", + "error": "NOT_AUTHORIZED", + "error_data": GENERAL_AUTH_ERROR, + }, + ) + if backup_conf["server_id"]["server_id"] != server_id: + return self.finish_json( + 400, + { + "status": "error", + "error": "ID_MISMATCH", + "error_data": ID_MISMATCH, + }, + ) + mask = self.controller.server_perms.get_lowest_api_perm_mask( + self.controller.server_perms.get_user_permissions_mask( + auth_data[4]["user_id"], server_id + ), + auth_data[5], + ) + server_permissions = self.controller.server_perms.get_permissions(mask) + if EnumPermissionsServer.BACKUP not in server_permissions: + # if the user doesn't have Schedule permission, return an error + return self.finish_json( + 400, + { + "status": "error", + "error": "NOT_AUTHORIZED", + "error_data": GENERAL_AUTH_ERROR, + }, + ) + self.controller.management.update_backup_config(backup_id, data) + return self.finish_json(200, {"status": "ok"}) + + +class ApiServersServerBackupsBackupFilesIndexHandler(BaseApiHandler): + def delete(self, server_id: str, backup_id: str): + auth_data = self.authenticate_user() + backup_conf = self.controller.management.get_backup_config(backup_id) + if backup_conf["server_id"]["server_id"] != server_id: + return self.finish_json( + 400, + { + "status": "error", + "error": "ID_MISMATCH", + "error_data": ID_MISMATCH, + }, + ) + if not auth_data: + return + mask = self.controller.server_perms.get_lowest_api_perm_mask( + self.controller.server_perms.get_user_permissions_mask( + auth_data[4]["user_id"], server_id + ), + auth_data[5], + ) + server_permissions = self.controller.server_perms.get_permissions(mask) + if EnumPermissionsServer.BACKUP not in server_permissions: + # if the user doesn't have Schedule permission, return an error + return self.finish_json( + 400, + { + "status": "error", + "error": "NOT_AUTHORIZED", + "error_data": GENERAL_AUTH_ERROR, + }, + ) + + try: + data = json.loads(self.request.body) + except json.decoder.JSONDecodeError as e: + return self.finish_json( + 400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)} + ) + try: + validate(data, BACKUP_SCHEMA) + except ValidationError as e: + return self.finish_json( + 400, + { + "status": "error", + "error": "INVALID_JSON_SCHEMA", + "error_data": str(e), + }, + ) + self.helper.validate_traversal( + os.path.join(backup_conf["backup_location"], backup_conf["backup_id"]), + os.path.join( + backup_conf["backup_location"], + backup_conf["backup_id"], + data["filename"], + ), + ) try: FileHelpers.del_file( - os.path.join(backup_conf["backup_path"], data["filename"]) + os.path.join( + backup_conf["backup_location"], + backup_conf["backup_id"], + data["filename"], + ) ) except Exception as e: return self.finish_json( @@ -88,136 +442,3 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler): ) return self.finish_json(200, {"status": "ok"}) - - def post(self, server_id: str): - auth_data = self.authenticate_user() - if not auth_data: - return - mask = self.controller.server_perms.get_lowest_api_perm_mask( - self.controller.server_perms.get_user_permissions_mask( - auth_data[4]["user_id"], server_id - ), - auth_data[5], - ) - server_permissions = self.controller.server_perms.get_permissions(mask) - if EnumPermissionsServer.BACKUP not in server_permissions: - # if the user doesn't have Schedule permission, return an error - return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"}) - - try: - data = json.loads(self.request.body) - except json.decoder.JSONDecodeError as e: - return self.finish_json( - 400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)} - ) - try: - validate(data, backup_schema) - except ValidationError as e: - return self.finish_json( - 400, - { - "status": "error", - "error": "INVALID_JSON_SCHEMA", - "error_data": str(e), - }, - ) - - try: - svr_obj = self.controller.servers.get_server_obj(server_id) - server_data = self.controller.servers.get_server_data_by_id(server_id) - zip_name = data["filename"] - # import the server again based on zipfile - backup_path = svr_obj.backup_path - if Helpers.validate_traversal(backup_path, zip_name): - temp_dir = Helpers.unzip_backup_archive(backup_path, zip_name) - if server_data["type"] == "minecraft-java": - new_server = self.controller.restore_java_zip_server( - svr_obj.server_name, - temp_dir, - server_data["executable"], - "1", - "2", - server_data["server_port"], - server_data["created_by"], - ) - elif server_data["type"] == "minecraft-bedrock": - new_server = self.controller.restore_bedrock_zip_server( - svr_obj.server_name, - temp_dir, - server_data["executable"], - server_data["server_port"], - server_data["created_by"], - ) - new_server_id = new_server - new_server = self.controller.servers.get_server_data(new_server) - self.controller.rename_backup_dir( - server_id, new_server_id, new_server["server_id"] - ) - # preserve current schedules - for schedule in self.controller.management.get_schedules_by_server( - server_id - ): - job_data = self.controller.management.get_scheduled_task( - schedule.schedule_id - ) - job_data["server_id"] = new_server_id - del job_data["schedule_id"] - self.tasks_manager.update_job(schedule.schedule_id, job_data) - # preserve execution command - new_server_obj = self.controller.servers.get_server_obj(new_server_id) - new_server_obj.execution_command = server_data["execution_command"] - # reset executable path - if svr_obj.path in svr_obj.executable: - new_server_obj.executable = str(svr_obj.executable).replace( - svr_obj.path, new_server_obj.path - ) - # reset run command path - if svr_obj.path in svr_obj.execution_command: - new_server_obj.execution_command = str( - svr_obj.execution_command - ).replace(svr_obj.path, new_server_obj.path) - # reset log path - if svr_obj.path in svr_obj.log_path: - new_server_obj.log_path = str(svr_obj.log_path).replace( - svr_obj.path, new_server_obj.path - ) - self.controller.servers.update_server(new_server_obj) - - # preserve backup config - backup_config = self.controller.management.get_backup_config(server_id) - excluded_dirs = [] - server_obj = self.controller.servers.get_server_obj(server_id) - loop_backup_path = self.helper.wtol_path(server_obj.path) - for item in self.controller.management.get_excluded_backup_dirs( - server_id - ): - item_path = self.helper.wtol_path(item) - bu_path = os.path.relpath(item_path, loop_backup_path) - bu_path = os.path.join(new_server_obj.path, bu_path) - excluded_dirs.append(bu_path) - self.controller.management.set_backup_config( - new_server_id, - new_server_obj.backup_path, - backup_config["max_backups"], - excluded_dirs, - backup_config["compress"], - backup_config["shutdown"], - ) - # remove old server's tasks - try: - self.tasks_manager.remove_all_server_tasks(server_id) - except JobLookupError as e: - logger.info("No active tasks found for server: {e}") - self.controller.remove_server(server_id, True) - except (FileNotFoundError, NotADirectoryError) as e: - return self.finish_json( - 400, {"status": "error", "error": f"NO BACKUP FOUND {e}"} - ) - self.controller.management.add_to_audit_log( - auth_data[4]["user_id"], - f"Restored server {server_id} backup {data['filename']}", - server_id, - self.get_remote_ip(), - ) - - return self.finish_json(200, {"status": "ok"}) diff --git a/app/classes/web/routes/api/servers/server/backups/index.py b/app/classes/web/routes/api/servers/server/backups/index.py index 865fe25a..a155f943 100644 --- a/app/classes/web/routes/api/servers/server/backups/index.py +++ b/app/classes/web/routes/api/servers/server/backups/index.py @@ -1,3 +1,4 @@ +import os import logging import json from jsonschema import validate @@ -10,13 +11,14 @@ logger = logging.getLogger(__name__) backup_patch_schema = { "type": "object", "properties": { - "backup_path": {"type": "string", "minLength": 1}, + "backup_name": {"type": "string", "minLength": 3}, + "backup_location": {"type": "string", "minLength": 1}, "max_backups": {"type": "integer"}, "compress": {"type": "boolean"}, "shutdown": {"type": "boolean"}, - "backup_before": {"type": "string"}, - "backup_after": {"type": "string"}, - "exclusions": {"type": "array"}, + "before": {"type": "string"}, + "after": {"type": "string"}, + "excluded_dirs": {"type": "array"}, }, "additionalProperties": False, "minProperties": 1, @@ -25,12 +27,13 @@ backup_patch_schema = { basic_backup_patch_schema = { "type": "object", "properties": { + "backup_name": {"type": "string", "minLength": 3}, "max_backups": {"type": "integer"}, "compress": {"type": "boolean"}, "shutdown": {"type": "boolean"}, - "backup_before": {"type": "string"}, - "backup_after": {"type": "string"}, - "exclusions": {"type": "array"}, + "before": {"type": "string"}, + "after": {"type": "string"}, + "excluded_dirs": {"type": "array"}, }, "additionalProperties": False, "minProperties": 1, @@ -52,9 +55,11 @@ class ApiServersServerBackupsIndexHandler(BaseApiHandler): if EnumPermissionsServer.BACKUP not in server_permissions: # if the user doesn't have Schedule permission, return an error return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"}) - self.finish_json(200, self.controller.management.get_backup_config(server_id)) + self.finish_json( + 200, self.controller.management.get_backups_by_server(server_id) + ) - def patch(self, server_id: str): + def post(self, server_id: str): auth_data = self.authenticate_user() if not auth_data: return @@ -80,7 +85,6 @@ class ApiServersServerBackupsIndexHandler(BaseApiHandler): "error_data": str(e), }, ) - if server_id not in [str(x["server_id"]) for x in auth_data[0]]: # if the user doesn't have access to the server, return an error return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"}) @@ -94,33 +98,12 @@ class ApiServersServerBackupsIndexHandler(BaseApiHandler): if EnumPermissionsServer.BACKUP not in server_permissions: # if the user doesn't have Schedule permission, return an error return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"}) - - self.controller.management.set_backup_config( - server_id, - data.get( - "backup_path", - self.controller.management.get_backup_config(server_id)["backup_path"], - ), - data.get( - "max_backups", - self.controller.management.get_backup_config(server_id)["max_backups"], - ), - data.get("exclusions"), - data.get( - "compress", - self.controller.management.get_backup_config(server_id)["compress"], - ), - data.get( - "shutdown", - self.controller.management.get_backup_config(server_id)["shutdown"], - ), - data.get( - "backup_before", - self.controller.management.get_backup_config(server_id)["before"], - ), - data.get( - "backup_after", - self.controller.management.get_backup_config(server_id)["after"], - ), - ) + # Set the backup location automatically for non-super users. We should probably + # make the default location configurable for SU eventually + if not auth_data[4]["superuser"]: + data["backup_location"] = os.path.join(self.helper.backup_path, server_id) + data["server_id"] = server_id + if not data.get("excluded_dirs", None): + data["excluded_dirs"] = [] + self.controller.management.add_backup_config(data) return self.finish_json(200, {"status": "ok"}) diff --git a/app/classes/web/routes/api/servers/server/files.py b/app/classes/web/routes/api/servers/server/files.py index 2951ff25..2699ae0c 100644 --- a/app/classes/web/routes/api/servers/server/files.py +++ b/app/classes/web/routes/api/servers/server/files.py @@ -72,7 +72,7 @@ file_delete_schema = { class ApiServersServerFilesIndexHandler(BaseApiHandler): - def post(self, server_id: str): + def post(self, server_id: str, backup_id=None): auth_data = self.authenticate_user() if not auth_data: return @@ -149,21 +149,35 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler): filename = html.escape(raw_filename) rel = os.path.join(folder, raw_filename) dpath = os.path.join(folder, filename) - if str(dpath) in self.controller.management.get_excluded_backup_dirs( - server_id - ): - if os.path.isdir(rel): - return_json[filename] = { - "path": dpath, - "dir": True, - "excluded": True, - } + if backup_id: + if str( + dpath + ) in self.controller.management.get_excluded_backup_dirs(backup_id): + if os.path.isdir(rel): + return_json[filename] = { + "path": dpath, + "dir": True, + "excluded": True, + } + else: + return_json[filename] = { + "path": dpath, + "dir": False, + "excluded": True, + } else: - return_json[filename] = { - "path": dpath, - "dir": False, - "excluded": True, - } + if os.path.isdir(rel): + return_json[filename] = { + "path": dpath, + "dir": True, + "excluded": False, + } + else: + return_json[filename] = { + "path": dpath, + "dir": False, + "excluded": False, + } else: if os.path.isdir(rel): return_json[filename] = { @@ -189,7 +203,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler): ) self.finish_json(200, {"status": "ok", "data": file_contents}) - def delete(self, server_id: str): + def delete(self, server_id: str, _backup_id=None): auth_data = self.authenticate_user() if not auth_data: return @@ -247,7 +261,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler): return self.finish_json(200, {"status": "ok"}) return self.finish_json(500, {"status": "error", "error": str(proc)}) - def patch(self, server_id: str): + def patch(self, server_id: str, _backup_id): auth_data = self.authenticate_user() if not auth_data: return @@ -301,7 +315,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler): file_object.write(file_contents) return self.finish_json(200, {"status": "ok"}) - def put(self, server_id: str): + def put(self, server_id: str, _backup_id): auth_data = self.authenticate_user() if not auth_data: return diff --git a/app/classes/web/routes/api/servers/server/index.py b/app/classes/web/routes/api/servers/server/index.py index 9bfc3a9a..3562334c 100644 --- a/app/classes/web/routes/api/servers/server/index.py +++ b/app/classes/web/routes/api/servers/server/index.py @@ -12,7 +12,7 @@ logger = logging.getLogger(__name__) server_patch_schema = { "type": "object", "properties": { - "server_name": {"type": "string", "minLength": 1}, + "server_name": {"type": "string", "minLength": 2, "pattern": "^[^/\\\\]*$"}, "backup_path": {"type": "string"}, "executable": {"type": "string"}, "log_path": {"type": "string", "minLength": 1}, diff --git a/app/classes/web/routes/api/servers/server/tasks/index.py b/app/classes/web/routes/api/servers/server/tasks/index.py index 0c03319c..ed8b9df9 100644 --- a/app/classes/web/routes/api/servers/server/tasks/index.py +++ b/app/classes/web/routes/api/servers/server/tasks/index.py @@ -21,6 +21,9 @@ new_task_schema = { "action": { "type": "string", }, + "action_id": { + "type": "string", + }, "interval": {"type": "integer"}, "interval_type": { "type": "string", @@ -110,6 +113,18 @@ class ApiServersServerTasksIndexHandler(BaseApiHandler): ) if "parent" not in data: data["parent"] = None + if data.get("action_id"): + backup_config = self.controller.management.get_backup_config( + data["action_id"] + ) + if backup_config["server_id"]["server_id"] != server_id: + return self.finish_json( + 405, + { + "status": "error", + "error": "Server ID Mismatch", + }, + ) task_id = self.tasks_manager.schedule_job(data) self.controller.management.add_to_audit_log( diff --git a/app/classes/web/routes/api/servers/server/tasks/task/index.py b/app/classes/web/routes/api/servers/server/tasks/task/index.py index dac60762..05c8cee9 100644 --- a/app/classes/web/routes/api/servers/server/tasks/task/index.py +++ b/app/classes/web/routes/api/servers/server/tasks/task/index.py @@ -22,6 +22,9 @@ task_patch_schema = { "action": { "type": "string", }, + "action_id": { + "type": "string", + }, "interval": {"type": "integer"}, "interval_type": { "type": "string", diff --git a/app/classes/web/routes/api/users/index.py b/app/classes/web/routes/api/users/index.py index dbdb1ac0..32ebd283 100644 --- a/app/classes/web/routes/api/users/index.py +++ b/app/classes/web/routes/api/users/index.py @@ -2,6 +2,7 @@ import logging import json from jsonschema import validate from jsonschema.exceptions import ValidationError +from app.classes.shared.translation import Translation from app.classes.models.crafty_permissions import EnumPermissionsCrafty from app.classes.models.roles import Roles, HelperRoles from app.classes.models.users import PUBLIC_USER_ATTRS @@ -54,6 +55,7 @@ class ApiUsersIndexHandler(BaseApiHandler): ) def post(self): + self.translator = Translation(self.helper) new_user_schema = { "type": "object", "properties": { @@ -87,12 +89,17 @@ class ApiUsersIndexHandler(BaseApiHandler): try: validate(data, new_user_schema) except ValidationError as e: + err = self.translator.translate( + "validators", + e.schema["error"], + self.controller.users.get_user_lang_by_id(auth_data[4]["user_id"]), + ) return self.finish_json( 400, { "status": "error", "error": "INVALID_JSON_SCHEMA", - "error_data": str(e), + "error_data": f"{str(err)}", }, ) username = data["username"] @@ -153,7 +160,11 @@ class ApiUsersIndexHandler(BaseApiHandler): for role in roles: role = self.controller.roles.get_role(role) - if int(role["manager"]) != int(auth_data[4]["user_id"]) and not superuser: + if ( + str(role.get("manager", "no manager found")) + != str(auth_data[4]["user_id"]) + and not superuser + ): return self.finish_json( 400, {"status": "error", "error": "INVALID_ROLES_CREATE"} ) diff --git a/app/classes/web/routes/api/users/user/api.py b/app/classes/web/routes/api/users/user/api.py index 3891ef83..4baac898 100644 --- a/app/classes/web/routes/api/users/user/api.py +++ b/app/classes/web/routes/api/users/user/api.py @@ -217,7 +217,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler): ) if ( - target_key.user_id != auth_data[4]["user_id"] + str(target_key.user_id) != str(auth_data[4]["user_id"]) and not auth_data[4]["superuser"] ): return self.finish_json( diff --git a/app/classes/web/routes/api/users/user/index.py b/app/classes/web/routes/api/users/user/index.py index 9fa46200..b05e4ac3 100644 --- a/app/classes/web/routes/api/users/user/index.py +++ b/app/classes/web/routes/api/users/user/index.py @@ -132,7 +132,6 @@ class ApiUsersUserIndexHandler(BaseApiHandler): return self.finish_json( 400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)} ) - try: validate(data, user_patch_schema) except ValidationError as e: @@ -144,10 +143,8 @@ class ApiUsersUserIndexHandler(BaseApiHandler): "error_data": str(e), }, ) - if user_id == "@me": user_id = user["user_id"] - if ( EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions and str(user["user_id"]) != str(user_id) @@ -215,6 +212,25 @@ class ApiUsersUserIndexHandler(BaseApiHandler): return self.finish_json( 400, {"status": "error", "error": "INVALID_ROLES_MODIFY"} ) + user_modify = self.controller.users.get_user_roles_id(user_id) + + for role in data["roles"]: + # Check if user is not a super user and that the exec user is the role + # manager or that the role already exists in the user's list + if not superuser and ( + str( + self.controller.roles.get_role(role).get( + "manager", "no manager found" + ) + ) + != str(auth_data[4]["user_id"]) + and role not in user_modify + ): + for item in user_modify: + print(type(role), type(item)) + return self.finish_json( + 400, {"status": "error", "error": "INVALID_ROLES_MODIFY"} + ) user_obj = HelperUsers.get_user_model(user_id) if "password" in data and str(user["user_id"]) != str(user_id): diff --git a/app/classes/web/tornado_handler.py b/app/classes/web/tornado_handler.py index 6285edfc..0b8140e3 100644 --- a/app/classes/web/tornado_handler.py +++ b/app/classes/web/tornado_handler.py @@ -24,7 +24,6 @@ from app.classes.web.routes.metrics.metrics_handlers import metrics_handlers from app.classes.web.server_handler import ServerHandler from app.classes.web.websocket_handler import WebSocketHandler from app.classes.web.static_handler import CustomStaticHandler -from app.classes.web.upload_handler import UploadHandler from app.classes.web.status_handler import StatusHandler @@ -142,7 +141,6 @@ class Webserver: (r"/panel/(.*)", PanelHandler, handler_args), (r"/server/(.*)", ServerHandler, handler_args), (r"/ws", WebSocketHandler, handler_args), - (r"/upload", UploadHandler, handler_args), (r"/status", StatusHandler, handler_args), # API Routes V2 *api_handlers(handler_args), diff --git a/app/classes/web/upload_handler.py b/app/classes/web/upload_handler.py deleted file mode 100644 index 747fa63b..00000000 --- a/app/classes/web/upload_handler.py +++ /dev/null @@ -1,331 +0,0 @@ -import logging -import os -import time -import urllib.parse -import tornado.web -import tornado.options -import tornado.httpserver -from app.classes.models.crafty_permissions import EnumPermissionsCrafty - -from app.classes.models.server_permissions import EnumPermissionsServer -from app.classes.shared.console import Console -from app.classes.shared.helpers import Helpers -from app.classes.shared.main_controller import Controller -from app.classes.web.base_handler import BaseHandler -from app.classes.shared.websocket_manager import WebSocketManager - -logger = logging.getLogger(__name__) - - -@tornado.web.stream_request_body -class UploadHandler(BaseHandler): - # noinspection PyAttributeOutsideInit - def initialize( - self, - helper: Helpers = None, - controller: Controller = None, - tasks_manager=None, - translator=None, - file_helper=None, - ): - self.helper = helper - self.controller = controller - self.tasks_manager = tasks_manager - self.translator = translator - self.file_helper = file_helper - - def prepare(self): - # Class & Function Defination - api_key, _token_data, exec_user = self.current_user - self.upload_type = str(self.request.headers.get("X-Content-Upload-Type")) - - if self.upload_type == "server_import": - superuser = exec_user["superuser"] - if api_key is not None: - superuser = superuser and api_key.full_access - user_id = exec_user["user_id"] - stream_size_value = self.helper.get_setting("stream_size_GB") - - max_streamed_size = (1024 * 1024 * 1024) * stream_size_value - - self.content_len = int(self.request.headers.get("Content-Length")) - if self.content_len > max_streamed_size: - logger.error( - f"User with ID {user_id} attempted to upload a file that" - f" exceeded the max body size." - ) - - return self.finish_json( - 413, - { - "status": "error", - "error": "TOO LARGE", - "info": self.helper.translation.translate( - "error", - "fileTooLarge", - self.controller.users.get_user_lang_by_id(user_id), - ), - }, - ) - self.do_upload = True - - if superuser: - exec_user_server_permissions = ( - self.controller.server_perms.list_defined_permissions() - ) - elif api_key is not None: - exec_user_server_permissions = ( - self.controller.crafty_perms.get_api_key_permissions_list(api_key) - ) - else: - exec_user_server_permissions = ( - self.controller.crafty_perms.get_crafty_permissions_list( - exec_user["user_id"] - ) - ) - - if user_id is None: - logger.warning("User ID not found in upload handler call") - Console.warning("User ID not found in upload handler call") - self.do_upload = False - - if ( - EnumPermissionsCrafty.SERVER_CREATION - not in exec_user_server_permissions - and not exec_user["superuser"] - ): - logger.warning( - f"User {user_id} tried to upload a server" " without permissions!" - ) - Console.warning( - f"User {user_id} tried to upload a server" " without permissions!" - ) - self.do_upload = False - - path = os.path.join(self.controller.project_root, "import", "upload") - self.helper.ensure_dir_exists(path) - # Delete existing files - if len(os.listdir(path)) > 0: - for item in os.listdir(): - try: - os.remove(os.path.join(path, item)) - except: - logger.debug("Could not delete file on user server upload") - - self.helper.ensure_dir_exists(path) - filename = urllib.parse.unquote( - self.request.headers.get("X-FileName", None) - ) - if not str(filename).endswith(".zip"): - WebSocketManager().broadcast("close_upload_box", "error") - self.finish("error") - full_path = os.path.join(path, filename) - - if self.do_upload: - try: - self.f = open(full_path, "wb") - except Exception as e: - logger.error(f"Upload failed with error: {e}") - self.do_upload = False - # If max_body_size is not set, you cannot upload files > 100MB - self.request.connection.set_max_body_size(max_streamed_size) - - elif self.upload_type == "background": - superuser = exec_user["superuser"] - if api_key is not None: - superuser = superuser and api_key.full_access - user_id = exec_user["user_id"] - stream_size_value = self.helper.get_setting("stream_size_GB") - - max_streamed_size = (1024 * 1024 * 1024) * stream_size_value - - self.content_len = int(self.request.headers.get("Content-Length")) - if self.content_len > max_streamed_size: - logger.error( - f"User with ID {user_id} attempted to upload a file that" - f" exceeded the max body size." - ) - - return self.finish_json( - 413, - { - "status": "error", - "error": "TOO LARGE", - "info": self.helper.translation.translate( - "error", - "fileTooLarge", - self.controller.users.get_user_lang_by_id(user_id), - ), - }, - ) - self.do_upload = True - - if not superuser: - return self.finish_json( - 401, - { - "status": "error", - "error": "UNAUTHORIZED ACCESS", - "info": self.helper.translation.translate( - "error", - "superError", - self.controller.users.get_user_lang_by_id(user_id), - ), - }, - ) - if not self.request.headers.get("X-Content-Type", None).startswith( - "image/" - ): - return self.finish_json( - 415, - { - "status": "error", - "error": "TYPE ERROR", - "info": self.helper.translation.translate( - "error", - "fileError", - self.controller.users.get_user_lang_by_id(user_id), - ), - }, - ) - if user_id is None: - logger.warning("User ID not found in upload handler call") - Console.warning("User ID not found in upload handler call") - self.do_upload = False - - path = os.path.join( - self.controller.project_root, - "app/frontend/static/assets/images/auth/custom", - ) - filename = self.request.headers.get("X-FileName", None) - full_path = os.path.join(path, filename) - - if self.do_upload: - try: - self.f = open(full_path, "wb") - except Exception as e: - logger.error(f"Upload failed with error: {e}") - self.do_upload = False - # If max_body_size is not set, you cannot upload files > 100MB - self.request.connection.set_max_body_size(max_streamed_size) - else: - server_id = self.get_argument("server_id", None) - superuser = exec_user["superuser"] - if api_key is not None: - superuser = superuser and api_key.full_access - user_id = exec_user["user_id"] - stream_size_value = self.helper.get_setting("stream_size_GB") - - max_streamed_size = (1024 * 1024 * 1024) * stream_size_value - - self.content_len = int(self.request.headers.get("Content-Length")) - if self.content_len > max_streamed_size: - logger.error( - f"User with ID {user_id} attempted to upload a file that" - f" exceeded the max body size." - ) - - return self.finish_json( - 413, - { - "status": "error", - "error": "TOO LARGE", - "info": self.helper.translation.translate( - "error", - "fileTooLarge", - self.controller.users.get_user_lang_by_id(user_id), - ), - }, - ) - self.do_upload = True - - if superuser: - exec_user_server_permissions = ( - self.controller.server_perms.list_defined_permissions() - ) - elif api_key is not None: - exec_user_server_permissions = ( - self.controller.server_perms.get_api_key_permissions_list( - api_key, server_id - ) - ) - else: - exec_user_server_permissions = ( - self.controller.server_perms.get_user_id_permissions_list( - exec_user["user_id"], server_id - ) - ) - - server_id = self.request.headers.get("X-ServerId", None) - if server_id is None: - logger.warning("Server ID not found in upload handler call") - Console.warning("Server ID not found in upload handler call") - self.do_upload = False - - if user_id is None: - logger.warning("User ID not found in upload handler call") - Console.warning("User ID not found in upload handler call") - self.do_upload = False - - if EnumPermissionsServer.FILES not in exec_user_server_permissions: - logger.warning( - f"User {user_id} tried to upload a file to " - f"{server_id} without permissions!" - ) - Console.warning( - f"User {user_id} tried to upload a file to " - f"{server_id} without permissions!" - ) - self.do_upload = False - - path = self.request.headers.get("X-Path", None) - filename = self.request.headers.get("X-FileName", None) - full_path = os.path.join(path, filename) - - if not self.helper.is_subdir( - full_path, - Helpers.get_os_understandable_path( - self.controller.servers.get_server_data_by_id(server_id)["path"] - ), - ): - logger.warning( - f"User {user_id} tried to upload a file to {server_id} " - f"but the path is not inside of the server!" - ) - Console.warning( - f"User {user_id} tried to upload a file to {server_id} " - f"but the path is not inside of the server!" - ) - self.do_upload = False - - if self.do_upload: - try: - self.f = open(full_path, "wb") - except Exception as e: - logger.error(f"Upload failed with error: {e}") - self.do_upload = False - # If max_body_size is not set, you cannot upload files > 100MB - self.request.connection.set_max_body_size(max_streamed_size) - - def post(self): - logger.info("Upload completed") - if self.upload_type == "server_files": - files_left = int(self.request.headers.get("X-Files-Left", None)) - else: - files_left = 0 - - if self.do_upload: - time.sleep(5) - if files_left == 0: - WebSocketManager().broadcast("close_upload_box", "success") - self.finish("success") # Nope, I'm sending "success" - self.f.close() - else: - time.sleep(5) - if files_left == 0: - WebSocketManager().broadcast("close_upload_box", "error") - self.finish("error") - - def data_received(self, chunk): - if self.do_upload: - self.f.write(chunk) diff --git a/app/frontend/static/assets/css/crafty.css b/app/frontend/static/assets/css/crafty.css index caf87654..be688657 100644 --- a/app/frontend/static/assets/css/crafty.css +++ b/app/frontend/static/assets/css/crafty.css @@ -12,6 +12,16 @@ nav.sidebar { position: fixed; } +td { + -ms-overflow-style: none; + /* IE and Edge */ + scrollbar-width: none; + /* Firefox */ +} + +td::-webkit-scrollbar { + display: none; +} @media (min-width: 992px) { nav.sidebar { @@ -270,4 +280,8 @@ div.warnings div.wssError a:hover { /**************************************************************/ .dropdown-menu { max-width: 100%; +} + +.hidden-input { + margin-left: -40px; } \ No newline at end of file diff --git a/app/frontend/static/assets/js/shared/upload.js b/app/frontend/static/assets/js/shared/upload.js new file mode 100644 index 00000000..d31d4e23 --- /dev/null +++ b/app/frontend/static/assets/js/shared/upload.js @@ -0,0 +1,208 @@ +async function uploadFile(type, file = null, path = null, file_num = 0, _onProgress = null) { + if (file == null) { + try { + file = $("#file")[0].files[0]; + } catch { + bootbox.alert("Please select a file first.") + return; + } + + } + const fileId = uuidv4(); + const token = getCookie("_xsrf"); + if (type !== "server_upload") { + document.getElementById("upload_input").innerHTML = '
 
'; + } + + let url = `` + if (type === "server_upload") { + url = `/api/v2/servers/${serverId}/files/upload/`; + } else if (type === "background") { + url = `/api/v2/crafty/admin/upload/` + } else if (type === "import") { + url = `/api/v2/servers/import/upload/` + } + console.log(url) + const chunkSize = 1024 * 1024 * 10; // 10MB + const totalChunks = Math.ceil(file.size / chunkSize); + + const uploadPromises = []; + let errors = []; // Array to store errors + try { + let res = await fetch(url, { + method: 'POST', + headers: { + 'X-XSRFToken': token, + 'chunked': true, + 'fileSize': file.size, + 'type': type, + 'totalChunks': totalChunks, + 'fileName': file.name, + 'location': path, + 'fileId': fileId, + }, + body: null, + }); + + if (!res.ok) { + let errorResponse = await res.json(); + throw new Error(JSON.stringify(errorResponse)); + } + + let responseData = await res.json(); + + if (responseData.status !== "ok") { + throw new Error(JSON.stringify(responseData)); + } + + for (let i = 0; i < totalChunks; i++) { + const start = i * chunkSize; + const end = Math.min(start + chunkSize, file.size); + const chunk = file.slice(start, end); + const chunk_hash = await calculateFileHash(chunk); + + const uploadPromise = fetch(url, { + method: 'POST', + body: chunk, + headers: { + 'Content-Range': `bytes ${start}-${end - 1}/${file.size}`, + 'Content-Length': chunk.size, + 'fileSize': file.size, + 'chunkHash': chunk_hash, + 'chunked': true, + 'type': type, + 'totalChunks': totalChunks, + 'fileName': file.name, + 'location': path, + 'fileId': fileId, + 'chunkId': i, + }, + }) + .then(async response => { + if (!response.ok) { + const errorData = await response.json(); + throw new Error(JSON.stringify(errorData) || 'Unknown error occurred'); + } + return response.json(); // Return the JSON data + }) + .then(data => { + if (data.status !== "completed" && data.status !== "partial") { + throw new Error(data.message || 'Unknown error occurred'); + } + // Update progress bar + const progress = (i + 1) / totalChunks * 100; + updateProgressBar(Math.round(progress), type, file_num); + }) + .catch(error => { + errors.push(error); // Store the error + }); + + uploadPromises.push(uploadPromise); + } + + await Promise.all(uploadPromises); + } catch (error) { + errors.push(error); // Store the error + } + + if (errors.length > 0) { + const errorMessage = errors.map(error => JSON.parse(error.message).data.message || 'Unknown error occurred').join('
'); + console.log(errorMessage) + bootbox.alert({ + title: 'Error', + message: errorMessage, + callback: function () { + window.location.reload(); + }, + }); + } else if (type !== "server_upload") { + // All promises resolved successfully + $("#upload_input").html(`
🔒
`); + if (type === "import") { + document.getElementById("lower_half").style.visibility = "visible"; + document.getElementById("lower_half").hidden = false; + } else if (type === "background") { + setTimeout(function () { + location.href = `/panel/custom_login`; + }, 2000); + } + } else { + let caught = false; + let expanded = false; + try { + expanded = document.getElementById(path).classList.contains("clicked"); + } catch { } + + let par_el; + let items; + try { + par_el = document.getElementById(path + "ul"); + items = par_el.children; + } catch (err) { + console.log(err); + caught = true; + par_el = document.getElementById("files-tree"); + items = par_el.children; + } + + let name = file.name; + let full_path = path + '/' + name; + let flag = false; + + for (let item of items) { + if ($(item).attr("data-name") === name) { + flag = true; + } + } + + if (!flag) { + if (caught && !expanded) { + $(par_el).append(`
  • ${name}
  • `); + } else if (expanded) { + $(par_el).append(`
  • ${name}
  • `); + } + setTreeViewContext(); + } + + $(`#upload-progress-bar-${file_num + 1}`).removeClass("progress-bar-striped"); + $(`#upload-progress-bar-${file_num + 1}`).addClass("bg-success"); + $(`#upload-progress-bar-${file_num + 1}`).html(''); + } +} + +async function calculateFileHash(file) { + const arrayBuffer = await file.arrayBuffer(); + const hashBuffer = await crypto.subtle.digest('SHA-256', arrayBuffer); + const hashArray = Array.from(new Uint8Array(hashBuffer)); + const hashHex = hashArray.map(b => b.toString(16).padStart(2, '0')).join(''); + + return hashHex; +} + +function updateProgressBar(progress, type, i) { + if (type !== "server_upload") { + if (progress === 100) { + $(`#upload-progress-bar`).removeClass("progress-bar-striped") + + $(`#upload-progress-bar`).removeClass("progress-bar-animated") + } + $(`#upload-progress-bar`).css('width', progress + '%'); + $(`#upload-progress-bar`).html(progress + '%'); + } else { + if (progress === 100) { + $(`#upload-progress-bar-${i + 1}`).removeClass("progress-bar-striped") + + $(`#upload-progress-bar-${i + 1}`).removeClass("progress-bar-animated") + } + $(`#upload-progress-bar-${i + 1}`).css('width', progress + '%'); + $(`#upload-progress-bar-${i + 1}`).html(progress + '%'); + } +} + +function uuidv4() { + return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) { + const r = Math.random() * 16 | 0, + v = c === 'x' ? r : (r & 0x3 | 0x8); + return v.toString(16); + }); +} \ No newline at end of file diff --git a/app/frontend/templates/panel/custom_login.html b/app/frontend/templates/panel/custom_login.html index 4eb22ca8..84af79e6 100644 --- a/app/frontend/templates/panel/custom_login.html +++ b/app/frontend/templates/panel/custom_login.html @@ -69,7 +69,7 @@
    + onclick="uploadFile('background')" disabled>UPLOAD
    @@ -381,61 +381,6 @@ } img.src = src_path; } - - var file; - function sendFile() { - file = $("#file")[0].files[0] - document.getElementById("upload_input").innerHTML = '
     
    '; - let xmlHttpRequest = new XMLHttpRequest(); - let token = getCookie("_xsrf") - let fileName = file.name - let target = '/upload' - let mimeType = file.type - let size = file.size - let type = 'background' - - xmlHttpRequest.upload.addEventListener('progress', function (e) { - - if (e.loaded <= size) { - var percent = Math.round(e.loaded / size * 100); - $(`#upload-progress-bar`).css('width', percent + '%'); - $(`#upload-progress-bar`).html(percent + '%'); - } - }); - - xmlHttpRequest.open('POST', target, true); - xmlHttpRequest.setRequestHeader('X-Content-Type', mimeType); - xmlHttpRequest.setRequestHeader('X-XSRFToken', token); - xmlHttpRequest.setRequestHeader('X-Content-Length', size); - xmlHttpRequest.setRequestHeader('X-Content-Disposition', 'attachment; filename="' + fileName + '"'); - xmlHttpRequest.setRequestHeader('X-Content-Upload-Type', type); - xmlHttpRequest.setRequestHeader('X-FileName', fileName); - xmlHttpRequest.addEventListener('load', (event) => { - if (event.target.responseText == 'success') { - console.log('Upload for file', file.name, 'was successful!') - document.getElementById("upload_input").innerHTML = '
    ' + fileName + ' 🔒
    '; - setTimeout(function () { - window.location.reload(); - }, 2000); - } - else { - let response_text = JSON.parse(event.target.responseText); - var x = document.querySelector('.bootbox'); - console.log(JSON.parse(event.target.responseText).info) - bootbox.alert({ - message: JSON.parse(event.target.responseText).info, - callback: function () { - window.location.reload(); - } - }); - doUpload = false; - } - }, false); - xmlHttpRequest.addEventListener('error', (e) => { - console.error('Error while uploading file', file.name + '.', 'Event:', e) - }, false); - xmlHttpRequest.send(file); - } - + {% end %} \ No newline at end of file diff --git a/app/frontend/templates/panel/panel_edit_role.html b/app/frontend/templates/panel/panel_edit_role.html index df065bf9..b72d3a2a 100644 --- a/app/frontend/templates/panel/panel_edit_role.html +++ b/app/frontend/templates/panel/panel_edit_role.html @@ -428,10 +428,13 @@ if (responseData.status === "ok") { window.location.href = "/panel/panel_config"; } else { - + let errordata = responseData.error; + if (responseData.error_data){ + errordata = responseData.error + } bootbox.alert({ title: responseData.error, - message: responseData.error_data + message: errordata }); } }); diff --git a/app/frontend/templates/panel/panel_edit_user.html b/app/frontend/templates/panel/panel_edit_user.html index 87631219..fdb5afd8 100644 --- a/app/frontend/templates/panel/panel_edit_user.html +++ b/app/frontend/templates/panel/panel_edit_user.html @@ -122,7 +122,7 @@ data['lang']) }}{% end %} name="lang" form="user_form"> {% for lang in data['languages'] %} {% if not 'incomplete' in lang %} - + {% else %} {% end %} @@ -393,6 +393,7 @@ data['lang']) }}{% end %} } function replacer(key, value) { if (typeof value == "boolean" || key === "email" || key === "permissions" || key === "roles") { + console.log(key) return value } else { console.log(key, value) @@ -433,6 +434,7 @@ data['lang']) }}{% end %} let disabled_flag = false; let roles = null; if (superuser || userId != edit_id){ + console.log("ROLES") roles = $('.role_check').map(function() { if ($(this).attr("disabled")){ disabled_flag = true; @@ -457,9 +459,7 @@ data['lang']) }}{% end %} delete formDataObject.username } if (superuser || userId != edit_id){ - if (!disabled_flag){ formDataObject.roles = roles; - } if ($("#permissions").length){ formDataObject.permissions = permissions; } diff --git a/app/frontend/templates/panel/server_backup.html b/app/frontend/templates/panel/server_backup.html index 2a9263ba..86cd9415 100644 --- a/app/frontend/templates/panel/server_backup.html +++ b/app/frontend/templates/panel/server_backup.html @@ -39,208 +39,152 @@ {% include "parts/m_server_controls_list.html %} -
    -
    -
    -
    - {% if data['backing_up'] %} -
    -
    {{ - data['backup_stats']['percent'] }}%
    -
    -

    Backing up {{data['server_stats']['world_size']}}

    - {% end %} - -
    - {% if not data['backing_up'] %} -
    - -
    - {% end %} -
    -
    - {% if data['super_user'] %} - - +
    +
    +
    +

    {{ translate('serverBackups', 'backups', + data['lang']) }}

    + {% if data['user_data']['hints'] %} + {% end %} -
    - -
    - - -
    -
    - - {% if data['backup_config']['compress'] %} - {{ translate('serverBackups', 'compress', data['lang']) }} - {% else %} - {{ - translate('serverBackups', 'compress', data['lang']) }} - {% end %} -
    -
    - - {% if data['backup_config']['shutdown'] %} - {{ translate('serverBackups', 'shutdown', data['lang']) }} - {% else %} - {{ - translate('serverBackups', 'shutdown', data['lang']) }} - {% end %} -
    -
    - - {% if data['backup_config']['before'] %} - {{ - translate('serverBackups', 'before', data['lang']) }} -
    - - {% else %} - {{ - translate('serverBackups', 'before', data['lang']) }} -
    - - {% end %} -
    -
    - - {% if data['backup_config']['after'] %} - {{ - translate('serverBackups', 'after', data['lang']) }} -
    - - {% else %} - {{ - translate('serverBackups', 'after', data['lang']) }} -
    - - {% end %} -
    -
    - -
    - -
    - - -
    -
    - - -

    {{ translate('serverBackups', 'currentBackups', data['lang']) }}

    - - - - - - - - - {% for backup in data['backup_list'] %} - - - - - - {% end %} - - -
    {{ translate('serverBackups', 'options', data['lang']) }}{{ translate('serverBackups', 'path', data['lang']) }}{{ translate('serverBackups', 'size', data['lang']) }}
    - - - {{ translate('serverBackups', 'download', data['lang']) }} - -
    -
    - - -
    {{ backup['path'] }}{{ backup['size'] }}
    - +
    + {% if len(data['backups']) == 0 %} +
    + {{ translate('serverBackups', 'no-backup', data['lang']) }} {{ + translate('serverBackups', 'newBackup',data['lang']) }}. +
    + {% end %} + {% if len(data['backups']) > 0 %} +
    + + + + + + + + + + + + {% for backup in data['backups'] %} + + + + + + + + {% end %} + +
    {{ translate('serverBackups', 'name', + data['lang']) }} {{ translate('serverBackups', 'status', + data['lang']) }} {{ translate('serverBackups', + 'storageLocation', data['lang']) }}{{ translate('serverBackups', + 'maxBackups', data['lang']) }}{{ translate('serverBackups', 'actions', + data['lang']) }}
    +

    {{backup.backup_name}}

    +
    + {% if backup.default %} + {{ translate('serverBackups', 'default', + data['lang']) }} + {% end %} +
    +
    + +
    +
    +

    {{backup.backup_location}}

    +
    +

    {{backup.max_backups}}

    +
    + + {% if not backup.default %} + + {% end %} + +
    +
    +
    + + + + + + + + + {% for backup in data['backups'] %} + + + + + {% end %} + +
    Name + {{ translate('serverBackups', 'edit', data['lang']) + }}
    +

    {{backup.backup_name}}

    +
    +
    + +
    +
    + {% if backup.default %} + {{ translate('serverBackups', 'default', + data['lang']) }} + {% end %} +
    + + {% if not backup.default %} + + {% end %} + +
    +
    + {% end %} +
    -
    -
    -
    -
    -

    {{ translate('serverBackups', 'excludedBackups', - data['lang']) }}

    -
    -
    -
      - {% for item in data['exclusions'] %} -
    • {{item}}
    • -
      - {% end %} -
    -
    @@ -298,7 +242,7 @@ {% block js %} + +{% end %} \ No newline at end of file diff --git a/app/frontend/templates/panel/server_files.html b/app/frontend/templates/panel/server_files.html index 2d99c6bb..d116681b 100644 --- a/app/frontend/templates/panel/server_files.html +++ b/app/frontend/templates/panel/server_files.html @@ -67,7 +67,8 @@ translate('serverFiles', 'download', data['lang']) }} {{ translate('serverFiles', 'delete', data['lang']) }} - {{ + {{ translate('serverFiles', 'delete', data['lang']) }} {{ @@ -156,7 +157,8 @@ right: 35px; } } - .tree-file:hover{ + + .tree-file:hover { cursor: pointer; } @@ -721,105 +723,7 @@ } } - async function sendFile(file, path, serverId, left, i, onProgress) { - let xmlHttpRequest = new XMLHttpRequest(); - let token = getCookie("_xsrf") - let fileName = file.name - let target = '/upload?server_id=' + serverId - let mimeType = file.type - let size = file.size - - xmlHttpRequest.upload.addEventListener('progress', function (e) { - - if (e.loaded <= size) { - var percent = Math.round(e.loaded / size * 100); - $(`#upload-progress-bar-${i + 1}`).css('width', percent + '%'); - $(`#upload-progress-bar-${i + 1}`).html(percent + '%'); - } - }); - - xmlHttpRequest.open('POST', target, true); - xmlHttpRequest.setRequestHeader('X-Content-Type', mimeType); - xmlHttpRequest.setRequestHeader('X-XSRFToken', token); - xmlHttpRequest.setRequestHeader('X-Content-Length', size); - xmlHttpRequest.setRequestHeader('X-Content-Disposition', 'attachment; filename="' + fileName + '"'); - xmlHttpRequest.setRequestHeader('X-Path', path); - xmlHttpRequest.setRequestHeader('X-Content-Upload-Type', 'server_files') - xmlHttpRequest.setRequestHeader('X-Files-Left', left); - xmlHttpRequest.setRequestHeader('X-FileName', fileName); - xmlHttpRequest.setRequestHeader('X-ServerId', serverId); - xmlHttpRequest.upload.addEventListener('progress', (event) => - onProgress(Math.floor(event.loaded / event.total * 100)), false); - xmlHttpRequest.addEventListener('load', (event) => { - if (event.target.responseText == 'success') { - console.log('Upload for file', file.name, 'was successful!'); - let caught = false; - try { - if (document.getElementById(path).classList.contains("clicked")) { - var expanded = true; - } - } catch { - var expanded = false; - } - - try { - var par_el = document.getElementById(path + "ul"); - var items = par_el.children; - } catch (err) { - console.log(err) - caught = true; - var par_el = document.getElementById("files-tree"); - var items = par_el.children; - } - let name = file.name; - console.log(par_el) - let full_path = path + '/' + name - let flag = false; - for (var k = 0; k < items.length; ++k) { - if ($(items[k]).attr("data-name") == name) { - flag = true; - } - } - if (!flag) { - if (caught && expanded == false) { - $(par_el).append('
  • ' + name + '
  • '); - } else if (expanded == true) { - $(par_el).append('
  • ' + name + '
  • '); - } - setTreeViewContext(); - } - $(`#upload-progress-bar-${i + 1}`).removeClass("progress-bar-striped"); - $(`#upload-progress-bar-${i + 1}`).addClass("bg-success"); - $(`#upload-progress-bar-${i + 1}`).html('') - } - else { - let response_text = JSON.parse(event.target.responseText); - var x = document.querySelector('.bootbox'); - if (x) { - x.remove() - } - var x = document.querySelector('.modal-content'); - if (x) { - x.remove() - } - console.log(JSON.parse(event.target.responseText).info) - bootbox.alert({ - message: JSON.parse(event.target.responseText).info, - callback: function () { - window.location.reload(); - } - }); - doUpload = false; - } - }, false); - xmlHttpRequest.addEventListener('error', (e) => { - console.error('Error while uploading file', file.name + '.', 'Event:', e) - }, false); - xmlHttpRequest.send(file); - } - let uploadWaitDialog; - let doUpload = true; async function uploadFilesE(event) { path = event.target.parentElement.getAttribute('data-path'); @@ -842,6 +746,9 @@ label: "{{ translate('serverFiles', 'upload', data['lang']) }}", className: "btn-default", callback: async function () { + if ($("#files").get(0).files.length === 0) { + return hideUploadBox(); + } var height = files.files.length * 50; var waitMessage = '

    ' + @@ -858,54 +765,56 @@ }); let nFiles = files.files.length; - for (i = 0; i < nFiles; i++) { - if (!doUpload) { - doUpload = true; - hideUploadBox(); - break; - } + const uploadPromises = []; + for (let i = 0; i < nFiles; i++) { + const file = files.files[i]; const progressHtml = ` -

    - ${files.files[i].name}: -
    -

    - `; +
    + ${file.name}: +
    +

    + `; + $('#upload-progress-bar-parent').append(progressHtml); - await sendFile(files.files[i], path, serverId, nFiles - i - 1, i, (progress) => { + const uploadPromise = uploadFile("server_upload", file, path, i, (progress) => { $(`#upload-progress-bar-${i + 1}`).attr('aria-valuenow', progress) $(`#upload-progress-bar-${i + 1}`).css('width', progress + '%'); }); + uploadPromises.push(uploadPromise); } - hideUploadBox(); - //$('#upload_file').submit(); //.trigger('submit'); + + + await Promise.all(uploadPromises); + setTimeout(() => { + hideUploadBox(); + }, 2000); + } } } }); - - var fileList = document.getElementById("files"); - fileList.addEventListener("change", function (e) { - var list = ""; - let files = Array.from(this.files) - files.forEach(file => { - list += "
  • " + file.name + "
  • " - }) - - document.getElementById("fileList").innerHTML = list; - }, false); }); } + async function calculateFileHash(file) { + const arrayBuffer = await file.arrayBuffer(); + const hashBuffer = await crypto.subtle.digest('SHA-256', arrayBuffer); + const hashArray = Array.from(new Uint8Array(hashBuffer)); + const hashHex = hashArray.map(b => b.toString(16).padStart(2, '0')).join(''); + + return hashHex; + } + function getDirView(event) { let path = event.target.parentElement.getAttribute("data-path"); if (document.getElementById(path).classList.contains('clicked')) { @@ -1211,5 +1120,5 @@ - + {% end %} \ No newline at end of file diff --git a/app/frontend/templates/panel/server_schedule_edit.html b/app/frontend/templates/panel/server_schedule_edit.html index 7b116f7f..50b48b10 100644 --- a/app/frontend/templates/panel/server_schedule_edit.html +++ b/app/frontend/templates/panel/server_schedule_edit.html @@ -79,6 +79,24 @@ +
    @@ -232,7 +250,7 @@ } function replacer(key, value) { - if (key != "start_time" && key != "cron_string" && key != "interval_type") { + if (key != "start_time" && key != "cron_string" && key != "interval_type" && key != "action_id") { if (typeof value == "boolean") { return value } @@ -247,7 +265,7 @@ } } else if (value === "" && key == "start_time"){ return "00:00"; - }else{ + }else { return value; } } @@ -281,6 +299,11 @@ // Format the plain form data as JSON let formDataJsonString = JSON.stringify(formDataObject, replacer); + let data = JSON.parse(formDataJsonString) + if (data["action"] === "backup" && !data["action_id"]){ + return bootbox.alert("Validation Failed") + } + let res = await fetch(`/api/v2/servers/${serverId}/tasks/`, { method: 'POST', headers: { @@ -358,6 +381,14 @@ document.getElementById("ifYes").style.display = "none"; document.getElementById("command_input").required = false; } + if (document.getElementById('action').value == "backup"){ + document.getElementById("ifBackup").style.display = "block"; + document.getElementById("action_id").required = true; + } else { + document.getElementById("ifBackup").style.display = "none"; + document.getElementById("action_id").required = false; + $("#action_id").val(null); + } } function basicAdvanced() { if (document.getElementById('difficulty').value == "advanced") { diff --git a/app/frontend/templates/server/bedrock_wizard.html b/app/frontend/templates/server/bedrock_wizard.html index 86b069f8..7c4789f7 100644 --- a/app/frontend/templates/server/bedrock_wizard.html +++ b/app/frontend/templates/server/bedrock_wizard.html @@ -301,8 +301,8 @@ 'labelZipFile', data['lang']) }}
    -
    @@ -495,61 +495,8 @@ {% end %} {% block js%} + diff --git a/app/migrations/20211120221511_api_keys.py b/app/migrations/20211120221511_api_keys.py index bede2c92..f5dd1e46 100644 --- a/app/migrations/20211120221511_api_keys.py +++ b/app/migrations/20211120221511_api_keys.py @@ -1,10 +1,11 @@ import peewee import datetime +from app.classes.shared.helpers import Helpers def migrate(migrator, database, **kwargs): migrator.add_columns( - "users", valid_tokens_from=peewee.DateTimeField(default=datetime.datetime.now) + "users", valid_tokens_from=peewee.DateTimeField(default=Helpers.get_utc_now) ) migrator.drop_columns("users", ["api_token"]) diff --git a/app/migrations/20240217_rework_servers_uuid_part2.py b/app/migrations/20240217_rework_servers_uuid_part2.py index a4b01c45..010da2e6 100644 --- a/app/migrations/20240217_rework_servers_uuid_part2.py +++ b/app/migrations/20240217_rework_servers_uuid_part2.py @@ -5,13 +5,7 @@ import logging from app.classes.shared.console import Console from app.classes.shared.migration import Migrator, MigrateHistory -from app.classes.models.management import ( - Webhooks, - Schedules, - Backups, -) -from app.classes.models.server_permissions import RoleServers -from app.classes.models.base_model import BaseModel +from app.classes.models.roles import Roles logger = logging.getLogger(__name__) @@ -53,6 +47,78 @@ def migrate(migrator: Migrator, database, **kwargs): table_name = "servers" database = db + # ********************************************************************************** + # Role Servers Class + # ********************************************************************************** + class RoleServers(peewee.Model): + role_id = peewee.ForeignKeyField(Roles, backref="role_server") + server_id = peewee.ForeignKeyField(Servers, backref="role_server") + permissions = peewee.CharField(default="00000000") + + class Meta: + table_name = "role_servers" + primary_key = peewee.CompositeKey("role_id", "server_id") + database = db + + # ********************************************************************************** + # Webhooks Class + # ********************************************************************************** + class Webhooks(peewee.Model): + id = peewee.AutoField() + server_id = peewee.ForeignKeyField(Servers, backref="webhook_server", null=True) + name = peewee.CharField(default="Custom Webhook", max_length=64) + url = peewee.CharField(default="") + webhook_type = peewee.CharField(default="Custom") + bot_name = peewee.CharField(default="Crafty Controller") + trigger = peewee.CharField(default="server_start,server_stop") + body = peewee.CharField(default="") + color = peewee.CharField(default="#005cd1") + enabled = peewee.BooleanField(default=True) + + class Meta: + table_name = "webhooks" + database = db + + # ********************************************************************************** + # Schedules Class + # ********************************************************************************** + class Schedules(peewee.Model): + schedule_id = peewee.IntegerField(unique=True, primary_key=True) + server_id = peewee.ForeignKeyField(Servers, backref="schedule_server") + enabled = peewee.BooleanField() + action = peewee.CharField() + interval = peewee.IntegerField() + interval_type = peewee.CharField() + start_time = peewee.CharField(null=True) + command = peewee.CharField(null=True) + name = peewee.CharField() + one_time = peewee.BooleanField(default=False) + cron_string = peewee.CharField(default="") + parent = peewee.IntegerField(null=True) + delay = peewee.IntegerField(default=0) + next_run = peewee.CharField(default="") + + class Meta: + table_name = "schedules" + database = db + + # ********************************************************************************** + # Backups Class + # ********************************************************************************** + class Backups(peewee.Model): + excluded_dirs = peewee.CharField(null=True) + max_backups = peewee.IntegerField() + max_backups = peewee.IntegerField() + server_id = peewee.ForeignKeyField(Servers, backref="backups_server") + compress = peewee.BooleanField(default=False) + shutdown = peewee.BooleanField(default=False) + before = peewee.CharField(default="") + after = peewee.CharField(default="") + + class Meta: + table_name = "backups" + database = db + this_migration = MigrateHistory.get_or_none( MigrateHistory.name == "20240217_rework_servers_uuid_part2" ) @@ -70,8 +136,8 @@ def migrate(migrator: Migrator, database, **kwargs): return try: - logger.info("Migrating Data from Int to UUID (Foreign Keys)") - Console.info("Migrating Data from Int to UUID (Foreign Keys)") + logger.debug("Migrating Data from Int to UUID (Foreign Keys)") + Console.debug("Migrating Data from Int to UUID (Foreign Keys)") # Changes on Webhooks Log Table for webhook in Webhooks.select(): @@ -122,8 +188,8 @@ def migrate(migrator: Migrator, database, **kwargs): and RoleServers.server_id == old_server_id ).execute() - logger.info("Migrating Data from Int to UUID (Foreign Keys) : SUCCESS") - Console.info("Migrating Data from Int to UUID (Foreign Keys) : SUCCESS") + logger.debug("Migrating Data from Int to UUID (Foreign Keys) : SUCCESS") + Console.debug("Migrating Data from Int to UUID (Foreign Keys) : SUCCESS") except Exception as ex: logger.error("Error while migrating Data from Int to UUID (Foreign Keys)") @@ -135,16 +201,16 @@ def migrate(migrator: Migrator, database, **kwargs): return try: - logger.info("Migrating Data from Int to UUID (Primary Keys)") - Console.info("Migrating Data from Int to UUID (Primary Keys)") + logger.debug("Migrating Data from Int to UUID (Primary Keys)") + Console.debug("Migrating Data from Int to UUID (Primary Keys)") # Migrating servers from the old id type to the new one for server in Servers.select(): Servers.update(server_id=server.server_uuid).where( Servers.server_id == server.server_id ).execute() - logger.info("Migrating Data from Int to UUID (Primary Keys) : SUCCESS") - Console.info("Migrating Data from Int to UUID (Primary Keys) : SUCCESS") + logger.debug("Migrating Data from Int to UUID (Primary Keys) : SUCCESS") + Console.debug("Migrating Data from Int to UUID (Primary Keys) : SUCCESS") except Exception as ex: logger.error("Error while migrating Data from Int to UUID (Primary Keys)") @@ -203,9 +269,81 @@ def rollback(migrator: Migrator, database, **kwargs): table_name = "servers" database = db + # ********************************************************************************** + # Role Servers Class + # ********************************************************************************** + class RoleServers(peewee.Model): + role_id = peewee.ForeignKeyField(Roles, backref="role_server") + server_id = peewee.ForeignKeyField(Servers, backref="role_server") + permissions = peewee.CharField(default="00000000") + + class Meta: + table_name = "role_servers" + primary_key = peewee.CompositeKey("role_id", "server_id") + database = db + + # ********************************************************************************** + # Webhooks Class + # ********************************************************************************** + class Webhooks(peewee.Model): + id = peewee.AutoField() + server_id = peewee.ForeignKeyField(Servers, backref="webhook_server", null=True) + name = peewee.CharField(default="Custom Webhook", max_length=64) + url = peewee.CharField(default="") + webhook_type = peewee.CharField(default="Custom") + bot_name = peewee.CharField(default="Crafty Controller") + trigger = peewee.CharField(default="server_start,server_stop") + body = peewee.CharField(default="") + color = peewee.CharField(default="#005cd1") + enabled = peewee.BooleanField(default=True) + + class Meta: + table_name = "webhooks" + database = db + + # ********************************************************************************** + # Schedules Class + # ********************************************************************************** + class Schedules(peewee.Model): + schedule_id = peewee.IntegerField(unique=True, primary_key=True) + server_id = peewee.ForeignKeyField(Servers, backref="schedule_server") + enabled = peewee.BooleanField() + action = peewee.CharField() + interval = peewee.IntegerField() + interval_type = peewee.CharField() + start_time = peewee.CharField(null=True) + command = peewee.CharField(null=True) + name = peewee.CharField() + one_time = peewee.BooleanField(default=False) + cron_string = peewee.CharField(default="") + parent = peewee.IntegerField(null=True) + delay = peewee.IntegerField(default=0) + next_run = peewee.CharField(default="") + + class Meta: + table_name = "schedules" + database = db + + # ********************************************************************************** + # Backups Class + # ********************************************************************************** + class Backups(peewee.Model): + excluded_dirs = peewee.CharField(null=True) + max_backups = peewee.IntegerField() + max_backups = peewee.IntegerField() + server_id = peewee.ForeignKeyField(Servers, backref="backups_server") + compress = peewee.BooleanField(default=False) + shutdown = peewee.BooleanField(default=False) + before = peewee.CharField(default="") + after = peewee.CharField(default="") + + class Meta: + table_name = "backups" + database = db + try: - logger.info("Migrating Data from UUID to Int (Primary Keys)") - Console.info("Migrating Data from UUID to Int (Primary Keys)") + logger.debug("Migrating Data from UUID to Int (Primary Keys)") + Console.debug("Migrating Data from UUID to Int (Primary Keys)") # Migrating servers from the old id type to the new one new_id = 0 for server in Servers.select(): @@ -217,8 +355,8 @@ def rollback(migrator: Migrator, database, **kwargs): Servers.server_id == server.server_id ).execute() - logger.info("Migrating Data from UUID to Int (Primary Keys) : SUCCESS") - Console.info("Migrating Data from UUID to Int (Primary Keys) : SUCCESS") + logger.debug("Migrating Data from UUID to Int (Primary Keys) : SUCCESS") + Console.debug("Migrating Data from UUID to Int (Primary Keys) : SUCCESS") except Exception as ex: logger.error("Error while migrating Data from UUID to Int (Primary Keys)") @@ -230,8 +368,8 @@ def rollback(migrator: Migrator, database, **kwargs): return try: - logger.info("Migrating Data from UUID to Int (Foreign Keys)") - Console.info("Migrating Data from UUID to Int (Foreign Keys)") + logger.debug("Migrating Data from UUID to Int (Foreign Keys)") + Console.debug("Migrating Data from UUID to Int (Foreign Keys)") # Changes on Webhooks Log Table for webhook in Webhooks.select(): old_server_id = webhook.server_id_id @@ -281,8 +419,8 @@ def rollback(migrator: Migrator, database, **kwargs): and RoleServers.server_id == old_server_id ).execute() - logger.info("Migrating Data from UUID to Int (Foreign Keys) : SUCCESS") - Console.info("Migrating Data from UUID to Int (Foreign Keys) : SUCCESS") + logger.debug("Migrating Data from UUID to Int (Foreign Keys) : SUCCESS") + Console.debug("Migrating Data from UUID to Int (Foreign Keys) : SUCCESS") except Exception as ex: logger.error("Error while migrating Data from UUID to Int (Foreign Keys)") diff --git a/app/migrations/20240308_multi-backup.py b/app/migrations/20240308_multi-backup.py new file mode 100644 index 00000000..db154ec8 --- /dev/null +++ b/app/migrations/20240308_multi-backup.py @@ -0,0 +1,238 @@ +import os +import datetime +import uuid +import peewee +import logging + + +from app.classes.shared.helpers import Helpers +from app.classes.shared.console import Console +from app.classes.shared.migration import Migrator +from app.classes.shared.file_helpers import FileHelpers + +logger = logging.getLogger(__name__) + + +def migrate(migrator: Migrator, database, **kwargs): + """ + Write your migrations here. + """ + db = database + Console.info("Starting Backups migrations") + Console.info( + "Migrations: Adding columns [backup_id, " + "backup_name, backup_location, enabled, default, action_id, backup_status]" + ) + migrator.add_columns( + "backups", + backup_id=peewee.CharField(default=Helpers.create_uuid), + ) + migrator.add_columns("backups", backup_name=peewee.CharField(default="Default")) + migrator.add_columns("backups", backup_location=peewee.CharField(default="")) + migrator.add_columns("backups", enabled=peewee.BooleanField(default=True)) + migrator.add_columns("backups", default=peewee.BooleanField(default=False)) + migrator.add_columns( + "backups", + status=peewee.CharField(default='{"status": "Standby", "message": ""}'), + ) + migrator.add_columns( + "schedules", action_id=peewee.CharField(null=True, default=None) + ) + + class Servers(peewee.Model): + server_id = peewee.CharField(primary_key=True, default=str(uuid.uuid4())) + created = peewee.DateTimeField(default=datetime.datetime.now) + server_name = peewee.CharField(default="Server", index=True) + path = peewee.CharField(default="") + backup_path = peewee.CharField(default="") + executable = peewee.CharField(default="") + log_path = peewee.CharField(default="") + execution_command = peewee.CharField(default="") + auto_start = peewee.BooleanField(default=0) + auto_start_delay = peewee.IntegerField(default=10) + crash_detection = peewee.BooleanField(default=0) + stop_command = peewee.CharField(default="stop") + executable_update_url = peewee.CharField(default="") + server_ip = peewee.CharField(default="127.0.0.1") + server_port = peewee.IntegerField(default=25565) + logs_delete_after = peewee.IntegerField(default=0) + type = peewee.CharField(default="minecraft-java") + show_status = peewee.BooleanField(default=1) + created_by = peewee.IntegerField(default=-100) + shutdown_timeout = peewee.IntegerField(default=60) + ignored_exits = peewee.CharField(default="0") + + class Meta: + table_name = "servers" + database = db + + class Backups(peewee.Model): + backup_id = peewee.CharField(primary_key=True, default=Helpers.create_uuid) + backup_name = peewee.CharField(default="New Backup") + backup_location = peewee.CharField(default="") + excluded_dirs = peewee.CharField(null=True) + max_backups = peewee.IntegerField() + server_id = peewee.ForeignKeyField(Servers, backref="backups_server") + compress = peewee.BooleanField(default=False) + shutdown = peewee.BooleanField(default=False) + before = peewee.CharField(default="") + after = peewee.CharField(default="") + default = peewee.BooleanField(default=False) + status = peewee.CharField(default='{"status": "Standby", "message": ""}') + enabled = peewee.BooleanField(default=True) + + class Meta: + table_name = "backups" + database = db + + class NewBackups(peewee.Model): + backup_id = peewee.CharField(primary_key=True, default=Helpers.create_uuid) + backup_name = peewee.CharField(default="New Backup") + backup_location = peewee.CharField(default="") + excluded_dirs = peewee.CharField(null=True) + max_backups = peewee.IntegerField() + server_id = peewee.ForeignKeyField(Servers, backref="backups_server") + compress = peewee.BooleanField(default=False) + shutdown = peewee.BooleanField(default=False) + before = peewee.CharField(default="") + after = peewee.CharField(default="") + default = peewee.BooleanField(default=False) + status = peewee.CharField(default='{"status": "Standby", "message": ""}') + enabled = peewee.BooleanField(default=True) + + class Meta: + table_name = "new_backups" + database = db + + class Schedules(peewee.Model): + schedule_id = peewee.IntegerField(unique=True, primary_key=True) + server_id = peewee.ForeignKeyField(Servers, backref="schedule_server") + enabled = peewee.BooleanField() + action = peewee.CharField() + interval = peewee.IntegerField() + interval_type = peewee.CharField() + start_time = peewee.CharField(null=True) + command = peewee.CharField(null=True) + action_id = peewee.CharField(null=True) + name = peewee.CharField() + one_time = peewee.BooleanField(default=False) + cron_string = peewee.CharField(default="") + parent = peewee.IntegerField(null=True) + delay = peewee.IntegerField(default=0) + next_run = peewee.CharField(default="") + + class Meta: + table_name = "schedules" + database = db + + class NewSchedules(peewee.Model): + schedule_id = peewee.IntegerField(unique=True, primary_key=True) + server_id = peewee.ForeignKeyField(Servers, backref="schedule_server") + enabled = peewee.BooleanField() + action = peewee.CharField() + interval = peewee.IntegerField() + interval_type = peewee.CharField() + start_time = peewee.CharField(null=True) + command = peewee.CharField(null=True) + action_id = peewee.CharField(null=True) + name = peewee.CharField() + one_time = peewee.BooleanField(default=False) + cron_string = peewee.CharField(default="") + parent = peewee.IntegerField(null=True) + delay = peewee.IntegerField(default=0) + next_run = peewee.CharField(default="") + + class Meta: + table_name = "new_schedules" + database = db + + migrator.create_table(NewBackups) + migrator.create_table(NewSchedules) + + migrator.run() + + # Copy data from the existing backups table to the new one + for backup in Backups.select(): + # Fetch the related server entry from the Servers table + server = Servers.get(Servers.server_id == backup.server_id) + Console.info(f"Migrations: Migrating backup for server {server.server_name}") + # Create a new backup entry with data from the + # old backup entry and related server + new_backup = NewBackups.create( + backup_name=f"{server.server_name} Backup", + # Set backup_location equal to backup_path + backup_location=server.backup_path, + excluded_dirs=backup.excluded_dirs, + max_backups=backup.max_backups, + server_id=server.server_id, + compress=backup.compress, + shutdown=backup.shutdown, + before=backup.before, + after=backup.after, + default=True, + enabled=True, + ) + Helpers.ensure_dir_exists( + os.path.join(server.backup_path, new_backup.backup_id) + ) + for file in os.listdir(server.backup_path): + if not os.path.isdir(os.path.join(os.path.join(server.backup_path, file))): + FileHelpers.move_file( + os.path.join(server.backup_path, file), + os.path.join(server.backup_path, new_backup.backup_id, file), + ) + + Console.debug("Migrations: Dropping old backup table") + # Drop the existing backups table + migrator.drop_table("backups") + + Console.debug("Migrations: Renaming new_backups to backups") + # Rename the new table to backups + migrator.rename_table("new_backups", "backups") + + Console.debug("Migrations: Dropping backup_path from servers table") + migrator.drop_columns("servers", ["backup_path"]) + + for schedule in Schedules.select(): + action_id = None + if schedule.command == "backup_server": + Console.info( + f"Migrations: Adding backup ID to task with name {schedule.name}" + ) + backup = NewBackups.get(NewBackups.server_id == schedule.server_id) + action_id = backup.backup_id + NewSchedules.create( + schedule_id=schedule.schedule_id, + server_id=schedule.server_id, + enabled=schedule.enabled, + action=schedule.action, + interval=schedule.interval, + interval_type=schedule.interval_type, + start_time=schedule.start_time, + command=schedule.command, + action_id=action_id, + name=schedule.name, + one_time=schedule.one_time, + cron_string=schedule.cron_string, + parent=schedule.parent, + delay=schedule.delay, + next_run=schedule.next_run, + ) + + Console.debug("Migrations: dropping old schedules table") + # Drop the existing backups table + migrator.drop_table("schedules") + + Console.debug("Migrations: renaming new_schedules to schedules") + # Rename the new table to backups + migrator.rename_table("new_schedules", "schedules") + + +def rollback(migrator: Migrator, database, **kwargs): + """ + Write your rollback migrations here. + """ + db = database + + migrator.drop_columns("backups", ["name", "backup_id", "backup_location"]) + migrator.add_columns("servers", backup_path=peewee.CharField(default="")) diff --git a/app/translations/cs_CS.json b/app/translations/cs_CS.json index 839eabb9..0ce6687d 100644 --- a/app/translations/cs_CS.json +++ b/app/translations/cs_CS.json @@ -321,10 +321,12 @@ "serversDesc": "servery, ke kterým má tato role přístup" }, "serverBackups": { + "actions": "Akce", "after": "Spustit příkaz po záloze", "backupAtMidnight": "Automatické zálohování o půlnoci?", "backupNow": "Zálohovat nyní!", "backupTask": "Bylo spuštěno zálohování.", + "backups": "Zálohy serverů", "before": "Spustit příkaz před zálohou", "cancel": "Zrušit", "clickExclude": "Kliknutím vyberete výjimku", @@ -333,21 +335,34 @@ "confirmDelete": "Chcete tuto zálohu odstranit? Tuto akci nelze vrátit zpět.", "confirmRestore": "Jste si jisti, že chcete provést obnovu z této zálohy. Všechny aktuální soubory serveru se změní na stav zálohy a nebude možné je obnovit.", "currentBackups": "Aktuální zálohy", + "default": "Defaultní záloha", + "defaultExplain": "Tuto zálohu Crafty používalo před aktualizací. Nemůžete ji změnit nebo smazat", "delete": "Smazat", "destroyBackup": "Zničit zálohu \" + file_to_del + \"?", "download": "Stáhnout", + "edit": "upravit", + "enabled": "Povoleno", "excludedBackups": "Vyloučené cesty: ", "excludedChoose": "Vyberte cesty, které chcete ze zálohování vyloučit.", "exclusionsTitle": "Vyloučení ze zálohování", + "failed": "Selhalo", "maxBackups": "Maximální počet záloh", "maxBackupsDesc": "Crafty neuloží více než N záloh a odstraní nejstarší (zadejte 0 pro zachování všech).", + "myBackup": "Moje nová záloha", + "name": "Jméno", + "newBackup": "Vytvořit novou zálohu", + "no-backup": "Žádné zálohy. Pro vytvoření nové zálohy zmáčkněte prosím. Vytvořit novou zálohu", "options": "Nastavení", "path": "Cesta", "restore": "Obnovit", "restoring": "Obnovení zálohy. To může chvíli trvat. Buďte prosím trpěliví.", + "run": "Nastartovat zálohu", "save": "Uložit", "shutdown": "Vypnout server po dobu zálohování", "size": "Velikost", + "standby": "V pohotovosti", + "status": "Stav", + "storage": "Lokace uložiště", "storageLocation": "Umístění úložiště", "storageLocationDesc": "Kam chcete ukládat zálohy?" }, @@ -512,6 +527,7 @@ }, "serverSchedules": { "action": "Akce", + "actionId": "Vyberte zálohu na které se to má potvrdit!", "areYouSure": "Odstranění naplánované úlohy?", "cancel": "Zrušit", "cannotSee": "Nevidíte všechno?", @@ -672,6 +688,9 @@ "userTheme": "Motiv UI", "uses": "Počet povolených použití (-1==bez omezení)" }, + "validators": { + "passLength": "Heslo je příliš krátké. Minimální délka je 8 znaků" + }, "webhooks": { "areYouSureDel": "Seš si jistý že chceš smazat tento webhook?", "areYouSureRun": "Seš si jistý že chceš otestovat tento webhook?", diff --git a/app/translations/de_DE.json b/app/translations/de_DE.json index 937db853..88c8002e 100644 --- a/app/translations/de_DE.json +++ b/app/translations/de_DE.json @@ -301,10 +301,12 @@ "serversDesc": "Server, auf die Nutzer mit dieser Rolle zugreifen darf" }, "serverBackups": { + "actions": "Aktionen", "after": "Befehl nach dem Backup ausführen", "backupAtMidnight": "Automatisches Backup um 24:00 Uhr?", "backupNow": "Jetzt sichern!", "backupTask": "Ein Backup-Auftrag wurde gestartet.", + "backups": "Server-Backups", "before": "Befehl vor dem Backup ausführen", "cancel": "Abbrechen", "clickExclude": "Auswählen, um Ausnahmen zu markieren", @@ -313,21 +315,34 @@ "confirmDelete": "Möchten Sie diese Backup-Datei löschen? Dies kann nicht rückgängig gemacht werden.", "confirmRestore": "Sicher, dass dieses Backup wiederherstellgestellt werden soll? Alle aktuellen Serverdateien werden in den Zustand von diesem Backup versetzt und können nicht wiederhergestellt werden.", "currentBackups": "Aktuelle Backups", + "default": "Standard-Backup", + "defaultExplain": "Das Backup, welches Crafty vor Updates verwendet. Dies kann nicht geändert oder gelöscht werden.", "delete": "Löschen", "destroyBackup": "Backup löschen \" + file_to_del + \"?", "download": "Herunterladen", + "edit": "Bearbeiten", + "enabled": "Aktiviert", "excludedBackups": "Ausgeschlossene Verzeichnisse: ", "excludedChoose": "Verzeichnisse auswählen, die nicht gesichert werden sollen", "exclusionsTitle": "Backup Ausnahmen", + "failed": "Fehlgeschlagen", "maxBackups": "Maximale Backups", "maxBackupsDesc": "Crafty speichert nicht mehr als N Backups, wodurch das älteste gelöscht wird (geben Sie 0 ein, um alle zu behalten)", + "myBackup": "Mein Neues Backup", + "name": "Name", + "newBackup": "Neues Backup erstellen", + "no-backup": "Keine Backups. Um eine neue Backup-Konfiguration zu erstellen, bitte auf 'Neues Backup erstellen' klicken.", "options": "Optionen", "path": "Pfad", "restore": "Wiederherstellen", "restoring": "Backup wiederherstellen. Dies kann eine Weile dauern.", + "run": "Backup erstellen", "save": "Speichern", "shutdown": "Server für die Dauer des Backups stoppen", "size": "Größe", + "standby": "Bereitschaft", + "status": "Status", + "storage": "Speicherort", "storageLocation": "Speicherort", "storageLocationDesc": "Wo wollen Sie die Backups speichern?" }, @@ -492,6 +507,7 @@ }, "serverSchedules": { "action": "Aktion", + "actionId": "Aktion auswählen", "areYouSure": "Geplante Aufgabe löschen?", "cancel": "Abbrechen", "cannotSee": "Nicht alles sichtbar?", @@ -653,6 +669,9 @@ "userTheme": "Design für die Benutzeroberfläche", "uses": "Anzahl der erlaubten Verwendungen (-1==Keine Begrenzung)" }, + "validators": { + "passLength": "Passwort zu kurz. Mindestlänge: 8" + }, "webhooks": { "areYouSureDel": "Sind Sie sicher, dass Sie diesen Webhook löschen möchten?", "areYouSureRun": "Sind Sie sicher, dass Sie diesen Webhook testen möchten?", diff --git a/app/translations/en_EN.json b/app/translations/en_EN.json index a9cf127e..7c568c1e 100644 --- a/app/translations/en_EN.json +++ b/app/translations/en_EN.json @@ -298,10 +298,12 @@ "serversDesc": "servers this role is allowed to access" }, "serverBackups": { + "actions": "Actions", "after": "Run command after backup", "backupAtMidnight": "Auto-backup at midnight?", "backupNow": "Backup Now!", "backupTask": "A backup task has been started.", + "backups": "Server Backups", "before": "Run command before backup", "cancel": "Cancel", "clickExclude": "Click to select Exclusions", @@ -310,21 +312,34 @@ "confirmDelete": "Do you want to delete this backup? This cannot be undone.", "confirmRestore": "Are you sure you want to restore from this backup. All current server files will changed to backup state and will be unrecoverable.", "currentBackups": "Current Backups", + "default": "Default Backup", + "defaultExplain": "The backup that Crafty will use before updates. This cannot be changed or deleted.", "delete": "Delete", "destroyBackup": "Destroy backup \" + file_to_del + \"?", "download": "Download", + "edit": "Edit", + "enabled": "Enabled", "excludedBackups": "Excluded Paths: ", "excludedChoose": "Choose the paths you wish to exclude from your backups", "exclusionsTitle": "Backup Exclusions", + "failed": "Failed", "maxBackups": "Max Backups", "maxBackupsDesc": "Crafty will not store more than N backups, deleting the oldest (enter 0 to keep all)", + "myBackup": "My New Backup", + "name": "Name", + "newBackup": "Create New Backup", + "no-backup": "No Backups. To make a new backup configuration please press. New Backup", "options": "Options", "path": "Path", "restore": "Restore", "restoring": "Restoring Backup. This may take a while. Please be patient.", + "run": "Run Backup", "save": "Save", "shutdown": "Shutdown server for duration of backup", "size": "Size", + "standby": "Standby", + "status": "Status", + "storage": "Storage Location", "storageLocation": "Storage Location", "storageLocationDesc": "Where do you want to store backups?" }, @@ -489,6 +504,7 @@ }, "serverSchedules": { "action": "Action", + "actionId": "Select Action Child", "areYouSure": "Delete Scheduled Task?", "cancel": "Cancel", "cannotSee": "Not seeing everything?", @@ -651,6 +667,9 @@ "userTheme": "UI Theme", "uses": "Number of uses allowed (-1==No Limit)" }, + "validators": { + "passLength": "Password Too Short. Minimum Length: 8" + }, "webhooks": { "areYouSureDel": "Are you sure you want to delete this webhook?", "areYouSureRun": "Are you sure you want to test this webhook?", diff --git a/app/translations/es_ES.json b/app/translations/es_ES.json index a0a079c5..e0f57869 100644 --- a/app/translations/es_ES.json +++ b/app/translations/es_ES.json @@ -228,7 +228,7 @@ "login": "Iniciar Sesión", "password": "Contraseña", "username": "Usuario", - "viewStatus": "View Public Status Page" + "viewStatus": "Ver página de estado público" }, "notify": { "activityLog": "Registros de actividad", @@ -301,10 +301,12 @@ "serversDesc": "Servidores a los que este grupo puede acceder" }, "serverBackups": { + "actions": "Acciones", "after": "Comando ejecutado después del respaldo", "backupAtMidnight": "¿Copia de seguridad automática a medianoche?", "backupNow": "¡Respalde ahora!", "backupTask": "Se ha iniciado una tarea de copia de seguridad.", + "backups": "Copias de seguridad del servidor", "before": "Comando ejecutado antes del respaldo", "cancel": "Cancelar", "clickExclude": "Click para seleccionar las Exclusiones", @@ -313,21 +315,34 @@ "confirmDelete": "¿Quieres eliminar esta copia de seguridad? Esto no se puede deshacer.", "confirmRestore": "¿Seguro que quiere restaurar desde este respaldo?. Todos los archivos del servidor actuales serán cambiados al estado del respaldo y serán irrecuperables.", "currentBackups": "Copias de seguridad actuales", + "default": "Copia de seguridad predeterminada", + "defaultExplain": "La copia de seguridad que Crafty usará antes de actualizar. No se puede cambiar ni eliminar.", "delete": "Eliminar", "destroyBackup": "¿Destruir copia de seguridad \" + file_to_del + \"?", "download": "Descargar", + "edit": "Editar", + "enabled": "Habilitado", "excludedBackups": "Rutas Excluidas: ", "excludedChoose": "Elige las rutas que desea excluir de los respaldos", "exclusionsTitle": "Exclusiones en respaldos.", + "failed": "Fallido", "maxBackups": "Cantidad máxima de respaldos", "maxBackupsDesc": "Crafty no almacenará más de N copias de seguridad, eliminando la más antigua. (Sin límite: 0)", + "myBackup": "Mi Nueva Copia", + "name": "Nombre", + "newBackup": "Crear Nueva Copia de Seguridad", + "no-backup": "No hay copias de seguridad. Para crear una nueva configuración de copias de seguridad, presiona Crear nueva copia", "options": "Opciones", "path": "Ruta", "restore": "Restaurar", "restoring": "Restaurando copia de seguridad. Esto puede tomar un tiempo. Sea paciente.", + "run": "Ejecutar Copia de seguridad", "save": "Guardar", "shutdown": "Apagar el servidor durante la duración de la copia del respaldo.", "size": "Tamaño", + "standby": "En espera", + "status": "Estado", + "storage": "Ubicación del almacenamiento", "storageLocation": "Ubicación de almacenamiento", "storageLocationDesc": "¿Dónde quieres almacenar las copias de seguridad?" }, @@ -492,6 +507,7 @@ }, "serverSchedules": { "action": "Acción", + "actionId": "Seleccionar acción secundaria", "areYouSure": "¿Borrar tarea programada?", "cancel": "Cancelar", "cannotSee": "¿No puede ver todo?", @@ -653,6 +669,9 @@ "userTheme": "Tema de Interfaz", "uses": "Número de usos permitidos. (Sin límite: -1)" }, + "validators": { + "passLength": "Contraseña demasiado corta. Longitud mínima: 8" + }, "webhooks": { "areYouSureDel": "¿Estás seguro de que quieres eliminar este webhook?", "areYouSureRun": "¿Estás seguro de que quieres probar este webhook?", diff --git a/app/translations/fr_FR.json b/app/translations/fr_FR.json index 092536ee..db838d2d 100644 --- a/app/translations/fr_FR.json +++ b/app/translations/fr_FR.json @@ -301,10 +301,12 @@ "serversDesc": "Les serveurs auquels ce rôle a accès" }, "serverBackups": { + "actions": "Actions", "after": "Exécuter une commande après la sauvegarde", "backupAtMidnight": "Sauvegarde Automatique à minuit ?", "backupNow": "Sauvegarder Maintenant !", "backupTask": "Une sauvegarde vient de démarrer.", + "backups": "Sauvegarde de Serveur", "before": "Exécuter une commande avant la sauvegarde", "cancel": "Annuler", "clickExclude": "Cliquer pour sélectionner les Exclusions", @@ -313,21 +315,34 @@ "confirmDelete": "Es-tu sûr de vouloir supprimer cette sauvegarde ? Tu ne pourras pas revenir en arrière.", "confirmRestore": "Êtes-vous sûr de vouloir restaurer à partir de cette sauvegarde. Tous les fichiers du serveur actuel passeront à l'état de sauvegarde et seront irrécupérables.", "currentBackups": "Sauvegardes Actuelles", + "default": "Sauvegarde par Défaut", + "defaultExplain": "La sauvegarde que Crafty utilisera avant la mise à jour. Cela ne peut être changé ou modifié.", "delete": "Supprimer", "destroyBackup": "Supprimer la sauvegarde \" + file_to_del + \" ?", "download": "Télécharger", + "edit": "Modifier", + "enabled": "Activé", "excludedBackups": "Dossiers Exclus : ", "excludedChoose": "Choisir les dossiers à exclure de la sauvegarde", "exclusionsTitle": "Exclusions de Sauvegarde", + "failed": "Echec", "maxBackups": "Sauvergardes Max", "maxBackupsDesc": "Crafty ne fera pas plus de N sauvegardes, supprimant les plus anciennes (entrer 0 pour toutes les garder)", + "myBackup": "Ma Nouvelle Sauvegarde", + "name": "Nom", + "newBackup": "Créer une Nouvelle Sauvegarde", + "no-backup": "Aucune Sauvegarde. Pour aouter une nouvelle configuration de sauvegarde, il faut clicker sur ", "options": "Options", "path": "Chemin", "restore": "Restaurer", "restoring": "Restauration de la sauvegarde. Cela peut prendre un peu de temps. S'il vous plaît soyez patient.", + "run": "Lancer la Sauvegarde", "save": "Sauvegarder", "shutdown": "Extinction du serveur pendant la durée de la sauvegarde", "size": "Taille", + "standby": "Attente", + "status": "Statut", + "storage": "Emplacement de la Sauvegarde", "storageLocation": "Emplacement de Sauvegarde", "storageLocationDesc": "Où veux-tu enregister tes sauvegardes ?" }, @@ -492,6 +507,7 @@ }, "serverSchedules": { "action": "Action", + "actionId": "Sélectionner une configuration de sauvegarde", "areYouSure": "Supprimer la Tâche Planifiée ?", "cancel": "Annuler", "cannotSee": "Tu ne peux pas tout voir ?", @@ -653,6 +669,9 @@ "userTheme": "Theme d'Interface Utilisateur", "uses": "Nombre d'utilisation Authorisé (-1 == Illimité)" }, + "validators": { + "passLength": "Mot de passe trop court. Longueur minimum : 8" + }, "webhooks": { "areYouSureDel": "Es-tu sûr de vouloir supprimer ce webhook ?", "areYouSureRun": "Es-tu sûr de vouloir tester ce webhook ?", diff --git a/app/translations/he_IL.json b/app/translations/he_IL_incomplete.json similarity index 97% rename from app/translations/he_IL.json rename to app/translations/he_IL_incomplete.json index f3c45e40..ec70de3c 100644 --- a/app/translations/he_IL.json +++ b/app/translations/he_IL_incomplete.json @@ -301,10 +301,12 @@ "serversDesc": "לשרתים מותר לגשת לתפקיד זה" }, "serverBackups": { + "actions": "פעולות", "after": "הרץ פקודה לאחר הגיבוי", "backupAtMidnight": "גיבוי אוטומטי בחצות?", "backupNow": "!גיבוי עכשיו", "backupTask": "החלה משימת גיבוי.", + "backups": "גיבויי שרת", "before": "הרץ פקודה לפני הגיבוי", "cancel": "לבטל", "clickExclude": "לחצו כדי לבחור מה לא יהיה בגיבוי", @@ -313,21 +315,34 @@ "confirmDelete": "האם ברצונכם למחוק את הגיבוי הזה? אי אפשר לבטל את זה.", "confirmRestore": "האם אתם בטוחים שברצונכם לשחזר מגיבוי זה. כל קבצי השרת הנוכחיים ישתנו למצב גיבוי ולא יהיה אפשר לשחזר.", "currentBackups": "גיבויים נוכחיים", + "default": "גיבוי ברירת מחדל", + "defaultExplain": "הגיבוי ש-Crafty ישתמש בו לפני עדכונים. לא ניתן לשנות או למחוק.", "delete": "למחוק", "destroyBackup": "?\" + file_to_del + \" להרוס גיבוי", "download": "הורדה", + "edit": "ערוך", + "enabled": "מופעל", "excludedBackups": "נתיבים שלא נכללו: ", "excludedChoose": "בחרו את הנתיבים שברצונכם לא לכלול בגיבויים", "exclusionsTitle": "אי הכללות גיבוי", + "failed": "נכשל", "maxBackups": "מקסימום גיבויים", "maxBackupsDesc": "גיבויים, ימחק את הישן ביותר (הזן 0 כדי לשמור את כולם) N-קראפטי לא יאחסן יותר מ", + "myBackup": "הגיבוי החדש שלי", + "name": "שם", + "newBackup": "צור גיבוי חדש", + "no-backup": "אין גיבויים. כדי ליצור תצורת גיבוי חדשה אנא לחץ על גיבוי חדש", "options": "אפשרויות", "path": "נתיב", "restore": "לשחזר", "restoring": "שחזור גיבוי. זה עשוי לקחת זמן. אנא חכו בסבלנות.", + "run": "הפעל גיבוי", "save": "שמירה", "shutdown": "כיבוי שרת למשך הגיבוי", "size": "גודל", + "standby": "בהמתנה", + "status": "סטטוס", + "storage": "מיקום אחסון", "storageLocation": "מקום איחסון", "storageLocationDesc": "איפו אתם רוצים לאחסן גיבויים?" }, @@ -492,6 +507,7 @@ }, "serverSchedules": { "action": "פעולה", + "actionId": "בחר פעולה משנית", "areYouSure": "למחוק משימה מתוזמנת?", "cancel": "לבטל", "cannotSee": "לא רואים הכל?", diff --git a/app/translations/humanized_index.json b/app/translations/humanized_index.json new file mode 100644 index 00000000..6f2ea8c7 --- /dev/null +++ b/app/translations/humanized_index.json @@ -0,0 +1,19 @@ +{ + "language": { + "cs_CS": "Čeština", + "de_DE": "Deutsch", + "en_EN": "English (US)", + "es_ES": "Español", + "fr_FR": "Français (France)", + "he_IL": "he_IL", + "it_IT": "Italiano", + "lol_EN": "Lolcatz", + "lv_LV": "Latviešu", + "nl_BE": "nl_BE", + "pl_PL": "Polski", + "th_TH": "ไทย", + "tr_TR": "Türkçe", + "uk_UA": "Українська", + "zh_CN": "中文(中国)" + } +} diff --git a/app/translations/it_IT.json b/app/translations/it_IT.json index 7aec26b3..e2540de0 100644 --- a/app/translations/it_IT.json +++ b/app/translations/it_IT.json @@ -301,10 +301,12 @@ "serversDesc": "Server a cui questo ruolo è consentito l'accesso" }, "serverBackups": { + "actions": "Azioni", "after": "Esegui il comando prima del backup", "backupAtMidnight": "Auto-backup a mezzanotte?", "backupNow": "Effettua il Backup Ora!", "backupTask": "Un'azione di backup è cominciata.", + "backups": "Backup del server", "before": "Esegui il comando dopo il backup", "cancel": "Cancella", "clickExclude": "Clicca per selezionare le esclusioni", @@ -313,21 +315,34 @@ "confirmDelete": "Vuoi eliminare questo backup? Non puoi tornare indietro.", "confirmRestore": "Sei sicuro di voler ripristinare qeusto backup? Tutti i file correnti verranno sovrascritti allo stato di backup e saranno irrecuperabili.", "currentBackups": "Backup attuali", + "default": "Backup predefinito", + "defaultExplain": "Il backup che Crafty utilizzerà prima degli aggiornamenti. Non può essere cambiato o eliminato.", "delete": "Elimina", "destroyBackup": "Distruggere il backup \" + file_to_del + \"?", "download": "Scarica", + "edit": "Modifica", + "enabled": "Abilitato", "excludedBackups": "Percorsi esclusi: ", "excludedChoose": "Scegli i percorsi che desideri escludere dai tuoi backups", "exclusionsTitle": "Fai un backup delle esclusioni", + "failed": "Fallito", "maxBackups": "Backup massimi", "maxBackupsDesc": "Crafty non memorizzerà più di N backup, cancellando quelli più vecchi (inserisci 0 per mantenerli tutti)", + "myBackup": "Il mio nuovo backup", + "name": "Nome", + "newBackup": "Crea nuovo backup", + "no-backup": "Nessun backup. Per configurare un nuovo backup clicca Nuovo backup", "options": "Opzioni", "path": "Percorso", "restore": "Ripristina", "restoring": "Ripristinando il backup. Potrebber volerci un momento. Per favore sii paziente.", + "run": "Esegui backup", "save": "Salva", "shutdown": "Arresto del server per la durata del backup", "size": "Dimensioni", + "standby": "Sospeso", + "status": "Stato", + "storage": "Percorso archiviazione", "storageLocation": "Percorso di memorizzazione", "storageLocationDesc": "Dove vuoi memorizzare i backup?" }, @@ -492,6 +507,7 @@ }, "serverSchedules": { "action": "Azione", + "actionId": "Seleziona azione da eseguire", "areYouSure": "Eliminare l'azione programmata?", "cancel": "Cancella", "cannotSee": "Non vedi tutto?", @@ -653,6 +669,9 @@ "userTheme": "Tema IU", "uses": "Numero di usi permessi (-1==Nessun limite)" }, + "validators": { + "passLength": "La password è troppo corta. Lunghezza minima: 8" + }, "webhooks": { "areYouSureDel": "Sei sicuro di voler eliminare questo webhook?", "areYouSureRun": "Sei sicuro di voler testare questo webhook?", diff --git a/app/translations/lol_EN.json b/app/translations/lol_EN.json index 282ffb36..862c1c68 100644 --- a/app/translations/lol_EN.json +++ b/app/translations/lol_EN.json @@ -301,10 +301,12 @@ "serversDesc": "SERVRS DIS ROLE IZ ALLOWD 2 ACCES" }, "serverBackups": { + "actions": "DO-STUFFZ", "after": "RUNZ COMMANDZ AFTUR BAKUP", "backupAtMidnight": "AUTO-BAKUP AT MIDDLENIGHTZ?", "backupNow": "BAKUP NOWZ!", "backupTask": "OKAI I GETZ FISH, BAK SOONZ", + "backups": "SERVER BACKUPS", "before": "RUNZ COMMANDZ BEFOUR BAKUP", "cancel": "STAHP", "clickExclude": "CLICK 2 MARK EXCLUSHUNS", @@ -313,21 +315,34 @@ "confirmDelete": "R U SURE U WANTZ ME TO EATZ DIS BAKUP? WIAL BEH LOZT FOREVR (LONGIR THAN KITTEHZ NAPZ)", "confirmRestore": "R U SURE U WANTZ 2 RESTORE FRUM DIS BAKUP. ALL CURRENT SERVR FISHZ WILL BE EATZ AN WILL BE UNRECOVERABLE.", "currentBackups": "CURRENT STASH OV BAKUPS", + "default": "USUAL BACKUP", + "defaultExplain": "DA BACKUP THAT CRAFTY USE BEFORE UPDATES. DIS NO CAN CHANGE OR GO AWAY.", "delete": "MAK GONE", "destroyBackup": "EAT BAKUP \" + file_to_del + \"?", "download": "DOWNLOADZ", + "edit": "MAKE BETTERS", + "enabled": "TURNED ON", "excludedBackups": "EXCLUSHUNS: ", "excludedChoose": "CHOOSE TEH PATHS U WANTS 2 EXCLUDE FRUM UR BAKUPS", "exclusionsTitle": "BAKUP EXCLUSHUNS", + "failed": "NOPE'D", "maxBackups": "MAX BAKUPS", "maxBackupsDesc": "CWAFTY WILL NOT KEEPZ MOAR THAN N BCKUPS, DELETIN TEH MOST OLDZ FURST (ENTR 0 TO BE BIG GREEDY)", + "myBackup": "MAH NEW BACKUP", + "name": "NAMZ", + "newBackup": "MAKEZ NEW BACKUP", + "no-backup": "NO BACKUPS. TO MAKE A NEW BACKUP THINGY PLEASE BOOP. NEW BACKUP", "options": "OPSHUNS", "path": "PETH", "restore": "RESTOR", "restoring": "RESTORIN BAKUP. DIS CUD TAEK WHILE. PLZ BE PATIENT.", + "run": "DO BACKUP NOWZ", "save": "DUN", "shutdown": "SLEEPY SERVR WEN MAK BAKAUPZ?", "size": "HOW BIGZ", + "standby": "WAITIN'", + "status": "WHAT'S UP", + "storage": "HIDING SPOT", "storageLocation": "SHINY STASH OV HINGZ", "storageLocationDesc": "WER DO U WANTS 2 STASH BAKUPS?" }, @@ -492,6 +507,7 @@ }, "serverSchedules": { "action": "ACTSHUN", + "actionId": "PICK ACTION KITTY", "areYouSure": "FORGET 2 DO DIS ????", "cancel": "STAHP", "cannotSee": "CANNY SEE?", @@ -653,6 +669,9 @@ "userTheme": "THEMEZ", "uses": "NUMBER OV USES ALLOWED (-1==NO LIMIT)" }, + "validators": { + "passLength": "PASSWRD TOO SMOL. NEEDZ 8 CATZ PLZ" + }, "webhooks": { "areYouSureDel": "U SURE U WANTZ TO EATZ DIS WEBHOOK?", "areYouSureRun": "U SURE U WANTZ TO TESTZ DIS WEBHOOK?", diff --git a/app/translations/lv_LV.json b/app/translations/lv_LV.json index 6e1f1397..0d454b8e 100644 --- a/app/translations/lv_LV.json +++ b/app/translations/lv_LV.json @@ -302,10 +302,12 @@ "serversDesc": "serveri, kuriem šai lomai ir atļauta piekļuve" }, "serverBackups": { + "actions": "Darbības", "after": "Palaist komandu pēc dublējuma", "backupAtMidnight": "Automātiski dublēt pusnaktī?", "backupNow": "Dublēt Tagad!", "backupTask": "Dublējuma uzdevums ticis startēts.", + "backups": "Servera Dublējumi", "before": "Palaist komandu pirms dublējuma", "cancel": "Atcelt", "clickExclude": "Nospied lai izvēlētos Izņēmumus", @@ -314,21 +316,34 @@ "confirmDelete": "Vai vēlaties izdzēst šo dublējumu? Šo nevar atdarīt.", "confirmRestore": "Vai tiešām vēlieties atjaunot no šī dublējuma. Visas esošās datnes tiks atgrieztas uz dublējuma stāvokli un būs neatgriežamas.", "currentBackups": "Pašreizējie Dublējumi", + "default": "Noklusētais Dublējums", + "defaultExplain": "Dublējums ko Crafty izmanto pirms atjaunināšanas. To nevar mainīt vai izdzēst.", "delete": "Dzēst", "destroyBackup": "Iznīcināt dublējumu \" + file_to_del + \"?", "download": "Lejupielādēt", + "edit": "Rediģēt", + "enabled": "Iespējots", "excludedBackups": "Izņēmuma Ceļi: ", "excludedChoose": "Izvēlies ceļus, kurus tu vēlies izņemt no saviem dublējumiem", "exclusionsTitle": "Dublējuma Izņēmumi", + "failed": "Neizdevās", "maxBackups": "Maks. Dublējumi", "maxBackupsDesc": "Crafty nesaglabās vairāk nekā N dublējumus, dzēšot vecākaos (ievadi 0 lai saglabātu visus)", + "myBackup": "Mans Jaunais Dublējums", + "name": "Nosaukums", + "newBackup": "Izveidot Jaunu Dublējumu", + "no-backup": "Nav Dublējumu. Lai izveidotu dublējuma konfigurāciju, nospied Izveidot Jaunu Dublējumu", "options": "Opcijas", "path": "Ceļš", "restore": "Atjaunot", "restoring": "Atjauno dublējumu. Tas var aizņemt kādi laiku. Esiet pacietīgs.", + "run": "Veikt Dublējumu", "save": "Saglabāt", "shutdown": "Apturēt serveri dublējumkopijas laikā", "size": "Lielums", + "standby": "Gaidstāve", + "status": "Statuss", + "storage": "Glabātavas Vieta", "storageLocation": "Krātuves Vieta", "storageLocationDesc": "Kur jūs vēlaties saglabāt dublējumus?" }, @@ -493,6 +508,7 @@ }, "serverSchedules": { "action": "Darbība", + "actionId": "Izvēlēties apakšdarbību", "areYouSure": "Dzēst Ieplānoto Uzdevumu?", "cancel": "Atcelt", "cannotSee": "Neredziet visu?", @@ -654,6 +670,9 @@ "userTheme": "UI Tēma", "uses": "Dauzums, cik reizes lietot (-1==Bez Limita)" }, + "validators": { + "passLength": "Parole pārāk īsa. Minimālais Garums: 8" + }, "webhooks": { "areYouSureDel": "Vai tiešām vēlies noņemt šo webhook?", "areYouSureRun": "Vai tiešām vēlies testēt šo webhook?", diff --git a/app/translations/nl_BE.json b/app/translations/nl_BE.json index 0abef0b4..ff531cfa 100644 --- a/app/translations/nl_BE.json +++ b/app/translations/nl_BE.json @@ -301,10 +301,12 @@ "serversDesc": "servers waar deze rol toegang toe heeft" }, "serverBackups": { + "actions": "Acties", "after": "Voer opdracht uit na back-up", "backupAtMidnight": "Automatische back-up maken om middernacht?", "backupNow": "Nu een back-up maken!", "backupTask": "Er is een back-uptaak ​​gestart.", + "backups": "Serverbackups", "before": "Voer opdracht uit vóór back-up", "cancel": "Annuleren", "clickExclude": "Klik om Uitsluitingen te selecteren", @@ -313,21 +315,34 @@ "confirmDelete": "Wil je deze back-up verwijderen? Dit kan niet ongedaan gemaakt worden.", "confirmRestore": "Bent u zeker dat u wilt herstellen vanaf deze backup. Alle huidige server bestanden zullen worden veranderd naar de backup status en zullen niet meer hersteld kunnen worden.", "currentBackups": "Huidige back-ups", + "default": "Standaardbackup", + "defaultExplain": "De backup die Crafty gebruikt vóór updates. Deze kan niet worden gewijzigd of verwijderd.", "delete": "Verwijderen", "destroyBackup": "Back-up vernietigen \" + file_to_del + \"?", "download": "Downloaden", + "edit": "Bewerken", + "enabled": "Ingeschakeld", "excludedBackups": "Uitgesloten paden: ", "excludedChoose": "Kies de paden die u wilt uitsluiten van uw back-ups", "exclusionsTitle": "Uitsluitingen voor back-ups", + "failed": "Mislukt", "maxBackups": "Max Back-ups", "maxBackupsDesc": "Crafty zal niet meer dan N back-ups opslaan, waarbij de oudste wordt verwijderd (voer 0 in om ze allemaal te bewaren)", + "myBackup": "Nieuwe backup", + "name": "Naam", + "newBackup": "Nieuwe backup maken", + "no-backup": "Geen backups. Druk op 'Nieuwe backup' om een nieuwe backupconfiguratie te maken.", "options": "Opties", "path": "Pad", "restore": "Herstellen", "restoring": "Back-up herstellen. Dit kan een tijdje duren. Even geduld alstublieft.", + "run": "Backup uitvoeren", "save": "Opslaan", "shutdown": "Sluit de server af voor de duur van de backup", "size": "Grootte", + "standby": "Standby", + "status": "Status", + "storage": "Opslaglocatie", "storageLocation": "Opslaglocatie", "storageLocationDesc": "Waar wil je back-ups opslaan?" }, @@ -492,6 +507,7 @@ }, "serverSchedules": { "action": "Actie", + "actionId": "Selecteer onderliggende actie", "areYouSure": "Verwijder Geplande Taak?", "cancel": "Annuleren", "cannotSee": "Ziet u niet alles?", @@ -653,6 +669,9 @@ "userTheme": "UI-thema", "uses": "Aantal toegestane gebruiken (-1==Geen Limiet)" }, + "validators": { + "passLength": "Wachtwoord te kort. Minimumlengte: 8 tekens" + }, "webhooks": { "areYouSureDel": "Weet u zeker dat u deze webhook wilt verwijderen?", "areYouSureRun": "Weet u zeker dat u deze webhook wilt testen?", diff --git a/app/translations/pl_PL.json b/app/translations/pl_PL.json index 7385db3f..0e44c87f 100644 --- a/app/translations/pl_PL.json +++ b/app/translations/pl_PL.json @@ -301,10 +301,12 @@ "serversDesc": "Serwery które mają tą role mają dostęp" }, "serverBackups": { + "actions": "Akcje", "after": "Wykonaj tę komendę po backupie", "backupAtMidnight": "Auto-backup o północy?", "backupNow": "Backup Teraz!", "backupTask": "Backup został rozpoczęty.", + "backups": "Kopie zapasowe serwera", "before": "Wykonaj tę komendę przed backupem", "cancel": "Anuluj", "clickExclude": "Kliknij aby zaznaczyć wyjątki", @@ -313,21 +315,34 @@ "confirmDelete": "Czy chcesz usunąć ten backup? Nie można tego cofnąć.", "confirmRestore": "Czy jesteś pewien że chcesz przywrócić z tego backupu. Wszystkie pliki powrócą do stanu z backupu.", "currentBackups": "Backupy Teraz", + "default": "Podstawowa kopia zapasowa", + "defaultExplain": "Kopia zapasowa przed jakimikolwiek zmianami. Nie można jej usunąć ani edytować.", "delete": "Usuń", "destroyBackup": "Zniszcz Backup \" + file_to_del + \"?", "download": "Pobierz", + "edit": "Edytuj", + "enabled": "Włączony", "excludedBackups": "Wykluczone ścieżki: ", "excludedChoose": "Wybierz ścieżki do wykluczenia z backupu", "exclusionsTitle": "Wykluczenia backupu", + "failed": "Nieudany!", "maxBackups": "Maks. Backupów", "maxBackupsDesc": "Crafty nie będzie zbierał więcej niż X backupów, zacznie usuwać od nadstarszych (wpisz 0, aby zatrzymać nieskończoną ilość)", + "myBackup": "Nowa kopia zapasowa", + "name": "Nazwa", + "newBackup": "Nowa kopia zapasowa", + "no-backup": "Brak kopii zapasowych. Aby skonfigurować kopię zapasową kliknij na", "options": "Opcje", "path": "Nazwa pliku", "restore": "Przywróć", "restoring": "Przywracanie backupu. To trochę zajmie. Bądź cierpliwy.", + "run": "Wykonaj kopię zapasową", "save": "Zapisz", "shutdown": "Wyłącz serwer na czas backupu", "size": "Rozmiar", + "standby": "Gotowy", + "status": "Status", + "storage": "Lokalizacja kopii zapasowych", "storageLocation": "Ścieżka zapisywania", "storageLocationDesc": "Gdzie chcesz trzymać backupy?" }, @@ -492,6 +507,7 @@ }, "serverSchedules": { "action": "Akcja", + "actionId": "Zaznacz zadanie podwładne", "areYouSure": "Usuń zaplanowane (zadanie)?", "cancel": "Anuluj", "cannotSee": "Nie widzisz wszystkiego?", @@ -652,6 +668,9 @@ "userTheme": "Wygląd interfejsu", "uses": "Ilość użyć (-1==Bez limitu)" }, + "validators": { + "passLength": "Hasło jest zbyt krótkie. Hasło musi posiadać minimum 8 znaków." + }, "webhooks": { "areYouSureDel": "Usunąć ten webhook?", "areYouSureRun": "Przetestować ten webhook?", diff --git a/app/translations/th_TH.json b/app/translations/th_TH.json index 12c24fa7..47fa3c01 100644 --- a/app/translations/th_TH.json +++ b/app/translations/th_TH.json @@ -301,10 +301,12 @@ "serversDesc": "เซิร์ฟเวอร์ที่บทบาทนี้ได้รับอนุญาตให้เข้าถึง" }, "serverBackups": { + "actions": "คำสั่งด่วน", "after": "ส่งคำสั่งหลังการสำรองข้อมูล", "backupAtMidnight": "คุณต้องการสำรองข้อมูลอัตโนมัติตอนเที่ยงคืนหรือไม่?", "backupNow": "สำรองข้อมูลตอนนี้!", "backupTask": "เริ่มการสำรองข้อมูลแล้ว", + "backups": "ข้อมูลสำรองเซิร์ฟเวอร์", "before": "ส่งคำสั่งก่อนการสำรองข้อมูล", "cancel": "ยกเลิก", "clickExclude": "คลิกเพื่อเลือกการยกเว้น", @@ -313,21 +315,34 @@ "confirmDelete": "คุณต้องการลบข้อมูลสำรองนี้หรือไม่ สิ่งนี้ไม่สามารถยกเลิกได้", "confirmRestore": "คุณแน่ใจหรือไม่ว่าต้องการกู้คืนจากข้อมูลสำรองนี้ ไฟล์เซิร์ฟเวอร์ปัจจุบันทั้งหมดจะเปลี่ยนเป็นแบบสำรองและจะไม่สามารถกู้คืนได้", "currentBackups": "ไฟล์สำรองข้อมูลปัจจุบัน", + "default": "ข้อมูลสำรองเริ่มต้น", + "defaultExplain": "ข้อมูลสำรองที่ Crafty จะใช้ก่อนการอัพเดต สิ่งนี้ไม่สามารถเปลี่ยนแปลงหรือลบได้", "delete": "ลบ", "destroyBackup": "คุณต้องการทำลายข้อมูลสำรอง \" + file_to_del + \"หรือไม่", "download": "ดาวน์โหลด", + "edit": "แก้ไข", + "enabled": "เปิดใช้งาน", "excludedBackups": "เส้นทางที่ยกเว้น: ", "excludedChoose": "เลือกเส้นทางที่คุณต้องการยกเว้นจากการสำรองข้อมูลของคุณ", "exclusionsTitle": "ข้อยกเว้นการสำรองข้อมูล", + "failed": "ล้มเหลว", "maxBackups": "ต้องการเก็บข้อมูลสำรองกี่ครั้ง?", "maxBackupsDesc": "Crafty จะไม่เก็บข้อมูลสำรองมากกว่า N รายการ โดยจะลบข้อมูลสำรองที่เก่าที่สุด (ป้อน 0 เพื่อเก็บทั้งหมด)", + "myBackup": "ข้อมูลสำรองใหม่ของฉัน", + "name": "ชื่อ", + "newBackup": "สร้างข้อมูลสำรองใหม่", + "no-backup": "ไม่มีการสำรองข้อมูล หากต้องการตั้งค่าการสำรองข้อมูลใหม่ กรุณากด สร้างข้อมูลสำรองใหม่", "options": "ตัวเลือก", "path": "เส้นทาง", "restore": "คืนค่า", "restoring": "กำลังกู้คืนข้อมูลสำรอง การดำเนินการนี้อาจใช้เวลาสักครู่ กรุณาอดทนรออย่างใจเย็น", + "run": "เริ่มทำงานไฟล์สำรอง", "save": "บันทึก", "shutdown": "ปิดเซิร์ฟเวอร์ตามระยะเวลาของการสำรองข้อมูล", "size": "ขนาด", + "standby": "พร้อมใช้งาน", + "status": "สถานะ", + "storage": "พื้นที่จัดเก็บข้อมูล", "storageLocation": "สถานที่จัดเก็บ", "storageLocationDesc": "คุณต้องการสำรองข้อมูลไว้ที่ไหน?" }, @@ -492,6 +507,7 @@ }, "serverSchedules": { "action": "การกระทำ", + "actionId": "เลือกลูกของการกระทำ", "areYouSure": "ลบงานที่กำหนดเวลาไว้?", "cancel": "ยกเลิก", "cannotSee": "ไม่เห็นอะไรเลยใช่ใหม?", @@ -652,6 +668,9 @@ "userTheme": "ธีม UI", "uses": "จำนวนการใช้งานที่อนุญาต (-1==ไม่มีขีดจำกัด)" }, + "validators": { + "passLength": "รหัสผ่านสั้นเกินไป จำนวนตัวอักขระขั้นต่ำ: 8" + }, "webhooks": { "areYouSureDel": "คุณแน่ใจหรือไม่ว่าต้องการลบ Webhook นี้?", "areYouSureRun": "คุณแน่ใจหรือไม่ว่าต้องการทดสอบ Webhook นี้?", diff --git a/app/translations/tr_TR.json b/app/translations/tr_TR.json index 709f9e43..9ad1ce93 100644 --- a/app/translations/tr_TR.json +++ b/app/translations/tr_TR.json @@ -301,10 +301,12 @@ "serversDesc": "bu rolün erişmesine izin verilen sunucular" }, "serverBackups": { + "actions": "Eylemler", "after": "Yedeklemeden sonra bir komut çalıştır", "backupAtMidnight": "Gece yarısında otomatik yedekleme yapılsın mı?", "backupNow": "Backup Now!", "backupTask": "Bir yedekleme görevi başlatıldı.", + "backups": "Sunucu Yedekleri", "before": "Yedeklemeden önce bir komut çalıştır", "cancel": "İptal", "clickExclude": "İstisnaları seçmek için tıklayın", @@ -313,21 +315,34 @@ "confirmDelete": "Bu yedeği silmek istediğine emin misin? Bu geri alınamaz.", "confirmRestore": "Bu yedeği geri yüklemek istediğinizden emin misiniz? Tüm mevcut sunucu dosyaları yedeklemedeki durumuna dönecek ve kurtarılamayacaktır.", "currentBackups": "Mevcut Yedekmeler", + "default": "Varsayılan Yedek", + "defaultExplain": "Crafty'nin güncellemelerden önce kullanacağı yedek. Bu değiştirilemez ya da silinemez.", "delete": "Sil", "destroyBackup": "\" + file_to_del + \" yedeklemesi yok edilsin mi?", "download": "İndir", + "edit": "Düzenle", + "enabled": "Etkin", "excludedBackups": "Hariç Tutulan Yollar: ", "excludedChoose": "Yedeklemelerinizden hariç tutmak istediğiniz yolları seçin", "exclusionsTitle": "Yedekleme İstisnaları", + "failed": "Başarısız", "maxBackups": "Maksimum Yedekleme Sayısı", "maxBackupsDesc": "Crafty N yedeklemeden fazlasını saklamayacak, en eskisini silecektir (tümünü saklamak için 0 girin)", + "myBackup": "Benim Yeni Yedeğim", + "name": "Ad", + "newBackup": "Yeni Yedek Oluştur", + "no-backup": "Mevcut yedek bulunmuyor. Yeni bir yedek oluşturmak için lütfen Yeni Yedek Oluştur tuşuna basınız.", "options": "Seçenekler", "path": "Dosya Yolu", "restore": "Geri Yükleme", "restoring": "Yedekleme geri yükleniyor. Bu biraz zaman alabilir. Lütfen sabırlı olun.", + "run": "Yedeği Çalıştır", "save": "Kaydet", "shutdown": "Yedekleme süresince sunucuyu kapat", "size": "Boyut", + "standby": "Beklemede", + "status": "Durum", + "storage": "Depolama Konumu", "storageLocation": "Depolama Konumu", "storageLocationDesc": "Yedekmeleri nerede saklamak istiyorsunuz?" }, @@ -492,6 +507,7 @@ }, "serverSchedules": { "action": "Eylem", + "actionId": "Alt Eylem Seçiniz", "areYouSure": "Zamanlanmış Görev Silinsin mi?", "cancel": "İptal", "cannotSee": "Her şeyi göremiyor musun?", @@ -652,6 +668,9 @@ "userTheme": "UI Teması", "uses": "İzin verilen kullanım sayısı (-1==Sınır Yok)" }, + "validators": { + "passLength": "Şifre çok kısa. Şifre en az 8 karakter olmalı." + }, "webhooks": { "areYouSureDel": "Bu webhooku silmek istediğinizden emin misiniz?", "areYouSureRun": "Bu webhooku test etmek istediğinizden emin misiniz?", diff --git a/app/translations/uk_UA.json b/app/translations/uk_UA.json index 792b6f56..924d1050 100644 --- a/app/translations/uk_UA.json +++ b/app/translations/uk_UA.json @@ -301,10 +301,12 @@ "serversDesc": "сервери які доступні для цієї ролі" }, "serverBackups": { + "actions": "Дії", "after": "Виконати команду після завершення бекапу", "backupAtMidnight": "Авто-бекап опівночі?", "backupNow": "Запустити бекап!", "backupTask": "Бекап запущено.", + "backups": "Сервер Бекапів", "before": "Виконати команду перед початком бекапу", "cancel": "Відмінити", "clickExclude": "Додати винятки", @@ -313,21 +315,34 @@ "confirmDelete": "Ви дійсно бажаєте видати бекап? Ця дія незворотня.", "confirmRestore": "Ви впевненні що бажаєте відновити даний бекап? При відновленні сервер буде вимкнуто та відновлено за допомогою даного бекапу, минулі файли будуть втрачені!", "currentBackups": "Поточні бекапи", + "default": "Звичайний Бекап", + "defaultExplain": "Бекап цього Crafty буде створений перед оновленням. Це не можна змінити чи видалити.", "delete": "Видалити", "destroyBackup": "Видалити бекап \" + file_to_del + \"?", "download": "Завантажити", + "edit": "Редагувати", + "enabled": "Увімкненно", "excludedBackups": "Винятки: ", "excludedChoose": "Виберіть папки які бажаєте додати у винятки", "exclusionsTitle": "Бекап винятки", + "failed": "Помилка", "maxBackups": "Максимум бекапів", "maxBackupsDesc": "Crafty не зможе зберігати більше ніж N бекапів, видалятиме старі (введіть 0 для зберігання усіх бекапів)", + "myBackup": "Мій новий бекап", + "name": "Назва", + "newBackup": "Створити новий бекап", + "no-backup": "Немає бекапів. Щоб створити бекап, натисніть кнопку Мій новий Бекап", "options": "Налаштування", "path": "Шлях", "restore": "Відновити", "restoring": "Відновлення бекапу. Це може зайняти деякий час. Будь ласка будьте терплячі.", + "run": "Запустити бекап", "save": "Зберегти", "shutdown": "Вимикати сервер на час бекапу", "size": "Розмір", + "standby": "Очікування", + "status": "Статус", + "storage": "Місце збереження", "storageLocation": "Місце зберігання", "storageLocationDesc": "Де ви бажаєте зберігати бекапи?" }, @@ -492,6 +507,7 @@ }, "serverSchedules": { "action": "Дія", + "actionId": "Вибрати дочірню дію", "areYouSure": "Видалити заплановане завдання?", "cancel": "Відмінити", "cannotSee": "Нічого не бачите?", @@ -652,6 +668,9 @@ "userTheme": "Тема інтерфейсу", "uses": "Дозволена кількість використань(-1==Без ліміту)" }, + "validators": { + "passLength": "Пароль, надто короткий. Мінімальна довжина: 8 символів" + }, "webhooks": { "areYouSureDel": "Ви впевнені, що хочете видалити цей Вебхук?", "areYouSureRun": "Ви впевнені, що хочете перевірити цей Вебхук?", diff --git a/app/translations/zh_CN.json b/app/translations/zh_CN.json index 95e71d70..359e4e13 100644 --- a/app/translations/zh_CN.json +++ b/app/translations/zh_CN.json @@ -301,10 +301,12 @@ "serversDesc": "此角色允许访问的服务器" }, "serverBackups": { + "actions": "操作", "after": "备份后运行指令", "backupAtMidnight": "午夜自动备份?", "backupNow": "现在备份!", "backupTask": "一个备份任务已开始。", + "backups": "服务器备份", "before": "备份前运行指令", "cancel": "取消", "clickExclude": "点击来选择排除项", @@ -313,21 +315,34 @@ "confirmDelete": "您想要删除这个备份吗?此操作不能撤销。", "confirmRestore": "你确定要从此备份恢复吗?所有现存的服务器文件将更改到备份时的状态,并且无法撤销。", "currentBackups": "现有备份", + "default": "默认备份", + "defaultExplain": "Crafty 在更新前会使用的备份。此项目不能被更改或删除。", "delete": "删除", "destroyBackup": "删除备份 \" + file_to_del + \"?", "download": "下载", + "edit": "编辑", + "enabled": "已启用", "excludedBackups": "排除的路径:", "excludedChoose": "选择您希望从您的备份中排除的路径", "exclusionsTitle": "备份排除项", + "failed": "失败", "maxBackups": "最大备份数量", "maxBackupsDesc": "Crafty 不会存储多于 N 个备份,并且会删除最旧的备份(输入 0 以保留所有备份)", + "myBackup": "我的新备份", + "name": "名称", + "newBackup": "创建新备份", + "no-backup": "暂无备份。请点击“新备份”以创建一个新的备份配置。", "options": "选项", "path": "路径", "restore": "恢复", "restoring": "正在恢复备份。这需要一点时间。请耐心等待。", + "run": "运行备份", "save": "保存", "shutdown": "在备份期间停止服务器", "size": "大小", + "standby": "等候", + "status": "状态", + "storage": "存储位置", "storageLocation": "存储位置", "storageLocationDesc": "您想要在哪里存储备份?" }, @@ -492,6 +507,7 @@ }, "serverSchedules": { "action": "操作", + "actionId": "选择子操作", "areYouSure": "删除计划任务?", "cancel": "取消", "cannotSee": "什么都看不到?", @@ -653,6 +669,9 @@ "userTheme": "UI 主题", "uses": "使用次数限制(-1==无限制)" }, + "validators": { + "passLength": "密码过短。最短长度:8" + }, "webhooks": { "areYouSureDel": "您确定要删除此 webhook 吗?", "areYouSureRun": "您确定要测试此 webhook 吗?", diff --git a/requirements.txt b/requirements.txt index ed0f7698..2ca0ff8b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,9 +13,9 @@ psutil==5.9.5 pyOpenSSL==24.0.0 pyjwt==2.8.0 PyYAML==6.0.1 -requests==2.31.0 +requests==2.32.0 termcolor==1.1 -tornado==6.3.3 +tornado==6.4.1 tzlocal==5.1 jsonschema==4.19.1 orjson==3.9.15