diff --git a/.gitlab/scripts/lang_sort.sh b/.gitlab/scripts/lang_sort.sh
index 5710ce1b..9a1e1cf0 100644
--- a/.gitlab/scripts/lang_sort.sh
+++ b/.gitlab/scripts/lang_sort.sh
@@ -56,8 +56,8 @@ get_keys "${DIR}/en_EN.json" | sort > "${ref_keys}"
# Iterate over each .json file in the directory
for file in "${DIR}"/*.json; do
- # Check if file is a regular file and not en_EN.json, and does not contain "_incomplete" in its name
- if [[ -f "${file}" && "${file}" != "${DIR}/en_EN.json" && ! "${file}" =~ _incomplete ]]; then
+ # Check if file is a regular file and not en_EN.json, humanized index and does not contain "_incomplete" in its name
+ if [[ -f "${file}" && "${file}" != "${DIR}/en_EN.json" && "${file}" != "${DIR}/humanized_index.json" && ! "${file}" =~ _incomplete ]]; then
# Get keys and subkeys from the current file
current_keys=$(mktemp)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 04f207ea..cc3695db 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,17 +2,34 @@
## --- [4.4.1] - 2024/TBD
### New features
TBD
+### Refactor
+- Backups | Allow multiple backup configurations ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/711))
+- UploadAPI | Use Crafty's JWT authentication for file uploads ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
+- UploadAPI | Splice files on the frontend to allow chunked uploads as well as bulk uploads ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
+- UploadAPI | Enhance upload progress feedback on all upload pages ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
+- UploadAPI | Consolidate and improve speed on uploads, supporting 100mb+ uploads through Cloudflare(Free) ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
### Bug fixes
- Fix zip imports so the root dir selection is functional ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/764))
- Fix bug where full access gives minimal access ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/768))
+- Bump tornado & requests for sec advisories ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/774))
- Ensure audit.log exists or create it on Crafty startup ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/771))
+- Fix typing issue on ID comparison causing general users to not be able to delete their own API keys ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/775))
+- Fix user creation bug where it would fail when a role was selected ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
+- Security improvements for general user creations on roles page ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
+- Security improvements for general user creations on user page ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
+- Use UTC for tokens_valid_from in user config, to resolve token invalidation on instance TZ change ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/765))
### Tweaks
- Add info note to default creds file ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/760))
- Remove navigation label from sidebar ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/766))
+- Do not allow slashes in server names ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/767))
- Add a thread dump to support logs ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/769))
- Remove text from status page and use symbols ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/770))
+- Add better feedback on when errors appear on user creation ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
+- Workaround cpu_freq call catching on obscure cpu architectures ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/776))
### Lang
+- Show natural language name instead of country code in User Config Lang select list ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/773))
- Add remaining `he_IL`, `th_TH` translations for 4.4.0 Release ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/761))
+- Mark `he_IL` incomplete ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
## --- [4.4.0] - 2024/05/11
diff --git a/app/classes/controllers/management_controller.py b/app/classes/controllers/management_controller.py
index cab755b6..fc17b8be 100644
--- a/app/classes/controllers/management_controller.py
+++ b/app/classes/controllers/management_controller.py
@@ -5,6 +5,7 @@ from prometheus_client import CollectorRegistry, Gauge
from app.classes.models.management import HelpersManagement, HelpersWebhooks
from app.classes.models.servers import HelperServers
+from app.classes.shared.helpers import Helpers
logger = logging.getLogger(__name__)
@@ -75,7 +76,7 @@ class ManagementController:
# Commands Methods
# **********************************************************************************
- def send_command(self, user_id, server_id, remote_ip, command):
+ def send_command(self, user_id, server_id, remote_ip, command, action_id=None):
server_name = HelperServers.get_server_friendly_name(server_id)
# Example: Admin issued command start_server for server Survival
@@ -86,7 +87,12 @@ class ManagementController:
remote_ip,
)
self.queue_command(
- {"server_id": server_id, "user_id": user_id, "command": command}
+ {
+ "server_id": server_id,
+ "user_id": user_id,
+ "command": command,
+ "action_id": action_id,
+ }
)
def queue_command(self, command_data):
@@ -123,6 +129,7 @@ class ManagementController:
cron_string="* * * * *",
parent=None,
delay=0,
+ action_id=None,
):
return HelpersManagement.create_scheduled_task(
server_id,
@@ -137,6 +144,7 @@ class ManagementController:
cron_string,
parent,
delay,
+ action_id,
)
@staticmethod
@@ -175,34 +183,47 @@ class ManagementController:
# Backups Methods
# **********************************************************************************
@staticmethod
- def get_backup_config(server_id):
- return HelpersManagement.get_backup_config(server_id)
+ def get_backup_config(backup_id):
+ return HelpersManagement.get_backup_config(backup_id)
- def set_backup_config(
- self,
- server_id: int,
- backup_path: str = None,
- max_backups: int = None,
- excluded_dirs: list = None,
- compress: bool = False,
- shutdown: bool = False,
- before: str = "",
- after: str = "",
- ):
- return self.management_helper.set_backup_config(
- server_id,
- backup_path,
- max_backups,
- excluded_dirs,
- compress,
- shutdown,
- before,
- after,
+ @staticmethod
+ def get_backups_by_server(server_id, model=False):
+ return HelpersManagement.get_backups_by_server(server_id, model)
+
+ @staticmethod
+ def delete_backup_config(backup_id):
+ HelpersManagement.remove_backup_config(backup_id)
+
+ @staticmethod
+ def update_backup_config(backup_id, updates):
+ if "backup_location" in updates:
+ updates["backup_location"] = Helpers.wtol_path(updates["backup_location"])
+ return HelpersManagement.update_backup_config(backup_id, updates)
+
+ def add_backup_config(self, data) -> str:
+ if "backup_location" in data:
+ data["backup_location"] = Helpers.wtol_path(data["backup_location"])
+ return self.management_helper.add_backup_config(data)
+
+ def add_default_backup_config(self, server_id, backup_path):
+ return self.management_helper.add_backup_config(
+ {
+ "backup_name": "Default Backup",
+ "backup_location": Helpers.wtol_path(backup_path),
+ "max_backups": 0,
+ "before": "",
+ "after": "",
+ "compress": False,
+ "shutdown": False,
+ "server_id": server_id,
+ "excluded_dirs": [],
+ "default": True,
+ }
)
@staticmethod
- def get_excluded_backup_dirs(server_id: int):
- return HelpersManagement.get_excluded_backup_dirs(server_id)
+ def get_excluded_backup_dirs(backup_id: int):
+ return HelpersManagement.get_excluded_backup_dirs(backup_id)
def add_excluded_backup_dir(self, server_id: int, dir_to_add: str):
self.management_helper.add_excluded_backup_dir(server_id, dir_to_add)
diff --git a/app/classes/controllers/servers_controller.py b/app/classes/controllers/servers_controller.py
index 6a5cce4e..7743ad84 100644
--- a/app/classes/controllers/servers_controller.py
+++ b/app/classes/controllers/servers_controller.py
@@ -48,7 +48,6 @@ class ServersController(metaclass=Singleton):
name: str,
server_uuid: str,
server_dir: str,
- backup_path: str,
server_command: str,
server_file: str,
server_log_file: str,
@@ -83,7 +82,6 @@ class ServersController(metaclass=Singleton):
server_uuid,
name,
server_dir,
- backup_path,
server_command,
server_file,
server_log_file,
@@ -148,8 +146,7 @@ class ServersController(metaclass=Singleton):
PermissionsServers.delete_roles_permissions(role_id, role_data["servers"])
# Remove roles from server
PermissionsServers.remove_roles_of_server(server_id)
- # Remove backup configs tied to server
- self.management_helper.remove_backup_config(server_id)
+ self.management_helper.remove_all_server_backups(server_id)
# Finally remove server
self.servers_helper.remove_server(server_id)
diff --git a/app/classes/controllers/users_controller.py b/app/classes/controllers/users_controller.py
index 5425fbf8..d45797bd 100644
--- a/app/classes/controllers/users_controller.py
+++ b/app/classes/controllers/users_controller.py
@@ -55,6 +55,7 @@ class UsersController:
"minLength": self.helper.minimum_password_length,
"examples": ["crafty"],
"title": "Password",
+ "error": "passLength",
},
"email": {
"type": "string",
diff --git a/app/classes/minecraft/stats.py b/app/classes/minecraft/stats.py
index a3f85c05..c3474d0d 100644
--- a/app/classes/minecraft/stats.py
+++ b/app/classes/minecraft/stats.py
@@ -86,7 +86,7 @@ class Stats:
def get_node_stats(self) -> NodeStatsReturnDict:
try:
cpu_freq = psutil.cpu_freq()
- except (NotImplementedError, FileNotFoundError):
+ except (NotImplementedError, AttributeError, FileNotFoundError):
cpu_freq = None
if cpu_freq is None:
cpu_freq = psutil._common.scpufreq(current=-1, min=-1, max=-1)
diff --git a/app/classes/models/management.py b/app/classes/models/management.py
index e59dd39c..1a3e4a11 100644
--- a/app/classes/models/management.py
+++ b/app/classes/models/management.py
@@ -16,6 +16,7 @@ from app.classes.models.base_model import BaseModel
from app.classes.models.users import HelperUsers
from app.classes.models.servers import Servers
from app.classes.models.server_permissions import PermissionsServers
+from app.classes.shared.helpers import Helpers
from app.classes.shared.websocket_manager import WebSocketManager
logger = logging.getLogger(__name__)
@@ -87,6 +88,7 @@ class Schedules(BaseModel):
interval_type = CharField()
start_time = CharField(null=True)
command = CharField(null=True)
+ action_id = CharField(null=True)
name = CharField()
one_time = BooleanField(default=False)
cron_string = CharField(default="")
@@ -102,13 +104,19 @@ class Schedules(BaseModel):
# Backups Class
# **********************************************************************************
class Backups(BaseModel):
+ backup_id = CharField(primary_key=True, default=Helpers.create_uuid)
+ backup_name = CharField(default="New Backup")
+ backup_location = CharField(default="")
excluded_dirs = CharField(null=True)
- max_backups = IntegerField()
+ max_backups = IntegerField(default=0)
server_id = ForeignKeyField(Servers, backref="backups_server")
compress = BooleanField(default=False)
shutdown = BooleanField(default=False)
before = CharField(default="")
after = CharField(default="")
+ default = BooleanField(default=False)
+ status = CharField(default='{"status": "Standby", "message": ""}')
+ enabled = BooleanField(default=True)
class Meta:
table_name = "backups"
@@ -263,6 +271,7 @@ class HelpersManagement:
cron_string="* * * * *",
parent=None,
delay=0,
+ action_id=None,
):
sch_id = Schedules.insert(
{
@@ -273,6 +282,7 @@ class HelpersManagement:
Schedules.interval_type: interval_type,
Schedules.start_time: start_time,
Schedules.command: command,
+ Schedules.action_id: action_id,
Schedules.name: name,
Schedules.one_time: one_time,
Schedules.cron_string: cron_string,
@@ -335,133 +345,81 @@ class HelpersManagement:
# Backups Methods
# **********************************************************************************
@staticmethod
- def get_backup_config(server_id):
- try:
- row = (
- Backups.select().where(Backups.server_id == server_id).join(Servers)[0]
- )
- conf = {
- "backup_path": row.server_id.backup_path,
- "excluded_dirs": row.excluded_dirs,
- "max_backups": row.max_backups,
- "server_id": row.server_id_id,
- "compress": row.compress,
- "shutdown": row.shutdown,
- "before": row.before,
- "after": row.after,
- }
- except IndexError:
- conf = {
- "backup_path": None,
- "excluded_dirs": None,
- "max_backups": 0,
- "server_id": server_id,
- "compress": False,
- "shutdown": False,
- "before": "",
- "after": "",
- }
- return conf
+ def get_backup_config(backup_id):
+ return model_to_dict(Backups.get(Backups.backup_id == backup_id))
@staticmethod
- def remove_backup_config(server_id):
+ def get_backups_by_server(server_id, model=False):
+ if not model:
+ data = {}
+ for backup in (
+ Backups.select().where(Backups.server_id == server_id).execute()
+ ):
+ data[str(backup.backup_id)] = {
+ "backup_id": backup.backup_id,
+ "backup_name": backup.backup_name,
+ "backup_location": backup.backup_location,
+ "excluded_dirs": backup.excluded_dirs,
+ "max_backups": backup.max_backups,
+ "server_id": backup.server_id_id,
+ "compress": backup.compress,
+ "shutdown": backup.shutdown,
+ "before": backup.before,
+ "after": backup.after,
+ "default": backup.default,
+ "enabled": backup.enabled,
+ }
+ else:
+ data = Backups.select().where(Backups.server_id == server_id).execute()
+ return data
+
+ @staticmethod
+ def get_default_server_backup(server_id: str) -> dict:
+ print(server_id)
+ bu_query = Backups.select().where(
+ Backups.server_id == server_id,
+ Backups.default == True, # pylint: disable=singleton-comparison
+ )
+ for item in bu_query:
+ print("HI", item)
+ backup_model = bu_query.first()
+
+ if backup_model:
+ return model_to_dict(backup_model)
+ raise IndexError
+
+ @staticmethod
+ def remove_all_server_backups(server_id):
Backups.delete().where(Backups.server_id == server_id).execute()
- def set_backup_config(
- self,
- server_id: int,
- backup_path: str = None,
- max_backups: int = None,
- excluded_dirs: list = None,
- compress: bool = False,
- shutdown: bool = False,
- before: str = "",
- after: str = "",
- ):
- logger.debug(f"Updating server {server_id} backup config with {locals()}")
- if Backups.select().where(Backups.server_id == server_id).exists():
- new_row = False
- conf = {}
- else:
- conf = {
- "excluded_dirs": None,
- "max_backups": 0,
- "server_id": server_id,
- "compress": False,
- "shutdown": False,
- "before": "",
- "after": "",
- }
- new_row = True
- if max_backups is not None:
- conf["max_backups"] = max_backups
- if excluded_dirs is not None:
- dirs_to_exclude = ",".join(excluded_dirs)
+ @staticmethod
+ def remove_backup_config(backup_id):
+ Backups.delete().where(Backups.backup_id == backup_id).execute()
+
+ def add_backup_config(self, conf) -> str:
+ if "excluded_dirs" in conf:
+ dirs_to_exclude = ",".join(conf["excluded_dirs"])
conf["excluded_dirs"] = dirs_to_exclude
- conf["compress"] = compress
- conf["shutdown"] = shutdown
- conf["before"] = before
- conf["after"] = after
- if not new_row:
- with self.database.atomic():
- if backup_path is not None:
- server_rows = (
- Servers.update(backup_path=backup_path)
- .where(Servers.server_id == server_id)
- .execute()
- )
- else:
- server_rows = 0
- backup_rows = (
- Backups.update(conf).where(Backups.server_id == server_id).execute()
- )
- logger.debug(
- f"Updating existing backup record. "
- f"{server_rows}+{backup_rows} rows affected"
- )
- else:
- with self.database.atomic():
- conf["server_id"] = server_id
- if backup_path is not None:
- Servers.update(backup_path=backup_path).where(
- Servers.server_id == server_id
- )
- Backups.create(**conf)
- logger.debug("Creating new backup record.")
+ backup = Backups.create(**conf)
+ logger.debug("Creating new backup record.")
+ return backup.backup_id
@staticmethod
- def get_excluded_backup_dirs(server_id: int):
- excluded_dirs = HelpersManagement.get_backup_config(server_id)["excluded_dirs"]
+ def update_backup_config(backup_id, data):
+ if "excluded_dirs" in data:
+ dirs_to_exclude = ",".join(data["excluded_dirs"])
+ data["excluded_dirs"] = dirs_to_exclude
+ Backups.update(**data).where(Backups.backup_id == backup_id).execute()
+
+ @staticmethod
+ def get_excluded_backup_dirs(backup_id: int):
+ excluded_dirs = HelpersManagement.get_backup_config(backup_id)["excluded_dirs"]
if excluded_dirs is not None and excluded_dirs != "":
dir_list = excluded_dirs.split(",")
else:
dir_list = []
return dir_list
- def add_excluded_backup_dir(self, server_id: int, dir_to_add: str):
- dir_list = self.get_excluded_backup_dirs(server_id)
- if dir_to_add not in dir_list:
- dir_list.append(dir_to_add)
- excluded_dirs = ",".join(dir_list)
- self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs)
- else:
- logger.debug(
- f"Not adding {dir_to_add} to excluded directories - "
- f"already in the excluded directory list for server ID {server_id}"
- )
-
- def del_excluded_backup_dir(self, server_id: int, dir_to_del: str):
- dir_list = self.get_excluded_backup_dirs(server_id)
- if dir_to_del in dir_list:
- dir_list.remove(dir_to_del)
- excluded_dirs = ",".join(dir_list)
- self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs)
- else:
- logger.debug(
- f"Not removing {dir_to_del} from excluded directories - "
- f"not in the excluded directory list for server ID {server_id}"
- )
-
# **********************************************************************************
# Webhooks Class
diff --git a/app/classes/models/servers.py b/app/classes/models/servers.py
index 13d9096a..e5d85c69 100644
--- a/app/classes/models/servers.py
+++ b/app/classes/models/servers.py
@@ -26,7 +26,6 @@ class Servers(BaseModel):
created = DateTimeField(default=datetime.datetime.now)
server_name = CharField(default="Server", index=True)
path = CharField(default="")
- backup_path = CharField(default="")
executable = CharField(default="")
log_path = CharField(default="")
execution_command = CharField(default="")
@@ -65,7 +64,6 @@ class HelperServers:
server_id: str,
name: str,
server_dir: str,
- backup_path: str,
server_command: str,
server_file: str,
server_log_file: str,
@@ -81,7 +79,6 @@ class HelperServers:
name: The name of the server
server_uuid: This is the UUID of the server
server_dir: The directory where the server is located
- backup_path: The path to the backup folder
server_command: The command to start the server
server_file: The name of the server file
server_log_file: The path to the server log file
@@ -111,7 +108,6 @@ class HelperServers:
server_port=server_port,
server_ip=server_host,
stop_command=server_stop,
- backup_path=backup_path,
type=server_type,
created_by=created_by,
).server_id
diff --git a/app/classes/models/users.py b/app/classes/models/users.py
index 3f96e651..6f6a6bde 100644
--- a/app/classes/models/users.py
+++ b/app/classes/models/users.py
@@ -38,7 +38,7 @@ class Users(BaseModel):
superuser = BooleanField(default=False)
lang = CharField(default="en_EN")
support_logs = CharField(default="")
- valid_tokens_from = DateTimeField(default=datetime.datetime.now)
+ valid_tokens_from = DateTimeField(default=Helpers.get_utc_now)
server_order = CharField(default="")
preparing = BooleanField(default=False)
hints = BooleanField(default=True)
@@ -119,7 +119,6 @@ class HelperUsers:
@staticmethod
def get_user_total():
count = Users.select().where(Users.username != "system").count()
- print(count)
return count
@staticmethod
diff --git a/app/classes/shared/authentication.py b/app/classes/shared/authentication.py
index fad8b730..94db5532 100644
--- a/app/classes/shared/authentication.py
+++ b/app/classes/shared/authentication.py
@@ -1,5 +1,6 @@
import logging
import time
+from datetime import datetime
from typing import Optional, Dict, Any, Tuple
import jwt
from jwt import PyJWTError
@@ -62,7 +63,17 @@ class Authentication:
user = HelperUsers.get_user(user_id)
# TODO: Have a cache or something so we don't constantly
# have to query the database
- if int(user.get("valid_tokens_from").timestamp()) < iat:
+ valid_tokens_from_str = user.get("valid_tokens_from")
+ # It's possible this will be a string or a dt coming from the DB
+ # We need to account for that
+ try:
+ valid_tokens_from_dt = datetime.strptime(
+ valid_tokens_from_str, "%Y-%m-%d %H:%M:%S.%f%z"
+ )
+ except TypeError:
+ valid_tokens_from_dt = valid_tokens_from_str
+ # Convert the string to a datetime object
+ if int(valid_tokens_from_dt.timestamp()) < iat:
# Success!
return key, data, user
return None
diff --git a/app/classes/shared/file_helpers.py b/app/classes/shared/file_helpers.py
index 90d8e65c..23bf01dd 100644
--- a/app/classes/shared/file_helpers.py
+++ b/app/classes/shared/file_helpers.py
@@ -4,7 +4,10 @@ import logging
import pathlib
import tempfile
import zipfile
-from zipfile import ZipFile, ZIP_DEFLATED
+import hashlib
+from typing import BinaryIO
+import mimetypes
+from zipfile import ZipFile, ZIP_DEFLATED, ZIP_STORED
import urllib.request
import ssl
import time
@@ -22,6 +25,7 @@ class FileHelpers:
def __init__(self, helper):
self.helper: Helpers = helper
+ self.mime_types = mimetypes.MimeTypes()
@staticmethod
def ssl_get_file(
@@ -142,6 +146,32 @@ class FileHelpers:
logger.error(f"Path specified is not a file or does not exist. {path}")
return e
+ def check_mime_types(self, file_path):
+ m_type, _value = self.mime_types.guess_type(file_path)
+ return m_type
+
+ @staticmethod
+ def calculate_file_hash(file_path: str) -> str:
+ """
+ Takes one parameter of file path.
+ It will generate a SHA256 hash for the path and return it.
+ """
+ sha256_hash = hashlib.sha256()
+ with open(file_path, "rb") as f:
+ for byte_block in iter(lambda: f.read(4096), b""):
+ sha256_hash.update(byte_block)
+ return sha256_hash.hexdigest()
+
+ @staticmethod
+ def calculate_buffer_hash(buffer: BinaryIO) -> str:
+ """
+ Takes one argument of a stream buffer. Will return a
+ sha256 hash of the buffer
+ """
+ sha256_hash = hashlib.sha256()
+ sha256_hash.update(buffer)
+ return sha256_hash.hexdigest()
+
@staticmethod
def copy_dir(src_path, dest_path, dirs_exist_ok=False):
# pylint: disable=unexpected-keyword-arg
@@ -229,74 +259,15 @@ class FileHelpers:
return True
- def make_compressed_backup(
- self, path_to_destination, path_to_zip, excluded_dirs, server_id, comment=""
- ):
- # create a ZipFile object
- path_to_destination += ".zip"
- ex_replace = [p.replace("\\", "/") for p in excluded_dirs]
- total_bytes = 0
- dir_bytes = Helpers.get_dir_size(path_to_zip)
- results = {
- "percent": 0,
- "total_files": self.helper.human_readable_file_size(dir_bytes),
- }
- WebSocketManager().broadcast_page_params(
- "/panel/server_detail",
- {"id": str(server_id)},
- "backup_status",
- results,
- )
- with ZipFile(path_to_destination, "w", ZIP_DEFLATED) as zip_file:
- zip_file.comment = bytes(
- comment, "utf-8"
- ) # comments over 65535 bytes will be truncated
- for root, dirs, files in os.walk(path_to_zip, topdown=True):
- for l_dir in dirs:
- if str(os.path.join(root, l_dir)).replace("\\", "/") in ex_replace:
- dirs.remove(l_dir)
- ziproot = path_to_zip
- for file in files:
- if (
- str(os.path.join(root, file)).replace("\\", "/")
- not in ex_replace
- and file != "crafty.sqlite"
- ):
- try:
- logger.info(f"backing up: {os.path.join(root, file)}")
- if os.name == "nt":
- zip_file.write(
- os.path.join(root, file),
- os.path.join(root.replace(ziproot, ""), file),
- )
- else:
- zip_file.write(
- os.path.join(root, file),
- os.path.join(root.replace(ziproot, "/"), file),
- )
-
- except Exception as e:
- logger.warning(
- f"Error backing up: {os.path.join(root, file)}!"
- f" - Error was: {e}"
- )
- total_bytes += os.path.getsize(os.path.join(root, file))
- percent = round((total_bytes / dir_bytes) * 100, 2)
- results = {
- "percent": percent,
- "total_files": self.helper.human_readable_file_size(dir_bytes),
- }
- WebSocketManager().broadcast_page_params(
- "/panel/server_detail",
- {"id": str(server_id)},
- "backup_status",
- results,
- )
-
- return True
-
def make_backup(
- self, path_to_destination, path_to_zip, excluded_dirs, server_id, comment=""
+ self,
+ path_to_destination,
+ path_to_zip,
+ excluded_dirs,
+ server_id,
+ backup_id,
+ comment="",
+ compressed=None,
):
# create a ZipFile object
path_to_destination += ".zip"
@@ -313,7 +284,15 @@ class FileHelpers:
"backup_status",
results,
)
- with ZipFile(path_to_destination, "w") as zip_file:
+ WebSocketManager().broadcast_page_params(
+ "/panel/edit_backup",
+ {"id": str(server_id)},
+ "backup_status",
+ results,
+ )
+ # Set the compression mode based on the `compressed` parameter
+ compression_mode = ZIP_DEFLATED if compressed else ZIP_STORED
+ with ZipFile(path_to_destination, "w", compression_mode) as zip_file:
zip_file.comment = bytes(
comment, "utf-8"
) # comments over 65535 bytes will be truncated
@@ -364,6 +343,7 @@ class FileHelpers:
results = {
"percent": percent,
"total_files": self.helper.human_readable_file_size(dir_bytes),
+ "backup_id": backup_id,
}
# send status results to page.
WebSocketManager().broadcast_page_params(
@@ -372,6 +352,12 @@ class FileHelpers:
"backup_status",
results,
)
+ WebSocketManager().broadcast_page_params(
+ "/panel/edit_backup",
+ {"id": str(server_id)},
+ "backup_status",
+ results,
+ )
return True
@staticmethod
diff --git a/app/classes/shared/helpers.py b/app/classes/shared/helpers.py
index 55a588fc..e827d5b2 100644
--- a/app/classes/shared/helpers.py
+++ b/app/classes/shared/helpers.py
@@ -19,7 +19,7 @@ import shutil
import shlex
import subprocess
import itertools
-from datetime import datetime
+from datetime import datetime, timezone
from socket import gethostname
from contextlib import redirect_stderr, suppress
import libgravatar
@@ -508,7 +508,6 @@ class Helpers:
"max_log_lines": 700,
"max_audit_entries": 300,
"disabled_language_files": [],
- "stream_size_GB": 1,
"keywords": ["help", "chunk"],
"allow_nsfw_profile_pictures": False,
"enable_user_self_delete": False,
@@ -640,6 +639,10 @@ class Helpers:
version = f"{major}.{minor}.{sub}"
return str(version)
+ @staticmethod
+ def get_utc_now() -> datetime:
+ return datetime.fromtimestamp(time.time(), tz=timezone.utc)
+
def encode_pass(self, password):
return self.passhasher.hash(password)
@@ -1006,6 +1009,11 @@ class Helpers:
except PermissionError as e:
logger.critical(f"Check generated exception due to permssion error: {e}")
return False
+ except FileNotFoundError as e:
+ logger.critical(
+ f"Check generated exception due to file does not exist error: {e}"
+ )
+ return False
def create_self_signed_cert(self, cert_dir=None):
if cert_dir is None:
diff --git a/app/classes/shared/main_controller.py b/app/classes/shared/main_controller.py
index e31486ea..0d90a967 100644
--- a/app/classes/shared/main_controller.py
+++ b/app/classes/shared/main_controller.py
@@ -566,7 +566,6 @@ class Controller:
name=data["name"],
server_uuid=server_fs_uuid,
server_dir=new_server_path,
- backup_path=backup_path,
server_command=server_command,
server_file=server_file,
server_log_file=log_location,
@@ -576,7 +575,7 @@ class Controller:
server_host=monitoring_host,
server_type=monitoring_type,
)
- self.management.set_backup_config(
+ self.management.add_default_backup_config(
new_server_id,
backup_path,
)
@@ -722,7 +721,6 @@ class Controller:
server_name,
server_id,
new_server_dir,
- backup_path,
server_command,
server_jar,
server_log_file,
@@ -776,7 +774,6 @@ class Controller:
server_name,
server_id,
new_server_dir,
- backup_path,
server_command,
server_exe,
server_log_file,
@@ -821,7 +818,6 @@ class Controller:
server_name,
server_id,
new_server_dir,
- backup_path,
server_command,
server_exe,
server_log_file,
@@ -869,7 +865,6 @@ class Controller:
server_name,
server_id,
new_server_dir,
- backup_path,
server_command,
server_exe,
server_log_file,
@@ -893,16 +888,13 @@ class Controller:
# **********************************************************************************
def rename_backup_dir(self, old_server_id, new_server_id, new_uuid):
- server_data = self.servers.get_server_data_by_id(old_server_id)
server_obj = self.servers.get_server_obj(new_server_id)
- old_bu_path = server_data["backup_path"]
ServerPermsController.backup_role_swap(old_server_id, new_server_id)
- backup_path = old_bu_path
+ backup_path = os.path.join(self.helper.backup_path, old_server_id)
backup_path = Path(backup_path)
backup_path_components = list(backup_path.parts)
backup_path_components[-1] = new_uuid
new_bu_path = pathlib.PurePath(os.path.join(*backup_path_components))
- server_obj.backup_path = new_bu_path
default_backup_dir = os.path.join(self.helper.backup_path, new_uuid)
try:
os.rmdir(default_backup_dir)
@@ -916,7 +908,6 @@ class Controller:
name: str,
server_uuid: str,
server_dir: str,
- backup_path: str,
server_command: str,
server_file: str,
server_log_file: str,
@@ -931,7 +922,6 @@ class Controller:
name,
server_uuid,
server_dir,
- backup_path,
server_command,
server_file,
server_log_file,
@@ -996,16 +986,16 @@ class Controller:
f"Unable to delete server files for server with ID: "
f"{server_id} with error logged: {e}"
)
- if Helpers.check_path_exists(
- self.servers.get_server_data_by_id(server_id)["backup_path"]
- ):
- FileHelpers.del_dirs(
- Helpers.get_os_understandable_path(
- self.servers.get_server_data_by_id(server_id)[
- "backup_path"
- ]
+ backup_configs = HelpersManagement.get_backups_by_server(
+ server_id, True
+ )
+ for config in backup_configs:
+ if Helpers.check_path_exists(config.backup_location):
+ FileHelpers.del_dirs(
+ Helpers.get_os_understandable_path(
+ config.backup_location
+ )
)
- )
# Cleanup scheduled tasks
try:
diff --git a/app/classes/shared/server.py b/app/classes/shared/server.py
index a6c98b89..ab8ca54a 100644
--- a/app/classes/shared/server.py
+++ b/app/classes/shared/server.py
@@ -207,9 +207,6 @@ class ServerInstance:
self.server_scheduler.start()
self.dir_scheduler.start()
self.start_dir_calc_task()
- self.backup_thread = threading.Thread(
- target=self.backup_server, daemon=True, name=f"backup_{self.name}"
- )
self.is_backingup = False
# Reset crash and update at initialization
self.stats_helper.server_crash_reset()
@@ -940,8 +937,7 @@ class ServerInstance:
WebSocketManager().broadcast_user(user, "send_start_reload", {})
def restart_threaded_server(self, user_id):
- bu_conf = HelpersManagement.get_backup_config(self.server_id)
- if self.is_backingup and bu_conf["shutdown"]:
+ if self.is_backingup:
logger.info(
"Restart command detected. Supressing - server has"
" backup shutdown enabled and server is currently backing up."
@@ -1111,12 +1107,16 @@ class ServerInstance:
f.write("eula=true")
self.run_threaded_server(user_id)
- def a_backup_server(self):
- if self.settings["backup_path"] == "":
- logger.critical("Backup path is None. Canceling Backup!")
- return
+ def server_backup_threader(self, backup_id, update=False):
+ # Check to see if we're already backing up
+ if self.check_backup_by_id(backup_id):
+ return False
+
backup_thread = threading.Thread(
- target=self.backup_server, daemon=True, name=f"backup_{self.name}"
+ target=self.backup_server,
+ daemon=True,
+ name=f"backup_{backup_id}",
+ args=[backup_id, update],
)
logger.info(
f"Starting Backup Thread for server {self.settings['server_name']}."
@@ -1127,27 +1127,20 @@ class ServerInstance:
"Backup Thread - Local server path not defined. "
"Setting local server path variable."
)
- # checks if the backup thread is currently alive for this server
- if not self.is_backingup:
- try:
- backup_thread.start()
- self.is_backingup = True
- except Exception as ex:
- logger.error(f"Failed to start backup: {ex}")
- return False
- else:
- logger.error(
- f"Backup is already being processed for server "
- f"{self.settings['server_name']}. Canceling backup request"
- )
+
+ try:
+ backup_thread.start()
+ except Exception as ex:
+ logger.error(f"Failed to start backup: {ex}")
return False
logger.info(f"Backup Thread started for server {self.settings['server_name']}.")
@callback
- def backup_server(self):
+ def backup_server(self, backup_id, update):
was_server_running = None
logger.info(f"Starting server {self.name} (ID {self.server_id}) backup")
server_users = PermissionsServers.get_server_user_list(self.server_id)
+ # Alert the start of the backup to the authorized users.
for user in server_users:
WebSocketManager().broadcast_user(
user,
@@ -1157,30 +1150,40 @@ class ServerInstance:
).format(self.name),
)
time.sleep(3)
- conf = HelpersManagement.get_backup_config(self.server_id)
+
+ # Get the backup config
+ conf = HelpersManagement.get_backup_config(backup_id)
+ # Adjust the location to include the backup ID for destination.
+ backup_location = os.path.join(conf["backup_location"], conf["backup_id"])
+
+ # Check if the backup location even exists.
+ if not backup_location:
+ Console.critical("No backup path found. Canceling")
+ return None
if conf["before"]:
- if self.check_running():
- logger.debug(
- "Found running server and send command option. Sending command"
- )
- self.send_command(conf["before"])
+ logger.debug(
+ "Found running server and send command option. Sending command"
+ )
+ self.send_command(conf["before"])
+ # Pause to let command run
+ time.sleep(5)
if conf["shutdown"]:
- if conf["before"]:
- # pause to let people read message.
- time.sleep(5)
logger.info(
"Found shutdown preference. Delaying"
+ "backup start. Shutting down server."
)
- if self.check_running():
- self.stop_server()
- was_server_running = True
+ if not update:
+ was_server_running = False
+ if self.check_running():
+ self.stop_server()
+ was_server_running = True
+
+ self.helper.ensure_dir_exists(backup_location)
- self.helper.ensure_dir_exists(self.settings["backup_path"])
try:
backup_filename = (
- f"{self.settings['backup_path']}/"
+ f"{backup_location}/"
f"{datetime.datetime.now().astimezone(self.tz).strftime('%Y-%m-%d_%H-%M-%S')}" # pylint: disable=line-too-long
)
logger.info(
@@ -1188,42 +1191,36 @@ class ServerInstance:
f" (ID#{self.server_id}, path={self.server_path}) "
f"at '{backup_filename}'"
)
- excluded_dirs = HelpersManagement.get_excluded_backup_dirs(self.server_id)
+ excluded_dirs = HelpersManagement.get_excluded_backup_dirs(backup_id)
server_dir = Helpers.get_os_understandable_path(self.settings["path"])
- if conf["compress"]:
- logger.debug(
- "Found compress backup to be true. Calling compressed archive"
- )
- self.file_helper.make_compressed_backup(
- Helpers.get_os_understandable_path(backup_filename),
- server_dir,
- excluded_dirs,
- self.server_id,
- )
- else:
- logger.debug(
- "Found compress backup to be false. Calling NON-compressed archive"
- )
- self.file_helper.make_backup(
- Helpers.get_os_understandable_path(backup_filename),
- server_dir,
- excluded_dirs,
- self.server_id,
- )
+
+ self.file_helper.make_backup(
+ Helpers.get_os_understandable_path(backup_filename),
+ server_dir,
+ excluded_dirs,
+ self.server_id,
+ backup_id,
+ conf["backup_name"],
+ conf["compress"],
+ )
while (
- len(self.list_backups()) > conf["max_backups"]
+ len(self.list_backups(conf)) > conf["max_backups"]
and conf["max_backups"] > 0
):
- backup_list = self.list_backups()
+ backup_list = self.list_backups(conf)
oldfile = backup_list[0]
- oldfile_path = f"{conf['backup_path']}/{oldfile['path']}"
+ oldfile_path = f"{backup_location}/{oldfile['path']}"
logger.info(f"Removing old backup '{oldfile['path']}'")
os.remove(Helpers.get_os_understandable_path(oldfile_path))
- self.is_backingup = False
logger.info(f"Backup of server: {self.name} completed")
- results = {"percent": 100, "total_files": 0, "current_file": 0}
+ results = {
+ "percent": 100,
+ "total_files": 0,
+ "current_file": 0,
+ "backup_id": backup_id,
+ }
if len(WebSocketManager().clients) > 0:
WebSocketManager().broadcast_page_params(
"/panel/server_detail",
@@ -1248,7 +1245,6 @@ class ServerInstance:
)
self.run_threaded_server(HelperUsers.get_user_id_by_name("system"))
time.sleep(3)
- self.last_backup_failed = False
if conf["after"]:
if self.check_running():
logger.debug(
@@ -1256,12 +1252,21 @@ class ServerInstance:
)
self.send_command(conf["after"])
# pause to let people read message.
+ HelpersManagement.update_backup_config(
+ backup_id,
+ {"status": json.dumps({"status": "Standby", "message": ""})},
+ )
time.sleep(5)
- except:
+ except Exception as e:
logger.exception(
f"Failed to create backup of server {self.name} (ID {self.server_id})"
)
- results = {"percent": 100, "total_files": 0, "current_file": 0}
+ results = {
+ "percent": 100,
+ "total_files": 0,
+ "current_file": 0,
+ "backup_id": backup_id,
+ }
if len(WebSocketManager().clients) > 0:
WebSocketManager().broadcast_page_params(
"/panel/server_detail",
@@ -1269,56 +1274,51 @@ class ServerInstance:
"backup_status",
results,
)
- self.is_backingup = False
if was_server_running:
logger.info(
"Backup complete. User had shutdown preference. Starting server."
)
self.run_threaded_server(HelperUsers.get_user_id_by_name("system"))
- self.last_backup_failed = True
-
- def backup_status(self, source_path, dest_path):
- results = Helpers.calc_percent(source_path, dest_path)
- self.backup_stats = results
- if len(WebSocketManager().clients) > 0:
- WebSocketManager().broadcast_page_params(
- "/panel/server_detail",
- {"id": str(self.server_id)},
- "backup_status",
- results,
+ HelpersManagement.update_backup_config(
+ backup_id,
+ {"status": json.dumps({"status": "Failed", "message": f"{e}"})},
)
+ self.set_backup_status()
def last_backup_status(self):
return self.last_backup_failed
- def send_backup_status(self):
- try:
- return self.backup_stats
- except:
- return {"percent": 0, "total_files": 0}
+ def set_backup_status(self):
+ backups = HelpersManagement.get_backups_by_server(self.server_id, True)
+ alert = False
+ for backup in backups:
+ if json.loads(backup.status)["status"] == "Failed":
+ alert = True
+ self.last_backup_failed = alert
- def list_backups(self):
- if not self.settings["backup_path"]:
+ def list_backups(self, backup_config: dict) -> list:
+ if not backup_config:
logger.info(
f"Error putting backup file list for server with ID: {self.server_id}"
)
return []
+ backup_location = os.path.join(
+ backup_config["backup_location"], backup_config["backup_id"]
+ )
if not Helpers.check_path_exists(
- Helpers.get_os_understandable_path(self.settings["backup_path"])
+ Helpers.get_os_understandable_path(backup_location)
):
return []
files = Helpers.get_human_readable_files_sizes(
Helpers.list_dir_by_date(
- Helpers.get_os_understandable_path(self.settings["backup_path"])
+ Helpers.get_os_understandable_path(backup_location)
)
)
return [
{
"path": os.path.relpath(
f["path"],
- start=Helpers.get_os_understandable_path(
- self.settings["backup_path"]
- ),
+ start=Helpers.get_os_understandable_path(backup_location),
),
"size": f["size"],
}
@@ -1330,7 +1330,7 @@ class ServerInstance:
def jar_update(self):
self.stats_helper.set_update(True)
update_thread = threading.Thread(
- target=self.a_jar_update, daemon=True, name=f"exe_update_{self.name}"
+ target=self.threaded_jar_update, daemon=True, name=f"exe_update_{self.name}"
)
update_thread.start()
@@ -1371,10 +1371,13 @@ class ServerInstance:
def check_update(self):
return self.stats_helper.get_server_stats()["updating"]
- def a_jar_update(self):
+ def threaded_jar_update(self):
server_users = PermissionsServers.get_server_user_list(self.server_id)
was_started = "-1"
- self.a_backup_server()
+ # Get default backup configuration
+ backup_config = HelpersManagement.get_default_server_backup(self.server_id)
+ # start threaded backup
+ self.server_backup_threader(backup_config["backup_id"], True)
# checks if server is running. Calls shutdown if it is running.
if self.check_running():
was_started = True
@@ -1403,47 +1406,22 @@ class ServerInstance:
"string": message,
},
)
- backup_dir = os.path.join(
- Helpers.get_os_understandable_path(self.settings["path"]),
- "crafty_executable_backups",
- )
- # checks if backup directory already exists
- if os.path.isdir(backup_dir):
- backup_executable = os.path.join(backup_dir, self.settings["executable"])
- else:
- logger.info(
- f"Executable backup directory not found for Server: {self.name}."
- f" Creating one."
- )
- os.mkdir(backup_dir)
- backup_executable = os.path.join(backup_dir, self.settings["executable"])
-
- if len(os.listdir(backup_dir)) > 0:
- # removes old backup
- logger.info(f"Old backups found for server: {self.name}. Removing...")
- for item in os.listdir(backup_dir):
- os.remove(os.path.join(backup_dir, item))
- logger.info(f"Old backups removed for server: {self.name}.")
- else:
- logger.info(f"No old backups found for server: {self.name}")
-
current_executable = os.path.join(
Helpers.get_os_understandable_path(self.settings["path"]),
self.settings["executable"],
)
-
- try:
- # copies to backup dir
- FileHelpers.copy_file(current_executable, backup_executable)
- except FileNotFoundError:
- logger.error("Could not create backup of jarfile. File not found.")
-
+ backing_up = True
# wait for backup
- while self.is_backingup:
- time.sleep(10)
+ while backing_up:
+ # Check to see if we're already backing up
+ backing_up = self.check_backup_by_id(backup_config["backup_id"])
+ time.sleep(2)
# check if backup was successful
- if self.last_backup_failed:
+ backup_status = json.loads(
+ HelpersManagement.get_backup_config(backup_config["backup_id"])["status"]
+ )["status"]
+ if backup_status == "Failed":
for user in server_users:
WebSocketManager().broadcast_user(
user,
@@ -1528,12 +1506,6 @@ class ServerInstance:
WebSocketManager().broadcast_user_page(
user, "/panel/dashboard", "send_start_reload", {}
)
- WebSocketManager().broadcast_user(
- user,
- "notification",
- "Executable update finished for " + self.name,
- )
-
self.management_helper.add_to_audit_log_raw(
"Alert",
"-1",
@@ -1656,6 +1628,14 @@ class ServerInstance:
except:
Console.critical("Can't broadcast server status to websocket")
+ def check_backup_by_id(self, backup_id: str) -> bool:
+ # Check to see if we're already backing up
+ for thread in threading.enumerate():
+ if thread.getName() == f"backup_{backup_id}":
+ Console.debug(f"Backup with id {backup_id} already running!")
+ return True
+ return False
+
def get_servers_stats(self):
server_stats = {}
diff --git a/app/classes/shared/tasks.py b/app/classes/shared/tasks.py
index b9513441..da9735a9 100644
--- a/app/classes/shared/tasks.py
+++ b/app/classes/shared/tasks.py
@@ -140,7 +140,7 @@ class TasksManager:
)
elif command == "backup_server":
- svr.a_backup_server()
+ svr.server_backup_threader(cmd["action_id"])
elif command == "update_executable":
svr.jar_update()
@@ -240,6 +240,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
+ "action_id": schedule.action_id,
}
],
)
@@ -268,6 +269,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
+ "action_id": schedule.action_id,
}
],
)
@@ -284,6 +286,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
+ "action_id": schedule.action_id,
}
],
)
@@ -303,6 +306,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
+ "action_id": schedule.action_id,
}
],
)
@@ -337,6 +341,7 @@ class TasksManager:
job_data["cron_string"],
job_data["parent"],
job_data["delay"],
+ job_data["action_id"],
)
# Checks to make sure some doofus didn't actually make the newly
@@ -367,6 +372,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -393,6 +399,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -409,6 +416,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -428,6 +436,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -520,6 +529,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -543,6 +553,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -559,6 +570,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -578,6 +590,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -653,6 +666,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
+ "action_id": schedule.action_id,
}
],
)
@@ -785,6 +799,18 @@ class TasksManager:
self.helper.ensure_dir_exists(
os.path.join(self.controller.project_root, "import", "upload")
)
+ self.helper.ensure_dir_exists(
+ os.path.join(self.controller.project_root, "temp")
+ )
+ for file in os.listdir(os.path.join(self.controller.project_root, "temp")):
+ if self.helper.is_file_older_than_x_days(
+ os.path.join(self.controller.project_root, "temp", file)
+ ):
+ try:
+ os.remove(os.path.join(file))
+ except FileNotFoundError:
+ logger.debug("Could not clear out file from temp directory")
+
for file in os.listdir(
os.path.join(self.controller.project_root, "import", "upload")
):
@@ -793,7 +819,7 @@ class TasksManager:
):
try:
os.remove(os.path.join(file))
- except:
+ except FileNotFoundError:
logger.debug("Could not clear out file from import directory")
def log_watcher(self):
diff --git a/app/classes/shared/translation.py b/app/classes/shared/translation.py
index 0e441808..538856a8 100644
--- a/app/classes/shared/translation.py
+++ b/app/classes/shared/translation.py
@@ -20,7 +20,7 @@ class Translation:
def get_language_file(self, language: str):
return os.path.join(self.translations_path, str(language) + ".json")
- def translate(self, page, word, language):
+ def translate(self, page, word, language, error=True):
fallback_language = "en_EN"
translated_word = self.translate_inner(page, word, language)
@@ -37,7 +37,9 @@ class Translation:
if hasattr(translated_word, "__iter__"):
# Multiline strings
return "\n".join(translated_word)
- return "Error while getting translation"
+ if error:
+ return "Error while getting translation"
+ return word
def translate_inner(self, page, word, language) -> t.Union[t.Any, None]:
language_file = self.get_language_file(language)
diff --git a/app/classes/web/panel_handler.py b/app/classes/web/panel_handler.py
index bbbc9d9e..8df48431 100644
--- a/app/classes/web/panel_handler.py
+++ b/app/classes/web/panel_handler.py
@@ -41,6 +41,8 @@ SUBPAGE_PERMS = {
"webhooks": EnumPermissionsServer.CONFIG,
}
+SCHEDULE_AUTH_ERROR_URL = "/panel/error?error=Unauthorized access To Schedules"
+
class PanelHandler(BaseHandler):
def get_user_roles(self) -> t.Dict[str, list]:
@@ -677,36 +679,18 @@ class PanelHandler(BaseHandler):
page_data["java_versions"] = page_java
if subpage == "backup":
server_info = self.controller.servers.get_server_data_by_id(server_id)
- page_data["backup_config"] = (
- self.controller.management.get_backup_config(server_id)
- )
- exclusions = []
- page_data["exclusions"] = (
- self.controller.management.get_excluded_backup_dirs(server_id)
+
+ page_data["backups"] = self.controller.management.get_backups_by_server(
+ server_id, model=True
)
page_data["backing_up"] = (
self.controller.servers.get_server_instance_by_id(
server_id
).is_backingup
)
- page_data["backup_stats"] = (
- self.controller.servers.get_server_instance_by_id(
- server_id
- ).send_backup_status()
- )
# makes it so relative path is the only thing shown
- for file in page_data["exclusions"]:
- if Helpers.is_os_windows():
- exclusions.append(file.replace(server_info["path"] + "\\", ""))
- else:
- exclusions.append(file.replace(server_info["path"] + "/", ""))
- page_data["exclusions"] = exclusions
+
self.controller.servers.refresh_server_settings(server_id)
- try:
- page_data["backup_list"] = server.list_backups()
- except:
- page_data["backup_list"] = []
- page_data["backup_path"] = Helpers.wtol_path(server_info["backup_path"])
if subpage == "metrics":
try:
@@ -780,20 +764,23 @@ class PanelHandler(BaseHandler):
elif page == "download_backup":
file = self.get_argument("file", "")
+ backup_id = self.get_argument("backup_id", "")
server_id = self.check_server_id()
if server_id is None:
return
-
+ backup_config = self.controller.management.get_backup_config(backup_id)
server_info = self.controller.servers.get_server_data_by_id(server_id)
+ backup_location = os.path.join(backup_config["backup_location"], backup_id)
backup_file = os.path.abspath(
os.path.join(
- Helpers.get_os_understandable_path(server_info["backup_path"]), file
+ Helpers.get_os_understandable_path(backup_location),
+ file,
)
)
if not self.helper.is_subdir(
backup_file,
- Helpers.get_os_understandable_path(server_info["backup_path"]),
+ Helpers.get_os_understandable_path(backup_location),
) or not os.path.isfile(backup_file):
self.redirect("/panel/error?error=Invalid path detected")
return
@@ -892,6 +879,8 @@ class PanelHandler(BaseHandler):
os.path.join(self.helper.root_dir, "app", "translations")
)
):
+ if file == "humanized_index.json":
+ continue
if file.endswith(".json"):
if file.split(".")[0] not in self.helper.get_setting(
"disabled_language_files"
@@ -1130,6 +1119,9 @@ class PanelHandler(BaseHandler):
page_data["server_data"] = self.controller.servers.get_server_data_by_id(
server_id
)
+ page_data["backups"] = self.controller.management.get_backups_by_server(
+ server_id, True
+ )
page_data["server_stats"] = self.controller.servers.get_server_stats_by_id(
server_id
)
@@ -1150,6 +1142,7 @@ class PanelHandler(BaseHandler):
page_data["schedule"]["delay"] = 0
page_data["schedule"]["time"] = ""
page_data["schedule"]["interval"] = 1
+ page_data["schedule"]["action_id"] = ""
# we don't need to check difficulty here.
# We'll just default to basic for new schedules
page_data["schedule"]["difficulty"] = "basic"
@@ -1158,7 +1151,7 @@ class PanelHandler(BaseHandler):
if not EnumPermissionsServer.SCHEDULE in page_data["user_permissions"]:
if not superuser:
- self.redirect("/panel/error?error=Unauthorized access To Schedules")
+ self.redirect(SCHEDULE_AUTH_ERROR_URL)
return
template = "panel/server_schedule_edit.html"
@@ -1195,6 +1188,9 @@ class PanelHandler(BaseHandler):
exec_user["user_id"], server_id
)
)
+ page_data["backups"] = self.controller.management.get_backups_by_server(
+ server_id, True
+ )
page_data["server_data"] = self.controller.servers.get_server_data_by_id(
server_id
)
@@ -1209,6 +1205,7 @@ class PanelHandler(BaseHandler):
page_data["schedule"]["server_id"] = server_id
page_data["schedule"]["schedule_id"] = schedule.schedule_id
page_data["schedule"]["action"] = schedule.action
+ page_data["schedule"]["action_id"] = schedule.action_id
if schedule.name:
page_data["schedule"]["name"] = schedule.name
else:
@@ -1252,11 +1249,141 @@ class PanelHandler(BaseHandler):
if not EnumPermissionsServer.SCHEDULE in page_data["user_permissions"]:
if not superuser:
- self.redirect("/panel/error?error=Unauthorized access To Schedules")
+ self.redirect(SCHEDULE_AUTH_ERROR_URL)
return
template = "panel/server_schedule_edit.html"
+ elif page == "edit_backup":
+ server_id = self.get_argument("id", None)
+ backup_id = self.get_argument("backup_id", None)
+ page_data["active_link"] = "backups"
+ page_data["permissions"] = {
+ "Commands": EnumPermissionsServer.COMMANDS,
+ "Terminal": EnumPermissionsServer.TERMINAL,
+ "Logs": EnumPermissionsServer.LOGS,
+ "Schedule": EnumPermissionsServer.SCHEDULE,
+ "Backup": EnumPermissionsServer.BACKUP,
+ "Files": EnumPermissionsServer.FILES,
+ "Config": EnumPermissionsServer.CONFIG,
+ "Players": EnumPermissionsServer.PLAYERS,
+ }
+ if not self.failed_server:
+ server_obj = self.controller.servers.get_server_instance_by_id(
+ server_id
+ )
+ page_data["backup_failed"] = server_obj.last_backup_status()
+ page_data["user_permissions"] = (
+ self.controller.server_perms.get_user_id_permissions_list(
+ exec_user["user_id"], server_id
+ )
+ )
+ server_info = self.controller.servers.get_server_data_by_id(server_id)
+ page_data["backup_config"] = self.controller.management.get_backup_config(
+ backup_id
+ )
+ page_data["backups"] = self.controller.management.get_backups_by_server(
+ server_id, model=True
+ )
+ exclusions = []
+ page_data["backing_up"] = self.controller.servers.get_server_instance_by_id(
+ server_id
+ ).is_backingup
+ self.controller.servers.refresh_server_settings(server_id)
+ try:
+ page_data["backup_list"] = server.list_backups(
+ page_data["backup_config"]
+ )
+ except:
+ page_data["backup_list"] = []
+ page_data["backup_path"] = Helpers.wtol_path(
+ page_data["backup_config"]["backup_location"]
+ )
+ page_data["server_data"] = self.controller.servers.get_server_data_by_id(
+ server_id
+ )
+ page_data["server_stats"] = self.controller.servers.get_server_stats_by_id(
+ server_id
+ )
+ page_data["server_stats"]["server_type"] = (
+ self.controller.servers.get_server_type_by_id(server_id)
+ )
+ page_data["exclusions"] = (
+ self.controller.management.get_excluded_backup_dirs(backup_id)
+ )
+ # Make exclusion paths relative for page
+ for file in page_data["exclusions"]:
+ if Helpers.is_os_windows():
+ exclusions.append(file.replace(server_info["path"] + "\\", ""))
+ else:
+ exclusions.append(file.replace(server_info["path"] + "/", ""))
+ page_data["exclusions"] = exclusions
+
+ if EnumPermissionsServer.BACKUP not in page_data["user_permissions"]:
+ if not superuser:
+ self.redirect(SCHEDULE_AUTH_ERROR_URL)
+ return
+ template = "panel/server_backup_edit.html"
+
+ elif page == "add_backup":
+ server_id = self.get_argument("id", None)
+ backup_id = self.get_argument("backup_id", None)
+ page_data["active_link"] = "backups"
+ page_data["permissions"] = {
+ "Commands": EnumPermissionsServer.COMMANDS,
+ "Terminal": EnumPermissionsServer.TERMINAL,
+ "Logs": EnumPermissionsServer.LOGS,
+ "Schedule": EnumPermissionsServer.SCHEDULE,
+ "Backup": EnumPermissionsServer.BACKUP,
+ "Files": EnumPermissionsServer.FILES,
+ "Config": EnumPermissionsServer.CONFIG,
+ "Players": EnumPermissionsServer.PLAYERS,
+ }
+ if not self.failed_server:
+ server_obj = self.controller.servers.get_server_instance_by_id(
+ server_id
+ )
+ page_data["backup_failed"] = server_obj.last_backup_status()
+ page_data["user_permissions"] = (
+ self.controller.server_perms.get_user_id_permissions_list(
+ exec_user["user_id"], server_id
+ )
+ )
+ server_info = self.controller.servers.get_server_data_by_id(server_id)
+ page_data["backup_config"] = {
+ "excluded_dirs": [],
+ "max_backups": 0,
+ "server_id": server_id,
+ "backup_location": os.path.join(self.helper.backup_path, server_id),
+ "compress": False,
+ "shutdown": False,
+ "before": "",
+ "after": "",
+ }
+ page_data["backing_up"] = False
+ self.controller.servers.refresh_server_settings(server_id)
+
+ page_data["backup_list"] = []
+ page_data["backup_path"] = Helpers.wtol_path(
+ page_data["backup_config"]["backup_location"]
+ )
+ page_data["server_data"] = self.controller.servers.get_server_data_by_id(
+ server_id
+ )
+ page_data["server_stats"] = self.controller.servers.get_server_stats_by_id(
+ server_id
+ )
+ page_data["server_stats"]["server_type"] = (
+ self.controller.servers.get_server_type_by_id(server_id)
+ )
+ page_data["exclusions"] = []
+
+ if EnumPermissionsServer.BACKUP not in page_data["user_permissions"]:
+ if not superuser:
+ self.redirect(SCHEDULE_AUTH_ERROR_URL)
+ return
+ template = "panel/server_backup_edit.html"
+
elif page == "edit_user":
user_id = self.get_argument("id", None)
role_servers = self.controller.servers.get_authorized_servers(user_id)
@@ -1307,6 +1434,8 @@ class PanelHandler(BaseHandler):
for file in sorted(
os.listdir(os.path.join(self.helper.root_dir, "app", "translations"))
):
+ if file == "humanized_index.json":
+ continue
if file.endswith(".json"):
if file.split(".")[0] not in self.helper.get_setting(
"disabled_language_files"
diff --git a/app/classes/web/routes/api/api_handlers.py b/app/classes/web/routes/api/api_handlers.py
index a30350a5..78223efe 100644
--- a/app/classes/web/routes/api/api_handlers.py
+++ b/app/classes/web/routes/api/api_handlers.py
@@ -38,12 +38,14 @@ from app.classes.web.routes.api.servers.server.backups.index import (
)
from app.classes.web.routes.api.servers.server.backups.backup.index import (
ApiServersServerBackupsBackupIndexHandler,
+ ApiServersServerBackupsBackupFilesIndexHandler,
)
from app.classes.web.routes.api.servers.server.files import (
ApiServersServerFilesIndexHandler,
ApiServersServerFilesCreateHandler,
ApiServersServerFilesZipHandler,
)
+from app.classes.web.routes.api.crafty.upload.index import ApiFilesUploadHandler
from app.classes.web.routes.api.servers.server.tasks.task.children import (
ApiServersServerTasksTaskChildrenHandler,
)
@@ -218,13 +220,13 @@ def api_handlers(handler_args):
handler_args,
),
(
- r"/api/v2/servers/([a-z0-9-]+)/backups/backup/?",
+ r"/api/v2/servers/([a-z0-9-]+)/backups/backup/([a-z0-9-]+)/?",
ApiServersServerBackupsBackupIndexHandler,
handler_args,
),
(
- r"/api/v2/servers/([a-z0-9-]+)/files/?",
- ApiServersServerFilesIndexHandler,
+ r"/api/v2/servers/([a-z0-9-]+)/backups/backup/([a-z0-9-]+)/files/?",
+ ApiServersServerBackupsBackupFilesIndexHandler,
handler_args,
),
(
@@ -237,6 +239,26 @@ def api_handlers(handler_args):
ApiServersServerFilesZipHandler,
handler_args,
),
+ (
+ r"/api/v2/crafty/admin/upload/?",
+ ApiFilesUploadHandler,
+ handler_args,
+ ),
+ (
+ r"/api/v2/servers/import/upload/?",
+ ApiFilesUploadHandler,
+ handler_args,
+ ),
+ (
+ r"/api/v2/servers/([a-z0-9-]+)/files/upload/?",
+ ApiFilesUploadHandler,
+ handler_args,
+ ),
+ (
+ r"/api/v2/servers/([a-z0-9-]+)/files(?:/([a-zA-Z0-9-]+))?/?",
+ ApiServersServerFilesIndexHandler,
+ handler_args,
+ ),
(
r"/api/v2/servers/([a-z0-9-]+)/tasks/?",
ApiServersServerTasksIndexHandler,
@@ -273,7 +295,8 @@ def api_handlers(handler_args):
handler_args,
),
(
- r"/api/v2/servers/([a-z0-9-]+)/action/([a-z_]+)/?",
+ # optional third argument when we need a action ID
+ r"/api/v2/servers/([a-z0-9-]+)/action/([a-z_]+)(?:/([a-z0-9-]+))?/?",
ApiServersServerActionHandler,
handler_args,
),
diff --git a/app/classes/web/routes/api/auth/invalidate_tokens.py b/app/classes/web/routes/api/auth/invalidate_tokens.py
index f15bf60d..9e38670a 100644
--- a/app/classes/web/routes/api/auth/invalidate_tokens.py
+++ b/app/classes/web/routes/api/auth/invalidate_tokens.py
@@ -1,6 +1,6 @@
-import datetime
import logging
from app.classes.web.base_api_handler import BaseApiHandler
+from app.classes.shared.helpers import Helpers
logger = logging.getLogger(__name__)
@@ -13,7 +13,7 @@ class ApiAuthInvalidateTokensHandler(BaseApiHandler):
logger.debug(f"Invalidate tokens for user {auth_data[4]['user_id']}")
self.controller.users.raw_update_user(
- auth_data[4]["user_id"], {"valid_tokens_from": datetime.datetime.now()}
+ auth_data[4]["user_id"], {"valid_tokens_from": Helpers.get_utc_now()}
)
self.finish_json(200, {"status": "ok"})
diff --git a/app/classes/web/routes/api/crafty/upload/index.py b/app/classes/web/routes/api/crafty/upload/index.py
new file mode 100644
index 00000000..b37ef796
--- /dev/null
+++ b/app/classes/web/routes/api/crafty/upload/index.py
@@ -0,0 +1,308 @@
+import os
+import logging
+import shutil
+from app.classes.models.server_permissions import EnumPermissionsServer
+from app.classes.shared.helpers import Helpers
+from app.classes.web.base_api_handler import BaseApiHandler
+
+logger = logging.getLogger(__name__)
+IMAGE_MIME_TYPES = [
+ "image/bmp",
+ "image/cis-cod",
+ "image/gif",
+ "image/ief",
+ "image/jpeg",
+ "image/pipeg",
+ "image/svg+xml",
+ "image/tiff",
+ "image/x-cmu-raster",
+ "image/x-cmx",
+ "image/x-icon",
+ "image/x-portable-anymap",
+ "image/x-portable-bitmap",
+ "image/x-portable-graymap",
+ "image/x-portable-pixmap",
+ "image/x-rgb",
+ "image/x-xbitmap",
+ "image/x-xpixmap",
+ "image/x-xwindowdump",
+ "image/png",
+ "image/webp",
+]
+
+ARCHIVE_MIME_TYPES = ["application/zip"]
+
+
+class ApiFilesUploadHandler(BaseApiHandler):
+ async def post(self, server_id=None):
+ auth_data = self.authenticate_user()
+ if not auth_data:
+ return
+
+ upload_type = self.request.headers.get("type")
+ accepted_types = []
+
+ if server_id:
+ # Check to make sure user is authorized for the server
+ if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
+ # if the user doesn't have access to the server, return an error
+ return self.finish_json(
+ 400, {"status": "error", "error": "NOT_AUTHORIZED"}
+ )
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
+ auth_data[4]["user_id"], server_id
+ ),
+ auth_data[5],
+ )
+ # Make sure user has file access for the server
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.FILES not in server_permissions:
+ # if the user doesn't have Files permission, return an error
+ return self.finish_json(
+ 400, {"status": "error", "error": "NOT_AUTHORIZED"}
+ )
+
+ u_type = "server_upload"
+ # Make sure user is a super user if they're changing panel settings
+ elif auth_data[4]["superuser"] and upload_type == "background":
+ u_type = "admin_config"
+ self.upload_dir = os.path.join(
+ self.controller.project_root,
+ "app/frontend/static/assets/images/auth/custom",
+ )
+ accepted_types = IMAGE_MIME_TYPES
+ elif upload_type == "import":
+ # Check that user can make servers
+ if (
+ not self.controller.crafty_perms.can_create_server(
+ auth_data[4]["user_id"]
+ )
+ and not auth_data[4]["superuser"]
+ ):
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "data": {"message": ""},
+ },
+ )
+ # Set directory to upload import dir
+ self.upload_dir = os.path.join(
+ self.controller.project_root, "import", "upload"
+ )
+ u_type = "server_import"
+ accepted_types = ARCHIVE_MIME_TYPES
+ else:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "data": {"message": ""},
+ },
+ )
+ # Get the headers from the request
+ self.chunk_hash = self.request.headers.get("chunkHash", 0)
+ self.file_id = self.request.headers.get("fileId")
+ self.chunked = self.request.headers.get("chunked", False)
+ self.filename = self.request.headers.get("fileName", None)
+ try:
+ file_size = int(self.request.headers.get("fileSize", None))
+ total_chunks = int(self.request.headers.get("totalChunks", 0))
+ except TypeError:
+ return self.finish_json(
+ 400, {"status": "error", "error": "TYPE ERROR", "data": {}}
+ )
+ self.chunk_index = self.request.headers.get("chunkId")
+ if u_type == "server_upload":
+ self.upload_dir = self.request.headers.get("location", None)
+ self.temp_dir = os.path.join(self.controller.project_root, "temp", self.file_id)
+
+ if u_type == "server_upload":
+ # If this is an upload from a server the path will be what
+ # Is requested
+ full_path = os.path.join(self.upload_dir, self.filename)
+
+ # Check to make sure the requested path is inside the server's directory
+ if not self.helper.is_subdir(
+ full_path,
+ Helpers.get_os_understandable_path(
+ self.controller.servers.get_server_data_by_id(server_id)["path"]
+ ),
+ ):
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT AUTHORIZED",
+ "data": {"message": "Traversal detected"},
+ },
+ )
+ # Check to make sure the file type we're being sent is what we're expecting
+ if (
+ self.file_helper.check_mime_types(self.filename) not in accepted_types
+ and u_type != "server_upload"
+ ):
+ return self.finish_json(
+ 422,
+ {
+ "status": "error",
+ "error": "INVALID FILE TYPE",
+ "data": {
+ "message": f"Invalid File Type only accepts {accepted_types}"
+ },
+ },
+ )
+ _total, _used, free = shutil.disk_usage(self.upload_dir)
+
+ # Check to see if we have enough space
+ if free <= file_size:
+ return self.finish_json(
+ 507,
+ {
+ "status": "error",
+ "error": "NO STORAGE SPACE",
+ "data": {"message": "Out Of Space!"},
+ },
+ )
+
+ # If this has no chunk index we know it's the inital request
+ if self.chunked and not self.chunk_index:
+ return self.finish_json(
+ 200, {"status": "ok", "data": {"file-id": self.file_id}}
+ )
+ # Create the upload and temp directories if they don't exist
+ os.makedirs(self.upload_dir, exist_ok=True)
+
+ # Check for chunked header. We will handle this request differently
+ # if it doesn't exist
+ if not self.chunked:
+ # Write the file directly to the upload dir
+ with open(os.path.join(self.upload_dir, self.filename), "wb") as file:
+ chunk = self.request.body
+ if chunk:
+ file.write(chunk)
+ # We'll check the file hash against the sent hash once the file is
+ # written. We cannot check this buffer.
+ calculated_hash = self.file_helper.calculate_file_hash(
+ os.path.join(self.upload_dir, self.filename)
+ )
+ logger.info(
+ f"File upload completed. Filename: {self.filename} Type: {u_type}"
+ )
+ return self.finish_json(
+ 200,
+ {
+ "status": "completed",
+ "data": {"message": "File uploaded successfully"},
+ },
+ )
+ # Since this is a chunked upload we'll create the temp dir for parts.
+ os.makedirs(self.temp_dir, exist_ok=True)
+
+ # Read headers and query parameters
+ content_length = int(self.request.headers.get("Content-Length"))
+ if content_length <= 0:
+ logger.error(
+ f"File upload failed. Filename: {self.filename}"
+ f"Type: {u_type} Error: INVALID CONTENT LENGTH"
+ )
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "INVALID CONTENT LENGTH",
+ "data": {"message": "Invalid content length"},
+ },
+ )
+
+ # At this point filename, chunk index and total chunks are required
+ # in the request
+ if not self.filename or self.chunk_index is None:
+ logger.error(
+ f"File upload failed. Filename: {self.filename}"
+ f"Type: {u_type} Error: CHUNK INDEX NOT FOUND"
+ )
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "INDEX ERROR",
+ "data": {
+ "message": "Filename, chunk_index,"
+ " and total_chunks are required"
+ },
+ },
+ )
+
+ # Calculate the hash of the buffer and compare it against the expected hash
+ calculated_hash = self.file_helper.calculate_buffer_hash(self.request.body)
+ if str(self.chunk_hash) != str(calculated_hash):
+ logger.error(
+ f"File upload failed. Filename: {self.filename}"
+ f"Type: {u_type} Error: INVALID HASH"
+ )
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "INVALID_HASH",
+ "data": {
+ "message": "Hash recieved does not match reported sent hash.",
+ "chunk_id": self.chunk_index,
+ },
+ },
+ )
+
+ # File paths
+ file_path = os.path.join(self.upload_dir, self.filename)
+ chunk_path = os.path.join(
+ self.temp_dir, f"{self.filename}.part{self.chunk_index}"
+ )
+
+ # Save the chunk
+ with open(chunk_path, "wb") as f:
+ f.write(self.request.body)
+
+ # Check if all chunks are received
+ received_chunks = [
+ f
+ for f in os.listdir(self.temp_dir)
+ if f.startswith(f"{self.filename}.part")
+ ]
+ # When we've reached the total chunks we'll
+ # Compare the hash and write the file
+ if len(received_chunks) == total_chunks:
+ with open(file_path, "wb") as outfile:
+ for i in range(total_chunks):
+ chunk_file = os.path.join(self.temp_dir, f"{self.filename}.part{i}")
+ with open(chunk_file, "rb") as infile:
+ outfile.write(infile.read())
+ os.remove(chunk_file)
+ logger.info(
+ f"File upload completed. Filename: {self.filename}"
+ f" Path: {file_path} Type: {u_type}"
+ )
+ self.controller.management.add_to_audit_log(
+ auth_data[4]["user_id"],
+ f"Uploaded file {self.filename}",
+ server_id,
+ self.request.remote_ip,
+ )
+ self.finish_json(
+ 200,
+ {
+ "status": "completed",
+ "data": {"message": "File uploaded successfully"},
+ },
+ )
+ else:
+ self.finish_json(
+ 200,
+ {
+ "status": "partial",
+ "data": {"message": f"Chunk {self.chunk_index} received"},
+ },
+ )
diff --git a/app/classes/web/routes/api/roles/index.py b/app/classes/web/routes/api/roles/index.py
index b0ddd834..45a00bf0 100644
--- a/app/classes/web/routes/api/roles/index.py
+++ b/app/classes/web/routes/api/roles/index.py
@@ -2,6 +2,7 @@ import typing as t
from jsonschema import ValidationError, validate
import orjson
from playhouse.shortcuts import model_to_dict
+from app.classes.models.crafty_permissions import EnumPermissionsCrafty
from app.classes.web.base_api_handler import BaseApiHandler
create_role_schema = {
@@ -72,7 +73,7 @@ class ApiRolesIndexHandler(BaseApiHandler):
return
(
_,
- _,
+ exec_user_permissions_crafty,
_,
superuser,
_,
@@ -82,7 +83,10 @@ class ApiRolesIndexHandler(BaseApiHandler):
# GET /api/v2/roles?ids=true
get_only_ids = self.get_query_argument("ids", None) == "true"
- if not superuser:
+ if (
+ not superuser
+ and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
+ ):
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
self.finish_json(
@@ -105,14 +109,17 @@ class ApiRolesIndexHandler(BaseApiHandler):
return
(
_,
- _,
+ exec_user_permissions_crafty,
_,
superuser,
user,
_,
) = auth_data
- if not superuser:
+ if (
+ not superuser
+ and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
+ ):
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@@ -139,6 +146,8 @@ class ApiRolesIndexHandler(BaseApiHandler):
role_name = data["name"]
manager = data.get("manager", None)
+ if not superuser and not manager:
+ manager = auth_data[4]["user_id"]
if manager == self.controller.users.get_id_by_name("SYSTEM") or manager == 0:
manager = None
diff --git a/app/classes/web/routes/api/roles/role/index.py b/app/classes/web/routes/api/roles/role/index.py
index 54b20aaa..1eab6183 100644
--- a/app/classes/web/routes/api/roles/role/index.py
+++ b/app/classes/web/routes/api/roles/role/index.py
@@ -1,6 +1,7 @@
from jsonschema import ValidationError, validate
import orjson
-from peewee import DoesNotExist
+from peewee import DoesNotExist, IntegrityError
+from app.classes.models.crafty_permissions import EnumPermissionsCrafty
from app.classes.web.base_api_handler import BaseApiHandler
modify_role_schema = {
@@ -71,14 +72,17 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
return
(
_,
- _,
+ exec_user_permissions_crafty,
_,
superuser,
_,
_,
) = auth_data
- if not superuser:
+ if (
+ not superuser
+ and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
+ ):
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@@ -101,8 +105,11 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
user,
_,
) = auth_data
-
- if not superuser:
+ role = self.controller.roles.get_role(role_id)
+ if (
+ str(role.get("manager", "no manager found")) != str(auth_data[4]["user_id"])
+ and not superuser
+ ):
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
self.controller.roles.remove_role(role_id)
@@ -125,7 +132,7 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
return
(
_,
- _,
+ exec_user_permissions_crafty,
_,
superuser,
user,
@@ -133,7 +140,10 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
) = auth_data
role = self.controller.roles.get_role(role_id)
- if not superuser and user["user_id"] != role["manager"]:
+ if not superuser and (
+ user["user_id"] != role["manager"]
+ or EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
+ ):
return self.finish_json(
400,
{
@@ -180,7 +190,10 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
)
except DoesNotExist:
return self.finish_json(404, {"status": "error", "error": "ROLE_NOT_FOUND"})
-
+ except IntegrityError:
+ return self.finish_json(
+ 404, {"status": "error", "error": "ROLE_NAME_EXISTS"}
+ )
self.controller.management.add_to_audit_log(
user["user_id"],
f"modified role with ID {role_id}",
diff --git a/app/classes/web/routes/api/servers/index.py b/app/classes/web/routes/api/servers/index.py
index 43cf01e2..ca551326 100644
--- a/app/classes/web/routes/api/servers/index.py
+++ b/app/classes/web/routes/api/servers/index.py
@@ -23,6 +23,7 @@ new_server_schema = {
"type": "string",
"examples": ["My Server"],
"minLength": 2,
+ "pattern": "^[^/\\\\]*$",
},
"roles": {"title": "Roles to add", "type": "array", "examples": [1, 2, 3]},
"stop_command": {
diff --git a/app/classes/web/routes/api/servers/server/action.py b/app/classes/web/routes/api/servers/server/action.py
index aba06da3..d8e58b2f 100644
--- a/app/classes/web/routes/api/servers/server/action.py
+++ b/app/classes/web/routes/api/servers/server/action.py
@@ -1,5 +1,6 @@
import logging
import os
+import json
from app.classes.models.server_permissions import EnumPermissionsServer
from app.classes.models.servers import Servers
from app.classes.shared.file_helpers import FileHelpers
@@ -10,7 +11,7 @@ logger = logging.getLogger(__name__)
class ApiServersServerActionHandler(BaseApiHandler):
- def post(self, server_id: str, action: str):
+ def post(self, server_id: str, action: str, action_id=None):
auth_data = self.authenticate_user()
if not auth_data:
return
@@ -54,7 +55,7 @@ class ApiServersServerActionHandler(BaseApiHandler):
return self._agree_eula(server_id, auth_data[4]["user_id"])
self.controller.management.send_command(
- auth_data[4]["user_id"], server_id, self.get_remote_ip(), action
+ auth_data[4]["user_id"], server_id, self.get_remote_ip(), action, action_id
)
self.finish_json(
@@ -82,6 +83,20 @@ class ApiServersServerActionHandler(BaseApiHandler):
new_server_id = self.helper.create_uuid()
new_server_path = os.path.join(self.helper.servers_dir, new_server_id)
new_backup_path = os.path.join(self.helper.backup_path, new_server_id)
+ backup_data = {
+ "backup_name": f"{new_server_name} Backup",
+ "backup_location": new_backup_path,
+ "excluded_dirs": "",
+ "max_backups": 0,
+ "server_id": new_server_id,
+ "compress": False,
+ "shutdown": False,
+ "before": "",
+ "after": "",
+ "default": True,
+ "status": json.dumps({"status": "Standby", "message": ""}),
+ "enabled": True,
+ }
new_server_command = str(server_data.get("execution_command")).replace(
server_id, new_server_id
)
@@ -93,7 +108,6 @@ class ApiServersServerActionHandler(BaseApiHandler):
new_server_name,
new_server_id,
new_server_path,
- new_backup_path,
new_server_command,
server_data.get("executable"),
new_server_log_path,
@@ -103,6 +117,8 @@ class ApiServersServerActionHandler(BaseApiHandler):
server_data.get("type"),
)
+ self.controller.management.add_backup_config(backup_data)
+
self.controller.management.add_to_audit_log(
user_id,
f"is cloning server {server_id} named {server_data.get('server_name')}",
diff --git a/app/classes/web/routes/api/servers/server/backups/backup/index.py b/app/classes/web/routes/api/servers/server/backups/backup/index.py
index 1b9ff915..5d8fd2b5 100644
--- a/app/classes/web/routes/api/servers/server/backups/backup/index.py
+++ b/app/classes/web/routes/api/servers/server/backups/backup/index.py
@@ -11,7 +11,7 @@ from app.classes.shared.helpers import Helpers
logger = logging.getLogger(__name__)
-backup_schema = {
+BACKUP_SCHEMA = {
"type": "object",
"properties": {
"filename": {"type": "string", "minLength": 5},
@@ -19,11 +19,44 @@ backup_schema = {
"additionalProperties": False,
"minProperties": 1,
}
+BACKUP_PATCH_SCHEMA = {
+ "type": "object",
+ "properties": {
+ "backup_name": {"type": "string", "minLength": 3},
+ "backup_location": {"type": "string", "minLength": 1},
+ "max_backups": {"type": "integer"},
+ "compress": {"type": "boolean"},
+ "shutdown": {"type": "boolean"},
+ "before": {"type": "string"},
+ "after": {"type": "string"},
+ "excluded_dirs": {"type": "array"},
+ },
+ "additionalProperties": False,
+ "minProperties": 1,
+}
+
+BASIC_BACKUP_PATCH_SCHEMA = {
+ "type": "object",
+ "properties": {
+ "backup_name": {"type": "string", "minLength": 3},
+ "max_backups": {"type": "integer"},
+ "compress": {"type": "boolean"},
+ "shutdown": {"type": "boolean"},
+ "before": {"type": "string"},
+ "after": {"type": "string"},
+ "excluded_dirs": {"type": "array"},
+ },
+ "additionalProperties": False,
+ "minProperties": 1,
+}
+ID_MISMATCH = "Server ID backup server ID different"
+GENERAL_AUTH_ERROR = "Authorization Error"
class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
- def get(self, server_id: str):
+ def get(self, server_id: str, backup_id: str):
auth_data = self.authenticate_user()
+ backup_conf = self.controller.management.get_backup_config(backup_id)
if not auth_data:
return
mask = self.controller.server_perms.get_lowest_api_perm_mask(
@@ -32,15 +65,40 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
),
auth_data[5],
)
+ if backup_conf["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "ID_MISMATCH",
+ "error_data": ID_MISMATCH,
+ },
+ )
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
- return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
- self.finish_json(200, self.controller.management.get_backup_config(server_id))
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+ self.finish_json(200, backup_conf)
- def delete(self, server_id: str):
+ def delete(self, server_id: str, backup_id: str):
auth_data = self.authenticate_user()
- backup_conf = self.controller.management.get_backup_config(server_id)
+ backup_conf = self.controller.management.get_backup_config(backup_id)
+ if backup_conf["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "ID_MISMATCH",
+ "error_data": ID_MISMATCH,
+ },
+ )
if not auth_data:
return
mask = self.controller.server_perms.get_lowest_api_perm_mask(
@@ -52,7 +110,66 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
- return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+
+ self.controller.management.add_to_audit_log(
+ auth_data[4]["user_id"],
+ f"Edited server {server_id}: removed backup config"
+ f" {backup_conf['backup_name']}",
+ server_id,
+ self.get_remote_ip(),
+ )
+ if backup_conf["default"]:
+ return self.finish_json(
+ 405,
+ {
+ "status": "error",
+ "error": "NOT_ALLOWED",
+ "error_data": "Cannot delete default backup",
+ },
+ )
+ self.controller.management.delete_backup_config(backup_id)
+
+ return self.finish_json(200, {"status": "ok"})
+
+ def post(self, server_id: str, backup_id: str):
+ auth_data = self.authenticate_user()
+ if not auth_data:
+ return
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
+ auth_data[4]["user_id"], server_id
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.BACKUP not in server_permissions:
+ # if the user doesn't have Schedule permission, return an error
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+ backup_config = self.controller.management.get_backup_config(backup_id)
+ if backup_config["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "ID_MISMATCH",
+ "error_data": ID_MISMATCH,
+ },
+ )
try:
data = json.loads(self.request.body)
@@ -61,7 +178,7 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
)
try:
- validate(data, backup_schema)
+ validate(data, BACKUP_SCHEMA)
except ValidationError as e:
return self.finish_json(
400,
@@ -72,9 +189,246 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
},
)
+ svr_obj = self.controller.servers.get_server_obj(server_id)
+ server_data = self.controller.servers.get_server_data_by_id(server_id)
+ zip_name = data["filename"]
+ # import the server again based on zipfile
+ backup_config = self.controller.management.get_backup_config(backup_id)
+ backup_location = os.path.join(
+ backup_config["backup_location"], backup_config["backup_id"]
+ )
+ if Helpers.validate_traversal(backup_location, zip_name):
+ try:
+ temp_dir = Helpers.unzip_backup_archive(backup_location, zip_name)
+ except (FileNotFoundError, NotADirectoryError) as e:
+ return self.finish_json(
+ 400, {"status": "error", "error": f"NO BACKUP FOUND {e}"}
+ )
+ if server_data["type"] == "minecraft-java":
+ new_server = self.controller.restore_java_zip_server(
+ svr_obj.server_name,
+ temp_dir,
+ server_data["executable"],
+ "1",
+ "2",
+ server_data["server_port"],
+ server_data["created_by"],
+ )
+ elif server_data["type"] == "minecraft-bedrock":
+ new_server = self.controller.restore_bedrock_zip_server(
+ svr_obj.server_name,
+ temp_dir,
+ server_data["executable"],
+ server_data["server_port"],
+ server_data["created_by"],
+ )
+ new_server_id = new_server
+ new_server = self.controller.servers.get_server_data(new_server)
+ self.controller.rename_backup_dir(
+ server_id,
+ new_server_id,
+ new_server["server_id"],
+ )
+ # preserve current schedules
+ for schedule in self.controller.management.get_schedules_by_server(
+ server_id
+ ):
+ job_data = self.controller.management.get_scheduled_task(
+ schedule.schedule_id
+ )
+ job_data["server_id"] = new_server_id
+ del job_data["schedule_id"]
+ self.tasks_manager.update_job(schedule.schedule_id, job_data)
+ # preserve execution command
+ new_server_obj = self.controller.servers.get_server_obj(new_server_id)
+ new_server_obj.execution_command = server_data["execution_command"]
+ # reset executable path
+ if svr_obj.path in svr_obj.executable:
+ new_server_obj.executable = str(svr_obj.executable).replace(
+ svr_obj.path, new_server_obj.path
+ )
+ # reset run command path
+ if svr_obj.path in svr_obj.execution_command:
+ new_server_obj.execution_command = str(
+ svr_obj.execution_command
+ ).replace(svr_obj.path, new_server_obj.path)
+ # reset log path
+ if svr_obj.path in svr_obj.log_path:
+ new_server_obj.log_path = str(svr_obj.log_path).replace(
+ svr_obj.path, new_server_obj.path
+ )
+ self.controller.servers.update_server(new_server_obj)
+
+ # preserve backup config
+ server_backups = self.controller.management.get_backups_by_server(server_id)
+ for backup in server_backups:
+ old_backup_id = server_backups[backup]["backup_id"]
+ del server_backups[backup]["backup_id"]
+ server_backups[backup]["server_id"] = new_server_id
+ if str(server_id) in (server_backups[backup]["backup_location"]):
+ server_backups[backup]["backup_location"] = str(
+ server_backups[backup]["backup_location"]
+ ).replace(str(server_id), str(new_server_id))
+ new_backup_id = self.controller.management.add_backup_config(
+ server_backups[backup]
+ )
+ os.listdir(server_backups[backup]["backup_location"])
+ FileHelpers.move_dir(
+ os.path.join(
+ server_backups[backup]["backup_location"], old_backup_id
+ ),
+ os.path.join(
+ server_backups[backup]["backup_location"], new_backup_id
+ ),
+ )
+ # remove old server's tasks
+ try:
+ self.tasks_manager.remove_all_server_tasks(server_id)
+ except JobLookupError as e:
+ logger.info("No active tasks found for server: {e}")
+ self.controller.remove_server(server_id, True)
+
+ self.controller.management.add_to_audit_log(
+ auth_data[4]["user_id"],
+ f"Restored server {server_id} backup {data['filename']}",
+ server_id,
+ self.get_remote_ip(),
+ )
+
+ return self.finish_json(200, {"status": "ok"})
+
+ def patch(self, server_id: str, backup_id: str):
+ auth_data = self.authenticate_user()
+ if not auth_data:
+ return
+
+ try:
+ data = json.loads(self.request.body)
+ except json.decoder.JSONDecodeError as e:
+ return self.finish_json(
+ 400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
+ )
+
+ try:
+ if auth_data[4]["superuser"]:
+ validate(data, BACKUP_PATCH_SCHEMA)
+ else:
+ validate(data, BASIC_BACKUP_PATCH_SCHEMA)
+ except ValidationError as e:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "INVALID_JSON_SCHEMA",
+ "error_data": str(e),
+ },
+ )
+ backup_conf = self.controller.management.get_backup_config(backup_id)
+ if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
+ # if the user doesn't have access to the server, return an error
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+ if backup_conf["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "ID_MISMATCH",
+ "error_data": ID_MISMATCH,
+ },
+ )
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
+ auth_data[4]["user_id"], server_id
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.BACKUP not in server_permissions:
+ # if the user doesn't have Schedule permission, return an error
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+ self.controller.management.update_backup_config(backup_id, data)
+ return self.finish_json(200, {"status": "ok"})
+
+
+class ApiServersServerBackupsBackupFilesIndexHandler(BaseApiHandler):
+ def delete(self, server_id: str, backup_id: str):
+ auth_data = self.authenticate_user()
+ backup_conf = self.controller.management.get_backup_config(backup_id)
+ if backup_conf["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "ID_MISMATCH",
+ "error_data": ID_MISMATCH,
+ },
+ )
+ if not auth_data:
+ return
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
+ auth_data[4]["user_id"], server_id
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.BACKUP not in server_permissions:
+ # if the user doesn't have Schedule permission, return an error
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+
+ try:
+ data = json.loads(self.request.body)
+ except json.decoder.JSONDecodeError as e:
+ return self.finish_json(
+ 400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
+ )
+ try:
+ validate(data, BACKUP_SCHEMA)
+ except ValidationError as e:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "INVALID_JSON_SCHEMA",
+ "error_data": str(e),
+ },
+ )
+ self.helper.validate_traversal(
+ os.path.join(backup_conf["backup_location"], backup_conf["backup_id"]),
+ os.path.join(
+ backup_conf["backup_location"],
+ backup_conf["backup_id"],
+ data["filename"],
+ ),
+ )
try:
FileHelpers.del_file(
- os.path.join(backup_conf["backup_path"], data["filename"])
+ os.path.join(
+ backup_conf["backup_location"],
+ backup_conf["backup_id"],
+ data["filename"],
+ )
)
except Exception as e:
return self.finish_json(
@@ -88,136 +442,3 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
)
return self.finish_json(200, {"status": "ok"})
-
- def post(self, server_id: str):
- auth_data = self.authenticate_user()
- if not auth_data:
- return
- mask = self.controller.server_perms.get_lowest_api_perm_mask(
- self.controller.server_perms.get_user_permissions_mask(
- auth_data[4]["user_id"], server_id
- ),
- auth_data[5],
- )
- server_permissions = self.controller.server_perms.get_permissions(mask)
- if EnumPermissionsServer.BACKUP not in server_permissions:
- # if the user doesn't have Schedule permission, return an error
- return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- try:
- data = json.loads(self.request.body)
- except json.decoder.JSONDecodeError as e:
- return self.finish_json(
- 400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
- )
- try:
- validate(data, backup_schema)
- except ValidationError as e:
- return self.finish_json(
- 400,
- {
- "status": "error",
- "error": "INVALID_JSON_SCHEMA",
- "error_data": str(e),
- },
- )
-
- try:
- svr_obj = self.controller.servers.get_server_obj(server_id)
- server_data = self.controller.servers.get_server_data_by_id(server_id)
- zip_name = data["filename"]
- # import the server again based on zipfile
- backup_path = svr_obj.backup_path
- if Helpers.validate_traversal(backup_path, zip_name):
- temp_dir = Helpers.unzip_backup_archive(backup_path, zip_name)
- if server_data["type"] == "minecraft-java":
- new_server = self.controller.restore_java_zip_server(
- svr_obj.server_name,
- temp_dir,
- server_data["executable"],
- "1",
- "2",
- server_data["server_port"],
- server_data["created_by"],
- )
- elif server_data["type"] == "minecraft-bedrock":
- new_server = self.controller.restore_bedrock_zip_server(
- svr_obj.server_name,
- temp_dir,
- server_data["executable"],
- server_data["server_port"],
- server_data["created_by"],
- )
- new_server_id = new_server
- new_server = self.controller.servers.get_server_data(new_server)
- self.controller.rename_backup_dir(
- server_id, new_server_id, new_server["server_id"]
- )
- # preserve current schedules
- for schedule in self.controller.management.get_schedules_by_server(
- server_id
- ):
- job_data = self.controller.management.get_scheduled_task(
- schedule.schedule_id
- )
- job_data["server_id"] = new_server_id
- del job_data["schedule_id"]
- self.tasks_manager.update_job(schedule.schedule_id, job_data)
- # preserve execution command
- new_server_obj = self.controller.servers.get_server_obj(new_server_id)
- new_server_obj.execution_command = server_data["execution_command"]
- # reset executable path
- if svr_obj.path in svr_obj.executable:
- new_server_obj.executable = str(svr_obj.executable).replace(
- svr_obj.path, new_server_obj.path
- )
- # reset run command path
- if svr_obj.path in svr_obj.execution_command:
- new_server_obj.execution_command = str(
- svr_obj.execution_command
- ).replace(svr_obj.path, new_server_obj.path)
- # reset log path
- if svr_obj.path in svr_obj.log_path:
- new_server_obj.log_path = str(svr_obj.log_path).replace(
- svr_obj.path, new_server_obj.path
- )
- self.controller.servers.update_server(new_server_obj)
-
- # preserve backup config
- backup_config = self.controller.management.get_backup_config(server_id)
- excluded_dirs = []
- server_obj = self.controller.servers.get_server_obj(server_id)
- loop_backup_path = self.helper.wtol_path(server_obj.path)
- for item in self.controller.management.get_excluded_backup_dirs(
- server_id
- ):
- item_path = self.helper.wtol_path(item)
- bu_path = os.path.relpath(item_path, loop_backup_path)
- bu_path = os.path.join(new_server_obj.path, bu_path)
- excluded_dirs.append(bu_path)
- self.controller.management.set_backup_config(
- new_server_id,
- new_server_obj.backup_path,
- backup_config["max_backups"],
- excluded_dirs,
- backup_config["compress"],
- backup_config["shutdown"],
- )
- # remove old server's tasks
- try:
- self.tasks_manager.remove_all_server_tasks(server_id)
- except JobLookupError as e:
- logger.info("No active tasks found for server: {e}")
- self.controller.remove_server(server_id, True)
- except (FileNotFoundError, NotADirectoryError) as e:
- return self.finish_json(
- 400, {"status": "error", "error": f"NO BACKUP FOUND {e}"}
- )
- self.controller.management.add_to_audit_log(
- auth_data[4]["user_id"],
- f"Restored server {server_id} backup {data['filename']}",
- server_id,
- self.get_remote_ip(),
- )
-
- return self.finish_json(200, {"status": "ok"})
diff --git a/app/classes/web/routes/api/servers/server/backups/index.py b/app/classes/web/routes/api/servers/server/backups/index.py
index 865fe25a..a155f943 100644
--- a/app/classes/web/routes/api/servers/server/backups/index.py
+++ b/app/classes/web/routes/api/servers/server/backups/index.py
@@ -1,3 +1,4 @@
+import os
import logging
import json
from jsonschema import validate
@@ -10,13 +11,14 @@ logger = logging.getLogger(__name__)
backup_patch_schema = {
"type": "object",
"properties": {
- "backup_path": {"type": "string", "minLength": 1},
+ "backup_name": {"type": "string", "minLength": 3},
+ "backup_location": {"type": "string", "minLength": 1},
"max_backups": {"type": "integer"},
"compress": {"type": "boolean"},
"shutdown": {"type": "boolean"},
- "backup_before": {"type": "string"},
- "backup_after": {"type": "string"},
- "exclusions": {"type": "array"},
+ "before": {"type": "string"},
+ "after": {"type": "string"},
+ "excluded_dirs": {"type": "array"},
},
"additionalProperties": False,
"minProperties": 1,
@@ -25,12 +27,13 @@ backup_patch_schema = {
basic_backup_patch_schema = {
"type": "object",
"properties": {
+ "backup_name": {"type": "string", "minLength": 3},
"max_backups": {"type": "integer"},
"compress": {"type": "boolean"},
"shutdown": {"type": "boolean"},
- "backup_before": {"type": "string"},
- "backup_after": {"type": "string"},
- "exclusions": {"type": "array"},
+ "before": {"type": "string"},
+ "after": {"type": "string"},
+ "excluded_dirs": {"type": "array"},
},
"additionalProperties": False,
"minProperties": 1,
@@ -52,9 +55,11 @@ class ApiServersServerBackupsIndexHandler(BaseApiHandler):
if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
- self.finish_json(200, self.controller.management.get_backup_config(server_id))
+ self.finish_json(
+ 200, self.controller.management.get_backups_by_server(server_id)
+ )
- def patch(self, server_id: str):
+ def post(self, server_id: str):
auth_data = self.authenticate_user()
if not auth_data:
return
@@ -80,7 +85,6 @@ class ApiServersServerBackupsIndexHandler(BaseApiHandler):
"error_data": str(e),
},
)
-
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
@@ -94,33 +98,12 @@ class ApiServersServerBackupsIndexHandler(BaseApiHandler):
if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- self.controller.management.set_backup_config(
- server_id,
- data.get(
- "backup_path",
- self.controller.management.get_backup_config(server_id)["backup_path"],
- ),
- data.get(
- "max_backups",
- self.controller.management.get_backup_config(server_id)["max_backups"],
- ),
- data.get("exclusions"),
- data.get(
- "compress",
- self.controller.management.get_backup_config(server_id)["compress"],
- ),
- data.get(
- "shutdown",
- self.controller.management.get_backup_config(server_id)["shutdown"],
- ),
- data.get(
- "backup_before",
- self.controller.management.get_backup_config(server_id)["before"],
- ),
- data.get(
- "backup_after",
- self.controller.management.get_backup_config(server_id)["after"],
- ),
- )
+ # Set the backup location automatically for non-super users. We should probably
+ # make the default location configurable for SU eventually
+ if not auth_data[4]["superuser"]:
+ data["backup_location"] = os.path.join(self.helper.backup_path, server_id)
+ data["server_id"] = server_id
+ if not data.get("excluded_dirs", None):
+ data["excluded_dirs"] = []
+ self.controller.management.add_backup_config(data)
return self.finish_json(200, {"status": "ok"})
diff --git a/app/classes/web/routes/api/servers/server/files.py b/app/classes/web/routes/api/servers/server/files.py
index 2951ff25..2699ae0c 100644
--- a/app/classes/web/routes/api/servers/server/files.py
+++ b/app/classes/web/routes/api/servers/server/files.py
@@ -72,7 +72,7 @@ file_delete_schema = {
class ApiServersServerFilesIndexHandler(BaseApiHandler):
- def post(self, server_id: str):
+ def post(self, server_id: str, backup_id=None):
auth_data = self.authenticate_user()
if not auth_data:
return
@@ -149,21 +149,35 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
filename = html.escape(raw_filename)
rel = os.path.join(folder, raw_filename)
dpath = os.path.join(folder, filename)
- if str(dpath) in self.controller.management.get_excluded_backup_dirs(
- server_id
- ):
- if os.path.isdir(rel):
- return_json[filename] = {
- "path": dpath,
- "dir": True,
- "excluded": True,
- }
+ if backup_id:
+ if str(
+ dpath
+ ) in self.controller.management.get_excluded_backup_dirs(backup_id):
+ if os.path.isdir(rel):
+ return_json[filename] = {
+ "path": dpath,
+ "dir": True,
+ "excluded": True,
+ }
+ else:
+ return_json[filename] = {
+ "path": dpath,
+ "dir": False,
+ "excluded": True,
+ }
else:
- return_json[filename] = {
- "path": dpath,
- "dir": False,
- "excluded": True,
- }
+ if os.path.isdir(rel):
+ return_json[filename] = {
+ "path": dpath,
+ "dir": True,
+ "excluded": False,
+ }
+ else:
+ return_json[filename] = {
+ "path": dpath,
+ "dir": False,
+ "excluded": False,
+ }
else:
if os.path.isdir(rel):
return_json[filename] = {
@@ -189,7 +203,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
)
self.finish_json(200, {"status": "ok", "data": file_contents})
- def delete(self, server_id: str):
+ def delete(self, server_id: str, _backup_id=None):
auth_data = self.authenticate_user()
if not auth_data:
return
@@ -247,7 +261,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
return self.finish_json(200, {"status": "ok"})
return self.finish_json(500, {"status": "error", "error": str(proc)})
- def patch(self, server_id: str):
+ def patch(self, server_id: str, _backup_id):
auth_data = self.authenticate_user()
if not auth_data:
return
@@ -301,7 +315,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
file_object.write(file_contents)
return self.finish_json(200, {"status": "ok"})
- def put(self, server_id: str):
+ def put(self, server_id: str, _backup_id):
auth_data = self.authenticate_user()
if not auth_data:
return
diff --git a/app/classes/web/routes/api/servers/server/index.py b/app/classes/web/routes/api/servers/server/index.py
index 9bfc3a9a..3562334c 100644
--- a/app/classes/web/routes/api/servers/server/index.py
+++ b/app/classes/web/routes/api/servers/server/index.py
@@ -12,7 +12,7 @@ logger = logging.getLogger(__name__)
server_patch_schema = {
"type": "object",
"properties": {
- "server_name": {"type": "string", "minLength": 1},
+ "server_name": {"type": "string", "minLength": 2, "pattern": "^[^/\\\\]*$"},
"backup_path": {"type": "string"},
"executable": {"type": "string"},
"log_path": {"type": "string", "minLength": 1},
diff --git a/app/classes/web/routes/api/servers/server/tasks/index.py b/app/classes/web/routes/api/servers/server/tasks/index.py
index 0c03319c..ed8b9df9 100644
--- a/app/classes/web/routes/api/servers/server/tasks/index.py
+++ b/app/classes/web/routes/api/servers/server/tasks/index.py
@@ -21,6 +21,9 @@ new_task_schema = {
"action": {
"type": "string",
},
+ "action_id": {
+ "type": "string",
+ },
"interval": {"type": "integer"},
"interval_type": {
"type": "string",
@@ -110,6 +113,18 @@ class ApiServersServerTasksIndexHandler(BaseApiHandler):
)
if "parent" not in data:
data["parent"] = None
+ if data.get("action_id"):
+ backup_config = self.controller.management.get_backup_config(
+ data["action_id"]
+ )
+ if backup_config["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 405,
+ {
+ "status": "error",
+ "error": "Server ID Mismatch",
+ },
+ )
task_id = self.tasks_manager.schedule_job(data)
self.controller.management.add_to_audit_log(
diff --git a/app/classes/web/routes/api/servers/server/tasks/task/index.py b/app/classes/web/routes/api/servers/server/tasks/task/index.py
index dac60762..05c8cee9 100644
--- a/app/classes/web/routes/api/servers/server/tasks/task/index.py
+++ b/app/classes/web/routes/api/servers/server/tasks/task/index.py
@@ -22,6 +22,9 @@ task_patch_schema = {
"action": {
"type": "string",
},
+ "action_id": {
+ "type": "string",
+ },
"interval": {"type": "integer"},
"interval_type": {
"type": "string",
diff --git a/app/classes/web/routes/api/users/index.py b/app/classes/web/routes/api/users/index.py
index dbdb1ac0..32ebd283 100644
--- a/app/classes/web/routes/api/users/index.py
+++ b/app/classes/web/routes/api/users/index.py
@@ -2,6 +2,7 @@ import logging
import json
from jsonschema import validate
from jsonschema.exceptions import ValidationError
+from app.classes.shared.translation import Translation
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
from app.classes.models.roles import Roles, HelperRoles
from app.classes.models.users import PUBLIC_USER_ATTRS
@@ -54,6 +55,7 @@ class ApiUsersIndexHandler(BaseApiHandler):
)
def post(self):
+ self.translator = Translation(self.helper)
new_user_schema = {
"type": "object",
"properties": {
@@ -87,12 +89,17 @@ class ApiUsersIndexHandler(BaseApiHandler):
try:
validate(data, new_user_schema)
except ValidationError as e:
+ err = self.translator.translate(
+ "validators",
+ e.schema["error"],
+ self.controller.users.get_user_lang_by_id(auth_data[4]["user_id"]),
+ )
return self.finish_json(
400,
{
"status": "error",
"error": "INVALID_JSON_SCHEMA",
- "error_data": str(e),
+ "error_data": f"{str(err)}",
},
)
username = data["username"]
@@ -153,7 +160,11 @@ class ApiUsersIndexHandler(BaseApiHandler):
for role in roles:
role = self.controller.roles.get_role(role)
- if int(role["manager"]) != int(auth_data[4]["user_id"]) and not superuser:
+ if (
+ str(role.get("manager", "no manager found"))
+ != str(auth_data[4]["user_id"])
+ and not superuser
+ ):
return self.finish_json(
400, {"status": "error", "error": "INVALID_ROLES_CREATE"}
)
diff --git a/app/classes/web/routes/api/users/user/api.py b/app/classes/web/routes/api/users/user/api.py
index 3891ef83..4baac898 100644
--- a/app/classes/web/routes/api/users/user/api.py
+++ b/app/classes/web/routes/api/users/user/api.py
@@ -217,7 +217,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
)
if (
- target_key.user_id != auth_data[4]["user_id"]
+ str(target_key.user_id) != str(auth_data[4]["user_id"])
and not auth_data[4]["superuser"]
):
return self.finish_json(
diff --git a/app/classes/web/routes/api/users/user/index.py b/app/classes/web/routes/api/users/user/index.py
index 9fa46200..b05e4ac3 100644
--- a/app/classes/web/routes/api/users/user/index.py
+++ b/app/classes/web/routes/api/users/user/index.py
@@ -132,7 +132,6 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
return self.finish_json(
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
)
-
try:
validate(data, user_patch_schema)
except ValidationError as e:
@@ -144,10 +143,8 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
"error_data": str(e),
},
)
-
if user_id == "@me":
user_id = user["user_id"]
-
if (
EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions
and str(user["user_id"]) != str(user_id)
@@ -215,6 +212,25 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
return self.finish_json(
400, {"status": "error", "error": "INVALID_ROLES_MODIFY"}
)
+ user_modify = self.controller.users.get_user_roles_id(user_id)
+
+ for role in data["roles"]:
+ # Check if user is not a super user and that the exec user is the role
+ # manager or that the role already exists in the user's list
+ if not superuser and (
+ str(
+ self.controller.roles.get_role(role).get(
+ "manager", "no manager found"
+ )
+ )
+ != str(auth_data[4]["user_id"])
+ and role not in user_modify
+ ):
+ for item in user_modify:
+ print(type(role), type(item))
+ return self.finish_json(
+ 400, {"status": "error", "error": "INVALID_ROLES_MODIFY"}
+ )
user_obj = HelperUsers.get_user_model(user_id)
if "password" in data and str(user["user_id"]) != str(user_id):
diff --git a/app/classes/web/tornado_handler.py b/app/classes/web/tornado_handler.py
index 6285edfc..0b8140e3 100644
--- a/app/classes/web/tornado_handler.py
+++ b/app/classes/web/tornado_handler.py
@@ -24,7 +24,6 @@ from app.classes.web.routes.metrics.metrics_handlers import metrics_handlers
from app.classes.web.server_handler import ServerHandler
from app.classes.web.websocket_handler import WebSocketHandler
from app.classes.web.static_handler import CustomStaticHandler
-from app.classes.web.upload_handler import UploadHandler
from app.classes.web.status_handler import StatusHandler
@@ -142,7 +141,6 @@ class Webserver:
(r"/panel/(.*)", PanelHandler, handler_args),
(r"/server/(.*)", ServerHandler, handler_args),
(r"/ws", WebSocketHandler, handler_args),
- (r"/upload", UploadHandler, handler_args),
(r"/status", StatusHandler, handler_args),
# API Routes V2
*api_handlers(handler_args),
diff --git a/app/classes/web/upload_handler.py b/app/classes/web/upload_handler.py
deleted file mode 100644
index 747fa63b..00000000
--- a/app/classes/web/upload_handler.py
+++ /dev/null
@@ -1,331 +0,0 @@
-import logging
-import os
-import time
-import urllib.parse
-import tornado.web
-import tornado.options
-import tornado.httpserver
-from app.classes.models.crafty_permissions import EnumPermissionsCrafty
-
-from app.classes.models.server_permissions import EnumPermissionsServer
-from app.classes.shared.console import Console
-from app.classes.shared.helpers import Helpers
-from app.classes.shared.main_controller import Controller
-from app.classes.web.base_handler import BaseHandler
-from app.classes.shared.websocket_manager import WebSocketManager
-
-logger = logging.getLogger(__name__)
-
-
-@tornado.web.stream_request_body
-class UploadHandler(BaseHandler):
- # noinspection PyAttributeOutsideInit
- def initialize(
- self,
- helper: Helpers = None,
- controller: Controller = None,
- tasks_manager=None,
- translator=None,
- file_helper=None,
- ):
- self.helper = helper
- self.controller = controller
- self.tasks_manager = tasks_manager
- self.translator = translator
- self.file_helper = file_helper
-
- def prepare(self):
- # Class & Function Defination
- api_key, _token_data, exec_user = self.current_user
- self.upload_type = str(self.request.headers.get("X-Content-Upload-Type"))
-
- if self.upload_type == "server_import":
- superuser = exec_user["superuser"]
- if api_key is not None:
- superuser = superuser and api_key.full_access
- user_id = exec_user["user_id"]
- stream_size_value = self.helper.get_setting("stream_size_GB")
-
- max_streamed_size = (1024 * 1024 * 1024) * stream_size_value
-
- self.content_len = int(self.request.headers.get("Content-Length"))
- if self.content_len > max_streamed_size:
- logger.error(
- f"User with ID {user_id} attempted to upload a file that"
- f" exceeded the max body size."
- )
-
- return self.finish_json(
- 413,
- {
- "status": "error",
- "error": "TOO LARGE",
- "info": self.helper.translation.translate(
- "error",
- "fileTooLarge",
- self.controller.users.get_user_lang_by_id(user_id),
- ),
- },
- )
- self.do_upload = True
-
- if superuser:
- exec_user_server_permissions = (
- self.controller.server_perms.list_defined_permissions()
- )
- elif api_key is not None:
- exec_user_server_permissions = (
- self.controller.crafty_perms.get_api_key_permissions_list(api_key)
- )
- else:
- exec_user_server_permissions = (
- self.controller.crafty_perms.get_crafty_permissions_list(
- exec_user["user_id"]
- )
- )
-
- if user_id is None:
- logger.warning("User ID not found in upload handler call")
- Console.warning("User ID not found in upload handler call")
- self.do_upload = False
-
- if (
- EnumPermissionsCrafty.SERVER_CREATION
- not in exec_user_server_permissions
- and not exec_user["superuser"]
- ):
- logger.warning(
- f"User {user_id} tried to upload a server" " without permissions!"
- )
- Console.warning(
- f"User {user_id} tried to upload a server" " without permissions!"
- )
- self.do_upload = False
-
- path = os.path.join(self.controller.project_root, "import", "upload")
- self.helper.ensure_dir_exists(path)
- # Delete existing files
- if len(os.listdir(path)) > 0:
- for item in os.listdir():
- try:
- os.remove(os.path.join(path, item))
- except:
- logger.debug("Could not delete file on user server upload")
-
- self.helper.ensure_dir_exists(path)
- filename = urllib.parse.unquote(
- self.request.headers.get("X-FileName", None)
- )
- if not str(filename).endswith(".zip"):
- WebSocketManager().broadcast("close_upload_box", "error")
- self.finish("error")
- full_path = os.path.join(path, filename)
-
- if self.do_upload:
- try:
- self.f = open(full_path, "wb")
- except Exception as e:
- logger.error(f"Upload failed with error: {e}")
- self.do_upload = False
- # If max_body_size is not set, you cannot upload files > 100MB
- self.request.connection.set_max_body_size(max_streamed_size)
-
- elif self.upload_type == "background":
- superuser = exec_user["superuser"]
- if api_key is not None:
- superuser = superuser and api_key.full_access
- user_id = exec_user["user_id"]
- stream_size_value = self.helper.get_setting("stream_size_GB")
-
- max_streamed_size = (1024 * 1024 * 1024) * stream_size_value
-
- self.content_len = int(self.request.headers.get("Content-Length"))
- if self.content_len > max_streamed_size:
- logger.error(
- f"User with ID {user_id} attempted to upload a file that"
- f" exceeded the max body size."
- )
-
- return self.finish_json(
- 413,
- {
- "status": "error",
- "error": "TOO LARGE",
- "info": self.helper.translation.translate(
- "error",
- "fileTooLarge",
- self.controller.users.get_user_lang_by_id(user_id),
- ),
- },
- )
- self.do_upload = True
-
- if not superuser:
- return self.finish_json(
- 401,
- {
- "status": "error",
- "error": "UNAUTHORIZED ACCESS",
- "info": self.helper.translation.translate(
- "error",
- "superError",
- self.controller.users.get_user_lang_by_id(user_id),
- ),
- },
- )
- if not self.request.headers.get("X-Content-Type", None).startswith(
- "image/"
- ):
- return self.finish_json(
- 415,
- {
- "status": "error",
- "error": "TYPE ERROR",
- "info": self.helper.translation.translate(
- "error",
- "fileError",
- self.controller.users.get_user_lang_by_id(user_id),
- ),
- },
- )
- if user_id is None:
- logger.warning("User ID not found in upload handler call")
- Console.warning("User ID not found in upload handler call")
- self.do_upload = False
-
- path = os.path.join(
- self.controller.project_root,
- "app/frontend/static/assets/images/auth/custom",
- )
- filename = self.request.headers.get("X-FileName", None)
- full_path = os.path.join(path, filename)
-
- if self.do_upload:
- try:
- self.f = open(full_path, "wb")
- except Exception as e:
- logger.error(f"Upload failed with error: {e}")
- self.do_upload = False
- # If max_body_size is not set, you cannot upload files > 100MB
- self.request.connection.set_max_body_size(max_streamed_size)
- else:
- server_id = self.get_argument("server_id", None)
- superuser = exec_user["superuser"]
- if api_key is not None:
- superuser = superuser and api_key.full_access
- user_id = exec_user["user_id"]
- stream_size_value = self.helper.get_setting("stream_size_GB")
-
- max_streamed_size = (1024 * 1024 * 1024) * stream_size_value
-
- self.content_len = int(self.request.headers.get("Content-Length"))
- if self.content_len > max_streamed_size:
- logger.error(
- f"User with ID {user_id} attempted to upload a file that"
- f" exceeded the max body size."
- )
-
- return self.finish_json(
- 413,
- {
- "status": "error",
- "error": "TOO LARGE",
- "info": self.helper.translation.translate(
- "error",
- "fileTooLarge",
- self.controller.users.get_user_lang_by_id(user_id),
- ),
- },
- )
- self.do_upload = True
-
- if superuser:
- exec_user_server_permissions = (
- self.controller.server_perms.list_defined_permissions()
- )
- elif api_key is not None:
- exec_user_server_permissions = (
- self.controller.server_perms.get_api_key_permissions_list(
- api_key, server_id
- )
- )
- else:
- exec_user_server_permissions = (
- self.controller.server_perms.get_user_id_permissions_list(
- exec_user["user_id"], server_id
- )
- )
-
- server_id = self.request.headers.get("X-ServerId", None)
- if server_id is None:
- logger.warning("Server ID not found in upload handler call")
- Console.warning("Server ID not found in upload handler call")
- self.do_upload = False
-
- if user_id is None:
- logger.warning("User ID not found in upload handler call")
- Console.warning("User ID not found in upload handler call")
- self.do_upload = False
-
- if EnumPermissionsServer.FILES not in exec_user_server_permissions:
- logger.warning(
- f"User {user_id} tried to upload a file to "
- f"{server_id} without permissions!"
- )
- Console.warning(
- f"User {user_id} tried to upload a file to "
- f"{server_id} without permissions!"
- )
- self.do_upload = False
-
- path = self.request.headers.get("X-Path", None)
- filename = self.request.headers.get("X-FileName", None)
- full_path = os.path.join(path, filename)
-
- if not self.helper.is_subdir(
- full_path,
- Helpers.get_os_understandable_path(
- self.controller.servers.get_server_data_by_id(server_id)["path"]
- ),
- ):
- logger.warning(
- f"User {user_id} tried to upload a file to {server_id} "
- f"but the path is not inside of the server!"
- )
- Console.warning(
- f"User {user_id} tried to upload a file to {server_id} "
- f"but the path is not inside of the server!"
- )
- self.do_upload = False
-
- if self.do_upload:
- try:
- self.f = open(full_path, "wb")
- except Exception as e:
- logger.error(f"Upload failed with error: {e}")
- self.do_upload = False
- # If max_body_size is not set, you cannot upload files > 100MB
- self.request.connection.set_max_body_size(max_streamed_size)
-
- def post(self):
- logger.info("Upload completed")
- if self.upload_type == "server_files":
- files_left = int(self.request.headers.get("X-Files-Left", None))
- else:
- files_left = 0
-
- if self.do_upload:
- time.sleep(5)
- if files_left == 0:
- WebSocketManager().broadcast("close_upload_box", "success")
- self.finish("success") # Nope, I'm sending "success"
- self.f.close()
- else:
- time.sleep(5)
- if files_left == 0:
- WebSocketManager().broadcast("close_upload_box", "error")
- self.finish("error")
-
- def data_received(self, chunk):
- if self.do_upload:
- self.f.write(chunk)
diff --git a/app/frontend/static/assets/css/crafty.css b/app/frontend/static/assets/css/crafty.css
index caf87654..be688657 100644
--- a/app/frontend/static/assets/css/crafty.css
+++ b/app/frontend/static/assets/css/crafty.css
@@ -12,6 +12,16 @@ nav.sidebar {
position: fixed;
}
+td {
+ -ms-overflow-style: none;
+ /* IE and Edge */
+ scrollbar-width: none;
+ /* Firefox */
+}
+
+td::-webkit-scrollbar {
+ display: none;
+}
@media (min-width: 992px) {
nav.sidebar {
@@ -270,4 +280,8 @@ div.warnings div.wssError a:hover {
/**************************************************************/
.dropdown-menu {
max-width: 100%;
+}
+
+.hidden-input {
+ margin-left: -40px;
}
\ No newline at end of file
diff --git a/app/frontend/static/assets/js/shared/upload.js b/app/frontend/static/assets/js/shared/upload.js
new file mode 100644
index 00000000..d31d4e23
--- /dev/null
+++ b/app/frontend/static/assets/js/shared/upload.js
@@ -0,0 +1,208 @@
+async function uploadFile(type, file = null, path = null, file_num = 0, _onProgress = null) {
+ if (file == null) {
+ try {
+ file = $("#file")[0].files[0];
+ } catch {
+ bootbox.alert("Please select a file first.")
+ return;
+ }
+
+ }
+ const fileId = uuidv4();
+ const token = getCookie("_xsrf");
+ if (type !== "server_upload") {
+ document.getElementById("upload_input").innerHTML = '
Backing up {{data['server_stats']['world_size']}}
- {% end %} - -