mirror of
https://gitlab.com/crafty-controller/crafty-4.git
synced 2025-01-18 09:05:15 +01:00
Merge branch 'dev' into 'master'
v4.4.4 See merge request crafty-controller/crafty-4!804
This commit is contained in:
commit
3a86b7e3ec
1
.gitignore
vendored
1
.gitignore
vendored
@ -38,3 +38,4 @@ docker/*
|
|||||||
!docker/docker-compose.yml
|
!docker/docker-compose.yml
|
||||||
lang_sort_log.txt
|
lang_sort_log.txt
|
||||||
lang_sort.txt
|
lang_sort.txt
|
||||||
|
app/migrations/status
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
## --- [4.4.4] - 2024/10/03
|
||||||
|
### Bug fixes
|
||||||
|
- Migrations | Fix orphan schedule configurations crashing migration operation ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/796))
|
||||||
|
- Fix logic issue causing bedrock wizard's root files buttons to not respond to user click events ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/797))
|
||||||
|
- Reset crash detection counter after crash detection process detects successful start ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/798))
|
||||||
|
- Update new bedrock DL url and correctly bubble up exception on DL fail - Thanks @sarcastron ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/802))
|
||||||
|
- Bump cryptography for GHSA-h4gh-qq45-vh27 ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/803))
|
||||||
|
<br><br>
|
||||||
|
|
||||||
## --- [4.4.3] - 2024/08/08
|
## --- [4.4.3] - 2024/08/08
|
||||||
### Bug fixes
|
### Bug fixes
|
||||||
- Fix schedules creation fail due to missing action ID ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/791))
|
- Fix schedules creation fail due to missing action ID ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/791))
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
[![Crafty Logo](app/frontend/static/assets/images/logo_long.svg)](https://craftycontrol.com)
|
[![Crafty Logo](app/frontend/static/assets/images/logo_long.svg)](https://craftycontrol.com)
|
||||||
# Crafty Controller 4.4.3
|
# Crafty Controller 4.4.4
|
||||||
> Python based Control Panel for your Minecraft Server
|
> Python based Control Panel for your Minecraft Server
|
||||||
|
|
||||||
## What is Crafty Controller?
|
## What is Crafty Controller?
|
||||||
|
@ -58,6 +58,7 @@ class Helpers:
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.root_dir = os.path.abspath(os.path.curdir)
|
self.root_dir = os.path.abspath(os.path.curdir)
|
||||||
|
self.read_annc = False
|
||||||
self.config_dir = os.path.join(self.root_dir, "app", "config")
|
self.config_dir = os.path.join(self.root_dir, "app", "config")
|
||||||
self.webroot = os.path.join(self.root_dir, "app", "frontend")
|
self.webroot = os.path.join(self.root_dir, "app", "frontend")
|
||||||
self.servers_dir = os.path.join(self.root_dir, "servers")
|
self.servers_dir = os.path.join(self.root_dir, "servers")
|
||||||
@ -79,6 +80,7 @@ class Helpers:
|
|||||||
|
|
||||||
self.translation = Translation(self)
|
self.translation = Translation(self)
|
||||||
self.update_available = False
|
self.update_available = False
|
||||||
|
self.migration_notifications = []
|
||||||
self.ignored_names = ["crafty_managed.txt", "db_stats"]
|
self.ignored_names = ["crafty_managed.txt", "db_stats"]
|
||||||
self.crafty_starting = False
|
self.crafty_starting = False
|
||||||
self.minimum_password_length = 8
|
self.minimum_password_length = 8
|
||||||
@ -128,24 +130,33 @@ class Helpers:
|
|||||||
"Chrome/104.0.0.0 Safari/537.36"
|
"Chrome/104.0.0.0 Safari/537.36"
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
target_win = 'https://minecraft.azureedge.net/bin-win/[^"]*'
|
target_win = 'https://www.minecraft.net/bedrockdedicatedserver/bin-win/[^"]*'
|
||||||
target_linux = 'https://minecraft.azureedge.net/bin-linux/[^"]*'
|
target_linux = (
|
||||||
|
'https://www.minecraft.net/bedrockdedicatedserver/bin-linux/[^"]*'
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
# Get minecraft server download page
|
# Get minecraft server download page
|
||||||
# (hopefully the don't change the structure)
|
# (hopefully the don't change the structure)
|
||||||
download_page = get(url, headers=headers, timeout=1)
|
download_page = get(url, headers=headers, timeout=1)
|
||||||
|
download_page.raise_for_status()
|
||||||
# Search for our string targets
|
# Search for our string targets
|
||||||
win_download_url = re.search(target_win, download_page.text).group(0)
|
win_search_result = re.search(target_win, download_page.text)
|
||||||
linux_download_url = re.search(target_linux, download_page.text).group(0)
|
linux_search_result = re.search(target_linux, download_page.text)
|
||||||
|
if win_search_result is None or linux_search_result is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Could not determine download URL from minecraft.net."
|
||||||
|
)
|
||||||
|
|
||||||
|
win_download_url = win_search_result.group(0)
|
||||||
|
linux_download_url = linux_search_result.group(0)
|
||||||
|
print(win_download_url, linux_download_url)
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
return win_download_url
|
return win_download_url
|
||||||
|
|
||||||
return linux_download_url
|
return linux_download_url
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Unable to resolve remote bedrock download url! \n{e}")
|
logger.error(f"Unable to resolve remote bedrock download url! \n{e}")
|
||||||
|
raise e
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get_execution_java(self, value, execution_command):
|
def get_execution_java(self, value, execution_command):
|
||||||
@ -614,11 +625,49 @@ class Helpers:
|
|||||||
|
|
||||||
return version_data
|
return version_data
|
||||||
|
|
||||||
def get_announcements(self):
|
def check_migrations(self) -> None:
|
||||||
|
if self.read_annc is False:
|
||||||
|
self.read_annc = True
|
||||||
|
for file in os.listdir(
|
||||||
|
os.path.join(self.root_dir, "app", "migrations", "status")
|
||||||
|
):
|
||||||
|
with open(
|
||||||
|
os.path.join(self.root_dir, "app", "migrations", "status", file),
|
||||||
|
"r",
|
||||||
|
encoding="utf-8",
|
||||||
|
) as notif_file:
|
||||||
|
file_json = json.load(notif_file)
|
||||||
|
for notif in file_json:
|
||||||
|
if not file_json[notif].get("status"):
|
||||||
|
self.migration_notifications.append(file_json[notif])
|
||||||
|
|
||||||
|
def get_announcements(self, lang=None):
|
||||||
try:
|
try:
|
||||||
data = []
|
data = []
|
||||||
response = requests.get("https://craftycontrol.com/notify", timeout=2)
|
response = requests.get("https://craftycontrol.com/notify", timeout=2)
|
||||||
data = json.loads(response.content)
|
data = json.loads(response.content)
|
||||||
|
if not lang:
|
||||||
|
lang = self.get_setting("language")
|
||||||
|
self.check_migrations()
|
||||||
|
for migration_warning in self.migration_notifications:
|
||||||
|
if not migration_warning.get("status"):
|
||||||
|
data.append(
|
||||||
|
{
|
||||||
|
"id": migration_warning.get("pid"),
|
||||||
|
"title": self.translation.translate(
|
||||||
|
"notify",
|
||||||
|
f"{migration_warning.get('type')}_title",
|
||||||
|
lang,
|
||||||
|
),
|
||||||
|
"date": "",
|
||||||
|
"desc": self.translation.translate(
|
||||||
|
"notify",
|
||||||
|
f"{migration_warning.get('type')}_desc",
|
||||||
|
lang,
|
||||||
|
),
|
||||||
|
"link": "",
|
||||||
|
}
|
||||||
|
)
|
||||||
if self.update_available:
|
if self.update_available:
|
||||||
data.append(self.update_available)
|
data.append(self.update_available)
|
||||||
return data
|
return data
|
||||||
|
@ -217,15 +217,16 @@ class ImportHelpers:
|
|||||||
FileHelpers.del_dirs(temp_dir)
|
FileHelpers.del_dirs(temp_dir)
|
||||||
|
|
||||||
def download_bedrock_server(self, path, new_id):
|
def download_bedrock_server(self, path, new_id):
|
||||||
|
bedrock_url = Helpers.get_latest_bedrock_url()
|
||||||
download_thread = threading.Thread(
|
download_thread = threading.Thread(
|
||||||
target=self.download_threaded_bedrock_server,
|
target=self.download_threaded_bedrock_server,
|
||||||
daemon=True,
|
daemon=True,
|
||||||
args=(path, new_id),
|
args=(path, new_id, bedrock_url),
|
||||||
name=f"{new_id}_download",
|
name=f"{new_id}_download",
|
||||||
)
|
)
|
||||||
download_thread.start()
|
download_thread.start()
|
||||||
|
|
||||||
def download_threaded_bedrock_server(self, path, new_id):
|
def download_threaded_bedrock_server(self, path, new_id, bedrock_url):
|
||||||
"""
|
"""
|
||||||
Downloads the latest Bedrock server, unzips it, sets necessary permissions.
|
Downloads the latest Bedrock server, unzips it, sets necessary permissions.
|
||||||
|
|
||||||
@ -236,10 +237,8 @@ class ImportHelpers:
|
|||||||
This method handles exceptions and logs errors for each step of the process.
|
This method handles exceptions and logs errors for each step of the process.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
bedrock_url = Helpers.get_latest_bedrock_url()
|
|
||||||
if bedrock_url:
|
if bedrock_url:
|
||||||
file_path = os.path.join(path, "bedrock_server.zip")
|
file_path = os.path.join(path, "bedrock_server.zip")
|
||||||
|
|
||||||
success = FileHelpers.ssl_get_file(
|
success = FileHelpers.ssl_get_file(
|
||||||
bedrock_url, path, "bedrock_server.zip"
|
bedrock_url, path, "bedrock_server.zip"
|
||||||
)
|
)
|
||||||
@ -263,6 +262,7 @@ class ImportHelpers:
|
|||||||
logger.critical(
|
logger.critical(
|
||||||
f"Failed to download bedrock executable during server creation! \n{e}"
|
f"Failed to download bedrock executable during server creation! \n{e}"
|
||||||
)
|
)
|
||||||
|
raise e
|
||||||
|
|
||||||
ServersController.finish_import(new_id)
|
ServersController.finish_import(new_id)
|
||||||
server_users = PermissionsServers.get_server_user_list(new_id)
|
server_users = PermissionsServers.get_server_user_list(new_id)
|
||||||
|
@ -1053,8 +1053,10 @@ class ServerInstance:
|
|||||||
|
|
||||||
running = self.check_running()
|
running = self.check_running()
|
||||||
|
|
||||||
# if all is okay, we just exit out
|
# if all is okay, we set the restart count to 0 and just exit out
|
||||||
if running:
|
if running:
|
||||||
|
Console.debug("Successfully found process. Resetting crash counter to 0")
|
||||||
|
self.restart_count = 0
|
||||||
return
|
return
|
||||||
# check the exit code -- This could be a fix for /stop
|
# check the exit code -- This could be a fix for /stop
|
||||||
if str(self.process.returncode) in self.settings["ignored_exits"].split(","):
|
if str(self.process.returncode) in self.settings["ignored_exits"].split(","):
|
||||||
|
@ -29,7 +29,7 @@ class ApiAnnounceIndexHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
data = self.helper.get_announcements()
|
data = self.helper.get_announcements(auth_data[4]["lang"])
|
||||||
if not data:
|
if not data:
|
||||||
return self.finish_json(
|
return self.finish_json(
|
||||||
424,
|
424,
|
||||||
|
@ -725,7 +725,19 @@ class ApiServersIndexHandler(BaseApiHandler):
|
|||||||
405, {"status": "error", "error": "DATA CONSTRAINT FAILED"}
|
405, {"status": "error", "error": "DATA CONSTRAINT FAILED"}
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
new_server_id = self.controller.create_api_server(data, user["user_id"])
|
try:
|
||||||
|
new_server_id = self.controller.create_api_server(data, user["user_id"])
|
||||||
|
except Exception as e:
|
||||||
|
self.controller.servers.stats.record_stats()
|
||||||
|
|
||||||
|
self.finish_json(
|
||||||
|
503,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "Could not create server",
|
||||||
|
"error_data": str(e),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
self.controller.servers.stats.record_stats()
|
self.controller.servers.stats.record_stats()
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"major": 4,
|
"major": 4,
|
||||||
"minor": 4,
|
"minor": 4,
|
||||||
"sub": 3
|
"sub": 4
|
||||||
}
|
}
|
||||||
|
@ -498,24 +498,19 @@
|
|||||||
<script src="../../static/assets/js/shared/upload.js"></script>
|
<script src="../../static/assets/js/shared/upload.js"></script>
|
||||||
<script>
|
<script>
|
||||||
document.getElementById("root_upload_button").addEventListener("click", function (event) {
|
document.getElementById("root_upload_button").addEventListener("click", function (event) {
|
||||||
if (file) {
|
if (document.getElementById('root_upload_button').classList.contains('clicked')) {
|
||||||
upload = true;
|
show_file_tree();
|
||||||
if (document.getElementById('root_upload_button').classList.contains('clicked')) {
|
return;
|
||||||
document.getElementById('main-tree-div-upload').innerHTML = '<input type="radio" class="root-input" id="main-tree-input-upload" name="root_path" value="" checked><span id="main-tree-upload" class="files-tree-title tree-caret-down root-dir"><i class="far fa-folder"></i><i class="far fa-folder-open"></i>{{ translate("serverFiles", "files", data["lang"]) }}</span></input>'
|
|
||||||
} else {
|
|
||||||
document.getElementById('root_upload_button').classList.add('clicked')
|
|
||||||
}
|
|
||||||
const token = getCookie("_xsrf");
|
|
||||||
var dialog = bootbox.dialog({
|
|
||||||
message: '<p class="text-center mb-0"><i class="fa fa-spin fa-cog"></i> Please wait while we gather your files...</p>',
|
|
||||||
closeButton: false
|
|
||||||
});
|
|
||||||
setTimeout(function () {
|
|
||||||
getDirView();
|
|
||||||
}, 2000);
|
|
||||||
} else {
|
} else {
|
||||||
bootbox.alert("You must input a path before selecting this button");
|
document.getElementById('root_upload_button').classList.add('clicked')
|
||||||
}
|
}
|
||||||
|
bootbox.dialog({
|
||||||
|
message: '<p class="text-center mb-0"><i class="fa fa-spin fa-cog"></i> Please wait while we gather your files...</p>',
|
||||||
|
closeButton: false
|
||||||
|
});
|
||||||
|
setTimeout(function () {
|
||||||
|
getDirView();
|
||||||
|
}, 2000);
|
||||||
});
|
});
|
||||||
|
|
||||||
function eula_confirm() {
|
function eula_confirm() {
|
||||||
@ -624,7 +619,9 @@
|
|||||||
if (responseData.status === "ok") {
|
if (responseData.status === "ok") {
|
||||||
window.location.href = '/panel/dashboard';
|
window.location.href = '/panel/dashboard';
|
||||||
} else {
|
} else {
|
||||||
|
// Close the "be patient..." dialogue box
|
||||||
|
$('.bootbox-close-button').click();
|
||||||
|
// Alert the user that there was an issue.
|
||||||
bootbox.alert({
|
bootbox.alert({
|
||||||
title: responseData.error,
|
title: responseData.error,
|
||||||
message: responseData.error_data
|
message: responseData.error_data
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
|
import json
|
||||||
import datetime
|
import datetime
|
||||||
import uuid
|
import uuid
|
||||||
import peewee
|
import peewee
|
||||||
@ -13,9 +14,9 @@ from app.classes.shared.file_helpers import FileHelpers
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def is_valid_backup(backup, all_servers):
|
def is_valid_entry(entry, all_servers):
|
||||||
try:
|
try:
|
||||||
return str(backup.server_id) in all_servers
|
return str(entry.server_id) in all_servers
|
||||||
except (TypeError, peewee.DoesNotExist):
|
except (TypeError, peewee.DoesNotExist):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -24,6 +25,8 @@ def migrate(migrator: Migrator, database, **kwargs):
|
|||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
backup_migration_status = True
|
||||||
|
schedule_migration_status = True
|
||||||
db = database
|
db = database
|
||||||
Console.info("Starting Backups migrations")
|
Console.info("Starting Backups migrations")
|
||||||
Console.info(
|
Console.info(
|
||||||
@ -161,10 +164,20 @@ def migrate(migrator: Migrator, database, **kwargs):
|
|||||||
row.server_id for row in Servers.select(Servers.server_id).distinct()
|
row.server_id for row in Servers.select(Servers.server_id).distinct()
|
||||||
]
|
]
|
||||||
all_backups = Backups.select()
|
all_backups = Backups.select()
|
||||||
|
all_schedules = Schedules.select()
|
||||||
Console.info("Cleaning up orphan backups for all servers")
|
Console.info("Cleaning up orphan backups for all servers")
|
||||||
valid_backups = [
|
valid_backups = [
|
||||||
backup for backup in all_backups if is_valid_backup(backup, all_servers)
|
backup for backup in all_backups if is_valid_entry(backup, all_servers)
|
||||||
]
|
]
|
||||||
|
if len(valid_backups) < len(all_backups):
|
||||||
|
backup_migration_status = False
|
||||||
|
print("Orphan backup found")
|
||||||
|
Console.info("Cleaning up orphan schedules for all servers")
|
||||||
|
valid_schedules = [
|
||||||
|
schedule for schedule in all_schedules if is_valid_entry(schedule, all_servers)
|
||||||
|
]
|
||||||
|
if len(valid_schedules) < len(all_schedules):
|
||||||
|
schedule_migration_status = False
|
||||||
# Copy data from the existing backups table to the new one
|
# Copy data from the existing backups table to the new one
|
||||||
for backup in valid_backups:
|
for backup in valid_backups:
|
||||||
Console.info(f"Trying to get server for backup migration {backup.server_id}")
|
Console.info(f"Trying to get server for backup migration {backup.server_id}")
|
||||||
@ -221,13 +234,20 @@ def migrate(migrator: Migrator, database, **kwargs):
|
|||||||
Console.debug("Migrations: Dropping backup_path from servers table")
|
Console.debug("Migrations: Dropping backup_path from servers table")
|
||||||
migrator.drop_columns("servers", ["backup_path"])
|
migrator.drop_columns("servers", ["backup_path"])
|
||||||
|
|
||||||
for schedule in Schedules.select():
|
for schedule in valid_schedules:
|
||||||
action_id = None
|
action_id = None
|
||||||
if schedule.command == "backup_server":
|
if schedule.command == "backup_server":
|
||||||
Console.info(
|
Console.info(
|
||||||
f"Migrations: Adding backup ID to task with name {schedule.name}"
|
f"Migrations: Adding backup ID to task with name {schedule.name}"
|
||||||
)
|
)
|
||||||
backup = NewBackups.get(NewBackups.server_id == schedule.server_id)
|
try:
|
||||||
|
backup = NewBackups.get(NewBackups.server_id == schedule.server_id)
|
||||||
|
except:
|
||||||
|
schedule_migration_status = False
|
||||||
|
Console.error(
|
||||||
|
"Could not find backup with selected server ID. Omitting from register."
|
||||||
|
)
|
||||||
|
continue
|
||||||
action_id = backup.backup_id
|
action_id = backup.backup_id
|
||||||
NewSchedules.create(
|
NewSchedules.create(
|
||||||
schedule_id=schedule.schedule_id,
|
schedule_id=schedule.schedule_id,
|
||||||
@ -255,6 +275,34 @@ def migrate(migrator: Migrator, database, **kwargs):
|
|||||||
# Rename the new table to backups
|
# Rename the new table to backups
|
||||||
migrator.rename_table("new_schedules", "schedules")
|
migrator.rename_table("new_schedules", "schedules")
|
||||||
|
|
||||||
|
with open(
|
||||||
|
os.path.join(
|
||||||
|
os.path.abspath(os.path.curdir),
|
||||||
|
"app",
|
||||||
|
"migrations",
|
||||||
|
"status",
|
||||||
|
"20240308_multi-backup.json",
|
||||||
|
),
|
||||||
|
"w",
|
||||||
|
encoding="utf-8",
|
||||||
|
) as file:
|
||||||
|
file.write(
|
||||||
|
json.dumps(
|
||||||
|
{
|
||||||
|
"backup_migration": {
|
||||||
|
"type": "backup",
|
||||||
|
"status": backup_migration_status,
|
||||||
|
"pid": str(uuid.uuid4()),
|
||||||
|
},
|
||||||
|
"schedule_migration": {
|
||||||
|
"type": "schedule",
|
||||||
|
"status": schedule_migration_status,
|
||||||
|
"pid": str(uuid.uuid4()),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator: Migrator, database, **kwargs):
|
def rollback(migrator: Migrator, database, **kwargs):
|
||||||
"""
|
"""
|
||||||
|
@ -231,10 +231,14 @@
|
|||||||
"activityLog": "Activity Logs",
|
"activityLog": "Activity Logs",
|
||||||
"backupComplete": "Backup completed successfully for server {}",
|
"backupComplete": "Backup completed successfully for server {}",
|
||||||
"backupStarted": "Backup started for server {}",
|
"backupStarted": "Backup started for server {}",
|
||||||
|
"backup_desc": "We detected the backup migration may have partially or fully failed. Please confirm your backups records on the backups tab.",
|
||||||
|
"backup_title": "Backup Migration Warning",
|
||||||
"downloadLogs": "Download Support Logs?",
|
"downloadLogs": "Download Support Logs?",
|
||||||
"finishedPreparing": "We've finished preparing your support logs. Please click download to download",
|
"finishedPreparing": "We've finished preparing your support logs. Please click download to download",
|
||||||
"logout": "Logout",
|
"logout": "Logout",
|
||||||
"preparingLogs": " Please wait while we prepare your logs... We`ll send a notification when they`re ready. This may take a while for large deployments.",
|
"preparingLogs": " Please wait while we prepare your logs... We`ll send a notification when they`re ready. This may take a while for large deployments.",
|
||||||
|
"schedule_desc": "We detected some or all of your scheduled tasks were not successfully transfered during the upgrade. Please confirm your schedules in the schedules tab.",
|
||||||
|
"schedule_title": "Schedules Migration Warning",
|
||||||
"supportLogs": "Support Logs"
|
"supportLogs": "Support Logs"
|
||||||
},
|
},
|
||||||
"offline": {
|
"offline": {
|
||||||
|
@ -235,10 +235,14 @@
|
|||||||
"activityLog": "Aktivitātes Logi",
|
"activityLog": "Aktivitātes Logi",
|
||||||
"backupComplete": "Dublējums veiksmīgi pabeigts priekš servera ",
|
"backupComplete": "Dublējums veiksmīgi pabeigts priekš servera ",
|
||||||
"backupStarted": "Dublējums startēts priekš servera ",
|
"backupStarted": "Dublējums startēts priekš servera ",
|
||||||
|
"backup_desc": "Mēs noteicām ka dublējuma migrācija daļēji vai pilnīgi neizdevās. Lūdzu pārskatiet savus dublējumus dublējumu cilnē.",
|
||||||
|
"backup_title": "Dublējuma Migrācijas Brīdinājums",
|
||||||
"downloadLogs": "Lejupielādēt Atbalsta Log Failus?",
|
"downloadLogs": "Lejupielādēt Atbalsta Log Failus?",
|
||||||
"finishedPreparing": "Mēs esam pabeiguši sagatavot jūsu atbalsta log datnes. Lūdzu nospiediet lejupielādet lai lejupielādētu",
|
"finishedPreparing": "Mēs esam pabeiguši sagatavot jūsu atbalsta log datnes. Lūdzu nospiediet lejupielādet lai lejupielādētu",
|
||||||
"logout": "Iziet",
|
"logout": "Iziet",
|
||||||
"preparingLogs": " Lūdzu uzgaidiet kamēr mēs sagatavojam jūsu log datnes... Mēs jums nosūtīsim paziņojumu kad tās būs gatavas. Tas var aizņemt kādu laiku priekš lielām instalācijām.",
|
"preparingLogs": " Lūdzu uzgaidiet kamēr mēs sagatavojam jūsu log datnes... Mēs jums nosūtīsim paziņojumu kad tās būs gatavas. Tas var aizņemt kādu laiku priekš lielām instalācijām.",
|
||||||
|
"schedule_desc": "Mēs noteicām ka daži vai visi no jūsu darbību grafikiem nebija veiksmīgi pārnesti atjauninājuma laikā. Lūdzu pārskatiet savus grafikus grafiku cilnē.",
|
||||||
|
"schedule_title": "Grafiku Migrācijas Brīdinājums",
|
||||||
"supportLogs": "Atbalsta Logi"
|
"supportLogs": "Atbalsta Logi"
|
||||||
},
|
},
|
||||||
"offline": {
|
"offline": {
|
||||||
|
@ -234,10 +234,14 @@
|
|||||||
"activityLog": "Activiteitslogboeken",
|
"activityLog": "Activiteitslogboeken",
|
||||||
"backupComplete": "Back-up succesvol voltooid voor server {}",
|
"backupComplete": "Back-up succesvol voltooid voor server {}",
|
||||||
"backupStarted": "Backup gestart voor server {}",
|
"backupStarted": "Backup gestart voor server {}",
|
||||||
|
"backup_desc": "We hebben gedetecteerd dat de back-upmigratie mogelijk gedeeltelijk of volledig is mislukt. Controleer uw back-uprecords op het tabblad Backups.",
|
||||||
|
"backup_title": "Waarschuwing voor back-upmigratie",
|
||||||
"downloadLogs": "Ondersteuningslogboeken downloaden?",
|
"downloadLogs": "Ondersteuningslogboeken downloaden?",
|
||||||
"finishedPreparing": "We zijn klaar met het voorbereiden van uw ondersteuningslogboeken. Klik op download om te downloaden",
|
"finishedPreparing": "We zijn klaar met het voorbereiden van uw ondersteuningslogboeken. Klik op download om te downloaden",
|
||||||
"logout": "Uitloggen",
|
"logout": "Uitloggen",
|
||||||
"preparingLogs": " Een ogenblik geduld alstublieft terwijl wij uw logboeken voorbereiden... We sturen een bericht als ze klaar zijn. Dit kan een tijdje duren voor grote implementaties.",
|
"preparingLogs": " Een ogenblik geduld alstublieft terwijl wij uw logboeken voorbereiden... We sturen een bericht als ze klaar zijn. Dit kan een tijdje duren voor grote implementaties.",
|
||||||
|
"schedule_desc": "We hebben gedetecteerd dat sommige of alle geplande taken niet succesvol zijn overgedragen tijdens de upgrade. Controleer uw schema's op het tabblad Schema's.",
|
||||||
|
"schedule_title": "Waarschuwing voor schemamigratie",
|
||||||
"supportLogs": "Ondersteuningslogboeken"
|
"supportLogs": "Ondersteuningslogboeken"
|
||||||
},
|
},
|
||||||
"offline": {
|
"offline": {
|
||||||
|
@ -234,10 +234,14 @@
|
|||||||
"activityLog": "Логи активностей",
|
"activityLog": "Логи активностей",
|
||||||
"backupComplete": "Бекап успішно завершено для сервера {}",
|
"backupComplete": "Бекап успішно завершено для сервера {}",
|
||||||
"backupStarted": "Бекап успішно розпочато для сервера {}",
|
"backupStarted": "Бекап успішно розпочато для сервера {}",
|
||||||
|
"backup_desc": "Ми зафіксували, що міграція бекапів можливо частково або повністю провалилась. Будь ласка перевірте ваші записи бекапів у відповідній вкладці.",
|
||||||
|
"backup_title": "Увага міграція бекапів",
|
||||||
"downloadLogs": "Завантажити логи для підтримки?",
|
"downloadLogs": "Завантажити логи для підтримки?",
|
||||||
"finishedPreparing": "Ми підготували логи. Будь ласка натисніть завантажити",
|
"finishedPreparing": "Ми підготували логи. Будь ласка натисніть завантажити",
|
||||||
"logout": "Вихід",
|
"logout": "Вихід",
|
||||||
"preparingLogs": "Будь ласка зачекайте поки ми підготуємо для вас логи... Ми надішлемо вам сповіщення коли усе буде готово. Це може зайняти трішки часу для великих проєктів.",
|
"preparingLogs": "Будь ласка зачекайте поки ми підготуємо для вас логи... Ми надішлемо вам сповіщення коли усе буде готово. Це може зайняти трішки часу для великих проєктів.",
|
||||||
|
"schedule_desc": "Ми зафіксували, що деякі або всі ваші заплановані завдання не вдалось успішно перенести поки робиться оновлення. Будь ласка перевірте ваші заплановані завдання у відповідній вкладці.",
|
||||||
|
"schedule_title": "Увага міграція запланованих завдань",
|
||||||
"supportLogs": "Логи для підтримки"
|
"supportLogs": "Логи для підтримки"
|
||||||
},
|
},
|
||||||
"offline": {
|
"offline": {
|
||||||
|
22
main.py
22
main.py
@ -115,6 +115,23 @@ def controller_setup():
|
|||||||
controller.clear_support_status()
|
controller.clear_support_status()
|
||||||
|
|
||||||
|
|
||||||
|
def get_migration_notifications():
|
||||||
|
migration_notifications = []
|
||||||
|
for file in os.listdir(
|
||||||
|
os.path.join(APPLICATION_PATH, "app", "migrations", "status")
|
||||||
|
):
|
||||||
|
if os.path.isfile(file):
|
||||||
|
with open(
|
||||||
|
os.path.join(APPLICATION_PATH, "app", "migrations", "status", file),
|
||||||
|
encoding="utf-8",
|
||||||
|
) as status_file:
|
||||||
|
status_json = json.load(status_file)
|
||||||
|
for item in status_json:
|
||||||
|
if not status_json[item].get("status"):
|
||||||
|
migration_notifications.append(item)
|
||||||
|
return migration_notifications
|
||||||
|
|
||||||
|
|
||||||
def tasks_starter():
|
def tasks_starter():
|
||||||
"""
|
"""
|
||||||
Method starts stats recording, app scheduler, and
|
Method starts stats recording, app scheduler, and
|
||||||
@ -350,6 +367,9 @@ if __name__ == "__main__":
|
|||||||
helper.db_path, pragmas={"journal_mode": "wal", "cache_size": -1024 * 10}
|
helper.db_path, pragmas={"journal_mode": "wal", "cache_size": -1024 * 10}
|
||||||
)
|
)
|
||||||
database_proxy.initialize(database)
|
database_proxy.initialize(database)
|
||||||
|
Helpers.ensure_dir_exists(
|
||||||
|
os.path.join(APPLICATION_PATH, "app", "migrations", "status")
|
||||||
|
)
|
||||||
migration_manager = MigrationManager(database, helper)
|
migration_manager = MigrationManager(database, helper)
|
||||||
migration_manager.up() # Automatically runs migrations
|
migration_manager.up() # Automatically runs migrations
|
||||||
|
|
||||||
@ -408,7 +428,7 @@ if __name__ == "__main__":
|
|||||||
controller.set_project_root(APPLICATION_PATH)
|
controller.set_project_root(APPLICATION_PATH)
|
||||||
tasks_manager = TasksManager(helper, controller, file_helper)
|
tasks_manager = TasksManager(helper, controller, file_helper)
|
||||||
import3 = Import3(helper, controller)
|
import3 = Import3(helper, controller)
|
||||||
|
helper.migration_notifications = get_migration_notifications()
|
||||||
# Check to see if client config.json version is different than the
|
# Check to see if client config.json version is different than the
|
||||||
# Master config.json in helpers.py
|
# Master config.json in helpers.py
|
||||||
Console.info("Checking for remote changes to config.json")
|
Console.info("Checking for remote changes to config.json")
|
||||||
|
@ -4,13 +4,13 @@ argon2-cffi==23.1.0
|
|||||||
cached_property==1.5.2
|
cached_property==1.5.2
|
||||||
colorama==0.4.6
|
colorama==0.4.6
|
||||||
croniter==1.4.1
|
croniter==1.4.1
|
||||||
cryptography==42.0.4
|
cryptography==43.0.1
|
||||||
libgravatar==1.0.4
|
libgravatar==1.0.4
|
||||||
nh3==0.2.14
|
nh3==0.2.14
|
||||||
packaging==23.2
|
packaging==23.2
|
||||||
peewee==3.13
|
peewee==3.13
|
||||||
psutil==5.9.5
|
psutil==5.9.5
|
||||||
pyOpenSSL==24.0.0
|
pyOpenSSL==24.2.1
|
||||||
pyjwt==2.8.0
|
pyjwt==2.8.0
|
||||||
PyYAML==6.0.1
|
PyYAML==6.0.1
|
||||||
requests==2.32.0
|
requests==2.32.0
|
||||||
|
@ -3,7 +3,7 @@ sonar.organization=crafty-controller
|
|||||||
|
|
||||||
# This is the name and version displayed in the SonarCloud UI.
|
# This is the name and version displayed in the SonarCloud UI.
|
||||||
sonar.projectName=Crafty 4
|
sonar.projectName=Crafty 4
|
||||||
sonar.projectVersion=4.4.3
|
sonar.projectVersion=4.4.4
|
||||||
sonar.python.version=3.9, 3.10, 3.11
|
sonar.python.version=3.9, 3.10, 3.11
|
||||||
sonar.exclusions=app/migrations/**, app/frontend/static/assets/vendors/**
|
sonar.exclusions=app/migrations/**, app/frontend/static/assets/vendors/**
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user