mirror of
https://github.com/Ultimaker/Cura.git
synced 2025-07-06 14:37:29 -06:00
Work in progress on pulling plugins out of the backups.
It's now in a state where it can actually upload ... something (that should work). Not tested the restore yet. I did run into trouble with the max concurrent requests, which I had to up to [more than 4, now on 8] to get it to work -- I'm not sure if I'm just working around a bug here, or if that's expected behaviour. part of CURA-12156
This commit is contained in:
parent
74420ee57b
commit
d167e3f28e
5 changed files with 132 additions and 30 deletions
|
@ -30,14 +30,14 @@ class Backups:
|
|||
|
||||
return self.manager.createBackup(available_remote_plugins)
|
||||
|
||||
def restoreBackup(self, zip_file: bytes, meta_data: Dict[str, Any]) -> None:
|
||||
def restoreBackup(self, zip_file: bytes, meta_data: Dict[str, Any], auto_close: bool = True) -> None:
|
||||
"""Restore a back-up using the BackupsManager.
|
||||
|
||||
:param zip_file: A ZIP file containing the actual back-up data.
|
||||
:param meta_data: Some metadata needed for restoring a back-up, like the Cura version number.
|
||||
"""
|
||||
|
||||
return self.manager.restoreBackup(zip_file, meta_data)
|
||||
return self.manager.restoreBackup(zip_file, meta_data, auto_close=auto_close)
|
||||
|
||||
def shouldReinstallDownloadablePlugins(self) -> bool:
|
||||
return self.manager.shouldReinstallDownloadablePlugins()
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# Copyright (c) 2025 UltiMaker
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
import tempfile
|
||||
|
||||
import json
|
||||
|
||||
import io
|
||||
|
@ -8,7 +10,7 @@ import re
|
|||
import shutil
|
||||
from copy import deepcopy
|
||||
from zipfile import ZipFile, ZIP_DEFLATED, BadZipfile
|
||||
from typing import Dict, Optional, TYPE_CHECKING, List
|
||||
from typing import Callable, Dict, Optional, TYPE_CHECKING, List
|
||||
|
||||
from UM import i18nCatalog
|
||||
from UM.Logger import Logger
|
||||
|
@ -37,9 +39,6 @@ class Backup:
|
|||
SECRETS_SETTINGS = ["general/ultimaker_auth_data"]
|
||||
"""Secret preferences that need to obfuscated when making a backup of Cura"""
|
||||
|
||||
TO_INSTALL_FILE = "packages.json"
|
||||
"""File that contains the 'to_install' dictionary, that manages plugins to be installed on next startup."""
|
||||
|
||||
catalog = i18nCatalog("cura")
|
||||
"""Re-use translation catalog"""
|
||||
|
||||
|
@ -74,7 +73,7 @@ class Backup:
|
|||
|
||||
# Create an empty buffer and write the archive to it.
|
||||
buffer = io.BytesIO()
|
||||
archive = self._makeArchive(buffer, version_data_dir)
|
||||
archive = self._makeArchive(buffer, version_data_dir, available_remote_plugins)
|
||||
if archive is None:
|
||||
return
|
||||
files = archive.namelist()
|
||||
|
@ -83,9 +82,7 @@ class Backup:
|
|||
machine_count = max(len([s for s in files if "machine_instances/" in s]) - 1, 0) # If people delete their profiles but not their preferences, it can still make a backup, and report -1 profiles. Server crashes on this.
|
||||
material_count = max(len([s for s in files if "materials/" in s]) - 1, 0)
|
||||
profile_count = max(len([s for s in files if "quality_changes/" in s]) - 1, 0)
|
||||
# We don't store plugins anymore, since if you can make backups, you have an account (and the plugins are
|
||||
# on the marketplace anyway)
|
||||
plugin_count = 0
|
||||
plugin_count = len([s for s in files if "plugin.json" in s])
|
||||
# Store the archive and metadata so the BackupManager can fetch them when needed.
|
||||
self.zip_file = buffer.getvalue()
|
||||
self.meta_data = {
|
||||
|
@ -98,19 +95,43 @@ class Backup:
|
|||
# Restore the obfuscated settings
|
||||
self._illuminate(**secrets)
|
||||
|
||||
def _fillToInstallsJson(self, file_path: str, reinstall_on_restore: dict[str, str], archive: ZipFile) -> None:
|
||||
pass # TODO!
|
||||
def _fillToInstallsJson(self, file_path: str, reinstall_on_restore: frozenset[str], add_to_archive: Callable[[str], None]) -> Optional[str]:
|
||||
""" Moves all plugin-data (in a config-file) for plugins that could be (re)installed from the Marketplace from
|
||||
'installed' to 'to_installs' before adding that file to the archive.
|
||||
|
||||
def _findRedownloadablePlugins(self, available_remote_plugins: frozenset) -> dict[str, str]:
|
||||
Note that the 'filename'-entry in the package-data (of the plugins) might not be valid anymore on restore.
|
||||
We'll replace it on restore instead, as that's the time when the new package is downloaded.
|
||||
|
||||
:param file_path: Absolute path to the packages-file.
|
||||
:param reinstall_on_restore: A set of plugins that _can_ be reinstalled from the Marketplace.
|
||||
:param add_to_archive: A function/lambda that takes a filename and adds it to the archive.
|
||||
"""
|
||||
with open(file_path, "r") as file:
|
||||
data = json.load(file)
|
||||
reinstall, keep_in = {}, {}
|
||||
for install_id, install_info in data["installed"].items():
|
||||
(reinstall if install_id in reinstall_on_restore else keep_in)[install_id] = install_info
|
||||
data["installed"] = keep_in
|
||||
data["to_install"].update(reinstall)
|
||||
if data is not None:
|
||||
tmpfile = tempfile.NamedTemporaryFile(delete=False)
|
||||
with open(tmpfile.name, "w") as outfile:
|
||||
json.dump(data, outfile)
|
||||
add_to_archive(tmpfile.name)
|
||||
return tmpfile.name
|
||||
return None
|
||||
|
||||
def _findRedownloadablePlugins(self, available_remote_plugins: frozenset) -> (frozenset[str], frozenset[str]):
|
||||
""" Find all plugins that should be able to be reinstalled from the Marketplace.
|
||||
|
||||
:param plugins_path: Path to all plugins in the user-space.
|
||||
:return: Set of all package-id's of plugins that can be reinstalled from the Marketplace.
|
||||
:return: Tuple of a set of plugin-ids and a set of plugin-paths.
|
||||
"""
|
||||
plugin_reg = PluginRegistry.getInstance()
|
||||
id = "id"
|
||||
return {v["location"]: v[id] for v in plugin_reg.getAllMetaData()
|
||||
if v[id] in available_remote_plugins and not plugin_reg.isBundledPlugin(v[id])}
|
||||
plugins = [v for v in plugin_reg.getAllMetaData()
|
||||
if v[id] in available_remote_plugins and not plugin_reg.isBundledPlugin(v[id])]
|
||||
return frozenset([v[id] for v in plugins]), frozenset([v["location"] for v in plugins])
|
||||
|
||||
def _makeArchive(self, buffer: "io.BytesIO", root_path: str, available_remote_plugins: frozenset) -> Optional[ZipFile]:
|
||||
"""Make a full archive from the given root path with the given name.
|
||||
|
@ -119,20 +140,28 @@ class Backup:
|
|||
:return: The archive as bytes.
|
||||
"""
|
||||
ignore_string = re.compile("|".join(self.IGNORED_FILES + self.IGNORED_FOLDERS))
|
||||
reinstall_instead_plugins = self._findRedownloadablePlugins(available_remote_plugins)
|
||||
reinstall_instead_ids, reinstall_instead_paths = self._findRedownloadablePlugins(available_remote_plugins)
|
||||
tmpfiles = []
|
||||
try:
|
||||
archive = ZipFile(buffer, "w", ZIP_DEFLATED)
|
||||
add_path_to_archive = lambda path: archive.write(path, path[len(root_path) + len(os.sep):])
|
||||
for root, folders, files in os.walk(root_path, topdown=True):
|
||||
folders[:] = [f for f in folders if f not in reinstall_instead_plugins]
|
||||
folders[:] = [f for f in folders if f not in reinstall_instead_paths]
|
||||
for item_name in folders + files:
|
||||
absolute_path = os.path.join(root, item_name)
|
||||
if ignore_string.search(absolute_path):
|
||||
continue
|
||||
if item_name == self.TO_INSTALL_FILE:
|
||||
self._fillToInstallsJson(absolute_path, reinstall_instead_plugins, archive)
|
||||
if item_name == "packages.json":
|
||||
tmpfiles.append(
|
||||
self._fillToInstallsJson(absolute_path, reinstall_instead_ids, add_path_to_archive))
|
||||
else:
|
||||
archive.write(absolute_path, absolute_path[len(root_path) + len(os.sep):])
|
||||
add_path_to_archive(absolute_path)
|
||||
archive.close()
|
||||
for tmpfile_path in tmpfiles:
|
||||
try:
|
||||
os.remove(tmpfile_path)
|
||||
except IOError as ex:
|
||||
Logger.warning(f"Couldn't remove temporary file '{tmpfile_path}' because '{ex}'.")
|
||||
return archive
|
||||
except (IOError, OSError, BadZipfile) as error:
|
||||
Logger.log("e", "Could not create archive from user data directory: %s", error)
|
||||
|
|
|
@ -39,12 +39,13 @@ class BackupsManager:
|
|||
# We don't return a Backup here because we want plugins only to interact with our API and not full objects.
|
||||
return backup.zip_file, backup.meta_data
|
||||
|
||||
def restoreBackup(self, zip_file: bytes, meta_data: Dict[str, str]) -> None:
|
||||
def restoreBackup(self, zip_file: bytes, meta_data: Dict[str, str], auto_close: bool = True) -> None:
|
||||
"""
|
||||
Restore a back-up from a given ZipFile.
|
||||
|
||||
:param zip_file: A bytes object containing the actual back-up.
|
||||
:param meta_data: A dict containing some metadata that is needed to restore the back-up correctly.
|
||||
:param auto_close: Normally, Cura will need to close immediately after restoring the back-up.
|
||||
"""
|
||||
|
||||
if not meta_data.get("cura_release", None):
|
||||
|
@ -57,7 +58,7 @@ class BackupsManager:
|
|||
backup = Backup(self._application, zip_file = zip_file, meta_data = meta_data)
|
||||
restored = backup.restore()
|
||||
|
||||
if restored:
|
||||
if restored and auto_close:
|
||||
# At this point, Cura will need to restart for the changes to take effect.
|
||||
# We don't want to store the data at this point as that would override the just-restored backup.
|
||||
self._application.windowClosed(save_data = False)
|
||||
|
|
|
@ -118,6 +118,8 @@ class CreateBackupJob(Job):
|
|||
}
|
||||
}).encode()
|
||||
|
||||
CuraApplication.getInstance().processEvents() # Needed??
|
||||
|
||||
HttpRequestManager.getInstance().put(
|
||||
self._api_backup_url,
|
||||
data = payload,
|
||||
|
@ -125,6 +127,8 @@ class CreateBackupJob(Job):
|
|||
error_callback = self._onUploadSlotCompleted,
|
||||
scope = self._json_cloud_scope)
|
||||
|
||||
CuraApplication.getInstance().processEvents() # Needed??
|
||||
|
||||
def _onUploadSlotCompleted(self, reply: QNetworkReply, error: Optional["QNetworkReply.NetworkError"] = None) -> None:
|
||||
if HttpRequestManager.safeHttpStatus(reply) >= 300:
|
||||
replyText = HttpRequestManager.readText(reply)
|
||||
|
|
|
@ -1,8 +1,12 @@
|
|||
# Copyright (c) 2021 Ultimaker B.V.
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
import tempfile
|
||||
|
||||
import json
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import os
|
||||
import threading
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import Optional, Any, Dict
|
||||
|
@ -12,9 +16,16 @@ from PyQt6.QtNetwork import QNetworkReply, QNetworkRequest
|
|||
from UM.Job import Job
|
||||
from UM.Logger import Logger
|
||||
from UM.PackageManager import catalog
|
||||
from UM.Resources import Resources
|
||||
from UM.TaskManagement.HttpRequestManager import HttpRequestManager
|
||||
from cura.CuraApplication import CuraApplication
|
||||
from UM.Version import Version
|
||||
|
||||
from cura.ApplicationMetadata import CuraSDKVersion
|
||||
from cura.CuraApplication import CuraApplication
|
||||
from cura.UltimakerCloud.UltimakerCloudScope import UltimakerCloudScope
|
||||
import cura.UltimakerCloud.UltimakerCloudConstants as UltimakerCloudConstants
|
||||
|
||||
PACKAGES_URL = f"{UltimakerCloudConstants.CuraCloudAPIRoot}/cura-packages/v{UltimakerCloudConstants.CuraCloudAPIVersion}/cura/v{CuraSDKVersion}/packages"
|
||||
|
||||
class RestoreBackupJob(Job):
|
||||
"""Downloads a backup and overwrites local configuration with the backup.
|
||||
|
@ -60,8 +71,8 @@ class RestoreBackupJob(Job):
|
|||
|
||||
# We store the file in a temporary path fist to ensure integrity.
|
||||
try:
|
||||
temporary_backup_file = NamedTemporaryFile(delete = False)
|
||||
with open(temporary_backup_file.name, "wb") as write_backup:
|
||||
self._temporary_backup_file = NamedTemporaryFile(delete = False)
|
||||
with open(self._temporary_backup_file.name, "wb") as write_backup:
|
||||
app = CuraApplication.getInstance()
|
||||
bytes_read = reply.read(self.DISK_WRITE_BUFFER_SIZE)
|
||||
while bytes_read:
|
||||
|
@ -74,18 +85,75 @@ class RestoreBackupJob(Job):
|
|||
self._job_done.set()
|
||||
return
|
||||
|
||||
if not self._verifyMd5Hash(temporary_backup_file.name, self._backup.get("md5_hash", "")):
|
||||
if not self._verifyMd5Hash(self._temporary_backup_file.name, self._backup.get("md5_hash", "")):
|
||||
# Don't restore the backup if the MD5 hashes do not match.
|
||||
# This can happen if the download was interrupted.
|
||||
Logger.log("w", "Remote and local MD5 hashes do not match, not restoring backup.")
|
||||
self.restore_backup_error_message = self.DEFAULT_ERROR_MESSAGE
|
||||
|
||||
# Tell Cura to place the backup back in the user data folder.
|
||||
with open(temporary_backup_file.name, "rb") as read_backup:
|
||||
metadata = self._backup.get("metadata", {})
|
||||
with open(self._temporary_backup_file.name, "rb") as read_backup:
|
||||
cura_api = CuraApplication.getInstance().getCuraAPI()
|
||||
cura_api.backups.restoreBackup(read_backup.read(), self._backup.get("metadata", {}))
|
||||
cura_api.backups.restoreBackup(read_backup.read(), metadata, auto_close=False)
|
||||
|
||||
# Read packages data-file, to get the 'to_install' plugin-ids.
|
||||
version_to_restore = Version(metadata.get("cura_release", "dev"))
|
||||
version_str = f"{version_to_restore.getMajor()}.{version_to_restore.getMinor()}"
|
||||
packages_path = os.path.abspath(os.path.join(os.path.abspath(
|
||||
Resources.getConfigStoragePath()), "..", version_str, "packages.json"))
|
||||
if not os.path.exists(packages_path):
|
||||
self._job_done.set()
|
||||
return
|
||||
|
||||
to_install = set()
|
||||
try:
|
||||
with open(packages_path, "r") as packages_file:
|
||||
packages_json = json.load(packages_file)
|
||||
if "to_install" in packages_json and "package_id" in packages_json["to_install"]:
|
||||
to_install.add(packages_json["to_install"]["package_id"])
|
||||
except IOError as ex:
|
||||
pass # TODO! (log + message)
|
||||
|
||||
if len(to_install) < 1:
|
||||
self._job_done.set()
|
||||
return
|
||||
|
||||
# Download all re-installable plugins packages, so they can be put back on start-up.
|
||||
redownload_errors = []
|
||||
def packageDownloadCallback(package_id: str, msg: "QNetworkReply", err: "QNetworkReply.NetworkError" = None) -> None:
|
||||
if err is not None or HttpRequestManager.safeHttpStatus(msg) != 200:
|
||||
redownload_errors.append(err)
|
||||
to_install.remove(package_id)
|
||||
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(mode="wb+", suffix=".curapackage") as temp_file:
|
||||
bytes_read = reply.read(self.DISK_WRITE_BUFFER_SIZE)
|
||||
while bytes_read:
|
||||
temp_file.write(bytes_read)
|
||||
bytes_read = reply.read(self.DISK_WRITE_BUFFER_SIZE)
|
||||
# self._app.processEvents()
|
||||
# self._progress[package_id]["file_written"] = temp_file.name
|
||||
if not CuraApplication.getInstance().getPackageManager().installPackage(temp_file.name):
|
||||
redownload_errors.append(f"Couldn't install package '{package_id}'.")
|
||||
except IOError as ex:
|
||||
redownload_errors.append(f"Couldn't read package '{package_id}' because '{ex}'.")
|
||||
|
||||
if len(to_install) < 1:
|
||||
if len(redownload_errors) == 0:
|
||||
self._job_done.set()
|
||||
else:
|
||||
print("|".join(redownload_errors)) # TODO: Message / Log instead.
|
||||
self._job_done.set() # NOTE: Set job probably not the right call here... (depends on wether or not that in the end closes the app or not...)
|
||||
|
||||
self._package_download_scope = UltimakerCloudScope(CuraApplication.getInstance())
|
||||
for package_id in to_install:
|
||||
HttpRequestManager.getInstance().get(
|
||||
f"{PACKAGES_URL}/{package_id}/download",
|
||||
scope=self._package_download_scope,
|
||||
callback=lambda msg: packageDownloadCallback(package_id, msg),
|
||||
error_callback=lambda msg, err: packageDownloadCallback(package_id, msg, err)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _verifyMd5Hash(file_path: str, known_hash: str) -> bool:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue