mirror of
https://github.com/Ultimaker/Cura.git
synced 2025-08-06 21:44:01 -06:00
Count backup items for meta data, small fixes
This commit is contained in:
parent
0e0492327c
commit
4429b5b5c1
1 changed files with 34 additions and 23 deletions
|
@ -19,7 +19,7 @@ class Backup:
|
|||
"""
|
||||
|
||||
# These files should be ignored when making a backup.
|
||||
IGNORED_FILES = {"cura.log"}
|
||||
IGNORED_FILES = {"cura.log", "cache"}
|
||||
|
||||
def __init__(self, zip_file: bytes = None, meta_data: dict = None):
|
||||
self.zip_file = zip_file # type: Optional[bytes]
|
||||
|
@ -37,21 +37,28 @@ class Backup:
|
|||
# Ensure all current settings are saved.
|
||||
CuraApplication.getInstance().saveSettings()
|
||||
|
||||
# We're using an easy to parse filename for when we're restoring edge cases:
|
||||
# TIMESTAMP.backup.VERSION.cura.zip
|
||||
archive = self._makeArchive(version_data_dir)
|
||||
|
||||
self.zip_file = archive
|
||||
# Create an empty buffer and write the archive to it.
|
||||
buffer = io.BytesIO()
|
||||
archive = self._makeArchive(buffer, version_data_dir)
|
||||
files = archive.namelist()
|
||||
|
||||
# Count the metadata items. We do this in a rather naive way at the moment.
|
||||
machine_count = len([s for s in files if "machine_instances/" in s]) - 1
|
||||
material_count = len([s for s in files if "materials/" in s]) - 1
|
||||
profile_count = len([s for s in files if "quality_changes/" in s]) - 1
|
||||
plugin_count = len([s for s in files if "plugin.json" in s])
|
||||
|
||||
# Store the archive and metadata so the BackupManager can fetch them when needed.
|
||||
self.zip_file = buffer.getvalue()
|
||||
self.meta_data = {
|
||||
"cura_release": cura_release,
|
||||
"machine_count": 0,
|
||||
"material_count": 0,
|
||||
"profile_count": 0,
|
||||
"plugin_count": 0
|
||||
"machine_count": str(machine_count),
|
||||
"material_count": str(material_count),
|
||||
"profile_count": str(profile_count),
|
||||
"plugin_count": str(plugin_count)
|
||||
}
|
||||
# TODO: fill meta data with real machine/material/etc counts.
|
||||
|
||||
def _makeArchive(self, root_path: str) -> Optional[bytes]:
|
||||
def _makeArchive(self, buffer: "io.BytesIO", root_path: str) -> Optional[ZipFile]:
|
||||
"""
|
||||
Make a full archive from the given root path with the given name.
|
||||
:param root_path: The root directory to archive recursively.
|
||||
|
@ -59,52 +66,56 @@ class Backup:
|
|||
"""
|
||||
contents = os.walk(root_path)
|
||||
try:
|
||||
buffer = io.BytesIO()
|
||||
archive = ZipFile(buffer, "w", ZIP_DEFLATED)
|
||||
for root, folders, files in contents:
|
||||
for folder_name in folders:
|
||||
# Add all folders, even empty ones.
|
||||
if folder_name in self.IGNORED_FILES:
|
||||
continue
|
||||
absolute_path = os.path.join(root, folder_name)
|
||||
relative_path = absolute_path[len(root_path) + len(os.sep):]
|
||||
archive.write(absolute_path, relative_path)
|
||||
for file_name in files:
|
||||
# Add all files except the ignored ones.
|
||||
if file_name in self.IGNORED_FILES:
|
||||
continue
|
||||
absolute_path = os.path.join(root, file_name)
|
||||
relative_path = absolute_path[len(root_path) + len(os.sep):]
|
||||
archive.write(absolute_path, relative_path)
|
||||
archive.close()
|
||||
return buffer.getvalue()
|
||||
return archive
|
||||
except (IOError, OSError, BadZipfile) as error:
|
||||
Logger.log("e", "Could not create archive from user data directory: %s", error)
|
||||
# TODO: show message.
|
||||
return None
|
||||
|
||||
def restore(self) -> bool:
|
||||
"""
|
||||
Restore this backup.
|
||||
Restore this backups
|
||||
:return: A boolean whether we had success or not.
|
||||
"""
|
||||
if not self.zip_file or not self.meta_data or not self.meta_data.get("cura_release", None):
|
||||
# We can restore without the minimum required information.
|
||||
Logger.log("w", "Tried to restore a Cura backup without having proper data or meta data.")
|
||||
# TODO: show message.
|
||||
return False
|
||||
|
||||
# global_data_dir = os.path.dirname(version_data_dir)
|
||||
# TODO: handle restoring older data version.
|
||||
# global_data_dir = os.path.dirname(version_data_dir)
|
||||
|
||||
version_data_dir = Resources.getDataStoragePath()
|
||||
archive = ZipFile(io.BytesIO(self.zip_file), "r")
|
||||
extracted = self._extractArchive(archive, version_data_dir)
|
||||
if not extracted:
|
||||
return False
|
||||
return True
|
||||
return extracted
|
||||
|
||||
@staticmethod
|
||||
def _extractArchive(archive: "ZipFile", target_path: str) -> bool:
|
||||
"""
|
||||
Extract the whole archive to the given target path.
|
||||
:param archive: The archive as ZipFile.
|
||||
:param target_path: The target path.
|
||||
:return: A boolean whether we had success or not.
|
||||
"""
|
||||
Logger.log("d", "Removing current data in location: %s", target_path)
|
||||
shutil.rmtree(target_path)
|
||||
|
||||
Logger.log("d", "Extracting backup to location: %s", target_path)
|
||||
archive.extractall(target_path)
|
||||
|
||||
return True
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue