Merge branch 'master' into ui_rework_4_0

This commit is contained in:
Ghostkeeper 2018-11-15 15:55:08 +01:00
commit d732a2f953
No known key found for this signature in database
GPG key ID: 86BEF881AE2CF276
11 changed files with 323 additions and 66 deletions

View file

@ -16,7 +16,7 @@ from .QualityGroup import QualityGroup
from .QualityNode import QualityNode from .QualityNode import QualityNode
if TYPE_CHECKING: if TYPE_CHECKING:
from UM.Settings.DefinitionContainer import DefinitionContainer from UM.Settings.Interfaces import DefinitionContainerInterface
from cura.Settings.GlobalStack import GlobalStack from cura.Settings.GlobalStack import GlobalStack
from .QualityChangesGroup import QualityChangesGroup from .QualityChangesGroup import QualityChangesGroup
from cura.CuraApplication import CuraApplication from cura.CuraApplication import CuraApplication
@ -538,7 +538,7 @@ class QualityManager(QObject):
# Example: for an Ultimaker 3 Extended, it has "quality_definition = ultimaker3". This means Ultimaker 3 Extended # Example: for an Ultimaker 3 Extended, it has "quality_definition = ultimaker3". This means Ultimaker 3 Extended
# shares the same set of qualities profiles as Ultimaker 3. # shares the same set of qualities profiles as Ultimaker 3.
# #
def getMachineDefinitionIDForQualitySearch(machine_definition: "DefinitionContainer", def getMachineDefinitionIDForQualitySearch(machine_definition: "DefinitionContainerInterface",
default_definition_id: str = "fdmprinter") -> str: default_definition_id: str = "fdmprinter") -> str:
machine_definition_id = default_definition_id machine_definition_id = default_definition_id
if parseBool(machine_definition.getMetaDataEntry("has_machine_quality", False)): if parseBool(machine_definition.getMetaDataEntry("has_machine_quality", False)):

View file

@ -419,13 +419,13 @@ class ContainerManager(QObject):
self._container_name_filters[name_filter] = entry self._container_name_filters[name_filter] = entry
## Import single profile, file_url does not have to end with curaprofile ## Import single profile, file_url does not have to end with curaprofile
@pyqtSlot(QUrl, result="QVariantMap") @pyqtSlot(QUrl, result = "QVariantMap")
def importProfile(self, file_url: QUrl): def importProfile(self, file_url: QUrl) -> Dict[str, str]:
if not file_url.isValid(): if not file_url.isValid():
return return {"status": "error", "message": catalog.i18nc("@info:status", "Invalid file URL:") + " " + str(file_url)}
path = file_url.toLocalFile() path = file_url.toLocalFile()
if not path: if not path:
return return {"status": "error", "message": catalog.i18nc("@info:status", "Invalid file URL:") + " " + str(file_url)}
return self._container_registry.importProfile(path) return self._container_registry.importProfile(path)
@pyqtSlot(QObject, QUrl, str) @pyqtSlot(QObject, QUrl, str)

View file

@ -5,12 +5,12 @@ import os
import re import re
import configparser import configparser
from typing import cast, Optional from typing import cast, Dict, Optional
from PyQt5.QtWidgets import QMessageBox from PyQt5.QtWidgets import QMessageBox
from UM.Decorators import override from UM.Decorators import override
from UM.Settings.ContainerFormatError import ContainerFormatError from UM.Settings.ContainerFormatError import ContainerFormatError
from UM.Settings.Interfaces import ContainerInterface
from UM.Settings.ContainerRegistry import ContainerRegistry from UM.Settings.ContainerRegistry import ContainerRegistry
from UM.Settings.ContainerStack import ContainerStack from UM.Settings.ContainerStack import ContainerStack
from UM.Settings.InstanceContainer import InstanceContainer from UM.Settings.InstanceContainer import InstanceContainer
@ -28,7 +28,7 @@ from . import GlobalStack
import cura.CuraApplication import cura.CuraApplication
from cura.Machines.QualityManager import getMachineDefinitionIDForQualitySearch from cura.Machines.QualityManager import getMachineDefinitionIDForQualitySearch
from cura.ReaderWriters.ProfileReader import NoProfileException from cura.ReaderWriters.ProfileReader import NoProfileException, ProfileReader
from UM.i18n import i18nCatalog from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura") catalog = i18nCatalog("cura")
@ -161,20 +161,20 @@ class CuraContainerRegistry(ContainerRegistry):
## Imports a profile from a file ## Imports a profile from a file
# #
# \param file_name \type{str} the full path and filename of the profile to import # \param file_name The full path and filename of the profile to import.
# \return \type{Dict} dict with a 'status' key containing the string 'ok' or 'error', and a 'message' key # \return Dict with a 'status' key containing the string 'ok' or 'error',
# containing a message for the user # and a 'message' key containing a message for the user.
def importProfile(self, file_name): def importProfile(self, file_name: str) -> Dict[str, str]:
Logger.log("d", "Attempting to import profile %s", file_name) Logger.log("d", "Attempting to import profile %s", file_name)
if not file_name: if not file_name:
return { "status": "error", "message": catalog.i18nc("@info:status Don't translate the XML tags <filename> or <message>!", "Failed to import profile from <filename>{0}</filename>: <message>{1}</message>", file_name, "Invalid path")} return { "status": "error", "message": catalog.i18nc("@info:status Don't translate the XML tags <filename>!", "Failed to import profile from <filename>{0}</filename>: {1}", file_name, "Invalid path")}
plugin_registry = PluginRegistry.getInstance() plugin_registry = PluginRegistry.getInstance()
extension = file_name.split(".")[-1] extension = file_name.split(".")[-1]
global_stack = Application.getInstance().getGlobalContainerStack() global_stack = Application.getInstance().getGlobalContainerStack()
if not global_stack: if not global_stack:
return return {"status": "error", "message": catalog.i18nc("@info:status Don't translate the XML tags <filename>!", "Can't import profile from <filename>{0}</filename> before a printer is added.", file_name)}
machine_extruders = [] machine_extruders = []
for position in sorted(global_stack.extruders): for position in sorted(global_stack.extruders):
@ -183,7 +183,7 @@ class CuraContainerRegistry(ContainerRegistry):
for plugin_id, meta_data in self._getIOPlugins("profile_reader"): for plugin_id, meta_data in self._getIOPlugins("profile_reader"):
if meta_data["profile_reader"][0]["extension"] != extension: if meta_data["profile_reader"][0]["extension"] != extension:
continue continue
profile_reader = plugin_registry.getPluginObject(plugin_id) profile_reader = cast(ProfileReader, plugin_registry.getPluginObject(plugin_id))
try: try:
profile_or_list = profile_reader.read(file_name) # Try to open the file with the profile reader. profile_or_list = profile_reader.read(file_name) # Try to open the file with the profile reader.
except NoProfileException: except NoProfileException:
@ -221,13 +221,13 @@ class CuraContainerRegistry(ContainerRegistry):
# Make sure we have a profile_definition in the file: # Make sure we have a profile_definition in the file:
if profile_definition is None: if profile_definition is None:
break break
machine_definition = self.findDefinitionContainers(id = profile_definition) machine_definitions = self.findDefinitionContainers(id = profile_definition)
if not machine_definition: if not machine_definitions:
Logger.log("e", "Incorrect profile [%s]. Unknown machine type [%s]", file_name, profile_definition) Logger.log("e", "Incorrect profile [%s]. Unknown machine type [%s]", file_name, profile_definition)
return {"status": "error", return {"status": "error",
"message": catalog.i18nc("@info:status Don't translate the XML tags <filename>!", "This profile <filename>{0}</filename> contains incorrect data, could not import it.", file_name) "message": catalog.i18nc("@info:status Don't translate the XML tags <filename>!", "This profile <filename>{0}</filename> contains incorrect data, could not import it.", file_name)
} }
machine_definition = machine_definition[0] machine_definition = machine_definitions[0]
# Get the expected machine definition. # Get the expected machine definition.
# i.e.: We expect gcode for a UM2 Extended to be defined as normal UM2 gcode... # i.e.: We expect gcode for a UM2 Extended to be defined as normal UM2 gcode...
@ -274,11 +274,12 @@ class CuraContainerRegistry(ContainerRegistry):
setting_value = global_profile.getProperty(qc_setting_key, "value") setting_value = global_profile.getProperty(qc_setting_key, "value")
setting_definition = global_stack.getSettingDefinition(qc_setting_key) setting_definition = global_stack.getSettingDefinition(qc_setting_key)
new_instance = SettingInstance(setting_definition, profile) if setting_definition is not None:
new_instance.setProperty("value", setting_value) new_instance = SettingInstance(setting_definition, profile)
new_instance.resetState() # Ensure that the state is not seen as a user state. new_instance.setProperty("value", setting_value)
profile.addInstance(new_instance) new_instance.resetState() # Ensure that the state is not seen as a user state.
profile.setDirty(True) profile.addInstance(new_instance)
profile.setDirty(True)
global_profile.removeInstance(qc_setting_key, postpone_emit=True) global_profile.removeInstance(qc_setting_key, postpone_emit=True)
extruder_profiles.append(profile) extruder_profiles.append(profile)
@ -290,7 +291,7 @@ class CuraContainerRegistry(ContainerRegistry):
for profile_index, profile in enumerate(profile_or_list): for profile_index, profile in enumerate(profile_or_list):
if profile_index == 0: if profile_index == 0:
# This is assumed to be the global profile # This is assumed to be the global profile
profile_id = (global_stack.getBottom().getId() + "_" + name_seed).lower().replace(" ", "_") profile_id = (cast(ContainerInterface, global_stack.getBottom()).getId() + "_" + name_seed).lower().replace(" ", "_")
elif profile_index < len(machine_extruders) + 1: elif profile_index < len(machine_extruders) + 1:
# This is assumed to be an extruder profile # This is assumed to be an extruder profile

View file

@ -1,4 +1,4 @@
# Copyright (c) 2015 Ultimaker B.V. # Copyright (c) 2018 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
from UM.i18n import i18nCatalog from UM.i18n import i18nCatalog
@ -29,6 +29,7 @@ class ChangeLog(Extension, QObject,):
self._change_logs = None self._change_logs = None
Application.getInstance().engineCreatedSignal.connect(self._onEngineCreated) Application.getInstance().engineCreatedSignal.connect(self._onEngineCreated)
Application.getInstance().getPreferences().addPreference("general/latest_version_changelog_shown", "2.0.0") #First version of CURA with uranium Application.getInstance().getPreferences().addPreference("general/latest_version_changelog_shown", "2.0.0") #First version of CURA with uranium
self.setMenuName(catalog.i18nc("@item:inmenu", "Changelog"))
self.addMenuItem(catalog.i18nc("@item:inmenu", "Show Changelog"), self.showChangelog) self.addMenuItem(catalog.i18nc("@item:inmenu", "Show Changelog"), self.showChangelog)
def getChangeLogs(self): def getChangeLogs(self):

View file

@ -1,6 +1,6 @@
{ {
"source_version": "15.04", "source_version": "15.04",
"target_version": 3, "target_version": "4.5",
"translation": { "translation": {
"machine_nozzle_size": "nozzle_size", "machine_nozzle_size": "nozzle_size",

View file

@ -1,4 +1,4 @@
# Copyright (c) 2017 Ultimaker B.V. # Copyright (c) 2018 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
import configparser # For reading the legacy profile INI files. import configparser # For reading the legacy profile INI files.
@ -6,6 +6,7 @@ import io
import json # For reading the Dictionary of Doom. import json # For reading the Dictionary of Doom.
import math # For mathematical operations included in the Dictionary of Doom. import math # For mathematical operations included in the Dictionary of Doom.
import os.path # For concatenating the path to the plugin and the relative path to the Dictionary of Doom. import os.path # For concatenating the path to the plugin and the relative path to the Dictionary of Doom.
from typing import Dict
from UM.Application import Application # To get the machine manager to create the new profile in. from UM.Application import Application # To get the machine manager to create the new profile in.
from UM.Logger import Logger # Logging errors. from UM.Logger import Logger # Logging errors.
@ -33,10 +34,11 @@ class LegacyProfileReader(ProfileReader):
# \param json The JSON file to load the default setting values from. This # \param json The JSON file to load the default setting values from. This
# should not be a URL but a pre-loaded JSON handle. # should not be a URL but a pre-loaded JSON handle.
# \return A dictionary of the default values of the legacy Cura version. # \return A dictionary of the default values of the legacy Cura version.
def prepareDefaults(self, json): def prepareDefaults(self, json: Dict[str, Dict[str, str]]) -> Dict[str, str]:
defaults = {} defaults = {}
for key in json["defaults"]: # We have to copy over all defaults from the JSON handle to a normal dict. if "defaults" in json:
defaults[key] = json["defaults"][key] for key in json["defaults"]: # We have to copy over all defaults from the JSON handle to a normal dict.
defaults[key] = json["defaults"][key]
return defaults return defaults
## Prepares the local variables that can be used in evaluation of computing ## Prepares the local variables that can be used in evaluation of computing
@ -80,11 +82,10 @@ class LegacyProfileReader(ProfileReader):
Logger.log("i", "Importing legacy profile from file " + file_name + ".") Logger.log("i", "Importing legacy profile from file " + file_name + ".")
container_registry = ContainerRegistry.getInstance() container_registry = ContainerRegistry.getInstance()
profile_id = container_registry.uniqueName("Imported Legacy Profile") profile_id = container_registry.uniqueName("Imported Legacy Profile")
profile = InstanceContainer(profile_id) # Create an empty profile.
parser = configparser.ConfigParser(interpolation = None) input_parser = configparser.ConfigParser(interpolation = None)
try: try:
parser.read([file_name]) # Parse the INI file. input_parser.read([file_name]) # Parse the INI file.
except Exception as e: except Exception as e:
Logger.log("e", "Unable to open legacy profile %s: %s", file_name, str(e)) Logger.log("e", "Unable to open legacy profile %s: %s", file_name, str(e))
return None return None
@ -92,7 +93,7 @@ class LegacyProfileReader(ProfileReader):
# Legacy Cura saved the profile under the section "profile_N" where N is the ID of a machine, except when you export in which case it saves it in the section "profile". # Legacy Cura saved the profile under the section "profile_N" where N is the ID of a machine, except when you export in which case it saves it in the section "profile".
# Since importing multiple machine profiles is out of scope, just import the first section we find. # Since importing multiple machine profiles is out of scope, just import the first section we find.
section = "" section = ""
for found_section in parser.sections(): for found_section in input_parser.sections():
if found_section.startswith("profile"): if found_section.startswith("profile"):
section = found_section section = found_section
break break
@ -110,15 +111,13 @@ class LegacyProfileReader(ProfileReader):
return None return None
defaults = self.prepareDefaults(dict_of_doom) defaults = self.prepareDefaults(dict_of_doom)
legacy_settings = self.prepareLocals(parser, section, defaults) #Gets the settings from the legacy profile. legacy_settings = self.prepareLocals(input_parser, section, defaults) #Gets the settings from the legacy profile.
#Check the target version in the Dictionary of Doom with this application version. # Serialised format into version 4.5. Do NOT upgrade this, let the version upgrader handle it.
if "target_version" not in dict_of_doom: output_parser = configparser.ConfigParser(interpolation = None)
Logger.log("e", "Dictionary of Doom has no target version. Is it the correct JSON file?") output_parser.add_section("general")
return None output_parser.add_section("metadata")
if InstanceContainer.Version != dict_of_doom["target_version"]: output_parser.add_section("values")
Logger.log("e", "Dictionary of Doom of legacy profile reader (version %s) is not in sync with the current instance container version (version %s)!", dict_of_doom["target_version"], str(InstanceContainer.Version))
return None
if "translation" not in dict_of_doom: if "translation" not in dict_of_doom:
Logger.log("e", "Dictionary of Doom has no translation. Is it the correct JSON file?") Logger.log("e", "Dictionary of Doom has no translation. Is it the correct JSON file?")
@ -127,7 +126,7 @@ class LegacyProfileReader(ProfileReader):
quality_definition = current_printer_definition.getMetaDataEntry("quality_definition") quality_definition = current_printer_definition.getMetaDataEntry("quality_definition")
if not quality_definition: if not quality_definition:
quality_definition = current_printer_definition.getId() quality_definition = current_printer_definition.getId()
profile.setDefinition(quality_definition) output_parser["general"]["definition"] = quality_definition
for new_setting in dict_of_doom["translation"]: # Evaluate all new settings that would get a value from the translations. for new_setting in dict_of_doom["translation"]: # Evaluate all new settings that would get a value from the translations.
old_setting_expression = dict_of_doom["translation"][new_setting] old_setting_expression = dict_of_doom["translation"][new_setting]
compiled = compile(old_setting_expression, new_setting, "eval") compiled = compile(old_setting_expression, new_setting, "eval")
@ -140,37 +139,34 @@ class LegacyProfileReader(ProfileReader):
definitions = current_printer_definition.findDefinitions(key = new_setting) definitions = current_printer_definition.findDefinitions(key = new_setting)
if definitions: if definitions:
if new_value != value_using_defaults and definitions[0].default_value != new_value: # Not equal to the default in the new Cura OR the default in the legacy Cura. if new_value != value_using_defaults and definitions[0].default_value != new_value: # Not equal to the default in the new Cura OR the default in the legacy Cura.
profile.setProperty(new_setting, "value", new_value) # Store the setting in the profile! output_parser["values"][new_setting] = str(new_value) # Store the setting in the profile!
if len(profile.getAllKeys()) == 0: if len(output_parser["values"]) == 0:
Logger.log("i", "A legacy profile was imported but everything evaluates to the defaults, creating an empty profile.") Logger.log("i", "A legacy profile was imported but everything evaluates to the defaults, creating an empty profile.")
profile.setMetaDataEntry("type", "profile") output_parser["general"]["version"] = "4"
# don't know what quality_type it is based on, so use "normal" by default output_parser["general"]["name"] = profile_id
profile.setMetaDataEntry("quality_type", "normal") output_parser["metadata"]["type"] = "quality_changes"
profile.setName(profile_id) output_parser["metadata"]["quality_type"] = "normal" # Don't know what quality_type it is based on, so use "normal" by default.
profile.setDirty(True) output_parser["metadata"]["position"] = "0" # We only support single extrusion.
output_parser["metadata"]["setting_version"] = "5" # What the dictionary of doom is made for.
#Serialise and deserialise in order to perform the version upgrade. # Serialise in order to perform the version upgrade.
parser = configparser.ConfigParser(interpolation = None)
data = profile.serialize()
parser.read_string(data)
parser["general"]["version"] = "1"
if parser.has_section("values"):
parser["settings"] = parser["values"]
del parser["values"]
stream = io.StringIO() stream = io.StringIO()
parser.write(stream) output_parser.write(stream)
data = stream.getvalue() data = stream.getvalue()
profile.deserialize(data)
# The definition can get reset to fdmprinter during the deserialization's upgrade. Here we set the definition profile = InstanceContainer(profile_id)
# again. profile.deserialize(data) # Also performs the version upgrade.
profile.setDefinition(quality_definition) profile.setDirty(True)
#We need to return one extruder stack and one global stack. #We need to return one extruder stack and one global stack.
global_container_id = container_registry.uniqueName("Global Imported Legacy Profile") global_container_id = container_registry.uniqueName("Global Imported Legacy Profile")
# We duplicate the extruder profile into the global stack.
# This may introduce some settings that are global in the extruder stack and some settings that are per-extruder in the global stack.
# We don't care about that. The engine will ignore them anyway.
global_profile = profile.duplicate(new_id = global_container_id, new_name = profile_id) #Needs to have the same name as the extruder profile. global_profile = profile.duplicate(new_id = global_container_id, new_name = profile_id) #Needs to have the same name as the extruder profile.
del global_profile.getMetaData()["position"] # Has no position because it's global.
global_profile.setDirty(True) global_profile.setDirty(True)
profile_definition = "fdmprinter" profile_definition = "fdmprinter"

View file

@ -0,0 +1,190 @@
# Copyright (c) 2018 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
import configparser # An input for some functions we're testing.
import os.path # To find the integration test .ini files.
import pytest # To register tests with.
import unittest.mock # To mock the application, plug-in and container registry out.
import UM.Application # To mock the application out.
import UM.PluginRegistry # To mock the plug-in registry out.
import UM.Settings.ContainerRegistry # To mock the container registry out.
import UM.Settings.InstanceContainer # To intercept the serialised data from the read() function.
import LegacyProfileReader as LegacyProfileReaderModule # To get the directory of the module.
from LegacyProfileReader import LegacyProfileReader # The module we're testing.
@pytest.fixture
def legacy_profile_reader():
return LegacyProfileReader()
test_prepareDefaultsData = [
{
"defaults":
{
"foo": "bar"
},
"cheese": "delicious"
},
{
"cat": "fluffy",
"dog": "floofy"
}
]
@pytest.mark.parametrize("input", test_prepareDefaultsData)
def test_prepareDefaults(legacy_profile_reader, input):
output = legacy_profile_reader.prepareDefaults(input)
if "defaults" in input:
assert input["defaults"] == output
else:
assert output == {}
test_prepareLocalsData = [
( # Ordinary case.
{ # Parser data.
"profile":
{
"layer_height": "0.2",
"infill_density": "30"
}
},
{ # Defaults.
"layer_height": "0.1",
"infill_density": "20",
"line_width": "0.4"
}
),
( # Empty data.
{ # Parser data.
"profile":
{
}
},
{ # Defaults.
}
),
( # All defaults.
{ # Parser data.
"profile":
{
}
},
{ # Defaults.
"foo": "bar",
"boo": "far"
}
),
( # Multiple config sections.
{ # Parser data.
"some_other_name":
{
"foo": "bar"
},
"profile":
{
"foo": "baz" #Not the same as in some_other_name
}
},
{ # Defaults.
"foo": "bla"
}
)
]
@pytest.mark.parametrize("parser_data, defaults", test_prepareLocalsData)
def test_prepareLocals(legacy_profile_reader, parser_data, defaults):
parser = configparser.ConfigParser()
parser.read_dict(parser_data)
output = legacy_profile_reader.prepareLocals(parser, "profile", defaults)
assert set(defaults.keys()) <= set(output.keys()) # All defaults must be in there.
assert set(parser_data["profile"]) <= set(output.keys()) # All overwritten values must be in there.
for key in output:
if key in parser_data["profile"]:
assert output[key] == parser_data["profile"][key] # If overwritten, must be the overwritten value.
else:
assert output[key] == defaults[key] # Otherwise must be equal to the default.
test_prepareLocalsNoSectionErrorData = [
( # Section does not exist.
{ # Parser data.
"some_other_name":
{
"foo": "bar"
},
},
{ # Defaults.
"foo": "baz"
}
)
]
## Test cases where a key error is expected.
@pytest.mark.parametrize("parser_data, defaults", test_prepareLocalsNoSectionErrorData)
def test_prepareLocalsNoSectionError(legacy_profile_reader, parser_data, defaults):
parser = configparser.ConfigParser()
parser.read_dict(parser_data)
with pytest.raises(configparser.NoSectionError):
legacy_profile_reader.prepareLocals(parser, "profile", defaults)
intercepted_data = ""
@pytest.mark.parametrize("file_name", ["normal_case.ini"])
def test_read(legacy_profile_reader, file_name):
# Mock out all dependencies. Quite a lot!
global_stack = unittest.mock.MagicMock()
global_stack.getProperty = unittest.mock.MagicMock(return_value = 1) # For machine_extruder_count setting.
def getMetaDataEntry(key, default_value = ""):
if key == "quality_definition":
return "mocked_quality_definition"
if key == "has_machine_quality":
return "True"
global_stack.definition.getMetaDataEntry = getMetaDataEntry
global_stack.definition.getId = unittest.mock.MagicMock(return_value = "mocked_global_definition")
application = unittest.mock.MagicMock()
application.getGlobalContainerStack = unittest.mock.MagicMock(return_value = global_stack)
application_getInstance = unittest.mock.MagicMock(return_value = application)
container_registry = unittest.mock.MagicMock()
container_registry_getInstance = unittest.mock.MagicMock(return_value = container_registry)
container_registry.uniqueName = unittest.mock.MagicMock(return_value = "Imported Legacy Profile")
container_registry.findDefinitionContainers = unittest.mock.MagicMock(return_value = [global_stack.definition])
UM.Settings.InstanceContainer.setContainerRegistry(container_registry)
plugin_registry = unittest.mock.MagicMock()
plugin_registry_getInstance = unittest.mock.MagicMock(return_value = plugin_registry)
plugin_registry.getPluginPath = unittest.mock.MagicMock(return_value = os.path.dirname(LegacyProfileReaderModule.__file__))
# Mock out the resulting InstanceContainer so that we can intercept the data before it's passed through the version upgrader.
def deserialize(self, data): # Intercepts the serialised data that we'd perform the version upgrade from when deserializing.
global intercepted_data
intercepted_data = data
parser = configparser.ConfigParser()
parser.read_string(data)
self._metadata["position"] = parser["metadata"]["position"]
def duplicate(self, new_id, new_name):
self._metadata["id"] = new_id
self._metadata["name"] = new_name
return self
with unittest.mock.patch.object(UM.Application.Application, "getInstance", application_getInstance):
with unittest.mock.patch.object(UM.Settings.ContainerRegistry.ContainerRegistry, "getInstance", container_registry_getInstance):
with unittest.mock.patch.object(UM.PluginRegistry.PluginRegistry, "getInstance", plugin_registry_getInstance):
with unittest.mock.patch.object(UM.Settings.InstanceContainer.InstanceContainer, "deserialize", deserialize):
with unittest.mock.patch.object(UM.Settings.InstanceContainer.InstanceContainer, "duplicate", duplicate):
result = legacy_profile_reader.read(os.path.join(os.path.dirname(__file__), file_name))
assert len(result) == 1
# Let's see what's inside the actual output file that we generated.
parser = configparser.ConfigParser()
parser.read_string(intercepted_data)
assert parser["general"]["definition"] == "mocked_quality_definition"
assert parser["general"]["version"] == "4" # Yes, before we upgraded.
assert parser["general"]["name"] == "Imported Legacy Profile" # Because we overwrote uniqueName.
assert parser["metadata"]["type"] == "quality_changes"
assert parser["metadata"]["quality_type"] == "normal"
assert parser["metadata"]["position"] == "0"
assert parser["metadata"]["setting_version"] == "5" # Yes, before we upgraded.

View file

@ -0,0 +1,7 @@
[profile]
foo = bar
boo = far
fill_overlap = 3
[alterations]
some = values

View file

@ -32,7 +32,8 @@ class PostProcessingPlugin(QObject, Extension):
def __init__(self, parent = None) -> None: def __init__(self, parent = None) -> None:
QObject.__init__(self, parent) QObject.__init__(self, parent)
Extension.__init__(self) Extension.__init__(self)
self.addMenuItem(i18n_catalog.i18n("Modify G-Code"), self.showPopup) self.setMenuName(i18n_catalog.i18nc("@item:inmenu", "Post Processing"))
self.addMenuItem(i18n_catalog.i18nc("@item:inmenu", "Modify G-Code"), self.showPopup)
self._view = None self._view = None
# Loaded scripts are all scripts that can be used # Loaded scripts are all scripts that can be used

View file

@ -511,7 +511,10 @@ class Toolbox(QObject, Extension):
# version, we also need to check if the current one has a lower SDK version. If so, this package should also # version, we also need to check if the current one has a lower SDK version. If so, this package should also
# be upgradable. # be upgradable.
elif remote_version == local_version: elif remote_version == local_version:
can_upgrade = local_package.get("sdk_version", 0) < remote_package.get("sdk_version", 0) # First read sdk_version_semver. If that doesn't exist, read just sdk_version (old version system).
remote_sdk_version = Version(remote_package.get("sdk_version_semver", remote_package.get("sdk_version", 0)))
local_sdk_version = Version(local_package.get("sdk_version_semver", local_package.get("sdk_version", 0)))
can_upgrade = local_sdk_version < remote_sdk_version
return can_upgrade return can_upgrade

View file

@ -4139,6 +4139,20 @@
"limit_to_extruder": "support_infill_extruder_nr", "limit_to_extruder": "support_infill_extruder_nr",
"settable_per_mesh": false "settable_per_mesh": false
}, },
"minimum_support_area":
{
"label": "Minimum Support Area",
"description": "Minimum area size for support polygons. Polygons which have an area smaller than this value will not be generated.",
"unit": "mm²",
"type": "float",
"default_value": 0.0,
"minimum_value": "0",
"enabled": "support_enable",
"limit_to_extruder": "support_infill_extruder_nr",
"settable_per_mesh": true,
"fabricate_enabled": true,
"intermediate_enabled": true
},
"support_interface_enable": "support_interface_enable":
{ {
"label": "Enable Support Interface", "label": "Enable Support Interface",
@ -4378,6 +4392,50 @@
} }
} }
}, },
"minimum_interface_area":
{
"label": "Minimum Support Interface Area",
"description": "Minimum area size for support interface polygons. Polygons which have an area smaller than this value will not be generated.",
"unit": "mm²",
"type": "float",
"default_value": 1.0,
"minimum_value": "0",
"minimum_value_warning": "minimum_support_area",
"limit_to_extruder": "support_interface_extruder_nr",
"enabled": "support_interface_enable and support_enable",
"settable_per_mesh": true,
"children":
{
"minimum_roof_area":
{
"label": "Minimum Support Roof Area",
"description": "Minimum area size for the roofs of the support. Polygons which have an area smaller than this value will not be generated.",
"unit": "mm²",
"type": "float",
"default_value": 1.0,
"value": "extruderValue(support_roof_extruder_nr, 'minimum_interface_area')",
"minimum_value": "0",
"minimum_value_warning": "minimum_support_area",
"limit_to_extruder": "support_roof_extruder_nr",
"enabled": "support_roof_enable and support_enable",
"settable_per_mesh": true
},
"minimum_bottom_area":
{
"label": "Minimum Support Floor Area",
"description": "Minimum area size for the floors of the support. Polygons which have an area smaller than this value will not be generated.",
"unit": "mm²",
"type": "float",
"default_value": 1.0,
"value": "extruderValue(support_bottom_extruder_nr, 'minimum_interface_area')",
"minimum_value": "0",
"minimum_value_warning": "minimum_support_area",
"limit_to_extruder": "support_bottom_extruder_nr",
"enabled": "support_bottom_enable and support_enable",
"settable_per_mesh": true
}
}
},
"support_interface_offset": "support_interface_offset":
{ {
"label": "Support Interface Horizontal Expansion", "label": "Support Interface Horizontal Expansion",