Merge branch '3.1'

This commit is contained in:
ChrisTerBeke 2017-11-24 10:31:22 +01:00
commit 7d1db1b165
26 changed files with 565 additions and 64 deletions

View file

@ -152,7 +152,8 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
if not definitions:
definition_container = DefinitionContainer(container_id)
definition_container.deserialize(archive.open(each_definition_container_file).read().decode("utf-8"))
definition_container.deserialize(archive.open(each_definition_container_file).read().decode("utf-8"),
file_name = each_definition_container_file)
else:
definition_container = definitions[0]
@ -208,7 +209,8 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
instance_container = InstanceContainer(container_id)
# Deserialize InstanceContainer by converting read data from bytes to string
instance_container.deserialize(archive.open(each_instance_container_file).read().decode("utf-8"))
instance_container.deserialize(archive.open(each_instance_container_file).read().decode("utf-8"),
file_name = each_instance_container_file)
instance_container_list.append(instance_container)
container_type = instance_container.getMetaDataEntry("type")
@ -378,7 +380,7 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
return WorkspaceReader.PreReadResult.accepted
## Overrides an ExtruderStack in the given GlobalStack and returns the new ExtruderStack.
def _overrideExtruderStack(self, global_stack, extruder_file_content):
def _overrideExtruderStack(self, global_stack, extruder_file_content, extruder_stack_file):
# Get extruder position first
extruder_config = configparser.ConfigParser()
extruder_config.read_string(extruder_file_content)
@ -394,7 +396,7 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
return None
# Override the given extruder stack
extruder_stack.deserialize(extruder_file_content)
extruder_stack.deserialize(extruder_file_content, file_name = extruder_stack_file)
# return the new ExtruderStack
return extruder_stack
@ -484,7 +486,8 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
definitions = self._container_registry.findDefinitionContainers(id = container_id)
if not definitions:
definition_container = DefinitionContainer(container_id)
definition_container.deserialize(archive.open(definition_container_file).read().decode("utf-8"))
definition_container.deserialize(archive.open(definition_container_file).read().decode("utf-8"),
file_name = definition_container_file)
self._container_registry.addContainer(definition_container)
Job.yieldThread()
@ -502,18 +505,21 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
if not materials:
material_container = xml_material_profile(container_id)
material_container.deserialize(archive.open(material_container_file).read().decode("utf-8"))
material_container.deserialize(archive.open(material_container_file).read().decode("utf-8"),
file_name = material_container_file)
containers_to_add.append(material_container)
else:
material_container = materials[0]
if not material_container.isReadOnly(): # Only create new materials if they are not read only.
if self._resolve_strategies["material"] == "override":
material_container.deserialize(archive.open(material_container_file).read().decode("utf-8"))
material_container.deserialize(archive.open(material_container_file).read().decode("utf-8"),
file_name = material_container_file)
elif self._resolve_strategies["material"] == "new":
# Note that we *must* deserialize it with a new ID, as multiple containers will be
# auto created & added.
material_container = xml_material_profile(self.getNewId(container_id))
material_container.deserialize(archive.open(material_container_file).read().decode("utf-8"))
material_container.deserialize(archive.open(material_container_file).read().decode("utf-8"),
file_name = material_container_file)
containers_to_add.append(material_container)
material_containers.append(material_container)
@ -540,7 +546,7 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
instance_container = InstanceContainer(container_id)
# Deserialize InstanceContainer by converting read data from bytes to string
instance_container.deserialize(serialized)
instance_container.deserialize(serialized, file_name = instance_container_file)
container_type = instance_container.getMetaDataEntry("type")
Job.yieldThread()
@ -562,7 +568,8 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
else:
if self._resolve_strategies["machine"] == "override" or self._resolve_strategies["machine"] is None:
instance_container = user_containers[0]
instance_container.deserialize(archive.open(instance_container_file).read().decode("utf-8"))
instance_container.deserialize(archive.open(instance_container_file).read().decode("utf-8"),
file_name = instance_container_file)
instance_container.setDirty(True)
elif self._resolve_strategies["machine"] == "new":
# The machine is going to get a spiffy new name, so ensure that the id's of user settings match.
@ -595,7 +602,8 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
# selected strategy.
if self._resolve_strategies[container_type] == "override":
instance_container = changes_containers[0]
instance_container.deserialize(archive.open(instance_container_file).read().decode("utf-8"))
instance_container.deserialize(archive.open(instance_container_file).read().decode("utf-8"),
file_name = instance_container_file)
instance_container.setDirty(True)
elif self._resolve_strategies[container_type] == "new":
@ -656,7 +664,8 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
# There is a machine, check if it has authentication data. If so, keep that data.
network_authentication_id = container_stacks[0].getMetaDataEntry("network_authentication_id")
network_authentication_key = container_stacks[0].getMetaDataEntry("network_authentication_key")
container_stacks[0].deserialize(archive.open(global_stack_file).read().decode("utf-8"))
container_stacks[0].deserialize(archive.open(global_stack_file).read().decode("utf-8"),
file_name = global_stack_file)
if network_authentication_id:
container_stacks[0].addMetaDataEntry("network_authentication_id", network_authentication_id)
if network_authentication_key:
@ -666,7 +675,8 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
# create a new global stack
stack = GlobalStack(global_stack_id_new)
# Deserialize stack by converting read data from bytes to string
stack.deserialize(archive.open(global_stack_file).read().decode("utf-8"))
stack.deserialize(archive.open(global_stack_file).read().decode("utf-8"),
file_name = global_stack_file)
# Ensure a unique ID and name
stack._id = global_stack_id_new
@ -706,7 +716,7 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
if self._resolve_strategies["machine"] == "override":
if global_stack.getProperty("machine_extruder_count", "value") > 1:
# deserialize new extruder stack over the current ones (if any)
stack = self._overrideExtruderStack(global_stack, extruder_file_content)
stack = self._overrideExtruderStack(global_stack, extruder_file_content, extruder_stack_file)
if stack is None:
continue
@ -726,7 +736,7 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
extruder_config.write(tmp_string_io)
extruder_file_content = tmp_string_io.getvalue()
stack.deserialize(extruder_file_content)
stack.deserialize(extruder_file_content, file_name = extruder_stack_file)
# Ensure a unique ID and name
stack._id = new_id
@ -741,12 +751,15 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
if stack.definitionChanges == self._container_registry.getEmptyInstanceContainer():
stack.setDefinitionChanges(CuraStackBuilder.createDefinitionChangesContainer(stack, stack.getId() + "_settings"))
extruder_stacks.append(stack)
if stack.getMetaDataEntry("type") == "extruder_train":
extruder_stacks.append(stack)
# If not extruder stacks were saved in the project file (pre 3.1) create one manually
# We re-use the container registry's addExtruderStackForSingleExtrusionMachine method for this
if not extruder_stacks:
self._container_registry.addExtruderStackForSingleExtrusionMachine(global_stack, "fdmextruder")
stack = self._container_registry.addExtruderStackForSingleExtrusionMachine(global_stack, "fdmextruder")
if stack:
extruder_stacks.append(stack)
except:
Logger.logException("w", "We failed to serialize the stack. Trying to clean up.")
@ -780,6 +793,46 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
for stack in [global_stack] + extruder_stacks:
stack.replaceContainer(_ContainerIndexes.Quality, empty_quality_container)
# Fix quality:
# The quality specified in an old project file can be wrong, for example, for UM2, it should be "um2_normal"
# but instead it was "normal". This should be fixed by setting it to the correct quality.
# Note that this only seems to happen on single-extrusion machines on the global stack, so we only apply the
# fix for that
quality = global_stack.quality
if quality.getId() not in ("empty", "empty_quality"):
quality_type = quality.getMetaDataEntry("quality_type")
quality_containers = self._container_registry.findInstanceContainers(definition = global_stack.definition.getId(),
type = "quality",
quality_type = quality_type)
quality_containers = [q for q in quality_containers if q.getMetaDataEntry("material", "") == ""]
if quality_containers:
global_stack.quality = quality_containers[0]
else:
# look for "fdmprinter" qualities if the machine-specific qualities cannot be found
quality_containers = self._container_registry.findInstanceContainers(definition = "fdmprinter",
type = "quality",
quality_type = quality_type)
quality_containers = [q for q in quality_containers if q.getMetaDataEntry("material", "") == ""]
if quality_containers:
global_stack.quality = quality_containers[0]
else:
# the quality_type of the quality profile cannot be found.
# this can happen if a quality_type has been removed in a newer version, for example:
# "extra_coarse" is removed from 2.7 to 3.0
# in this case, the quality will be reset to "normal"
quality_containers = self._container_registry.findInstanceContainers(
definition = global_stack.definition.getId(),
type = "quality",
quality_type = "normal")
quality_containers = [q for q in quality_containers if q.getMetaDataEntry("material", "") == ""]
if quality_containers:
global_stack.quality = quality_containers[0]
else:
# This should not happen!
Logger.log("e", "Cannot find quality normal for global stack [%s] [%s]",
global_stack.getId(), global_stack.definition.getId())
global_stack.quality = self._container_registry.findInstanceContainers(id = "empty_quality")[0]
# Replacing the old containers if resolve is "new".
# When resolve is "new", some containers will get renamed, so all the other containers that reference to those
# MUST get updated too.

View file

@ -95,22 +95,22 @@ class ProcessSlicedLayersJob(Job):
# Find the minimum layer number
# When using a raft, the raft layers are sent as layers < 0. Instead of allowing layers < 0, we
# instead simply offset all other layers so the lowest layer is always 0.
# instead simply offset all other layers so the lowest layer is always 0. It could happens that
# the first raft layer has value -8 but there are just 4 raft (negative) layers.
min_layer_number = 0
negative_layers = 0
for layer in self._layers:
if layer.id < min_layer_number:
min_layer_number = layer.id
if layer.id < 0:
negative_layers += 1
current_layer = 0
for layer in self._layers:
abs_layer_number = layer.id + abs(min_layer_number)
# Workaround when the last layer doesn't have paths, this layer is skipped because this was generating
# some glitches when rendering.
if layer.id == len(self._layers)-1 and layer.repeatedMessageCount("path_segment") == 0:
Logger.log("i", "No sliced data in the layer", layer.id)
continue
# Negative layers are offset by the minimum layer number, but the positive layers are just
# offset by the number of negative layers so there is no layer gap between raft and model
abs_layer_number = layer.id + abs(min_layer_number) if layer.id < 0 else layer.id + negative_layers
layer_data.addLayer(abs_layer_number)
this_layer = layer_data.getLayer(abs_layer_number)

View file

@ -143,6 +143,11 @@ class GCodeReader(MeshReader):
this_layer.polygons.append(this_poly)
return True
def _createEmptyLayer(self, layer_number):
self._layer_data_builder.addLayer(layer_number)
self._layer_data_builder.setLayerHeight(layer_number, 0)
self._layer_data_builder.setLayerThickness(layer_number, 0)
def _calculateLineWidth(self, current_point, previous_point, current_extrusion, previous_extrusion, layer_thickness):
# Area of the filament
Af = (self._filament_diameter / 2) ** 2 * numpy.pi
@ -322,6 +327,9 @@ class GCodeReader(MeshReader):
current_position = self._position(0, 0, 0, 0, [0])
current_path = []
min_layer_number = 0
negative_layers = 0
previous_layer = 0
for line in file:
if self._cancelled:
@ -359,7 +367,23 @@ class GCodeReader(MeshReader):
layer_number = int(line[len(self._layer_keyword):])
self._createPolygon(self._current_layer_thickness, current_path, self._extruder_offsets.get(self._extruder_number, [0, 0]))
current_path.clear()
# When using a raft, the raft layers are stored as layers < 0, it mimics the same behavior
# as in ProcessSlicedLayersJob
if layer_number < min_layer_number:
min_layer_number = layer_number
if layer_number < 0:
layer_number += abs(min_layer_number)
negative_layers += 1
else:
layer_number += negative_layers
# In case there is a gap in the layer count, empty layers are created
for empty_layer in range(previous_layer + 1, layer_number):
self._createEmptyLayer(empty_layer)
self._layer_number = layer_number
previous_layer = layer_number
except:
pass

View file

@ -34,6 +34,7 @@ class SimulationPass(RenderPass):
self._nozzle_shader = None
self._old_current_layer = 0
self._old_current_path = 0
self._switching_layers = True # It tracks when the user is moving the layers' slider
self._gl = OpenGL.getInstance().getBindingsObject()
self._scene = Application.getInstance().getController().getScene()
self._extruder_manager = ExtruderManager.getInstance()
@ -91,7 +92,7 @@ class SimulationPass(RenderPass):
self.bind()
tool_handle_batch = RenderBatch(self._tool_handle_shader, type = RenderBatch.RenderType.Solid)
tool_handle_batch = RenderBatch(self._tool_handle_shader, type = RenderBatch.RenderType.Overlay)
head_position = None # Indicates the current position of the print head
nozzle_node = None
@ -143,8 +144,10 @@ class SimulationPass(RenderPass):
# All the layers but the current selected layer are rendered first
if self._old_current_path != self._layer_view._current_path_num:
self._current_shader = self._layer_shadow_shader
self._switching_layers = False
if not self._layer_view.isSimulationRunning() and self._old_current_layer != self._layer_view._current_layer_num:
self._current_shader = self._layer_shader
self._switching_layers = True
layers_batch = RenderBatch(self._current_shader, type = RenderBatch.RenderType.Solid, mode = RenderBatch.RenderMode.Lines, range = (start, end))
layers_batch.addItem(node.getWorldTransformation(), layer_data)
@ -170,8 +173,9 @@ class SimulationPass(RenderPass):
if len(batch.items) > 0:
batch.render(self._scene.getActiveCamera())
# The nozzle is drawn once we know the correct position
if not self._compatibility_mode and self._layer_view.getActivity() and nozzle_node is not None:
# The nozzle is drawn when once we know the correct position of the head,
# but the user is not using the layer slider, and the compatibility mode is not enabled
if not self._switching_layers and not self._compatibility_mode and self._layer_view.getActivity() and nozzle_node is not None:
if head_position is not None:
nozzle_node.setVisible(True)
nozzle_node.setPosition(head_position)

View file

@ -376,7 +376,7 @@ class SimulationView(View):
if layer is None:
return
new_max_paths = layer.lineMeshElementCount()
if new_max_paths > 0 and new_max_paths != self._max_paths:
if new_max_paths >= 0 and new_max_paths != self._max_paths:
self._max_paths = new_max_paths
self.maxPathsChanged.emit()

View file

@ -138,10 +138,11 @@ Item
text: catalog.i18nc("@label:listbox", "Feedrate"),
type_id: 2
})
layerViewTypes.append({
text: catalog.i18nc("@label:listbox", "Layer thickness"),
type_id: 3 // these ids match the switching in the shader
})
// TODO DON'T DELETE!!!! This part must be enabled when adaptive layer height feature is available
// layerViewTypes.append({
// text: catalog.i18nc("@label:listbox", "Layer thickness"),
// type_id: 3 // these ids match the switching in the shader
// })
}
ComboBox
@ -619,7 +620,7 @@ Item
Timer
{
id: simulationTimer
interval: 250
interval: 100
running: false
repeat: true
onTriggered: {

View file

@ -11,7 +11,7 @@ catalog = i18nCatalog("cura")
def getMetaData():
return {
"view": {
"name": catalog.i18nc("@item:inlistbox", "Simulation view"),
"name": catalog.i18nc("@item:inlistbox", "Layer view"),
"view_panel": "SimulationView.qml",
"weight": 2
}

View file

@ -116,7 +116,10 @@ class VersionUpgrade30to31(VersionUpgrade):
all_quality_changes = self._getSingleExtrusionMachineQualityChanges(parser)
# Note that DO NOT!!! use the quality_changes returned from _getSingleExtrusionMachineQualityChanges().
# Those are loaded from the hard drive which are original files that haven't been upgraded yet.
if len(all_quality_changes) == 1 and not parser.has_option("metadata", "extruder"):
# NOTE 2: The number can be 0 or 1 depends on whether you are loading it from the qualities folder or
# from a project file. When you load from a project file, the custom profile may not be in cura
# yet, so you will get 0.
if len(all_quality_changes) <= 1 and not parser.has_option("metadata", "extruder"):
self._createExtruderQualityChangesForSingleExtrusionMachine(filename, parser)
# Update version numbers
@ -199,7 +202,7 @@ class VersionUpgrade30to31(VersionUpgrade):
def _createExtruderQualityChangesForSingleExtrusionMachine(self, filename, global_quality_changes):
suffix = "_" + quote_plus(global_quality_changes["general"]["name"].lower())
machine_name = filename.strip("." + os.sep).replace(suffix, "")
machine_name = os.path.os.path.basename(filename).replace(".inst.cfg", "").replace(suffix, "")
new_filename = machine_name + "_" + "fdmextruder" + suffix
extruder_quality_changes_parser = configparser.ConfigParser()

View file

@ -422,11 +422,11 @@ class XmlMaterialProfile(InstanceContainer):
return version * 1000000 + setting_version
## Overridden from InstanceContainer
def deserialize(self, serialized):
def deserialize(self, serialized, file_name = None):
containers_to_add = []
# update the serialized data first
from UM.Settings.Interfaces import ContainerInterface
serialized = ContainerInterface.deserialize(self, serialized)
serialized = ContainerInterface.deserialize(self, serialized, file_name)
try:
data = ET.fromstring(serialized)