Merge branch 'main' into 050922b

This commit is contained in:
digitalfrost 2022-09-13 10:50:39 +02:00
commit fe483d4656
552 changed files with 3424 additions and 2631 deletions

View file

@ -21,6 +21,7 @@ class ArrangeObjectsJob(Job):
self._min_offset = min_offset
def run(self):
found_solution_for_all = False
status_message = Message(i18n_catalog.i18nc("@info:status", "Finding new location for objects"),
lifetime = 0,
dismissable = False,
@ -28,18 +29,19 @@ class ArrangeObjectsJob(Job):
title = i18n_catalog.i18nc("@info:title", "Finding Location"))
status_message.show()
found_solution_for_all = None
try:
found_solution_for_all = arrange(self._nodes, Application.getInstance().getBuildVolume(), self._fixed_nodes)
except: # If the thread crashes, the message should still close
Logger.logException("e", "Unable to arrange the objects on the buildplate. The arrange algorithm has crashed.")
status_message.hide()
if found_solution_for_all is not None and not found_solution_for_all:
if not found_solution_for_all:
no_full_solution_message = Message(
i18n_catalog.i18nc("@info:status",
"Unable to find a location within the build volume for all objects"),
title = i18n_catalog.i18nc("@info:title", "Can't Find Location"),
message_type = Message.MessageType.ERROR)
no_full_solution_message.show()
self.finished.emit(self)

View file

@ -24,9 +24,12 @@ class LayerPolygon:
PrimeTowerType = 11
__number_of_types = 12
__jump_map = numpy.logical_or(numpy.logical_or(numpy.arange(__number_of_types) == NoneType, numpy.arange(__number_of_types) == MoveCombingType), numpy.arange(__number_of_types) == MoveRetractionType)
__jump_map = numpy.logical_or(numpy.logical_or(numpy.arange(__number_of_types) == NoneType,
numpy.arange(__number_of_types) == MoveCombingType),
numpy.arange(__number_of_types) == MoveRetractionType)
def __init__(self, extruder: int, line_types: numpy.ndarray, data: numpy.ndarray, line_widths: numpy.ndarray, line_thicknesses: numpy.ndarray, line_feedrates: numpy.ndarray) -> None:
def __init__(self, extruder: int, line_types: numpy.ndarray, data: numpy.ndarray,
line_widths: numpy.ndarray, line_thicknesses: numpy.ndarray, line_feedrates: numpy.ndarray) -> None:
"""LayerPolygon, used in ProcessSlicedLayersJob
:param extruder: The position of the extruder
@ -39,10 +42,12 @@ class LayerPolygon:
self._extruder = extruder
self._types = line_types
for i in range(len(self._types)):
if self._types[i] >= self.__number_of_types: # Got faulty line data from the engine.
Logger.log("w", "Found an unknown line type: %s", i)
self._types[i] = self.NoneType
unknown_types = numpy.where(self._types >= self.__number_of_types, self._types, None)
if unknown_types.any():
# Got faulty line data from the engine.
for idx in unknown_types:
Logger.warning(f"Found an unknown line type at: {idx}")
self._types[idx] = self.NoneType
self._data = data
self._line_widths = line_widths
self._line_thicknesses = line_thicknesses
@ -58,14 +63,16 @@ class LayerPolygon:
self._mesh_line_count = len(self._types) - self._jump_count
self._vertex_count = self._mesh_line_count + numpy.sum(self._types[1:] == self._types[:-1])
# Buffering the colors shouldn't be necessary as it is not
# Buffering the colors shouldn't be necessary as it is not
# re-used and can save a lot of memory usage.
self._color_map = LayerPolygon.getColorMap()
self._colors = self._color_map[self._types] # type: numpy.ndarray
# When type is used as index returns true if type == LayerPolygon.InfillType or type == LayerPolygon.SkinType or type == LayerPolygon.SupportInfillType
# When type is used as index returns true if type == LayerPolygon.InfillType
# or type == LayerPolygon.SkinType
# or type == LayerPolygon.SupportInfillType
# Should be generated in better way, not hardcoded.
self._is_infill_or_skin_type_map = numpy.array([0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0], dtype = bool)
self._is_infill_or_skin_type_map = numpy.array([0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0], dtype=bool)
self._build_cache_line_mesh_mask = None # type: Optional[numpy.ndarray]
self._build_cache_needed_points = None # type: Optional[numpy.ndarray]
@ -80,12 +87,14 @@ class LayerPolygon:
# Only if the type of line segment changes do we need to add an extra vertex to change colors
self._build_cache_needed_points[1:, 0][:, numpy.newaxis] = self._types[1:] != self._types[:-1]
# Mark points as unneeded if they are of types we don't want in the line mesh according to the calculated mask
numpy.logical_and(self._build_cache_needed_points, self._build_cache_line_mesh_mask, self._build_cache_needed_points )
numpy.logical_and(self._build_cache_needed_points, self._build_cache_line_mesh_mask, self._build_cache_needed_points)
self._vertex_begin = 0
self._vertex_end = cast(int, numpy.sum(self._build_cache_needed_points))
def build(self, vertex_offset: int, index_offset: int, vertices: numpy.ndarray, colors: numpy.ndarray, line_dimensions: numpy.ndarray, feedrates: numpy.ndarray, extruders: numpy.ndarray, line_types: numpy.ndarray, indices: numpy.ndarray) -> None:
def build(self, vertex_offset: int, index_offset: int, vertices: numpy.ndarray,
colors: numpy.ndarray, line_dimensions: numpy.ndarray, feedrates: numpy.ndarray,
extruders: numpy.ndarray, line_types: numpy.ndarray, indices: numpy.ndarray) -> None:
"""Set all the arrays provided by the function caller, representing the LayerPolygon
The arrays are either by vertex or by indices.
@ -111,19 +120,20 @@ class LayerPolygon:
line_mesh_mask = self._build_cache_line_mesh_mask
needed_points_list = self._build_cache_needed_points
# Index to the points we need to represent the line mesh. This is constructed by generating simple
# start and end points for each line. For line segment n these are points n and n+1. Row n reads [n n+1]
# Then then the indices for the points we don't need are thrown away based on the pre-calculated list.
index_list = ( numpy.arange(len(self._types)).reshape((-1, 1)) + numpy.array([[0, 1]]) ).reshape((-1, 1))[needed_points_list.reshape((-1, 1))]
# Index to the points we need to represent the line mesh.
# This is constructed by generating simple start and end points for each line.
# For line segment n, these are points n and n+1. Row n reads [n n+1]
# Then the indices for the points we don't need are thrown away based on the pre-calculated list.
index_list = (numpy.arange(len(self._types)).reshape((-1, 1)) + numpy.array([[0, 1]])).reshape((-1, 1))[needed_points_list.reshape((-1, 1))]
# The relative values of begin and end indices have already been set in buildCache, so we only need to offset them to the parents offset.
self._vertex_begin += vertex_offset
self._vertex_end += vertex_offset
# Points are picked based on the index list to get the vertices needed.
# Points are picked based on the index list to get the vertices needed.
vertices[self._vertex_begin:self._vertex_end, :] = self._data[index_list, :]
# Create an array with colors for each vertex and remove the color data for the points that has been thrown away.
# Create an array with colors for each vertex and remove the color data for the points that has been thrown away.
colors[self._vertex_begin:self._vertex_end, :] = numpy.tile(self._colors, (1, 2)).reshape((-1, 4))[needed_points_list.ravel()]
# Create an array with line widths and thicknesses for each vertex.
@ -138,14 +148,15 @@ class LayerPolygon:
# Convert type per vertex to type per line
line_types[self._vertex_begin:self._vertex_end] = numpy.tile(self._types, (1, 2)).reshape((-1, 1))[needed_points_list.ravel()][:, 0]
# The relative values of begin and end indices have already been set in buildCache, so we only need to offset them to the parents offset.
# The relative values of begin and end indices have already been set in buildCache,
# so we only need to offset them to the parents offset.
self._index_begin += index_offset
self._index_end += index_offset
indices[self._index_begin:self._index_end, :] = numpy.arange(self._index_end-self._index_begin, dtype = numpy.int32).reshape((-1, 1))
indices[self._index_begin:self._index_end, :] = numpy.arange(self._index_end-self._index_begin, dtype=numpy.int32).reshape((-1, 1))
# When the line type changes the index needs to be increased by 2.
indices[self._index_begin:self._index_end, :] += numpy.cumsum(needed_points_list[line_mesh_mask.ravel(), 0], dtype = numpy.int32).reshape((-1, 1))
# Each line segment goes from it's starting point p to p+1, offset by the vertex index.
# Each line segment goes from it's starting point p to p+1, offset by the vertex index.
# The -1 is to compensate for the necessarily True value of needed_points_list[0,0] which causes an unwanted +1 in cumsum above.
indices[self._index_begin:self._index_end, :] += numpy.array([self._vertex_begin - 1, self._vertex_begin])
@ -214,13 +225,12 @@ class LayerPolygon:
"""
normals = numpy.copy(self._data)
normals[:, 1] = 0.0 # We are only interested in 2D normals
normals[:, 1] = 0.0 # We are only interested in 2D normals
# Calculate the edges between points.
# The call to numpy.roll shifts the entire array by one so that
# we end up subtracting each next point from the current, wrapping
# around. This gives us the edges from the next point to the current
# point.
# The call to numpy.roll shifts the entire array by one
# so that we end up subtracting each next point from the current, wrapping around.
# This gives us the edges from the next point to the current point.
normals = numpy.diff(normals, 1, 0)
# Calculate the length of each edge using standard Pythagoras
@ -245,17 +255,17 @@ class LayerPolygon:
if cls.__color_map is None:
theme = cast(Theme, QtApplication.getInstance().getTheme())
cls.__color_map = numpy.array([
theme.getColor("layerview_none").getRgbF(), # NoneType
theme.getColor("layerview_inset_0").getRgbF(), # Inset0Type
theme.getColor("layerview_inset_x").getRgbF(), # InsetXType
theme.getColor("layerview_skin").getRgbF(), # SkinType
theme.getColor("layerview_support").getRgbF(), # SupportType
theme.getColor("layerview_skirt").getRgbF(), # SkirtType
theme.getColor("layerview_infill").getRgbF(), # InfillType
theme.getColor("layerview_support_infill").getRgbF(), # SupportInfillType
theme.getColor("layerview_move_combing").getRgbF(), # MoveCombingType
theme.getColor("layerview_move_retraction").getRgbF(), # MoveRetractionType
theme.getColor("layerview_support_interface").getRgbF(), # SupportInterfaceType
theme.getColor("layerview_none").getRgbF(), # NoneType
theme.getColor("layerview_inset_0").getRgbF(), # Inset0Type
theme.getColor("layerview_inset_x").getRgbF(), # InsetXType
theme.getColor("layerview_skin").getRgbF(), # SkinType
theme.getColor("layerview_support").getRgbF(), # SupportType
theme.getColor("layerview_skirt").getRgbF(), # SkirtType
theme.getColor("layerview_infill").getRgbF(), # InfillType
theme.getColor("layerview_support_infill").getRgbF(), # SupportInfillType
theme.getColor("layerview_move_combing").getRgbF(), # MoveCombingType
theme.getColor("layerview_move_retraction").getRgbF(), # MoveRetractionType
theme.getColor("layerview_support_interface").getRgbF(), # SupportInterfaceType
theme.getColor("layerview_prime_tower").getRgbF() # PrimeTowerType
])

View file

@ -43,7 +43,7 @@ class MachineErrorChecker(QObject):
self._application = cura.CuraApplication.CuraApplication.getInstance()
self._machine_manager = self._application.getMachineManager()
self._start_time = 0. # measure checking time
self._check_start_time = time.time()
self._setCheckTimer()
@ -160,7 +160,7 @@ class MachineErrorChecker(QObject):
self._stacks_and_keys_to_check.append((stack, key))
self._application.callLater(self._checkStack)
self._start_time = time.time()
self._check_start_time = time.time()
Logger.log("d", "New error check scheduled.")
def _checkStack(self) -> None:
@ -212,12 +212,10 @@ class MachineErrorChecker(QObject):
self._has_errors = result
self.hasErrorUpdated.emit()
self._machine_manager.stacksValidationChanged.emit()
if keys_to_recheck is None:
self._keys_to_check = set()
else:
self._keys_to_check = keys_to_recheck
self._keys_to_check = keys_to_recheck if keys_to_recheck else set()
self._need_to_check = False
self._check_in_progress = False
self.needToWaitForResultChanged.emit()
self.errorCheckFinished.emit()
Logger.log("i", "Error check finished, result = %s, time = %0.1fs", result, time.time() - self._start_time)
execution_time = time.time() - self._check_start_time
Logger.info(f"Error check finished, result = {result}, time = {execution_time:.2f}s")

View file

@ -44,6 +44,7 @@ class GlobalStacksModel(ListModel):
self._filter_connection_type = None # type: Optional[ConnectionType]
self._filter_online_only = False
self._filter_capabilities: List[str] = [] # Required capabilities that all listed printers must have.
self._filter_abstract_machines: Optional[bool] = None
# Listen to changes
CuraContainerRegistry.getInstance().containerAdded.connect(self._onContainerChanged)
@ -54,6 +55,7 @@ class GlobalStacksModel(ListModel):
filterConnectionTypeChanged = pyqtSignal()
filterCapabilitiesChanged = pyqtSignal()
filterOnlineOnlyChanged = pyqtSignal()
filterAbstractMachinesChanged = pyqtSignal()
def setFilterConnectionType(self, new_filter: Optional[ConnectionType]) -> None:
if self._filter_connection_type != new_filter:
@ -98,6 +100,22 @@ class GlobalStacksModel(ListModel):
"""
return self._filter_capabilities
def setFilterAbstractMachines(self, new_filter: Optional[bool]) -> None:
if self._filter_abstract_machines != new_filter:
self._filter_abstract_machines = new_filter
self.filterAbstractMachinesChanged.emit()
@pyqtProperty(bool, fset = setFilterAbstractMachines, notify = filterAbstractMachinesChanged)
def filterAbstractMachines(self) -> Optional[bool]:
"""
Weather we include abstract printers, non-abstract printers or both
if this is set to None both abstract and non-abstract printers will be included in the list
set to True will only include abstract printers
set to False will only inclde non-abstract printers
"""
return self._filter_abstract_machines
def _onContainerChanged(self, container) -> None:
"""Handler for container added/removed events from registry"""
@ -130,6 +148,10 @@ class GlobalStacksModel(ListModel):
if self._filter_online_only and not is_online:
continue
is_abstract_machine = parseBool(container_stack.getMetaDataEntry("is_abstract_machine", False))
if self._filter_abstract_machines is not None and self._filter_abstract_machines is not is_abstract_machine:
continue
capabilities = set(container_stack.getMetaDataEntry(META_CAPABILITIES, "").split(","))
if set(self._filter_capabilities) - capabilities: # Not all required capabilities are met.
continue

View file

@ -89,16 +89,19 @@ class MachineListModel(ListModel):
machines_manager = CuraApplication.getInstance().getMachineManager()
online_machine_stacks = machines_manager.getMachinesWithDefinition(definition_id, online_only = True)
# Create a list item for abstract machine
self.addItem(abstract_machine, len(online_machine_stacks))
online_machine_stacks = list(filter(lambda machine: machine.hasNetworkedConnection(), online_machine_stacks))
other_machine_stacks.remove(abstract_machine)
if abstract_machine in online_machine_stacks:
online_machine_stacks.remove(abstract_machine)
# Create a list item for abstract machine
self.addItem(abstract_machine, True, len(online_machine_stacks))
# Create list of machines that are children of the abstract machine
for stack in online_machine_stacks:
if self._show_cloud_printers:
self.addItem(stack)
self.addItem(stack, True)
# Remove this machine from the other stack list
if stack in other_machine_stacks:
other_machine_stacks.remove(stack)
@ -118,25 +121,18 @@ class MachineListModel(ListModel):
})
for stack in other_machine_stacks:
self.addItem(stack)
self.addItem(stack, False)
def addItem(self, container_stack: ContainerStack, machine_count: int = 0) -> None:
def addItem(self, container_stack: ContainerStack, is_online: bool, machine_count: int = 0) -> None:
if parseBool(container_stack.getMetaDataEntry("hidden", False)):
return
# This is required because machines loaded from projects have the is_online="True" but no connection type.
# We want to display them the same way as unconnected printers in this case.
has_connection = False
has_connection |= parseBool(container_stack.getMetaDataEntry("is_abstract_machine", False))
for connection_type in [ConnectionType.NetworkConnection.value, ConnectionType.CloudConnection.value]:
has_connection |= connection_type in container_stack.configuredConnectionTypes
self.appendItem({
"componentType": "MACHINE",
"name": container_stack.getName(),
"id": container_stack.getId(),
"metadata": container_stack.getMetaData().copy(),
"isOnline": parseBool(container_stack.getMetaDataEntry("is_online", False)) and has_connection,
"isOnline": is_online,
"isAbstractMachine": parseBool(container_stack.getMetaDataEntry("is_abstract_machine", False)),
"machineCount": machine_count,
})

View file

@ -49,7 +49,7 @@ class CuraContainerStack(ContainerStack):
self._empty_material = cura_empty_instance_containers.empty_material_container #type: InstanceContainer
self._empty_variant = cura_empty_instance_containers.empty_variant_container #type: InstanceContainer
self._containers = [self._empty_instance_container for i in range(len(_ContainerIndexes.IndexTypeMap))] #type: List[ContainerInterface]
self._containers: List[ContainerInterface] = [self._empty_instance_container for i in _ContainerIndexes.IndexTypeMap]
self._containers[_ContainerIndexes.QualityChanges] = self._empty_quality_changes
self._containers[_ContainerIndexes.Quality] = self._empty_quality
self._containers[_ContainerIndexes.Material] = self._empty_material

View file

@ -347,6 +347,12 @@ class GlobalStack(CuraContainerStack):
nameChanged = pyqtSignal()
name = pyqtProperty(str, fget=getName, fset=setName, notify=nameChanged)
def hasNetworkedConnection(self) -> bool:
has_connection = False
for connection_type in [ConnectionType.NetworkConnection.value, ConnectionType.CloudConnection.value]:
has_connection |= connection_type in self.configuredConnectionTypes
return has_connection
## private:
global_stack_mime = MimeType(
name = "application/x-cura-globalstack",

View file

@ -186,7 +186,7 @@ class PrintInformation(QObject):
if time != time: # Check for NaN. Engine can sometimes give us weird values.
duration.setDuration(0)
Logger.log("w", "Received NaN for print duration message")
Logger.warning("Received NaN for print duration message")
continue
total_estimated_time += time
@ -368,7 +368,7 @@ class PrintInformation(QObject):
mime_type = MimeTypeDatabase.getMimeTypeForFile(name)
data = mime_type.stripExtension(name)
except MimeTypeNotFoundError:
Logger.log("w", "Unsupported Mime Type Database file extension %s", name)
Logger.warning(f"Unsupported Mime Type Database file extension {name}")
if data is not None and check_name is not None:
self._base_name = data

View file

@ -62,15 +62,21 @@ class WhatsNewPagesModel(WelcomePagesModel):
def initialize(self) -> None:
self._pages = []
self._pages.append({"id": "whats_new",
"page_url": self._getBuiltinWelcomePagePath("WhatsNewContent.qml"),
"next_page_button_text": self._catalog.i18nc("@action:button", "Skip"),
"next_page_id": "changelog"
})
self._pages.append({"id": "changelog",
"page_url": self._getBuiltinWelcomePagePath("ChangelogContent.qml"),
"next_page_button_text": self._catalog.i18nc("@action:button", "Close"),
})
try:
self._pages.append({"id": "whats_new",
"page_url": self._getBuiltinWelcomePagePath("WhatsNewContent.qml"),
"next_page_button_text": self._catalog.i18nc("@action:button", "Skip"),
"next_page_id": "changelog"
})
except FileNotFoundError:
Logger.warning("Unable to find what's new page")
try:
self._pages.append({"id": "changelog",
"page_url": self._getBuiltinWelcomePagePath("ChangelogContent.qml"),
"next_page_button_text": self._catalog.i18nc("@action:button", "Close"),
})
except FileNotFoundError:
Logger.warning("Unable to find changelog page")
self.setItems(self._pages)
images, max_image = WhatsNewPagesModel._collectOrdinalFiles(Resources.Images, WhatsNewPagesModel.image_formats)