Merge branch 'main' into AddCoolingProfile
273
plugins/3DConnexion/NavlibClient.py
Normal file
|
@ -0,0 +1,273 @@
|
|||
# Copyright (c) 2025 3Dconnexion, UltiMaker
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
from typing import Optional
|
||||
from UM.Math.Matrix import Matrix
|
||||
from UM.Math.Vector import Vector
|
||||
from UM.Math.AxisAlignedBox import AxisAlignedBox
|
||||
from cura.PickingPass import PickingPass
|
||||
from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator
|
||||
from UM.Scene.SceneNode import SceneNode
|
||||
from UM.Scene.Scene import Scene
|
||||
from UM.Resources import Resources
|
||||
from UM.Tool import Tool
|
||||
from UM.View.Renderer import Renderer
|
||||
from .OverlayNode import OverlayNode
|
||||
import pynavlib.pynavlib_interface as pynav
|
||||
|
||||
|
||||
class NavlibClient(pynav.NavlibNavigationModel, Tool):
|
||||
|
||||
def __init__(self, scene: Scene, renderer: Renderer) -> None:
|
||||
pynav.NavlibNavigationModel.__init__(self, False, pynav.NavlibOptions.RowMajorOrder)
|
||||
Tool.__init__(self)
|
||||
self._scene = scene
|
||||
self._renderer = renderer
|
||||
self._pointer_pick = None
|
||||
self._was_pick = False
|
||||
self._hit_selection_only = False
|
||||
self._picking_pass = None
|
||||
self._pivot_node = OverlayNode(node=SceneNode(), image_path=Resources.getPath(Resources.Images, "cor.png"), size=2.5)
|
||||
self.put_profile_hint("UltiMaker Cura")
|
||||
self.enable_navigation(True)
|
||||
|
||||
def pick(self, x: float, y: float, check_selection: bool = False, radius: float = 0.) -> Optional[Vector]:
|
||||
|
||||
if self._picking_pass is None or radius < 0.:
|
||||
return None
|
||||
|
||||
step = 0.
|
||||
if radius == 0.:
|
||||
grid_resolution = 0
|
||||
else:
|
||||
grid_resolution = 5
|
||||
step = (2. * radius) / float(grid_resolution)
|
||||
|
||||
min_depth = 99999.
|
||||
result_position = None
|
||||
|
||||
for i in range(grid_resolution + 1):
|
||||
for j in range(grid_resolution + 1):
|
||||
|
||||
coord_x = (x - radius) + i * step
|
||||
coord_y = (y - radius) + j * step
|
||||
|
||||
picked_depth = self._picking_pass.getPickedDepth(coord_x, coord_y)
|
||||
max_depth = 16777.215
|
||||
|
||||
if 0. < picked_depth < max_depth:
|
||||
|
||||
valid_hit = True
|
||||
if check_selection:
|
||||
selection_pass = self._renderer.getRenderPass("selection")
|
||||
picked_object_id = selection_pass.getIdAtPosition(coord_x, coord_y)
|
||||
picked_object = self._scene.findObject(picked_object_id)
|
||||
|
||||
from UM.Scene.Selection import Selection
|
||||
valid_hit = Selection.isSelected(picked_object)
|
||||
|
||||
if not valid_hit and grid_resolution > 0.:
|
||||
continue
|
||||
elif not valid_hit and grid_resolution == 0.:
|
||||
return None
|
||||
|
||||
if picked_depth < min_depth:
|
||||
min_depth = picked_depth
|
||||
result_position = self._picking_pass.getPickedPosition(coord_x, coord_y)
|
||||
|
||||
return result_position
|
||||
|
||||
def get_pointer_position(self) -> "pynav.NavlibVector":
|
||||
|
||||
from UM.Qt.QtApplication import QtApplication
|
||||
main_window = QtApplication.getInstance().getMainWindow()
|
||||
|
||||
x_n = 2. * main_window._mouse_x / main_window.width() - 1.
|
||||
y_n = 2. * main_window._mouse_y / main_window.height() - 1.
|
||||
|
||||
if self.get_is_view_perspective():
|
||||
self._was_pick = True
|
||||
from cura.Utils.Threading import call_on_qt_thread
|
||||
wrapped_pick = call_on_qt_thread(self.pick)
|
||||
|
||||
self._pointer_pick = wrapped_pick(x_n, y_n)
|
||||
|
||||
return pynav.NavlibVector(0., 0., 0.)
|
||||
else:
|
||||
ray = self._scene.getActiveCamera().getRay(x_n, y_n)
|
||||
pointer_position = ray.origin + ray.direction
|
||||
|
||||
return pynav.NavlibVector(pointer_position.x, pointer_position.y, pointer_position.z)
|
||||
|
||||
def get_view_extents(self) -> "pynav.NavlibBox":
|
||||
|
||||
view_width = self._scene.getActiveCamera().getViewportWidth()
|
||||
view_height = self._scene.getActiveCamera().getViewportHeight()
|
||||
horizontal_zoom = view_width * self._scene.getActiveCamera().getZoomFactor()
|
||||
vertical_zoom = view_height * self._scene.getActiveCamera().getZoomFactor()
|
||||
|
||||
pt_min = pynav.NavlibVector(-view_width / 2 - horizontal_zoom, -view_height / 2 - vertical_zoom, -9001)
|
||||
pt_max = pynav.NavlibVector(view_width / 2 + horizontal_zoom, view_height / 2 + vertical_zoom, 9001)
|
||||
|
||||
return pynav.NavlibBox(pt_min, pt_max)
|
||||
|
||||
def get_view_frustum(self) -> "pynav.NavlibFrustum":
|
||||
|
||||
projection_matrix = self._scene.getActiveCamera().getProjectionMatrix()
|
||||
half_height = 2. / projection_matrix.getData()[1,1]
|
||||
half_width = half_height * (projection_matrix.getData()[1,1] / projection_matrix.getData()[0,0])
|
||||
|
||||
return pynav.NavlibFrustum(-half_width, half_width, -half_height, half_height, 1., 5000.)
|
||||
|
||||
def get_is_view_perspective(self) -> bool:
|
||||
return self._scene.getActiveCamera().isPerspective()
|
||||
|
||||
def get_selection_extents(self) -> "pynav.NavlibBox":
|
||||
|
||||
from UM.Scene.Selection import Selection
|
||||
bounding_box = Selection.getBoundingBox()
|
||||
|
||||
if(bounding_box is not None) :
|
||||
pt_min = pynav.NavlibVector(bounding_box.minimum.x, bounding_box.minimum.y, bounding_box.minimum.z)
|
||||
pt_max = pynav.NavlibVector(bounding_box.maximum.x, bounding_box.maximum.y, bounding_box.maximum.z)
|
||||
return pynav.NavlibBox(pt_min, pt_max)
|
||||
|
||||
def get_selection_transform(self) -> "pynav.NavlibMatrix":
|
||||
return pynav.NavlibMatrix()
|
||||
|
||||
def get_is_selection_empty(self) -> bool:
|
||||
from UM.Scene.Selection import Selection
|
||||
return not Selection.hasSelection()
|
||||
|
||||
def get_pivot_visible(self) -> bool:
|
||||
return False
|
||||
|
||||
def get_camera_matrix(self) -> "pynav.NavlibMatrix":
|
||||
|
||||
transformation = self._scene.getActiveCamera().getLocalTransformation()
|
||||
|
||||
return pynav.NavlibMatrix([[transformation.at(0, 0), transformation.at(0, 1), transformation.at(0, 2), transformation.at(0, 3)],
|
||||
[transformation.at(1, 0), transformation.at(1, 1), transformation.at(1, 2), transformation.at(1, 3)],
|
||||
[transformation.at(2, 0), transformation.at(2, 1), transformation.at(2, 2), transformation.at(2, 3)],
|
||||
[transformation.at(3, 0), transformation.at(3, 1), transformation.at(3, 2), transformation.at(3, 3)]])
|
||||
|
||||
def get_coordinate_system(self) -> "pynav.NavlibMatrix":
|
||||
return pynav.NavlibMatrix()
|
||||
|
||||
def get_front_view(self) -> "pynav.NavlibMatrix":
|
||||
return pynav.NavlibMatrix()
|
||||
|
||||
def get_model_extents(self) -> "pynav.NavlibBox":
|
||||
|
||||
result_bbox = AxisAlignedBox()
|
||||
build_volume_bbox = None
|
||||
|
||||
for node in DepthFirstIterator(self._scene.getRoot()):
|
||||
node.setCalculateBoundingBox(True)
|
||||
if node.__class__.__qualname__ == "CuraSceneNode" :
|
||||
result_bbox = result_bbox + node.getBoundingBox()
|
||||
elif node.__class__.__qualname__ == "BuildVolume":
|
||||
build_volume_bbox = node.getBoundingBox()
|
||||
|
||||
if not result_bbox.isValid():
|
||||
result_bbox = build_volume_bbox
|
||||
|
||||
if result_bbox is not None:
|
||||
pt_min = pynav.NavlibVector(result_bbox.minimum.x, result_bbox.minimum.y, result_bbox.minimum.z)
|
||||
pt_max = pynav.NavlibVector(result_bbox.maximum.x, result_bbox.maximum.y, result_bbox.maximum.z)
|
||||
self._scene_center = result_bbox.center
|
||||
self._scene_radius = (result_bbox.maximum - self._scene_center).length()
|
||||
return pynav.NavlibBox(pt_min, pt_max)
|
||||
|
||||
def get_pivot_position(self) -> "pynav.NavlibVector":
|
||||
return pynav.NavlibVector()
|
||||
|
||||
def get_hit_look_at(self) -> "pynav.NavlibVector":
|
||||
|
||||
if self._was_pick and self._pointer_pick is not None:
|
||||
return pynav.NavlibVector(self._pointer_pick.x, self._pointer_pick.y, self._pointer_pick.z)
|
||||
elif self._was_pick and self._pointer_pick is None:
|
||||
return None
|
||||
|
||||
from cura.Utils.Threading import call_on_qt_thread
|
||||
wrapped_pick = call_on_qt_thread(self.pick)
|
||||
picked_position = wrapped_pick(0, 0, self._hit_selection_only, 0.5)
|
||||
|
||||
if picked_position is not None:
|
||||
return pynav.NavlibVector(picked_position.x, picked_position.y, picked_position.z)
|
||||
|
||||
def get_units_to_meters(self) -> float:
|
||||
return 0.05
|
||||
|
||||
def is_user_pivot(self) -> bool:
|
||||
return False
|
||||
|
||||
def set_camera_matrix(self, matrix : "pynav.NavlibMatrix") -> None:
|
||||
|
||||
# !!!!!!
|
||||
# Hit testing in Orthographic view is not reliable
|
||||
# Picking starts in camera position, not on near plane
|
||||
# which results in wrong depth values (visible geometry
|
||||
# cannot be picked properly) - Workaround needed (camera position offset)
|
||||
# !!!!!!
|
||||
if not self.get_is_view_perspective():
|
||||
affine = matrix._matrix
|
||||
direction = Vector(-affine[0][2], -affine[1][2], -affine[2][2])
|
||||
distance = self._scene_center - Vector(affine[0][3], affine[1][3], affine[2][3])
|
||||
|
||||
cos_value = direction.dot(distance.normalized())
|
||||
|
||||
offset = 0.
|
||||
|
||||
if (distance.length() < self._scene_radius) and (cos_value > 0.):
|
||||
offset = self._scene_radius
|
||||
elif (distance.length() < self._scene_radius) and (cos_value < 0.):
|
||||
offset = 2. * self._scene_radius
|
||||
elif (distance.length() > self._scene_radius) and (cos_value < 0.):
|
||||
offset = 2. * distance.length()
|
||||
|
||||
matrix._matrix[0][3] = matrix._matrix[0][3] - offset * direction.x
|
||||
matrix._matrix[1][3] = matrix._matrix[1][3] - offset * direction.y
|
||||
matrix._matrix[2][3] = matrix._matrix[2][3] - offset * direction.z
|
||||
|
||||
transformation = Matrix(data = matrix._matrix)
|
||||
self._scene.getActiveCamera().setTransformation(transformation)
|
||||
|
||||
active_camera = self._scene.getActiveCamera()
|
||||
if active_camera.isPerspective():
|
||||
camera_position = active_camera.getWorldPosition()
|
||||
dist = (camera_position - self._pivot_node.getWorldPosition()).length()
|
||||
scale = dist / 400.
|
||||
else:
|
||||
view_width = active_camera.getViewportWidth()
|
||||
current_size = view_width + (2. * active_camera.getZoomFactor() * view_width)
|
||||
scale = current_size / view_width * 5.
|
||||
|
||||
self._pivot_node.scale(scale)
|
||||
|
||||
def set_view_extents(self, extents: "pynav.NavlibBox") -> None:
|
||||
view_width = self._scene.getActiveCamera().getViewportWidth()
|
||||
new_zoom = (extents._min._x + view_width / 2.) / - view_width
|
||||
self._scene.getActiveCamera().setZoomFactor(new_zoom)
|
||||
|
||||
def set_hit_selection_only(self, onlySelection : bool) -> None:
|
||||
self._hit_selection_only = onlySelection
|
||||
|
||||
def set_motion_flag(self, motion : bool) -> None:
|
||||
if motion:
|
||||
width = self._scene.getActiveCamera().getViewportWidth()
|
||||
height = self._scene.getActiveCamera().getViewportHeight()
|
||||
self._picking_pass = PickingPass(width, height)
|
||||
self._renderer.addRenderPass(self._picking_pass)
|
||||
else:
|
||||
self._was_pick = False
|
||||
self._renderer.removeRenderPass(self._picking_pass)
|
||||
|
||||
def set_pivot_position(self, position) -> None:
|
||||
self._pivot_node._target_node.setPosition(position=Vector(position._x, position._y, position._z), transform_space = SceneNode.TransformSpace.World)
|
||||
|
||||
def set_pivot_visible(self, visible) -> None:
|
||||
if visible:
|
||||
self._scene.getRoot().addChild(self._pivot_node)
|
||||
else:
|
||||
self._scene.getRoot().removeChild(self._pivot_node)
|
68
plugins/3DConnexion/OverlayNode.py
Normal file
|
@ -0,0 +1,68 @@
|
|||
# Copyright (c) 2025 3Dconnexion, UltiMaker
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
from UM.Scene.SceneNode import SceneNode
|
||||
from UM.View.GL.OpenGL import OpenGL
|
||||
from UM.Mesh.MeshBuilder import MeshBuilder # To create the overlay quad
|
||||
from UM.Resources import Resources # To find shader locations
|
||||
from UM.Math.Matrix import Matrix
|
||||
from UM.Application import Application
|
||||
|
||||
try:
|
||||
from PyQt6.QtGui import QImage
|
||||
except:
|
||||
from PyQt5.QtGui import QImage
|
||||
|
||||
class OverlayNode(SceneNode):
|
||||
def __init__(self, node, image_path, size, parent=None):
|
||||
super().__init__(parent)
|
||||
self._target_node = node
|
||||
self.setCalculateBoundingBox(False)
|
||||
|
||||
self._overlay_mesh = self._createOverlayQuad(size)
|
||||
self._drawed_mesh = self._overlay_mesh
|
||||
self._shader = None
|
||||
self._scene = Application.getInstance().getController().getScene()
|
||||
self._scale = 1.
|
||||
self._image_path = image_path
|
||||
|
||||
def scale(self, factor):
|
||||
scale_matrix = Matrix()
|
||||
scale_matrix.setByScaleFactor(factor)
|
||||
self._drawed_mesh = self._overlay_mesh.getTransformed(scale_matrix)
|
||||
|
||||
def _createOverlayQuad(self, size):
|
||||
mb = MeshBuilder()
|
||||
mb.addFaceByPoints(-size / 2, -size / 2, 0, -size / 2, size / 2, 0, size / 2, -size / 2, 0)
|
||||
mb.addFaceByPoints(size / 2, size / 2, 0, -size / 2, size / 2, 0, size / 2, -size / 2, 0)
|
||||
|
||||
# Set UV coordinates so a texture can be created
|
||||
mb.setVertexUVCoordinates(0, 0, 1)
|
||||
mb.setVertexUVCoordinates(1, 0, 0)
|
||||
mb.setVertexUVCoordinates(4, 0, 0)
|
||||
mb.setVertexUVCoordinates(2, 1, 1)
|
||||
mb.setVertexUVCoordinates(5, 1, 1)
|
||||
mb.setVertexUVCoordinates(3, 1, 0)
|
||||
|
||||
return mb.build()
|
||||
|
||||
def render(self, renderer):
|
||||
|
||||
if not self._shader:
|
||||
# We now misuse the platform shader, as it actually supports textures
|
||||
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "platform.shader"))
|
||||
# Set the opacity to 0, so that the template is in full control.
|
||||
self._shader.setUniformValue("u_opacity", 0)
|
||||
self._texture = OpenGL.getInstance().createTexture()
|
||||
texture_image = QImage(self._image_path)
|
||||
self._texture.setImage(texture_image)
|
||||
self._shader.setTexture(0, self._texture)
|
||||
|
||||
node_position = self._target_node.getWorldPosition()
|
||||
position_matrix = Matrix()
|
||||
position_matrix.setByTranslation(node_position)
|
||||
camera_orientation = self._scene.getActiveCamera().getOrientation().toMatrix()
|
||||
|
||||
renderer.queueNode(self._scene.getRoot(), shader=self._shader, mesh=self._drawed_mesh.getTransformed(position_matrix.multiply(camera_orientation)), overlay=True)
|
||||
|
||||
return True # This node does it's own rendering.
|
26
plugins/3DConnexion/__init__.py
Normal file
|
@ -0,0 +1,26 @@
|
|||
# Copyright (c) 2025 UltiMaker
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
from UM.Logger import Logger
|
||||
|
||||
from typing import TYPE_CHECKING, Dict, Any
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from UM.Application import Application
|
||||
|
||||
|
||||
def getMetaData() -> Dict[str, Any]:
|
||||
return {
|
||||
"tool": {
|
||||
"visible": False
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def register(app: "Application") -> Dict[str, Any]:
|
||||
try:
|
||||
from .NavlibClient import NavlibClient
|
||||
return { "tool": NavlibClient(app.getController().getScene(), app.getRenderer()) }
|
||||
except BaseException as exception:
|
||||
Logger.warning(f"Unable to load 3Dconnexion library: {exception}")
|
||||
return { }
|
8
plugins/3DConnexion/plugin.json
Normal file
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"name": "3DConnexion mouses",
|
||||
"author": "3DConnexion",
|
||||
"version": "1.0.0",
|
||||
"description": "Allows working with 3D mouses inside Cura.",
|
||||
"api": 8,
|
||||
"i18n-catalog": "cura"
|
||||
}
|
|
@ -17,6 +17,7 @@ from UM.MimeTypeDatabase import MimeTypeDatabase, MimeType
|
|||
from UM.Scene.GroupDecorator import GroupDecorator
|
||||
from UM.Scene.SceneNode import SceneNode # For typing.
|
||||
from UM.Scene.SceneNodeSettings import SceneNodeSettings
|
||||
from UM.Util import parseBool
|
||||
from cura.CuraApplication import CuraApplication
|
||||
from cura.Machines.ContainerTree import ContainerTree
|
||||
from cura.Scene.BuildPlateDecorator import BuildPlateDecorator
|
||||
|
@ -93,7 +94,7 @@ class ThreeMFReader(MeshReader):
|
|||
return temp_mat
|
||||
|
||||
@staticmethod
|
||||
def _convertSavitarNodeToUMNode(savitar_node: Savitar.SceneNode, file_name: str = "") -> Optional[SceneNode]:
|
||||
def _convertSavitarNodeToUMNode(savitar_node: Savitar.SceneNode, file_name: str = "", archive: zipfile.ZipFile = None) -> Optional[SceneNode]:
|
||||
"""Convenience function that converts a SceneNode object (as obtained from libSavitar) to a scene node.
|
||||
|
||||
:returns: Scene node.
|
||||
|
@ -114,6 +115,10 @@ class ThreeMFReader(MeshReader):
|
|||
|
||||
active_build_plate = CuraApplication.getInstance().getMultiBuildPlateModel().activeBuildPlate
|
||||
|
||||
component_path = savitar_node.getComponentPath()
|
||||
if component_path != "" and archive is not None:
|
||||
savitar_node.parseComponentData(archive.open(component_path.lstrip("/")).read())
|
||||
|
||||
um_node = CuraSceneNode() # This adds a SettingOverrideDecorator
|
||||
um_node.addDecorator(BuildPlateDecorator(active_build_plate))
|
||||
try:
|
||||
|
@ -131,6 +136,7 @@ class ThreeMFReader(MeshReader):
|
|||
vertices = numpy.resize(data, (int(data.size / 3), 3))
|
||||
mesh_builder.setVertices(vertices)
|
||||
mesh_builder.calculateNormals(fast=True)
|
||||
mesh_builder.setMeshId(node_id)
|
||||
if file_name:
|
||||
# The filename is used to give the user the option to reload the file if it is changed on disk
|
||||
# It is only set for the root node of the 3mf file
|
||||
|
@ -141,7 +147,7 @@ class ThreeMFReader(MeshReader):
|
|||
um_node.setMeshData(mesh_data)
|
||||
|
||||
for child in savitar_node.getChildren():
|
||||
child_node = ThreeMFReader._convertSavitarNodeToUMNode(child)
|
||||
child_node = ThreeMFReader._convertSavitarNodeToUMNode(child, archive=archive)
|
||||
if child_node:
|
||||
um_node.addChild(child_node)
|
||||
|
||||
|
@ -182,7 +188,7 @@ class ThreeMFReader(MeshReader):
|
|||
um_node.printOrder = int(setting_value)
|
||||
continue
|
||||
if key =="drop_to_buildplate":
|
||||
um_node.setSetting(SceneNodeSettings.AutoDropDown, eval(setting_value))
|
||||
um_node.setSetting(SceneNodeSettings.AutoDropDown, parseBool(setting_value))
|
||||
continue
|
||||
if key in known_setting_keys:
|
||||
setting_container.setProperty(key, "value", setting_value)
|
||||
|
@ -230,7 +236,7 @@ class ThreeMFReader(MeshReader):
|
|||
CuraApplication.getInstance().getController().getScene().setMetaDataEntry(key, value)
|
||||
|
||||
for node in scene_3mf.getSceneNodes():
|
||||
um_node = ThreeMFReader._convertSavitarNodeToUMNode(node, file_name)
|
||||
um_node = ThreeMFReader._convertSavitarNodeToUMNode(node, file_name, archive)
|
||||
if um_node is None:
|
||||
continue
|
||||
|
||||
|
|
|
@ -1354,7 +1354,6 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
|
|||
return
|
||||
machine_manager.setQualityChangesGroup(quality_changes_group, no_dialog = True)
|
||||
else:
|
||||
self._quality_type_to_apply = self._quality_type_to_apply.lower() if self._quality_type_to_apply else None
|
||||
quality_group_dict = container_tree.getCurrentQualityGroups()
|
||||
if self._quality_type_to_apply in quality_group_dict:
|
||||
quality_group = quality_group_dict[self._quality_type_to_apply]
|
||||
|
|
|
@ -23,7 +23,7 @@ def getMetaData() -> Dict:
|
|||
if "3MFReader.ThreeMFReader" in sys.modules:
|
||||
metaData["mesh_reader"] = [
|
||||
{
|
||||
"extension": "3mf",
|
||||
"extension": workspace_extension,
|
||||
"description": catalog.i18nc("@item:inlistbox", "3MF File")
|
||||
}
|
||||
]
|
||||
|
|
176
plugins/3MFWriter/BambuLabVariant.py
Normal file
|
@ -0,0 +1,176 @@
|
|||
# Copyright (c) 2025 UltiMaker
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
from io import StringIO
|
||||
import xml.etree.ElementTree as ET
|
||||
import zipfile
|
||||
|
||||
from PyQt6.QtCore import Qt, QBuffer
|
||||
from PyQt6.QtGui import QImage
|
||||
|
||||
from UM.Application import Application
|
||||
from UM.Logger import Logger
|
||||
from UM.Mesh.MeshWriter import MeshWriter
|
||||
from UM.PluginRegistry import PluginRegistry
|
||||
from typing import cast
|
||||
|
||||
from cura.CuraApplication import CuraApplication
|
||||
|
||||
from .ThreeMFVariant import ThreeMFVariant
|
||||
from UM.i18n import i18nCatalog
|
||||
catalog = i18nCatalog("cura")
|
||||
|
||||
# Path constants
|
||||
METADATA_PATH = "Metadata"
|
||||
THUMBNAIL_PATH_MULTIPLATE = f"{METADATA_PATH}/plate_1.png"
|
||||
THUMBNAIL_PATH_MULTIPLATE_SMALL = f"{METADATA_PATH}/plate_1_small.png"
|
||||
GCODE_PATH = f"{METADATA_PATH}/plate_1.gcode"
|
||||
GCODE_MD5_PATH = f"{GCODE_PATH}.md5"
|
||||
MODEL_SETTINGS_PATH = f"{METADATA_PATH}/model_settings.config"
|
||||
PLATE_DESC_PATH = f"{METADATA_PATH}/plate_1.json"
|
||||
SLICE_INFO_PATH = f"{METADATA_PATH}/slice_info.config"
|
||||
PROJECT_SETTINGS_PATH = f"{METADATA_PATH}/project_settings.config"
|
||||
|
||||
class BambuLabVariant(ThreeMFVariant):
|
||||
"""BambuLab specific implementation of the 3MF format."""
|
||||
|
||||
@property
|
||||
def mime_type(self) -> str:
|
||||
return "application/vnd.bambulab-package.3dmanufacturing-3dmodel+xml"
|
||||
|
||||
def process_thumbnail(self, snapshot: QImage, thumbnail_buffer: QBuffer,
|
||||
archive: zipfile.ZipFile, relations_element: ET.Element) -> None:
|
||||
"""Process the thumbnail for BambuLab variant."""
|
||||
# Write thumbnail
|
||||
archive.writestr(zipfile.ZipInfo(THUMBNAIL_PATH_MULTIPLATE), thumbnail_buffer.data())
|
||||
|
||||
# Add relations elements for thumbnails
|
||||
ET.SubElement(relations_element, "Relationship",
|
||||
Target="/" + THUMBNAIL_PATH_MULTIPLATE, Id="rel-2",
|
||||
pe="http://schemas.openxmlformats.org/package/2006/relationships/metadata/thumbnail")
|
||||
|
||||
ET.SubElement(relations_element, "Relationship",
|
||||
Target="/" + THUMBNAIL_PATH_MULTIPLATE, Id="rel-4",
|
||||
Type="http://schemas.bambulab.com/package/2021/cover-thumbnail-middle")
|
||||
|
||||
# Create and save small thumbnail
|
||||
small_snapshot = snapshot.scaled(128, 128, transformMode=Qt.TransformationMode.SmoothTransformation)
|
||||
small_thumbnail_buffer = QBuffer()
|
||||
small_thumbnail_buffer.open(QBuffer.OpenModeFlag.ReadWrite)
|
||||
small_snapshot.save(small_thumbnail_buffer, "PNG")
|
||||
|
||||
# Write small thumbnail
|
||||
archive.writestr(zipfile.ZipInfo(THUMBNAIL_PATH_MULTIPLATE_SMALL), small_thumbnail_buffer.data())
|
||||
|
||||
# Add relation for small thumbnail
|
||||
ET.SubElement(relations_element, "Relationship",
|
||||
Target="/" + THUMBNAIL_PATH_MULTIPLATE_SMALL, Id="rel-5",
|
||||
Type="http://schemas.bambulab.com/package/2021/cover-thumbnail-small")
|
||||
|
||||
def add_extra_files(self, archive: zipfile.ZipFile, metadata_relations_element: ET.Element) -> None:
|
||||
"""Add BambuLab specific files to the archive."""
|
||||
self._storeGCode(archive, metadata_relations_element)
|
||||
self._storeModelSettings(archive)
|
||||
self._storePlateDesc(archive)
|
||||
self._storeSliceInfo(archive)
|
||||
self._storeProjectSettings(archive)
|
||||
|
||||
def _storeGCode(self, archive: zipfile.ZipFile, metadata_relations_element: ET.Element):
|
||||
"""Store GCode data in the archive."""
|
||||
gcode_textio = StringIO()
|
||||
gcode_writer = cast(MeshWriter, PluginRegistry.getInstance().getPluginObject("GCodeWriter"))
|
||||
success = gcode_writer.write(gcode_textio, None)
|
||||
|
||||
if not success:
|
||||
error_msg = catalog.i18nc("@info:error", "Can't write GCode to 3MF file")
|
||||
self._writer.setInformation(error_msg)
|
||||
Logger.error(error_msg)
|
||||
raise Exception(error_msg)
|
||||
|
||||
gcode_data = gcode_textio.getvalue().encode("UTF-8")
|
||||
archive.writestr(zipfile.ZipInfo(GCODE_PATH), gcode_data)
|
||||
|
||||
gcode_relation_element = ET.SubElement(metadata_relations_element, "Relationship",
|
||||
Target=f"/{GCODE_PATH}", Id="rel-1",
|
||||
Type="http://schemas.bambulab.com/package/2021/gcode")
|
||||
|
||||
# Calculate and store the MD5 sum of the gcode data
|
||||
md5_hash = hashlib.md5(gcode_data).hexdigest()
|
||||
archive.writestr(zipfile.ZipInfo(GCODE_MD5_PATH), md5_hash.encode("UTF-8"))
|
||||
|
||||
def _storeModelSettings(self, archive: zipfile.ZipFile):
|
||||
"""Store model settings in the archive."""
|
||||
config = ET.Element("config")
|
||||
plate = ET.SubElement(config, "plate")
|
||||
ET.SubElement(plate, "metadata", key="plater_id", value="1")
|
||||
ET.SubElement(plate, "metadata", key="plater_name", value="")
|
||||
ET.SubElement(plate, "metadata", key="locked", value="false")
|
||||
ET.SubElement(plate, "metadata", key="filament_map_mode", value="Auto For Flush")
|
||||
extruders_count = len(CuraApplication.getInstance().getExtruderManager().extruderIds)
|
||||
ET.SubElement(plate, "metadata", key="filament_maps", value=" ".join("1" for _ in range(extruders_count)))
|
||||
ET.SubElement(plate, "metadata", key="gcode_file", value=GCODE_PATH)
|
||||
ET.SubElement(plate, "metadata", key="thumbnail_file", value=THUMBNAIL_PATH_MULTIPLATE)
|
||||
ET.SubElement(plate, "metadata", key="pattern_bbox_file", value=PLATE_DESC_PATH)
|
||||
|
||||
self._writer._storeElementTree(archive, MODEL_SETTINGS_PATH, config)
|
||||
|
||||
def _storePlateDesc(self, archive: zipfile.ZipFile):
|
||||
"""Store plate description in the archive."""
|
||||
plate_desc = {}
|
||||
|
||||
filament_ids = []
|
||||
filament_colors = []
|
||||
|
||||
for extruder in CuraApplication.getInstance().getExtruderManager().getUsedExtruderStacks():
|
||||
filament_ids.append(extruder.getValue("extruder_nr"))
|
||||
filament_colors.append(self._writer._getMaterialColor(extruder))
|
||||
|
||||
plate_desc["filament_ids"] = filament_ids
|
||||
plate_desc["filament_colors"] = filament_colors
|
||||
plate_desc["first_extruder"] = CuraApplication.getInstance().getExtruderManager().getInitialExtruderNr()
|
||||
plate_desc["is_seq_print"] = Application.getInstance().getGlobalContainerStack().getValue("print_sequence") == "one_at_a_time"
|
||||
plate_desc["nozzle_diameter"] = CuraApplication.getInstance().getExtruderManager().getActiveExtruderStack().getValue("machine_nozzle_size")
|
||||
plate_desc["version"] = 2
|
||||
|
||||
file = zipfile.ZipInfo(PLATE_DESC_PATH)
|
||||
file.compress_type = zipfile.ZIP_DEFLATED
|
||||
archive.writestr(file, json.dumps(plate_desc).encode("UTF-8"))
|
||||
|
||||
def _storeSliceInfo(self, archive: zipfile.ZipFile):
|
||||
"""Store slice information in the archive."""
|
||||
config = ET.Element("config")
|
||||
|
||||
header = ET.SubElement(config, "header")
|
||||
ET.SubElement(header, "header_item", key="X-BBL-Client-Type", value="slicer")
|
||||
ET.SubElement(header, "header_item", key="X-BBL-Client-Version", value="02.00.01.50")
|
||||
|
||||
plate = ET.SubElement(config, "plate")
|
||||
ET.SubElement(plate, "metadata", key="index", value="1")
|
||||
ET.SubElement(plate,
|
||||
"metadata",
|
||||
key="nozzle_diameters",
|
||||
value=str(CuraApplication.getInstance().getExtruderManager().getActiveExtruderStack().getValue("machine_nozzle_size")))
|
||||
|
||||
print_information = CuraApplication.getInstance().getPrintInformation()
|
||||
for index, extruder in enumerate(Application.getInstance().getGlobalContainerStack().extruderList):
|
||||
used_m = print_information.materialLengths[index]
|
||||
used_g = print_information.materialWeights[index]
|
||||
if used_m > 0.0 and used_g > 0.0:
|
||||
ET.SubElement(plate,
|
||||
"filament",
|
||||
id=str(extruder.getValue("extruder_nr") + 1),
|
||||
tray_info_idx="GFA00",
|
||||
type=extruder.material.getMetaDataEntry("material", ""),
|
||||
color=self._writer._getMaterialColor(extruder),
|
||||
used_m=str(used_m),
|
||||
used_g=str(used_g))
|
||||
|
||||
self._writer._storeElementTree(archive, SLICE_INFO_PATH, config)
|
||||
|
||||
def _storeProjectSettings(self, archive: zipfile.ZipFile):
|
||||
api = CuraApplication.getInstance().getCuraAPI()
|
||||
file = zipfile.ZipInfo(PROJECT_SETTINGS_PATH)
|
||||
json_string = json.dumps(api.interface.settings.getAllGlobalSettings(), separators=(", ", ": "), indent=4)
|
||||
archive.writestr(file, json_string.encode("UTF-8"))
|
33
plugins/3MFWriter/Cura3mfVariant.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
# Copyright (c) 2025 UltiMaker
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
import xml.etree.ElementTree as ET
|
||||
import zipfile
|
||||
|
||||
from PyQt6.QtCore import QBuffer
|
||||
from PyQt6.QtGui import QImage
|
||||
|
||||
from .ThreeMFVariant import ThreeMFVariant
|
||||
|
||||
# Standard 3MF paths
|
||||
METADATA_PATH = "Metadata"
|
||||
THUMBNAIL_PATH = f"{METADATA_PATH}/thumbnail.png"
|
||||
|
||||
class Cura3mfVariant(ThreeMFVariant):
|
||||
"""Default implementation of the 3MF format."""
|
||||
|
||||
@property
|
||||
def mime_type(self) -> str:
|
||||
return "application/vnd.ms-package.3dmanufacturing-3dmodel+xml"
|
||||
|
||||
def process_thumbnail(self, snapshot: QImage, thumbnail_buffer: QBuffer,
|
||||
archive: zipfile.ZipFile, relations_element: ET.Element) -> None:
|
||||
"""Process the thumbnail for default 3MF variant."""
|
||||
thumbnail_file = zipfile.ZipInfo(THUMBNAIL_PATH)
|
||||
# Don't try to compress snapshot file, because the PNG is pretty much as compact as it will get
|
||||
archive.writestr(thumbnail_file, thumbnail_buffer.data())
|
||||
|
||||
# Add thumbnail relation to _rels/.rels file
|
||||
ET.SubElement(relations_element, "Relationship",
|
||||
Target="/" + THUMBNAIL_PATH, Id="rel1",
|
||||
Type="http://schemas.openxmlformats.org/package/2006/relationships/metadata/thumbnail")
|
74
plugins/3MFWriter/ThreeMFVariant.py
Normal file
|
@ -0,0 +1,74 @@
|
|||
# Copyright (c) 2025 UltiMaker
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import TYPE_CHECKING
|
||||
import xml.etree.ElementTree as ET
|
||||
import zipfile
|
||||
|
||||
from PyQt6.QtGui import QImage
|
||||
from PyQt6.QtCore import QBuffer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .ThreeMFWriter import ThreeMFWriter
|
||||
|
||||
class ThreeMFVariant(ABC):
|
||||
"""Base class for 3MF format variants.
|
||||
|
||||
Different vendors may have their own extensions to the 3MF format,
|
||||
such as BambuLab's 3MF variant. This class provides an interface
|
||||
for implementing these variants.
|
||||
"""
|
||||
|
||||
def __init__(self, writer: 'ThreeMFWriter'):
|
||||
"""
|
||||
:param writer: The ThreeMFWriter instance that will use this variant
|
||||
"""
|
||||
self._writer = writer
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def mime_type(self) -> str:
|
||||
"""The MIME type for this 3MF variant."""
|
||||
pass
|
||||
|
||||
def handles_mime_type(self, mime_type: str) -> bool:
|
||||
"""Check if this variant handles the given MIME type.
|
||||
|
||||
:param mime_type: The MIME type to check
|
||||
:return: True if this variant handles the MIME type, False otherwise
|
||||
"""
|
||||
return mime_type == self.mime_type
|
||||
|
||||
def prepare_content_types(self, content_types: ET.Element) -> None:
|
||||
"""Prepare the content types XML element for this variant.
|
||||
|
||||
:param content_types: The content types XML element
|
||||
"""
|
||||
pass
|
||||
|
||||
def prepare_relations(self, relations_element: ET.Element) -> None:
|
||||
"""Prepare the relations XML element for this variant.
|
||||
|
||||
:param relations_element: The relations XML element
|
||||
"""
|
||||
pass
|
||||
|
||||
def process_thumbnail(self, snapshot: QImage, thumbnail_buffer: QBuffer,
|
||||
archive: zipfile.ZipFile, relations_element: ET.Element) -> None:
|
||||
"""Process the thumbnail for this variant.
|
||||
|
||||
:param snapshot: The snapshot image
|
||||
:param thumbnail_buffer: Buffer containing the thumbnail data
|
||||
:param archive: The zip archive to write to
|
||||
:param relations_element: The relations XML element
|
||||
"""
|
||||
pass
|
||||
|
||||
def add_extra_files(self, archive: zipfile.ZipFile, metadata_relations_element: ET.Element) -> None:
|
||||
"""Add any extra files required by this variant to the archive.
|
||||
|
||||
:param archive: The zip archive to write to
|
||||
:param metadata_relations_element: The metadata relations XML element
|
||||
"""
|
||||
pass
|
|
@ -8,9 +8,12 @@ from io import StringIO
|
|||
from threading import Lock
|
||||
import zipfile
|
||||
from typing import Dict, Any
|
||||
from pathlib import Path
|
||||
from zipfile import ZipFile
|
||||
|
||||
from UM.Application import Application
|
||||
from UM.Logger import Logger
|
||||
from UM.PluginRegistry import PluginRegistry
|
||||
from UM.Preferences import Preferences
|
||||
from UM.Settings.ContainerRegistry import ContainerRegistry
|
||||
from UM.Workspace.WorkspaceWriter import WorkspaceWriter
|
||||
|
@ -33,7 +36,7 @@ class ThreeMFWorkspaceWriter(WorkspaceWriter):
|
|||
if self._ucp_model != model:
|
||||
self._ucp_model = model
|
||||
|
||||
def _write(self, stream, nodes, mode=WorkspaceWriter.OutputMode.BinaryMode):
|
||||
def _write(self, stream, nodes, mode, include_log):
|
||||
application = Application.getInstance()
|
||||
machine_manager = application.getMachineManager()
|
||||
|
||||
|
@ -79,6 +82,11 @@ class ThreeMFWorkspaceWriter(WorkspaceWriter):
|
|||
if self._ucp_model is not None:
|
||||
user_settings_data = self._getUserSettings(self._ucp_model)
|
||||
ThreeMFWriter._storeMetadataJson(user_settings_data, archive, USER_SETTINGS_PATH)
|
||||
|
||||
# Write log file
|
||||
if include_log:
|
||||
ThreeMFWorkspaceWriter._writeLogFile(archive)
|
||||
|
||||
except PermissionError:
|
||||
self.setInformation(catalog.i18nc("@error:zip", "No permission to write the workspace here."))
|
||||
Logger.error("No permission to write workspace to this stream.")
|
||||
|
@ -125,8 +133,8 @@ class ThreeMFWorkspaceWriter(WorkspaceWriter):
|
|||
|
||||
return True
|
||||
|
||||
def write(self, stream, nodes, mode=WorkspaceWriter.OutputMode.BinaryMode):
|
||||
success = self._write(stream, nodes, mode=WorkspaceWriter.OutputMode.BinaryMode)
|
||||
def write(self, stream, nodes, mode=WorkspaceWriter.OutputMode.BinaryMode, **kwargs):
|
||||
success = self._write(stream, nodes, WorkspaceWriter.OutputMode.BinaryMode, kwargs.get("include_log", False))
|
||||
self._ucp_model = None
|
||||
return success
|
||||
|
||||
|
@ -191,6 +199,17 @@ class ThreeMFWorkspaceWriter(WorkspaceWriter):
|
|||
Logger.error("File became inaccessible while writing to it: {archive_filename}".format(archive_filename = archive.fp.name))
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def _writeLogFile(archive: ZipFile) -> None:
|
||||
"""Helper function that writes the Cura log file to the archive.
|
||||
|
||||
:param archive: The archive to write to.
|
||||
"""
|
||||
file_logger = PluginRegistry.getInstance().getPluginObject("FileLogger")
|
||||
file_logger.flush()
|
||||
for file_path in file_logger.getFilesPaths():
|
||||
archive.write(file_path, arcname=f"log/{Path(file_path).name}")
|
||||
|
||||
@staticmethod
|
||||
def _getUserSettings(model: SettingsExportModel) -> Dict[str, Dict[str, Any]]:
|
||||
user_settings = {}
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
# Copyright (c) 2015-2022 Ultimaker B.V.
|
||||
# Copyright (c) 2015-2025 UltiMaker
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
import json
|
||||
import re
|
||||
import threading
|
||||
|
||||
from typing import Optional, cast, List, Dict, Pattern, Set
|
||||
from typing import Optional, cast, List, Dict, Set
|
||||
|
||||
from UM.PluginRegistry import PluginRegistry
|
||||
from UM.Mesh.MeshWriter import MeshWriter
|
||||
from UM.Math.Vector import Vector
|
||||
from UM.Logger import Logger
|
||||
|
@ -19,7 +21,9 @@ from UM.Settings.ContainerRegistry import ContainerRegistry
|
|||
|
||||
from cura.CuraApplication import CuraApplication
|
||||
from cura.CuraPackageManager import CuraPackageManager
|
||||
from cura.Machines.Models.ExtrudersModel import ExtrudersModel
|
||||
from cura.Settings import CuraContainerStack
|
||||
from cura.Settings.ExtruderStack import ExtruderStack
|
||||
from cura.Utils.Threading import call_on_qt_thread
|
||||
from cura.Scene.CuraSceneNode import CuraSceneNode
|
||||
from cura.Snapshot import Snapshot
|
||||
|
@ -45,11 +49,13 @@ import UM.Application
|
|||
|
||||
from .SettingsExportModel import SettingsExportModel
|
||||
from .SettingsExportGroup import SettingsExportGroup
|
||||
from .ThreeMFVariant import ThreeMFVariant
|
||||
from .Cura3mfVariant import Cura3mfVariant
|
||||
from .BambuLabVariant import BambuLabVariant
|
||||
|
||||
from UM.i18n import i18nCatalog
|
||||
catalog = i18nCatalog("cura")
|
||||
|
||||
THUMBNAIL_PATH = "Metadata/thumbnail.png"
|
||||
MODEL_PATH = "3D/3dmodel.model"
|
||||
PACKAGE_METADATA_PATH = "Cura/packages.json"
|
||||
|
||||
|
@ -68,6 +74,12 @@ class ThreeMFWriter(MeshWriter):
|
|||
self._store_archive = False
|
||||
self._lock = threading.Lock()
|
||||
|
||||
# Register available variants
|
||||
self._variants = {
|
||||
Cura3mfVariant(self).mime_type: Cura3mfVariant,
|
||||
BambuLabVariant(self).mime_type: BambuLabVariant
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _convertMatrixToString(matrix):
|
||||
result = ""
|
||||
|
@ -114,22 +126,24 @@ class ThreeMFWriter(MeshWriter):
|
|||
|
||||
mesh_data = um_node.getMeshData()
|
||||
|
||||
node_matrix = um_node.getLocalTransformation()
|
||||
node_matrix.preMultiply(transformation)
|
||||
|
||||
if center_mesh:
|
||||
node_matrix = Matrix()
|
||||
center_matrix = Matrix()
|
||||
# compensate for original center position, if object(s) is/are not around its zero position
|
||||
if mesh_data is not None:
|
||||
extents = mesh_data.getExtents()
|
||||
if extents is not None:
|
||||
# We use a different coordinate space while writing, so flip Z and Y
|
||||
center_vector = Vector(extents.center.x, extents.center.y, extents.center.z)
|
||||
node_matrix.setByTranslation(center_vector)
|
||||
node_matrix.multiply(um_node.getLocalTransformation())
|
||||
else:
|
||||
node_matrix = um_node.getLocalTransformation()
|
||||
center_vector = Vector(-extents.center.x, -extents.center.y, -extents.center.z)
|
||||
center_matrix.setByTranslation(center_vector)
|
||||
node_matrix.preMultiply(center_matrix)
|
||||
|
||||
matrix_string = ThreeMFWriter._convertMatrixToString(node_matrix.preMultiply(transformation))
|
||||
matrix_string = ThreeMFWriter._convertMatrixToString(node_matrix)
|
||||
|
||||
savitar_node.setTransformation(matrix_string)
|
||||
|
||||
if mesh_data is not None:
|
||||
savitar_node.getMeshData().setVerticesFromBytes(mesh_data.getVerticesAsByteArray())
|
||||
indices_array = mesh_data.getIndicesAsByteArray()
|
||||
|
@ -199,26 +213,48 @@ class ThreeMFWriter(MeshWriter):
|
|||
|
||||
painter.end()
|
||||
|
||||
def write(self, stream, nodes, mode = MeshWriter.OutputMode.BinaryMode, export_settings_model = None) -> bool:
|
||||
def _getVariant(self, mime_type: str) -> ThreeMFVariant:
|
||||
"""Get the appropriate variant for the given MIME type.
|
||||
|
||||
:param mime_type: The MIME type to get the variant for
|
||||
:return: An instance of the variant for the given MIME type
|
||||
"""
|
||||
variant_class = self._variants.get(mime_type, Cura3mfVariant)
|
||||
return variant_class(self)
|
||||
|
||||
def write(self, stream, nodes, mode = MeshWriter.OutputMode.BinaryMode, export_settings_model = None, **kwargs) -> bool:
|
||||
self._archive = None # Reset archive
|
||||
archive = zipfile.ZipFile(stream, "w", compression = zipfile.ZIP_DEFLATED)
|
||||
|
||||
# Determine which variant to use based on mime type in kwargs
|
||||
mime_type = kwargs.get("mime_type", Cura3mfVariant(self).mime_type)
|
||||
variant = self._getVariant(mime_type)
|
||||
|
||||
try:
|
||||
model_file = zipfile.ZipInfo(MODEL_PATH)
|
||||
# Because zipfile is stupid and ignores archive-level compression settings when writing with ZipInfo.
|
||||
model_file.compress_type = zipfile.ZIP_DEFLATED
|
||||
|
||||
# Create content types file
|
||||
content_types_file = zipfile.ZipInfo("[Content_Types].xml")
|
||||
content_types_file.compress_type = zipfile.ZIP_DEFLATED
|
||||
content_types = ET.Element("Types", xmlns = self._namespaces["content-types"])
|
||||
rels_type = ET.SubElement(content_types, "Default", Extension = "rels", ContentType = "application/vnd.openxmlformats-package.relationships+xml")
|
||||
model_type = ET.SubElement(content_types, "Default", Extension = "model", ContentType = "application/vnd.ms-package.3dmanufacturing-3dmodel+xml")
|
||||
|
||||
# Create _rels/.rels file
|
||||
relations_file = zipfile.ZipInfo("_rels/.rels")
|
||||
relations_file.compress_type = zipfile.ZIP_DEFLATED
|
||||
relations_element = ET.Element("Relationships", xmlns = self._namespaces["relationships"])
|
||||
model_relation_element = ET.SubElement(relations_element, "Relationship", Target = "/" + MODEL_PATH, Id = "rel0", Type = "http://schemas.microsoft.com/3dmanufacturing/2013/01/3dmodel")
|
||||
relations_element = self._makeRelationsTree()
|
||||
model_relation_element = ET.SubElement(relations_element, "Relationship", Target="/" + MODEL_PATH,
|
||||
Id="rel0",
|
||||
Type="http://schemas.microsoft.com/3dmanufacturing/2013/01/3dmodel")
|
||||
|
||||
# Create Metadata/_rels/model_settings.config.rels
|
||||
metadata_relations_element = self._makeRelationsTree()
|
||||
|
||||
# Let the variant add its specific files
|
||||
variant.add_extra_files(archive, metadata_relations_element)
|
||||
|
||||
# Let the variant prepare content types and relations
|
||||
variant.prepare_content_types(content_types)
|
||||
variant.prepare_relations(relations_element)
|
||||
|
||||
# Attempt to add a thumbnail
|
||||
snapshot = self._createSnapshot()
|
||||
|
@ -231,16 +267,11 @@ class ThreeMFWriter(MeshWriter):
|
|||
thumbnail_buffer.open(QBuffer.OpenModeFlag.ReadWrite)
|
||||
snapshot.save(thumbnail_buffer, "PNG")
|
||||
|
||||
thumbnail_file = zipfile.ZipInfo(THUMBNAIL_PATH)
|
||||
# Don't try to compress snapshot file, because the PNG is pretty much as compact as it will get
|
||||
archive.writestr(thumbnail_file, thumbnail_buffer.data())
|
||||
|
||||
# Add PNG to content types file
|
||||
thumbnail_type = ET.SubElement(content_types, "Default", Extension="png", ContentType="image/png")
|
||||
# Add thumbnail relation to _rels/.rels file
|
||||
thumbnail_relation_element = ET.SubElement(relations_element, "Relationship",
|
||||
Target="/" + THUMBNAIL_PATH, Id="rel1",
|
||||
Type="http://schemas.openxmlformats.org/package/2006/relationships/metadata/thumbnail")
|
||||
|
||||
# Let the variant process the thumbnail
|
||||
variant.process_thumbnail(snapshot, thumbnail_buffer, archive, relations_element)
|
||||
|
||||
# Write material metadata
|
||||
packages_metadata = self._getMaterialPackageMetadata() + self._getPluginPackageMetadata()
|
||||
|
@ -303,8 +334,10 @@ class ThreeMFWriter(MeshWriter):
|
|||
scene_string = parser.sceneToString(savitar_scene)
|
||||
|
||||
archive.writestr(model_file, scene_string)
|
||||
archive.writestr(content_types_file, b'<?xml version="1.0" encoding="UTF-8"?> \n' + ET.tostring(content_types))
|
||||
archive.writestr(relations_file, b'<?xml version="1.0" encoding="UTF-8"?> \n' + ET.tostring(relations_element))
|
||||
self._storeElementTree(archive, "[Content_Types].xml", content_types)
|
||||
self._storeElementTree(archive, "_rels/.rels", relations_element)
|
||||
if len(metadata_relations_element) > 0:
|
||||
self._storeElementTree(archive, "Metadata/_rels/model_settings.config.rels", metadata_relations_element)
|
||||
except Exception as error:
|
||||
Logger.logException("e", "Error writing zip file")
|
||||
self.setInformation(str(error))
|
||||
|
@ -317,6 +350,25 @@ class ThreeMFWriter(MeshWriter):
|
|||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _storeElementTree(archive: zipfile.ZipFile, file_path: str, root_element: ET.Element):
|
||||
file = zipfile.ZipInfo(file_path)
|
||||
file.compress_type = zipfile.ZIP_DEFLATED
|
||||
archive.writestr(file, b'<?xml version="1.0" encoding="UTF-8"?> \n' + ET.tostring(root_element))
|
||||
|
||||
def _makeRelationsTree(self):
|
||||
return ET.Element("Relationships", xmlns=self._namespaces["relationships"])
|
||||
|
||||
@staticmethod
|
||||
def _getMaterialColor(extruder: "ExtruderStack") -> str:
|
||||
position = int(extruder.getMetaDataEntry("position", default="0"))
|
||||
try:
|
||||
default_color = ExtrudersModel.defaultColors[position]
|
||||
except IndexError:
|
||||
default_color = "#e0e000"
|
||||
color_code = extruder.material.getMetaDataEntry("color_code", default=default_color)
|
||||
return color_code.upper()
|
||||
|
||||
@staticmethod
|
||||
def _storeMetadataJson(metadata: Dict[str, List[Dict[str, str]]], archive: zipfile.ZipFile, path: str) -> None:
|
||||
"""Stores metadata inside archive path as json file"""
|
||||
|
|
|
@ -28,11 +28,17 @@ def getMetaData():
|
|||
metaData["mesh_writer"] = {
|
||||
"output": [
|
||||
{
|
||||
"extension": "3mf",
|
||||
"extension": workspace_extension,
|
||||
"description": i18n_catalog.i18nc("@item:inlistbox", "3MF file"),
|
||||
"mime_type": "application/vnd.ms-package.3dmanufacturing-3dmodel+xml",
|
||||
"mode": ThreeMFWriter.ThreeMFWriter.OutputMode.BinaryMode
|
||||
},
|
||||
{
|
||||
"extension": f"gcode.{workspace_extension}",
|
||||
"description": i18n_catalog.i18nc("@item:inlistbox", "BambuLab 3MF file"),
|
||||
"mime_type": "application/vnd.bambulab-package.3dmanufacturing-3dmodel+xml",
|
||||
"mode": ThreeMFWorkspaceWriter.ThreeMFWorkspaceWriter.OutputMode.BinaryMode
|
||||
}
|
||||
]
|
||||
}
|
||||
metaData["workspace_writer"] = {
|
||||
|
@ -44,7 +50,7 @@ def getMetaData():
|
|||
"mode": ThreeMFWorkspaceWriter.ThreeMFWorkspaceWriter.OutputMode.BinaryMode
|
||||
},
|
||||
{
|
||||
"extension": "3mf",
|
||||
"extension": workspace_extension,
|
||||
"description": i18n_catalog.i18nc("@item:inlistbox", "Universal Cura Project"),
|
||||
"mime_type": "application/x-ucp",
|
||||
"mode": ThreeMFWorkspaceWriter.ThreeMFWorkspaceWriter.OutputMode.BinaryMode
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright (c) 2020 Ultimaker B.V.
|
||||
# Copyright (c) 2025 UltiMaker
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
import json
|
||||
import threading
|
||||
|
@ -13,11 +13,14 @@ from UM.Message import Message
|
|||
from UM.TaskManagement.HttpRequestManager import HttpRequestManager
|
||||
from UM.TaskManagement.HttpRequestScope import JsonDecoratorScope
|
||||
from UM.i18n import i18nCatalog
|
||||
from cura.ApplicationMetadata import CuraSDKVersion
|
||||
from cura.CuraApplication import CuraApplication
|
||||
from cura.UltimakerCloud.UltimakerCloudScope import UltimakerCloudScope
|
||||
import cura.UltimakerCloud.UltimakerCloudConstants as UltimakerCloudConstants
|
||||
|
||||
catalog = i18nCatalog("cura")
|
||||
|
||||
PACKAGES_URL = f"{UltimakerCloudConstants.CuraCloudAPIRoot}/cura-packages/v{UltimakerCloudConstants.CuraCloudAPIVersion}/cura/v{CuraSDKVersion}/packages"
|
||||
|
||||
class CreateBackupJob(Job):
|
||||
"""Creates backup zip, requests upload url and uploads the backup file to cloud storage."""
|
||||
|
@ -40,23 +43,54 @@ class CreateBackupJob(Job):
|
|||
self._job_done = threading.Event()
|
||||
"""Set when the job completes. Does not indicate success."""
|
||||
self.backup_upload_error_message = ""
|
||||
"""After the job completes, an empty string indicates success. Othrerwise, the value is a translated message."""
|
||||
"""After the job completes, an empty string indicates success. Otherwise, the value is a translated message."""
|
||||
|
||||
def _setPluginFetchErrorMessage(self, error_msg: str) -> None:
|
||||
Logger.error(f"Fetching plugins for backup resulted in error: {error_msg}")
|
||||
self.backup_upload_error_message = "Couldn't update currently available plugins, backup stopped."
|
||||
self._upload_message.hide()
|
||||
self._job_done.set()
|
||||
|
||||
def run(self) -> None:
|
||||
upload_message = Message(catalog.i18nc("@info:backup_status", "Creating your backup..."),
|
||||
self._upload_message = Message(catalog.i18nc("@info:backup_status", "Fetch re-downloadable package-ids..."),
|
||||
title = self.MESSAGE_TITLE,
|
||||
progress = -1)
|
||||
upload_message.show()
|
||||
self._upload_message.show()
|
||||
CuraApplication.getInstance().processEvents()
|
||||
|
||||
if CuraApplication.getInstance().getCuraAPI().backups.shouldReinstallDownloadablePlugins():
|
||||
request_url = f"{PACKAGES_URL}?package_type=plugin"
|
||||
scope = JsonDecoratorScope(UltimakerCloudScope(CuraApplication.getInstance()))
|
||||
HttpRequestManager.getInstance().get(
|
||||
request_url,
|
||||
scope=scope,
|
||||
callback=self._continueRun,
|
||||
error_callback=lambda reply, error: self._setPluginFetchErrorMessage(str(error)),
|
||||
)
|
||||
else:
|
||||
self._continueRun()
|
||||
|
||||
def _continueRun(self, reply: "QNetworkReply" = None) -> None:
|
||||
if reply is not None:
|
||||
response_data = HttpRequestManager.readJSON(reply)
|
||||
if "data" not in response_data:
|
||||
self._setPluginFetchErrorMessage(f"Missing 'data' from response. Keys in response: {response_data.keys()}")
|
||||
return
|
||||
available_remote_plugins = frozenset({v["package_id"] for v in response_data["data"]})
|
||||
else:
|
||||
available_remote_plugins = frozenset()
|
||||
|
||||
self._upload_message.setText(catalog.i18nc("@info:backup_status", "Creating your backup..."))
|
||||
CuraApplication.getInstance().processEvents()
|
||||
cura_api = CuraApplication.getInstance().getCuraAPI()
|
||||
self._backup_zip, backup_meta_data = cura_api.backups.createBackup()
|
||||
self._backup_zip, backup_meta_data = cura_api.backups.createBackup(available_remote_plugins)
|
||||
|
||||
if not self._backup_zip or not backup_meta_data:
|
||||
self.backup_upload_error_message = catalog.i18nc("@info:backup_status", "There was an error while creating your backup.")
|
||||
upload_message.hide()
|
||||
self._upload_message.hide()
|
||||
return
|
||||
|
||||
upload_message.setText(catalog.i18nc("@info:backup_status", "Uploading your backup..."))
|
||||
self._upload_message.setText(catalog.i18nc("@info:backup_status", "Uploading your backup..."))
|
||||
CuraApplication.getInstance().processEvents()
|
||||
|
||||
# Create an upload entry for the backup.
|
||||
|
@ -64,13 +98,18 @@ class CreateBackupJob(Job):
|
|||
backup_meta_data["description"] = "{}.backup.{}.cura.zip".format(timestamp, backup_meta_data["cura_release"])
|
||||
self._requestUploadSlot(backup_meta_data, len(self._backup_zip))
|
||||
|
||||
self._job_done.wait()
|
||||
# Note: One 'process events' call wasn't enough with the changed situation somehow.
|
||||
for _ in range(5000):
|
||||
CuraApplication.getInstance().processEvents()
|
||||
if self._job_done.wait(0.02):
|
||||
break
|
||||
|
||||
if self.backup_upload_error_message == "":
|
||||
upload_message.setText(catalog.i18nc("@info:backup_status", "Your backup has finished uploading."))
|
||||
upload_message.setProgress(None) # Hide progress bar
|
||||
self._upload_message.setText(catalog.i18nc("@info:backup_status", "Your backup has finished uploading."))
|
||||
self._upload_message.setProgress(None) # Hide progress bar
|
||||
else:
|
||||
# some error occurred. This error is presented to the user by DrivePluginExtension
|
||||
upload_message.hide()
|
||||
self._upload_message.hide()
|
||||
|
||||
def _requestUploadSlot(self, backup_metadata: Dict[str, Any], backup_size: int) -> None:
|
||||
"""Request a backup upload slot from the API.
|
||||
|
@ -83,7 +122,6 @@ class CreateBackupJob(Job):
|
|||
"metadata": backup_metadata
|
||||
}
|
||||
}).encode()
|
||||
|
||||
HttpRequestManager.getInstance().put(
|
||||
self._api_backup_url,
|
||||
data = payload,
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
# Copyright (c) 2021 Ultimaker B.V.
|
||||
# Copyright (c) 2025 UltiMaker
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import threading
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import Optional, Any, Dict
|
||||
|
@ -12,9 +13,16 @@ from PyQt6.QtNetwork import QNetworkReply, QNetworkRequest
|
|||
from UM.Job import Job
|
||||
from UM.Logger import Logger
|
||||
from UM.PackageManager import catalog
|
||||
from UM.Resources import Resources
|
||||
from UM.TaskManagement.HttpRequestManager import HttpRequestManager
|
||||
from cura.CuraApplication import CuraApplication
|
||||
from UM.Version import Version
|
||||
|
||||
from cura.ApplicationMetadata import CuraSDKVersion
|
||||
from cura.CuraApplication import CuraApplication
|
||||
from cura.UltimakerCloud.UltimakerCloudScope import UltimakerCloudScope
|
||||
import cura.UltimakerCloud.UltimakerCloudConstants as UltimakerCloudConstants
|
||||
|
||||
PACKAGES_URL_TEMPLATE = f"{UltimakerCloudConstants.CuraCloudAPIRoot}/cura-packages/v{UltimakerCloudConstants.CuraCloudAPIVersion}/cura/v{{0}}/packages/{{1}}/download"
|
||||
|
||||
class RestoreBackupJob(Job):
|
||||
"""Downloads a backup and overwrites local configuration with the backup.
|
||||
|
@ -38,7 +46,6 @@ class RestoreBackupJob(Job):
|
|||
self.restore_backup_error_message = ""
|
||||
|
||||
def run(self) -> None:
|
||||
|
||||
url = self._backup.get("download_url")
|
||||
assert url is not None
|
||||
|
||||
|
@ -48,7 +55,11 @@ class RestoreBackupJob(Job):
|
|||
error_callback = self._onRestoreRequestCompleted
|
||||
)
|
||||
|
||||
self._job_done.wait() # A job is considered finished when the run function completes
|
||||
# Note: Just to be sure, use the same structure here as in CreateBackupJob.
|
||||
for _ in range(5000):
|
||||
CuraApplication.getInstance().processEvents()
|
||||
if self._job_done.wait(0.02):
|
||||
break
|
||||
|
||||
def _onRestoreRequestCompleted(self, reply: QNetworkReply, error: Optional["QNetworkReply.NetworkError"] = None) -> None:
|
||||
if not HttpRequestManager.replyIndicatesSuccess(reply, error):
|
||||
|
@ -60,8 +71,8 @@ class RestoreBackupJob(Job):
|
|||
|
||||
# We store the file in a temporary path fist to ensure integrity.
|
||||
try:
|
||||
temporary_backup_file = NamedTemporaryFile(delete = False)
|
||||
with open(temporary_backup_file.name, "wb") as write_backup:
|
||||
self._temporary_backup_file = NamedTemporaryFile(delete_on_close = False)
|
||||
with open(self._temporary_backup_file.name, "wb") as write_backup:
|
||||
app = CuraApplication.getInstance()
|
||||
bytes_read = reply.read(self.DISK_WRITE_BUFFER_SIZE)
|
||||
while bytes_read:
|
||||
|
@ -69,23 +80,98 @@ class RestoreBackupJob(Job):
|
|||
bytes_read = reply.read(self.DISK_WRITE_BUFFER_SIZE)
|
||||
app.processEvents()
|
||||
except EnvironmentError as e:
|
||||
Logger.log("e", f"Unable to save backed up files due to computer limitations: {str(e)}")
|
||||
Logger.error(f"Unable to save backed up files due to computer limitations: {str(e)}")
|
||||
self.restore_backup_error_message = self.DEFAULT_ERROR_MESSAGE
|
||||
self._job_done.set()
|
||||
return
|
||||
|
||||
if not self._verifyMd5Hash(temporary_backup_file.name, self._backup.get("md5_hash", "")):
|
||||
if not self._verifyMd5Hash(self._temporary_backup_file.name, self._backup.get("md5_hash", "")):
|
||||
# Don't restore the backup if the MD5 hashes do not match.
|
||||
# This can happen if the download was interrupted.
|
||||
Logger.log("w", "Remote and local MD5 hashes do not match, not restoring backup.")
|
||||
Logger.error("Remote and local MD5 hashes do not match, not restoring backup.")
|
||||
self.restore_backup_error_message = self.DEFAULT_ERROR_MESSAGE
|
||||
self._job_done.set()
|
||||
return
|
||||
|
||||
# Tell Cura to place the backup back in the user data folder.
|
||||
with open(temporary_backup_file.name, "rb") as read_backup:
|
||||
metadata = self._backup.get("metadata", {})
|
||||
with open(self._temporary_backup_file.name, "rb") as read_backup:
|
||||
cura_api = CuraApplication.getInstance().getCuraAPI()
|
||||
cura_api.backups.restoreBackup(read_backup.read(), self._backup.get("metadata", {}))
|
||||
cura_api.backups.restoreBackup(read_backup.read(), metadata, auto_close=False)
|
||||
|
||||
self._job_done.set()
|
||||
# Read packages data-file, to get the 'to_install' plugin-ids.
|
||||
version_to_restore = Version(metadata.get("cura_release", "dev"))
|
||||
version_str = f"{version_to_restore.getMajor()}.{version_to_restore.getMinor()}"
|
||||
packages_path = os.path.abspath(os.path.join(os.path.abspath(
|
||||
Resources.getConfigStoragePath()), "..", version_str, "packages.json"))
|
||||
if not os.path.exists(packages_path):
|
||||
Logger.error(f"Can't find path '{packages_path}' to tell what packages should be redownloaded.")
|
||||
self.restore_backup_error_message = self.DEFAULT_ERROR_MESSAGE
|
||||
self._job_done.set()
|
||||
return
|
||||
|
||||
to_install = {}
|
||||
try:
|
||||
with open(packages_path, "r") as packages_file:
|
||||
packages_json = json.load(packages_file)
|
||||
if "to_install" in packages_json:
|
||||
for package_data in packages_json["to_install"].values():
|
||||
if "package_info" not in package_data:
|
||||
continue
|
||||
package_info = package_data["package_info"]
|
||||
if "package_id" in package_info and "sdk_version_semver" in package_info:
|
||||
to_install[package_info["package_id"]] = package_info["sdk_version_semver"]
|
||||
except IOError as ex:
|
||||
Logger.error(f"Couldn't open '{packages_path}' because '{str(ex)}' to get packages to re-install.")
|
||||
self.restore_backup_error_message = self.DEFAULT_ERROR_MESSAGE
|
||||
self._job_done.set()
|
||||
return
|
||||
|
||||
if len(to_install) < 1:
|
||||
Logger.info("No packages to reinstall, early out.")
|
||||
self._job_done.set()
|
||||
return
|
||||
|
||||
# Download all re-installable plugins packages, so they can be put back on start-up.
|
||||
redownload_errors = []
|
||||
def packageDownloadCallback(package_id: str, msg: "QNetworkReply", err: "QNetworkReply.NetworkError" = None) -> None:
|
||||
if err is not None or HttpRequestManager.safeHttpStatus(msg) != 200:
|
||||
redownload_errors.append(err)
|
||||
del to_install[package_id]
|
||||
|
||||
try:
|
||||
with NamedTemporaryFile(mode="wb", suffix=".curapackage", delete=False) as temp_file:
|
||||
bytes_read = msg.read(self.DISK_WRITE_BUFFER_SIZE)
|
||||
while bytes_read:
|
||||
temp_file.write(bytes_read)
|
||||
bytes_read = msg.read(self.DISK_WRITE_BUFFER_SIZE)
|
||||
CuraApplication.getInstance().processEvents()
|
||||
temp_file.close()
|
||||
if not CuraApplication.getInstance().getPackageManager().installPackage(temp_file.name):
|
||||
redownload_errors.append(f"Couldn't install package '{package_id}'.")
|
||||
except IOError as ex:
|
||||
redownload_errors.append(f"Couldn't process package '{package_id}' because '{ex}'.")
|
||||
|
||||
if len(to_install) < 1:
|
||||
if len(redownload_errors) == 0:
|
||||
Logger.info("All packages redownloaded!")
|
||||
self._job_done.set()
|
||||
else:
|
||||
msgs = "\n - ".join(redownload_errors)
|
||||
Logger.error(f"Couldn't re-install at least one package(s) because: {msgs}")
|
||||
self.restore_backup_error_message = self.DEFAULT_ERROR_MESSAGE
|
||||
self._job_done.set()
|
||||
|
||||
self._package_download_scope = UltimakerCloudScope(CuraApplication.getInstance())
|
||||
for package_id, package_api_version in to_install.items():
|
||||
def handlePackageId(package_id: str = package_id):
|
||||
HttpRequestManager.getInstance().get(
|
||||
PACKAGES_URL_TEMPLATE.format(package_api_version, package_id),
|
||||
scope=self._package_download_scope,
|
||||
callback=lambda msg: packageDownloadCallback(package_id, msg),
|
||||
error_callback=lambda msg, err: packageDownloadCallback(package_id, msg, err)
|
||||
)
|
||||
handlePackageId(package_id)
|
||||
|
||||
@staticmethod
|
||||
def _verifyMd5Hash(file_path: str, known_hash: str) -> bool:
|
||||
|
|
|
@ -68,6 +68,9 @@ class CuraEngineBackend(QObject, Backend):
|
|||
"""
|
||||
|
||||
super().__init__()
|
||||
self._init_done = False
|
||||
self._immediate_slice_after_init = False
|
||||
|
||||
# Find out where the engine is located, and how it is called.
|
||||
# This depends on how Cura is packaged and which OS we are running on.
|
||||
executable_name = "CuraEngine"
|
||||
|
@ -197,7 +200,8 @@ class CuraEngineBackend(QObject, Backend):
|
|||
self._slicing_error_message.actionTriggered.connect(self._reportBackendError)
|
||||
|
||||
self._resetLastSliceTimeStats()
|
||||
self._snapshot: Optional[QImage] = None
|
||||
self._snapshot: Optional[QImage] = None
|
||||
self._last_socket_error: Optional[Arcus.Error] = None
|
||||
|
||||
application.initializationFinished.connect(self.initialize)
|
||||
|
||||
|
@ -267,6 +271,10 @@ class CuraEngineBackend(QObject, Backend):
|
|||
self._machine_error_checker = application.getMachineErrorChecker()
|
||||
self._machine_error_checker.errorCheckFinished.connect(self._onStackErrorCheckFinished)
|
||||
|
||||
self._init_done = True
|
||||
if self._immediate_slice_after_init:
|
||||
self.slice()
|
||||
|
||||
def close(self) -> None:
|
||||
"""Terminate the engine process.
|
||||
|
||||
|
@ -341,6 +349,11 @@ class CuraEngineBackend(QObject, Backend):
|
|||
def slice(self) -> None:
|
||||
"""Perform a slice of the scene."""
|
||||
|
||||
if not self._init_done:
|
||||
self._immediate_slice_after_init = True
|
||||
return
|
||||
self._immediate_slice_after_init = False
|
||||
|
||||
self._createSnapshot()
|
||||
|
||||
self.startPlugins()
|
||||
|
@ -569,7 +582,20 @@ class CuraEngineBackend(QObject, Backend):
|
|||
return
|
||||
|
||||
# Preparation completed, send it to the backend.
|
||||
self._socket.sendMessage(job.getSliceMessage())
|
||||
immediate_success = self._socket.sendMessage(job.getSliceMessage())
|
||||
if (not CuraApplication.getInstance().getUseExternalBackend()) and (not immediate_success):
|
||||
if self._last_socket_error is not None and self._last_socket_error.getErrorCode() == Arcus.ErrorCode.MessageTooBigError:
|
||||
error_txt = catalog.i18nc("@info:status", "Unable to send the model data to the engine. Please try to use a less detailed model, or reduce the number of instances.")
|
||||
else:
|
||||
error_txt = catalog.i18nc("@info:status", "Unable to send the model data to the engine. Please try again, or contact support.")
|
||||
|
||||
self._error_message = Message(error_txt,
|
||||
title=catalog.i18nc("@info:title", "Unable to slice"),
|
||||
message_type=Message.MessageType.WARNING)
|
||||
self._error_message.show()
|
||||
self.setState(BackendState.Error)
|
||||
self.backendError.emit(job)
|
||||
return
|
||||
|
||||
# Notify the user that it's now up to the backend to do its job
|
||||
self.setState(BackendState.Processing)
|
||||
|
@ -691,6 +717,7 @@ class CuraEngineBackend(QObject, Backend):
|
|||
if error.getErrorCode() == Arcus.ErrorCode.Debug:
|
||||
return
|
||||
|
||||
self._last_socket_error = error
|
||||
self._terminate()
|
||||
self._createSocket()
|
||||
|
||||
|
|
|
@ -49,7 +49,20 @@ class StartJobResult(IntEnum):
|
|||
ObjectsWithDisabledExtruder = 8
|
||||
|
||||
|
||||
class GcodeStartEndFormatter(Formatter):
|
||||
class GcodeConditionState(IntEnum):
|
||||
OutsideCondition = 1
|
||||
ConditionFalse = 2
|
||||
ConditionTrue = 3
|
||||
ConditionDone = 4
|
||||
|
||||
|
||||
class GcodeInstruction(IntEnum):
|
||||
Skip = 1
|
||||
Evaluate = 2
|
||||
EvaluateAndWrite = 3
|
||||
|
||||
|
||||
class GcodeStartEndFormatter:
|
||||
# Formatter class that handles token expansion in start/end gcode
|
||||
# Example of a start/end gcode string:
|
||||
# ```
|
||||
|
@ -63,22 +76,50 @@ class GcodeStartEndFormatter(Formatter):
|
|||
# will be used. Alternatively, if the expression is formatted as "{[expression], [extruder_nr]}",
|
||||
# then the expression will be evaluated with the extruder stack of the specified extruder_nr.
|
||||
|
||||
_extruder_regex = re.compile(r"^\s*(?P<expression>.*)\s*,\s*(?P<extruder_nr_expr>.*)\s*$")
|
||||
_instruction_regex = re.compile(r"{(?P<condition>if|else|elif|endif)?\s*(?P<expression>[^{}]*?)\s*(?:,\s*(?P<extruder_nr_expr>[^{}]*))?\s*}(?P<end_of_line>\n?)")
|
||||
|
||||
def __init__(self, all_extruder_settings: Dict[str, Any], default_extruder_nr: int = -1) -> None:
|
||||
def __init__(self, all_extruder_settings: Dict[str, Dict[str, Any]], default_extruder_nr: int = -1) -> None:
|
||||
super().__init__()
|
||||
self._all_extruder_settings: Dict[str, Any] = all_extruder_settings
|
||||
self._all_extruder_settings: Dict[str, Dict[str, Any]] = all_extruder_settings
|
||||
self._default_extruder_nr: int = default_extruder_nr
|
||||
self._cura_application = CuraApplication.getInstance()
|
||||
self._extruder_manager = ExtruderManager.getInstance()
|
||||
|
||||
def get_field(self, field_name, args: [str], kwargs: dict) -> Tuple[str, str]:
|
||||
# get_field method parses all fields in the format-string and parses them individually to the get_value method.
|
||||
# e.g. for a string "Hello {foo.bar}" would the complete field "foo.bar" would be passed to get_field, and then
|
||||
# the individual parts "foo" and "bar" would be passed to get_value. This poses a problem for us, because want
|
||||
# to parse the entire field as a single expression. To solve this, we override the get_field method and return
|
||||
# the entire field as the expression.
|
||||
return self.get_value(field_name, args, kwargs), field_name
|
||||
def format(self, text: str) -> str:
|
||||
remaining_text: str = text
|
||||
result: str = ""
|
||||
|
||||
def get_value(self, expression: str, args: [str], kwargs: dict) -> str:
|
||||
self._condition_state: GcodeConditionState = GcodeConditionState.OutsideCondition
|
||||
|
||||
while len(remaining_text) > 0:
|
||||
next_code_match = self._instruction_regex.search(remaining_text)
|
||||
if next_code_match is not None:
|
||||
expression_start, expression_end = next_code_match.span()
|
||||
|
||||
if expression_start > 0:
|
||||
result += self._process_statement(remaining_text[:expression_start])
|
||||
|
||||
result += self._process_code(next_code_match)
|
||||
|
||||
remaining_text = remaining_text[expression_end:]
|
||||
|
||||
else:
|
||||
result += self._process_statement(remaining_text)
|
||||
remaining_text = ""
|
||||
|
||||
return result
|
||||
|
||||
def _process_statement(self, statement: str) -> str:
|
||||
if self._condition_state in [GcodeConditionState.OutsideCondition, GcodeConditionState.ConditionTrue]:
|
||||
return statement
|
||||
else:
|
||||
return ""
|
||||
|
||||
def _process_code(self, code: re.Match) -> str:
|
||||
condition: Optional[str] = code.group("condition")
|
||||
expression: Optional[str] = code.group("expression")
|
||||
extruder_nr_expr: Optional[str] = code.group("extruder_nr_expr")
|
||||
end_of_line: Optional[str] = code.group("end_of_line")
|
||||
|
||||
# The following variables are not settings, but only become available after slicing.
|
||||
# when these variables are encountered, we return them as-is. They are replaced later
|
||||
|
@ -87,53 +128,100 @@ class GcodeStartEndFormatter(Formatter):
|
|||
if expression in post_slice_data_variables:
|
||||
return f"{{{expression}}}"
|
||||
|
||||
extruder_nr = str(self._default_extruder_nr)
|
||||
extruder_nr: str = str(self._default_extruder_nr)
|
||||
instruction: GcodeInstruction = GcodeInstruction.Skip
|
||||
|
||||
# The settings may specify a specific extruder to use. This is done by
|
||||
# formatting the expression as "{expression}, {extruder_nr_expr}". If the
|
||||
# expression is formatted like this, we extract the extruder_nr and use
|
||||
# it to get the value from the correct extruder stack.
|
||||
match = self._extruder_regex.match(expression)
|
||||
if match:
|
||||
expression = match.group("expression")
|
||||
extruder_nr_expr = match.group("extruder_nr_expr")
|
||||
|
||||
if extruder_nr_expr.isdigit():
|
||||
extruder_nr = extruder_nr_expr
|
||||
if condition is None:
|
||||
# This is a classic statement
|
||||
if self._condition_state in [GcodeConditionState.OutsideCondition, GcodeConditionState.ConditionTrue]:
|
||||
# Skip and move to next
|
||||
instruction = GcodeInstruction.EvaluateAndWrite
|
||||
else:
|
||||
# This is a condition statement, first check validity
|
||||
if condition == "if":
|
||||
if self._condition_state != GcodeConditionState.OutsideCondition:
|
||||
raise SyntaxError("Nested conditions are not supported")
|
||||
else:
|
||||
# We get the value of the extruder_nr_expr from `_all_extruder_settings` dictionary
|
||||
# rather than the global container stack. The `_all_extruder_settings["-1"]` is a
|
||||
# dict-representation of the global container stack, with additional properties such
|
||||
# as `initial_extruder_nr`. As users may enter such expressions we can't use the
|
||||
# global container stack.
|
||||
extruder_nr = str(self._all_extruder_settings["-1"].get(extruder_nr_expr, "-1"))
|
||||
if self._condition_state == GcodeConditionState.OutsideCondition:
|
||||
raise SyntaxError("Condition should start with an 'if' statement")
|
||||
|
||||
if extruder_nr in self._all_extruder_settings:
|
||||
additional_variables = self._all_extruder_settings[extruder_nr].copy()
|
||||
else:
|
||||
Logger.warning(f"Extruder {extruder_nr} does not exist, using global settings")
|
||||
additional_variables = self._all_extruder_settings["-1"].copy()
|
||||
if condition == "if":
|
||||
# First instruction, just evaluate it
|
||||
instruction = GcodeInstruction.Evaluate
|
||||
|
||||
# Add the arguments and keyword arguments to the additional settings. These
|
||||
# are currently _not_ used, but they are added for consistency with the
|
||||
# base Formatter class.
|
||||
for key, value in enumerate(args):
|
||||
additional_variables[key] = value
|
||||
for key, value in kwargs.items():
|
||||
additional_variables[key] = value
|
||||
else:
|
||||
if self._condition_state == GcodeConditionState.ConditionTrue:
|
||||
# We have reached the next condition after a valid one has been found, skip the rest
|
||||
self._condition_state = GcodeConditionState.ConditionDone
|
||||
|
||||
if extruder_nr == "-1":
|
||||
container_stack = CuraApplication.getInstance().getGlobalContainerStack()
|
||||
else:
|
||||
container_stack = ExtruderManager.getInstance().getExtruderStack(extruder_nr)
|
||||
if not container_stack:
|
||||
if condition == "elif":
|
||||
if self._condition_state == GcodeConditionState.ConditionFalse:
|
||||
# New instruction, and valid condition has not been reached so far => evaluate it
|
||||
instruction = GcodeInstruction.Evaluate
|
||||
else:
|
||||
# New instruction, but valid condition has already been reached => skip it
|
||||
instruction = GcodeInstruction.Skip
|
||||
|
||||
elif condition == "else":
|
||||
instruction = GcodeInstruction.Skip # Never evaluate, expression should be empty
|
||||
if self._condition_state == GcodeConditionState.ConditionFalse:
|
||||
# Fallback instruction, and valid condition has not been reached so far => active next
|
||||
self._condition_state = GcodeConditionState.ConditionTrue
|
||||
|
||||
elif condition == "endif":
|
||||
instruction = GcodeInstruction.Skip # Never evaluate, expression should be empty
|
||||
self._condition_state = GcodeConditionState.OutsideCondition
|
||||
|
||||
if instruction >= GcodeInstruction.Evaluate and extruder_nr_expr is not None:
|
||||
extruder_nr_function = SettingFunction(extruder_nr_expr)
|
||||
container_stack = self._cura_application.getGlobalContainerStack()
|
||||
|
||||
# We add the variables contained in `_all_extruder_settings["-1"]`, which is a dict-representation of the
|
||||
# global container stack, with additional properties such as `initial_extruder_nr`. As users may enter such
|
||||
# expressions we can't use the global container stack. The variables contained in the global container stack
|
||||
# will then be inserted twice, which is not optimal but works well.
|
||||
extruder_nr = str(extruder_nr_function(container_stack, additional_variables=self._all_extruder_settings["-1"]))
|
||||
|
||||
if instruction >= GcodeInstruction.Evaluate:
|
||||
if extruder_nr in self._all_extruder_settings:
|
||||
additional_variables = self._all_extruder_settings[extruder_nr].copy()
|
||||
else:
|
||||
Logger.warning(f"Extruder {extruder_nr} does not exist, using global settings")
|
||||
container_stack = CuraApplication.getInstance().getGlobalContainerStack()
|
||||
additional_variables = self._all_extruder_settings["-1"].copy()
|
||||
|
||||
setting_function = SettingFunction(expression)
|
||||
value = setting_function(container_stack, additional_variables=additional_variables)
|
||||
if extruder_nr == "-1":
|
||||
container_stack = self._cura_application.getGlobalContainerStack()
|
||||
else:
|
||||
container_stack = self._extruder_manager.getExtruderStack(extruder_nr)
|
||||
if not container_stack:
|
||||
Logger.warning(f"Extruder {extruder_nr} does not exist, using global settings")
|
||||
container_stack = self._cura_application.getGlobalContainerStack()
|
||||
|
||||
return value
|
||||
setting_function = SettingFunction(expression)
|
||||
value = setting_function(container_stack, additional_variables=additional_variables)
|
||||
|
||||
if instruction == GcodeInstruction.Evaluate:
|
||||
if value:
|
||||
self._condition_state = GcodeConditionState.ConditionTrue
|
||||
else:
|
||||
self._condition_state = GcodeConditionState.ConditionFalse
|
||||
|
||||
return ""
|
||||
else:
|
||||
value_str = str(value)
|
||||
|
||||
if end_of_line is not None:
|
||||
# If we are evaluating an expression that is not a condition, restore the end of line
|
||||
value_str += end_of_line
|
||||
|
||||
return value_str
|
||||
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
class StartSliceJob(Job):
|
||||
|
@ -366,7 +454,12 @@ class StartSliceJob(Job):
|
|||
for extruder_stack in global_stack.extruderList:
|
||||
self._buildExtruderMessage(extruder_stack)
|
||||
|
||||
for plugin in CuraApplication.getInstance().getBackendPlugins():
|
||||
backend_plugins = CuraApplication.getInstance().getBackendPlugins()
|
||||
|
||||
# Sort backend plugins by name. Not a very good strategy, but at least it is repeatable. This will be improved later.
|
||||
backend_plugins = sorted(backend_plugins, key=lambda backend_plugin: backend_plugin.getId())
|
||||
|
||||
for plugin in backend_plugins:
|
||||
if not plugin.usePlugin():
|
||||
continue
|
||||
for slot in plugin.getSupportedSlots():
|
||||
|
@ -465,6 +558,9 @@ class StartSliceJob(Job):
|
|||
result["day"] = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"][int(time.strftime("%w"))]
|
||||
result["initial_extruder_nr"] = CuraApplication.getInstance().getExtruderManager().getInitialExtruderNr()
|
||||
|
||||
# If adding or changing a setting here, please update the associated wiki page
|
||||
# https://github.com/Ultimaker/Cura/wiki/Start-End-G%E2%80%90Code
|
||||
|
||||
return result
|
||||
|
||||
def _cacheAllExtruderSettings(self):
|
||||
|
@ -518,6 +614,7 @@ class StartSliceJob(Job):
|
|||
|
||||
# Replace the setting tokens in start and end g-code.
|
||||
extruder_nr = stack.getProperty("extruder_nr", "value")
|
||||
settings["machine_extruder_prestart_code"] = self._expandGcodeTokens(settings["machine_extruder_prestart_code"], extruder_nr)
|
||||
settings["machine_extruder_start_code"] = self._expandGcodeTokens(settings["machine_extruder_start_code"], extruder_nr)
|
||||
settings["machine_extruder_end_code"] = self._expandGcodeTokens(settings["machine_extruder_end_code"], extruder_nr)
|
||||
|
||||
|
@ -554,12 +651,16 @@ class StartSliceJob(Job):
|
|||
start_gcode = settings["machine_start_gcode"]
|
||||
# Remove all the comments from the start g-code
|
||||
start_gcode = re.sub(r";.+?(\n|$)", "\n", start_gcode)
|
||||
bed_temperature_settings = ["material_bed_temperature", "material_bed_temperature_layer_0"]
|
||||
pattern = r"\{(%s)(,\s?\w+)?\}" % "|".join(bed_temperature_settings) # match {setting} as well as {setting, extruder_nr}
|
||||
settings["material_bed_temp_prepend"] = re.search(pattern, start_gcode) == None
|
||||
print_temperature_settings = ["material_print_temperature", "material_print_temperature_layer_0", "default_material_print_temperature", "material_initial_print_temperature", "material_final_print_temperature", "material_standby_temperature", "print_temperature"]
|
||||
pattern = r"\{(%s)(,\s?\w+)?\}" % "|".join(print_temperature_settings) # match {setting} as well as {setting, extruder_nr}
|
||||
settings["material_print_temp_prepend"] = re.search(pattern, start_gcode) is None
|
||||
|
||||
if settings["material_bed_temp_prepend"]:
|
||||
bed_temperature_settings = ["material_bed_temperature", "material_bed_temperature_layer_0"]
|
||||
pattern = r"\{(%s)(,\s?\w+)?\}" % "|".join(bed_temperature_settings) # match {setting} as well as {setting, extruder_nr}
|
||||
settings["material_bed_temp_prepend"] = re.search(pattern, start_gcode) == None
|
||||
|
||||
if settings["material_print_temp_prepend"]:
|
||||
print_temperature_settings = ["material_print_temperature", "material_print_temperature_layer_0", "default_material_print_temperature", "material_initial_print_temperature", "material_final_print_temperature", "material_standby_temperature", "print_temperature"]
|
||||
pattern = r"\{(%s)(,\s?\w+)?\}" % "|".join(print_temperature_settings) # match {setting} as well as {setting, extruder_nr}
|
||||
settings["material_print_temp_prepend"] = re.search(pattern, start_gcode) is None
|
||||
|
||||
# Replace the setting tokens in start and end g-code.
|
||||
# Use values from the first used extruder by default so we get the expected temperatures
|
||||
|
|
|
@ -208,7 +208,14 @@ Item
|
|||
anchors.rightMargin: UM.Theme.getSize("thin_margin").height
|
||||
|
||||
enabled: UM.Backend.state == UM.Backend.Done
|
||||
currentIndex: UM.Backend.state == UM.Backend.Done ? dfFilenameTextfield.text.startsWith("MM")? 1 : 0 : 2
|
||||
|
||||
// Pre-select the correct index, depending on the situation (see the model-property below):
|
||||
// - Don't select any post-slice-file-format when the engine isn't done.
|
||||
// - Choose either the S-series or the Makerbot-series of printers' format otherwise, depending on the active printer.
|
||||
// This way, the user can just click 'save' without having to worry about wether or not the format is right.
|
||||
property int isMakerbotFormat: Cura.MachineManager.activeMachine.getOutputFileFormats.includes("application/x-makerbot") || Cura.MachineManager.activeMachine.getOutputFileFormats.includes("application/x-makerbot-sketch")
|
||||
property int isBackendDone: UM.Backend.state == UM.Backend.Done
|
||||
currentIndex: isBackendDone ? (isMakerbotFormat ? 1 : 0) : 2
|
||||
|
||||
textRole: "text"
|
||||
valueRole: "value"
|
||||
|
|
|
@ -196,7 +196,7 @@ class DigitalFactoryApiClient:
|
|||
url = "{}/projects/{}/files".format(self.CURA_API_ROOT, library_project_id)
|
||||
self._http.get(url,
|
||||
scope = self._scope,
|
||||
callback = self._parseCallback(on_finished, DigitalFactoryFileResponse, failed),
|
||||
callback = self._parseCallback(on_finished, DigitalFactoryFileResponse, failed, default_values = {'username': ''}),
|
||||
error_callback = failed,
|
||||
timeout = self.DEFAULT_REQUEST_TIMEOUT)
|
||||
|
||||
|
@ -205,7 +205,8 @@ class DigitalFactoryApiClient:
|
|||
Callable[[List[CloudApiClientModel]], Any]],
|
||||
model: Type[CloudApiClientModel],
|
||||
on_error: Optional[Callable] = None,
|
||||
pagination_manager: Optional[PaginationManager] = None) -> Callable[[QNetworkReply], None]:
|
||||
pagination_manager: Optional[PaginationManager] = None,
|
||||
default_values: Dict[str, str] = None) -> Callable[[QNetworkReply], None]:
|
||||
|
||||
"""
|
||||
Creates a callback function so that it includes the parsing of the response into the correct model.
|
||||
|
@ -234,7 +235,7 @@ class DigitalFactoryApiClient:
|
|||
if status_code >= 300 and on_error is not None:
|
||||
on_error()
|
||||
else:
|
||||
self._parseModels(response, on_finished, model, pagination_manager = pagination_manager)
|
||||
self._parseModels(response, on_finished, model, pagination_manager = pagination_manager, default_values = default_values)
|
||||
|
||||
self._anti_gc_callbacks.append(parse)
|
||||
return parse
|
||||
|
@ -262,7 +263,8 @@ class DigitalFactoryApiClient:
|
|||
on_finished: Union[Callable[[CloudApiClientModel], Any],
|
||||
Callable[[List[CloudApiClientModel]], Any]],
|
||||
model_class: Type[CloudApiClientModel],
|
||||
pagination_manager: Optional[PaginationManager] = None) -> None:
|
||||
pagination_manager: Optional[PaginationManager] = None,
|
||||
default_values: Dict[str, str] = None) -> None:
|
||||
"""Parses the given models and calls the correct callback depending on the result.
|
||||
|
||||
:param response: The response from the server, after being converted to a dict.
|
||||
|
@ -279,7 +281,10 @@ class DigitalFactoryApiClient:
|
|||
if "links" in response and pagination_manager:
|
||||
pagination_manager.setLinks(response["links"])
|
||||
if isinstance(data, list):
|
||||
results = [model_class(**c) for c in data] # type: List[CloudApiClientModel]
|
||||
results = [] # type: List[CloudApiClientModel]
|
||||
for model_data in data:
|
||||
complete_model_data = (default_values | model_data) if default_values is not None else model_data
|
||||
results.append(model_class(**complete_model_data))
|
||||
on_finished_list = cast(Callable[[List[CloudApiClientModel]], Any], on_finished)
|
||||
on_finished_list(results)
|
||||
else:
|
||||
|
|
|
@ -24,7 +24,7 @@ class GCodeGzWriter(MeshWriter):
|
|||
def __init__(self) -> None:
|
||||
super().__init__(add_to_recent_files = False)
|
||||
|
||||
def write(self, stream: BufferedIOBase, nodes: List[SceneNode], mode = MeshWriter.OutputMode.BinaryMode) -> bool:
|
||||
def write(self, stream: BufferedIOBase, nodes: List[SceneNode], mode = MeshWriter.OutputMode.BinaryMode, **kwargs) -> bool:
|
||||
"""Writes the gzipped g-code to a stream.
|
||||
|
||||
Note that even though the function accepts a collection of nodes, the
|
||||
|
|
|
@ -298,8 +298,14 @@ class FlavorParser:
|
|||
position.e.extend([0] * (self._extruder_number - len(position.e) + 1))
|
||||
return position
|
||||
|
||||
def processMCode(self, M: int, line: str, position: Position, path: List[List[Union[float, int]]]) -> Position:
|
||||
pass
|
||||
def processMCode(self, M: int, line: str, position: Position, path: List[List[Union[float, int]]]) -> None:
|
||||
# Set extrusion mode
|
||||
if M == 82:
|
||||
# Set absolute extrusion mode
|
||||
self._is_absolute_extrusion = True
|
||||
elif M == 83:
|
||||
# Set relative extrusion mode
|
||||
self._is_absolute_extrusion = False
|
||||
|
||||
_type_keyword = ";TYPE:"
|
||||
_layer_keyword = ";LAYER:"
|
||||
|
|
|
@ -11,14 +11,6 @@ class RepRapFlavorParser(FlavorParser.FlavorParser):
|
|||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def processMCode(self, M, line, position, path):
|
||||
if M == 82:
|
||||
# Set absolute extrusion mode
|
||||
self._is_absolute_extrusion = True
|
||||
elif M == 83:
|
||||
# Set relative extrusion mode
|
||||
self._is_absolute_extrusion = False
|
||||
|
||||
def _gCode90(self, position, params, path):
|
||||
"""Set the absolute positioning
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ class GCodeWriter(MeshWriter):
|
|||
|
||||
self._application = Application.getInstance()
|
||||
|
||||
def write(self, stream, nodes, mode = MeshWriter.OutputMode.TextMode):
|
||||
def write(self, stream, nodes, mode = MeshWriter.OutputMode.TextMode, **kwargs):
|
||||
"""Writes the g-code for the entire scene to a stream.
|
||||
|
||||
Note that even though the function accepts a collection of nodes, the
|
||||
|
|
|
@ -54,7 +54,7 @@ Item
|
|||
{
|
||||
anchors.top: parent.top
|
||||
anchors.left: parent.left
|
||||
width: parent.width * 2 / 3
|
||||
width: parent.width / 2
|
||||
|
||||
spacing: base.columnSpacing
|
||||
|
||||
|
@ -139,6 +139,39 @@ Item
|
|||
decimals: 0
|
||||
forceUpdateOnChangeFunction: forceUpdateFunction
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// =======================================
|
||||
// Right-side column "Nozzle Settings"
|
||||
// =======================================
|
||||
Column
|
||||
{
|
||||
anchors.top: parent.top
|
||||
anchors.right: parent.right
|
||||
width: parent.width / 2
|
||||
|
||||
spacing: base.columnSpacing
|
||||
|
||||
UM.Label // Title Label
|
||||
{
|
||||
text: catalog.i18nc("@title:label", " ")
|
||||
font: UM.Theme.getFont("medium_bold")
|
||||
}
|
||||
|
||||
Cura.NumericTextFieldWithUnit
|
||||
{
|
||||
id: extruderChangeDurationFieldId
|
||||
containerStackId: base.extruderStackId
|
||||
settingKey: "machine_extruder_change_duration"
|
||||
settingStoreIndex: propertyStoreIndex
|
||||
labelText: catalog.i18nc("@label", "Extruder Change duration")
|
||||
labelFont: base.labelFont
|
||||
labelWidth: base.labelWidth
|
||||
controlWidth: base.controlWidth
|
||||
unitText: catalog.i18nc("@label", "s")
|
||||
forceUpdateOnChangeFunction: forceUpdateFunction
|
||||
}
|
||||
|
||||
Cura.NumericTextFieldWithUnit
|
||||
{
|
||||
|
@ -179,24 +212,48 @@ Item
|
|||
anchors.right: parent.right
|
||||
anchors.margins: UM.Theme.getSize("default_margin").width
|
||||
|
||||
Cura.GcodeTextArea // "Extruder Start G-code"
|
||||
Column
|
||||
{
|
||||
anchors.top: parent.top
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.bottomMargin: UM.Theme.getSize("default_margin").height
|
||||
anchors.left: parent.left
|
||||
width: base.columnWidth - UM.Theme.getSize("default_margin").width
|
||||
anchors.bottom: buttonLearnMore.top
|
||||
anchors.bottomMargin: UM.Theme.getSize("default_margin").height
|
||||
|
||||
width: parent.width / 2
|
||||
|
||||
labelText: catalog.i18nc("@title:label", "Extruder Start G-code")
|
||||
containerStackId: base.extruderStackId
|
||||
settingKey: "machine_extruder_start_code"
|
||||
settingStoreIndex: propertyStoreIndex
|
||||
spacing: base.columnSpacing
|
||||
|
||||
Cura.GcodeTextArea // "Extruder Prestart G-code"
|
||||
{
|
||||
anchors.top: parent.top
|
||||
anchors.left: parent.left
|
||||
height: (parent.height / 2) - UM.Theme.getSize("default_margin").height
|
||||
width: base.columnWidth - UM.Theme.getSize("default_margin").width
|
||||
|
||||
labelText: catalog.i18nc("@title:label", "Extruder Prestart G-code")
|
||||
containerStackId: base.extruderStackId
|
||||
settingKey: "machine_extruder_prestart_code"
|
||||
settingStoreIndex: propertyStoreIndex
|
||||
}
|
||||
|
||||
Cura.GcodeTextArea // "Extruder Start G-code"
|
||||
{
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.left: parent.left
|
||||
height: (parent.height / 2) - UM.Theme.getSize("default_margin").height
|
||||
width: base.columnWidth - UM.Theme.getSize("default_margin").width
|
||||
|
||||
labelText: catalog.i18nc("@title:label", "Extruder Start G-code")
|
||||
containerStackId: base.extruderStackId
|
||||
settingKey: "machine_extruder_start_code"
|
||||
settingStoreIndex: propertyStoreIndex
|
||||
}
|
||||
}
|
||||
|
||||
Cura.GcodeTextArea // "Extruder End G-code"
|
||||
{
|
||||
anchors.top: parent.top
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.bottom: buttonLearnMore.top
|
||||
anchors.bottomMargin: UM.Theme.getSize("default_margin").height
|
||||
anchors.right: parent.right
|
||||
width: base.columnWidth - UM.Theme.getSize("default_margin").width
|
||||
|
@ -206,5 +263,17 @@ Item
|
|||
settingKey: "machine_extruder_end_code"
|
||||
settingStoreIndex: propertyStoreIndex
|
||||
}
|
||||
|
||||
Cura.TertiaryButton
|
||||
{
|
||||
id: buttonLearnMore
|
||||
|
||||
text: catalog.i18nc("@button", "Learn more")
|
||||
iconSource: UM.Theme.getIcon("LinkExternal")
|
||||
isIconOnRightSide: true
|
||||
onClicked: Qt.openUrlExternally("https://github.com/Ultimaker/Cura/wiki/Start-End-G%E2%80%90Code")
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.right: parent.right
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -214,7 +214,7 @@ Item
|
|||
|
||||
settingStoreIndex: propertyStoreIndex
|
||||
|
||||
labelText: catalog.i18nc("@label", "Y min")
|
||||
labelText: catalog.i18nc("@label", "Y min ( '-' towards back)")
|
||||
labelFont: base.labelFont
|
||||
labelWidth: base.labelWidth
|
||||
controlWidth: base.controlWidth
|
||||
|
@ -254,7 +254,7 @@ Item
|
|||
settingKey: "machine_head_with_fans_polygon"
|
||||
settingStoreIndex: propertyStoreIndex
|
||||
|
||||
labelText: catalog.i18nc("@label", "Y max")
|
||||
labelText: catalog.i18nc("@label", "Y max ( '+' towards front)")
|
||||
labelFont: base.labelFont
|
||||
labelWidth: base.labelWidth
|
||||
controlWidth: base.controlWidth
|
||||
|
@ -344,6 +344,21 @@ Item
|
|||
labelWidth: base.labelWidth
|
||||
forceUpdateOnChangeFunction: forceUpdateFunction
|
||||
}
|
||||
|
||||
/*
|
||||
- Allows user to toggle if Start Gcode is the absolute first gcode.
|
||||
*/
|
||||
Cura.SimpleCheckBox // "Make sure Start Code is before all gcodes"
|
||||
{
|
||||
id: applyStartGcodeFirstCheckbox
|
||||
containerStackId: machineStackId
|
||||
settingKey: "machine_start_gcode_first"
|
||||
settingStoreIndex: propertyStoreIndex
|
||||
labelText: catalog.i18nc("@label", "Start GCode must be first")
|
||||
labelFont: base.labelFont
|
||||
labelWidth: base.labelWidth
|
||||
forceUpdateOnChangeFunction: forceUpdateFunction
|
||||
}
|
||||
|
||||
|
||||
/* The "Shared Heater" feature is temporarily disabled because its
|
||||
|
@ -376,7 +391,7 @@ Item
|
|||
anchors
|
||||
{
|
||||
top: upperBlock.bottom
|
||||
bottom: parent.bottom
|
||||
bottom: buttonLearnMore.top
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
margins: UM.Theme.getSize("default_margin").width
|
||||
|
@ -403,5 +418,19 @@ Item
|
|||
settingKey: "machine_end_gcode"
|
||||
settingStoreIndex: propertyStoreIndex
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Cura.TertiaryButton
|
||||
{
|
||||
id: buttonLearnMore
|
||||
|
||||
text: catalog.i18nc("@button", "Learn more")
|
||||
iconSource: UM.Theme.getIcon("LinkExternal")
|
||||
isIconOnRightSide: true
|
||||
onClicked: Qt.openUrlExternally("https://github.com/Ultimaker/Cura/wiki/Start-End-G%E2%80%90Code")
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.right: parent.right
|
||||
anchors.margins: UM.Theme.getSize("default_margin").width
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright (c) 2023 UltiMaker
|
||||
# Copyright (c) 2024 UltiMaker
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
from io import StringIO, BufferedIOBase
|
||||
import json
|
||||
|
@ -18,6 +18,7 @@ from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator
|
|||
from UM.i18n import i18nCatalog
|
||||
|
||||
from cura.CuraApplication import CuraApplication
|
||||
from cura.PrinterOutput.FormatMaps import FormatMaps
|
||||
from cura.Snapshot import Snapshot
|
||||
from cura.Utils.Threading import call_on_qt_thread
|
||||
from cura.CuraVersion import ConanInstalls
|
||||
|
@ -45,6 +46,13 @@ class MakerbotWriter(MeshWriter):
|
|||
suffixes=["makerbot"]
|
||||
)
|
||||
)
|
||||
MimeTypeDatabase.addMimeType(
|
||||
MimeType(
|
||||
name="application/x-makerbot-replicator_plus",
|
||||
comment="Makerbot Toolpath Package",
|
||||
suffixes=["makerbot"]
|
||||
)
|
||||
)
|
||||
|
||||
_PNG_FORMAT = [
|
||||
{"prefix": "isometric_thumbnail", "width": 120, "height": 120},
|
||||
|
@ -83,7 +91,7 @@ class MakerbotWriter(MeshWriter):
|
|||
|
||||
return None
|
||||
|
||||
def write(self, stream: BufferedIOBase, nodes: List[SceneNode], mode=MeshWriter.OutputMode.BinaryMode) -> bool:
|
||||
def write(self, stream: BufferedIOBase, nodes: List[SceneNode], mode=MeshWriter.OutputMode.BinaryMode, **kwargs) -> bool:
|
||||
metadata, file_format = self._getMeta(nodes)
|
||||
if mode != MeshWriter.OutputMode.BinaryMode:
|
||||
Logger.log("e", "MakerbotWriter does not support text mode.")
|
||||
|
@ -111,15 +119,15 @@ class MakerbotWriter(MeshWriter):
|
|||
match file_format:
|
||||
case "application/x-makerbot-sketch":
|
||||
filename, filedata = "print.gcode", gcode_text_io.getvalue()
|
||||
self._PNG_FORMATS = self._PNG_FORMAT
|
||||
case "application/x-makerbot":
|
||||
filename, filedata = "print.jsontoolpath", du.gcode_2_miracle_jtp(gcode_text_io.getvalue())
|
||||
self._PNG_FORMATS = self._PNG_FORMAT + self._PNG_FORMAT_METHOD
|
||||
case "application/x-makerbot-replicator_plus":
|
||||
filename, filedata = "print.jsontoolpath", du.gcode_2_miracle_jtp(gcode_text_io.getvalue(), nb_extruders=1)
|
||||
case _:
|
||||
raise Exception("Unsupported Mime type")
|
||||
|
||||
png_files = []
|
||||
for png_format in self._PNG_FORMATS:
|
||||
for png_format in (self._PNG_FORMAT + self._PNG_FORMAT_METHOD):
|
||||
width, height, prefix = png_format["width"], png_format["height"], png_format["prefix"]
|
||||
thumbnail_buffer = self._createThumbnail(width, height)
|
||||
if thumbnail_buffer is None:
|
||||
|
@ -137,6 +145,30 @@ class MakerbotWriter(MeshWriter):
|
|||
for png_file in png_files:
|
||||
file, data = png_file["file"], png_file["data"]
|
||||
zip_stream.writestr(file, data)
|
||||
api = CuraApplication.getInstance().getCuraAPI()
|
||||
metadata_json = api.interface.settings.getSliceMetadata()
|
||||
|
||||
# All the mapping stuff we have to do:
|
||||
product_to_id_map = FormatMaps.getProductIdMap()
|
||||
printer_name_map = FormatMaps.getInversePrinterNameMap()
|
||||
extruder_type_map = FormatMaps.getInverseExtruderTypeMap()
|
||||
material_map = FormatMaps.getInverseMaterialMap()
|
||||
for key, value in metadata_json.items():
|
||||
if "all_settings" in value:
|
||||
if "machine_name" in value["all_settings"]:
|
||||
machine_name = value["all_settings"]["machine_name"]
|
||||
if machine_name in product_to_id_map:
|
||||
machine_name = product_to_id_map[machine_name][0]
|
||||
value["all_settings"]["machine_name"] = printer_name_map.get(machine_name, machine_name)
|
||||
if "machine_nozzle_id" in value["all_settings"]:
|
||||
extruder_type = value["all_settings"]["machine_nozzle_id"]
|
||||
value["all_settings"]["machine_nozzle_id"] = extruder_type_map.get(extruder_type, extruder_type)
|
||||
if "material_type" in value["all_settings"]:
|
||||
material_type = value["all_settings"]["material_type"]
|
||||
value["all_settings"]["material_type"] = material_map.get(material_type, material_type)
|
||||
|
||||
slice_metadata = json.dumps(metadata_json, separators=(", ", ": "), indent=4)
|
||||
zip_stream.writestr("slicemetadata.json", slice_metadata)
|
||||
except (IOError, OSError, BadZipFile) as ex:
|
||||
Logger.log("e", f"Could not write to (.makerbot) file because: '{ex}'.")
|
||||
self.setInformation(catalog.i18nc("@error", "MakerbotWriter could not save to the designated path."))
|
||||
|
@ -226,12 +258,88 @@ class MakerbotWriter(MeshWriter):
|
|||
|
||||
meta["preferences"] = dict()
|
||||
bounds = application.getBuildVolume().getBoundingBox()
|
||||
intent = CuraApplication.getInstance().getIntentManager().currentIntentCategory
|
||||
meta["preferences"]["instance0"] = {
|
||||
"machineBounds": [bounds.right, bounds.back, bounds.left, bounds.front] if bounds is not None else None,
|
||||
"printMode": CuraApplication.getInstance().getIntentManager().currentIntentCategory,
|
||||
"machineBounds": [bounds.right, bounds.front, bounds.left, bounds.back] if bounds is not None else None,
|
||||
"printMode": intent
|
||||
}
|
||||
|
||||
meta["miracle_config"] = {"gaggles": {str(node.getName()): {} for node in nodes}}
|
||||
if file_format == "application/x-makerbot":
|
||||
accel_overrides = meta["accel_overrides"] = {}
|
||||
if intent in ['highspeed', 'highspeedsolid']:
|
||||
accel_overrides['do_input_shaping'] = True
|
||||
accel_overrides['do_corner_rounding'] = True
|
||||
bead_mode_overrides = accel_overrides["bead_mode"] = {}
|
||||
|
||||
accel_enabled = global_stack.getProperty('acceleration_enabled', 'value')
|
||||
|
||||
if accel_enabled:
|
||||
global_accel_setting = global_stack.getProperty('acceleration_print', 'value')
|
||||
accel_overrides["rate_mm_per_s_sq"] = {
|
||||
"x": global_accel_setting,
|
||||
"y": global_accel_setting
|
||||
}
|
||||
|
||||
if global_stack.getProperty('acceleration_travel_enabled', 'value'):
|
||||
travel_accel_setting = global_stack.getProperty('acceleration_travel', 'value')
|
||||
bead_mode_overrides['Travel Move'] = {
|
||||
"rate_mm_per_s_sq": {
|
||||
"x": travel_accel_setting,
|
||||
"y": travel_accel_setting
|
||||
}
|
||||
}
|
||||
|
||||
jerk_enabled = global_stack.getProperty('jerk_enabled', 'value')
|
||||
if jerk_enabled:
|
||||
global_jerk_setting = global_stack.getProperty('jerk_print', 'value')
|
||||
accel_overrides["max_speed_change_mm_per_s"] = {
|
||||
"x": global_jerk_setting,
|
||||
"y": global_jerk_setting
|
||||
}
|
||||
|
||||
if global_stack.getProperty('jerk_travel_enabled', 'value'):
|
||||
travel_jerk_setting = global_stack.getProperty('jerk_travel', 'value')
|
||||
if 'Travel Move' not in bead_mode_overrides:
|
||||
bead_mode_overrides['Travel Move' ] = {}
|
||||
bead_mode_overrides['Travel Move'].update({
|
||||
"max_speed_change_mm_per_s": {
|
||||
"x": travel_jerk_setting,
|
||||
"y": travel_jerk_setting
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
# Get bead mode settings per extruder
|
||||
available_bead_modes = {
|
||||
"infill": "FILL",
|
||||
"prime_tower": "PRIME_TOWER",
|
||||
"roofing": "TOP_SURFACE",
|
||||
"support_infill": "SUPPORT",
|
||||
"support_interface": "SUPPORT_INTERFACE",
|
||||
"wall_0": "WALL_OUTER",
|
||||
"wall_x": "WALL_INNER",
|
||||
"skirt_brim": "SKIRT"
|
||||
}
|
||||
for idx, extruder in enumerate(extruders):
|
||||
for bead_mode_setting, bead_mode_tag in available_bead_modes.items():
|
||||
ext_specific_tag = "%s_%s" % (bead_mode_tag, idx)
|
||||
if accel_enabled or jerk_enabled:
|
||||
bead_mode_overrides[ext_specific_tag] = {}
|
||||
|
||||
if accel_enabled:
|
||||
accel_val = extruder.getProperty('acceleration_%s' % bead_mode_setting, 'value')
|
||||
bead_mode_overrides[ext_specific_tag]["rate_mm_per_s_sq"] = {
|
||||
"x": accel_val,
|
||||
"y": accel_val
|
||||
}
|
||||
if jerk_enabled:
|
||||
jerk_val = extruder.getProperty('jerk_%s' % bead_mode_setting, 'value')
|
||||
bead_mode_overrides[ext_specific_tag][ "max_speed_change_mm_per_s"] = {
|
||||
"x": jerk_val,
|
||||
"y": jerk_val
|
||||
}
|
||||
|
||||
meta["miracle_config"] = {"gaggles": {"instance0": {}}}
|
||||
|
||||
version_info = dict()
|
||||
cura_engine_info = ConanInstalls.get("curaengine", {"version": "unknown", "revision": "unknown"})
|
||||
|
|
|
@ -25,6 +25,12 @@ def getMetaData():
|
|||
"description": catalog.i18nc("@item:inlistbox", "Makerbot Sketch Printfile"),
|
||||
"mime_type": "application/x-makerbot-sketch",
|
||||
"mode": MakerbotWriter.MakerbotWriter.OutputMode.BinaryMode,
|
||||
},
|
||||
{
|
||||
"extension": file_extension,
|
||||
"description": catalog.i18nc("@item:inlistbox", "Makerbot Replicator+ Printfile"),
|
||||
"mime_type": "application/x-makerbot-replicator_plus",
|
||||
"mode": MakerbotWriter.MakerbotWriter.OutputMode.BinaryMode,
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
@ -122,7 +122,7 @@ class Script:
|
|||
if not key in line or (';' in line and line.find(key) > line.find(';')):
|
||||
return default
|
||||
sub_part = line[line.find(key) + 1:]
|
||||
m = re.search('^-?[0-9]+\.?[0-9]*', sub_part)
|
||||
m = re.search(r'^-?[0-9]+\.?[0-9]*', sub_part)
|
||||
if m is None:
|
||||
return default
|
||||
try:
|
||||
|
|
|
@ -92,7 +92,7 @@ class FilamentChange(Script):
|
|||
"type": "float",
|
||||
"default_value": 0,
|
||||
"minimum_value": 0,
|
||||
"enabled": "enabled"
|
||||
"enabled": "enabled and not firmware_config"
|
||||
},
|
||||
"retract_method":
|
||||
{
|
||||
|
|
|
@ -1,65 +1,217 @@
|
|||
# Copyright (c) 2020 Ultimaker B.V.
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
# Created by Wayne Porter
|
||||
# Re-write in April of 2024 by GregValiant (Greg Foresi)
|
||||
# Changes:
|
||||
# Added an 'Enable' setting
|
||||
# Added support for multi-line insertions (comma delimited)
|
||||
# Added insertions in a range of layers or a single insertion at a layer. Numbers are consistent with the Cura Preview (base1)
|
||||
# Added frequency of Insertion (once only, every layer, every 2nd, 3rd, 5th, 10th, 25th, 50th, 100th)
|
||||
# Added support for 'One at a Time' print sequence
|
||||
# Rafts are allowed and accounted for but no insertions are made in raft layers
|
||||
|
||||
from ..Script import Script
|
||||
import re
|
||||
from UM.Application import Application
|
||||
|
||||
class InsertAtLayerChange(Script):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def getSettingDataString(self):
|
||||
return """{
|
||||
"name": "Insert at layer change",
|
||||
"name": "Insert at Layer Change",
|
||||
"key": "InsertAtLayerChange",
|
||||
"metadata": {},
|
||||
"version": 2,
|
||||
"settings":
|
||||
{
|
||||
"insert_location":
|
||||
"enabled":
|
||||
{
|
||||
"label": "When to insert",
|
||||
"description": "Whether to insert code before or after layer change.",
|
||||
"label": "Enable this script",
|
||||
"description": "You must enable the script for it to run.",
|
||||
"type": "bool",
|
||||
"default_value": true,
|
||||
"enabled": true
|
||||
},
|
||||
"insert_frequency":
|
||||
{
|
||||
"label": "How often to insert",
|
||||
"description": "Every so many layers starting with the Start Layer OR as single insertion at a specific layer. If the print sequence is 'one_at_a_time' then the insertions will be made for every model. Insertions are made at the beginning of a layer.",
|
||||
"type": "enum",
|
||||
"options": {"before": "Before", "after": "After"},
|
||||
"default_value": "before"
|
||||
"options": {
|
||||
"once_only": "One insertion only",
|
||||
"every_layer": "Every Layer",
|
||||
"every_2nd": "Every 2nd",
|
||||
"every_3rd": "Every 3rd",
|
||||
"every_5th": "Every 5th",
|
||||
"every_10th": "Every 10th",
|
||||
"every_25th": "Every 25th",
|
||||
"every_50th": "Every 50th",
|
||||
"every_100th": "Every 100th"},
|
||||
"default_value": "every_layer",
|
||||
"enabled": "enabled"
|
||||
},
|
||||
"start_layer":
|
||||
{
|
||||
"label": "Starting Layer",
|
||||
"description": "The layer before which the first insertion will take place. If the Print_Sequence is 'All at Once' then use the layer numbers from the Cura Preview. Enter '1' to start at gcode LAYER:0. In 'One at a Time' mode use the layer numbers from the first model that prints AND all models will receive the same insertions. NOTE: There is never an insertion for raft layers.",
|
||||
"type": "int",
|
||||
"default_value": 1,
|
||||
"minimum_value": 1,
|
||||
"enabled": "insert_frequency != 'once_only' and enabled"
|
||||
},
|
||||
"end_layer":
|
||||
{
|
||||
"label": "Ending Layer",
|
||||
"description": "The layer before which the last insertion will take place. Enter '-1' to indicate the entire file. Use layer numbers from the Cura Preview.",
|
||||
"type": "int",
|
||||
"default_value": -1,
|
||||
"minimum_value": -1,
|
||||
"enabled": "insert_frequency != 'once_only' and enabled"
|
||||
},
|
||||
"single_end_layer":
|
||||
{
|
||||
"label": "Layer # for Single Insertion.",
|
||||
"description": "The layer before which the Gcode insertion will take place. Use the layer numbers from the Cura Preview.",
|
||||
"type": "str",
|
||||
"default_value": "",
|
||||
"enabled": "insert_frequency == 'once_only' and enabled"
|
||||
},
|
||||
"gcode_to_add":
|
||||
{
|
||||
"label": "G-code to insert",
|
||||
"description": "G-code to add before or after layer change.",
|
||||
"label": "G-code to insert.",
|
||||
"description": "G-code to add at start of the layer. Use a comma to delimit multi-line commands. EX: G28 X Y,M220 S100,M117 HELL0. NOTE: All inserted text will be converted to upper-case as some firmwares don't understand lower-case.",
|
||||
"type": "str",
|
||||
"default_value": ""
|
||||
},
|
||||
"skip_layers":
|
||||
{
|
||||
"label": "Skip layers",
|
||||
"description": "Number of layers to skip between insertions (0 for every layer).",
|
||||
"type": "int",
|
||||
"default_value": 0,
|
||||
"minimum_value": 0
|
||||
"default_value": "",
|
||||
"enabled": "enabled"
|
||||
}
|
||||
}
|
||||
}"""
|
||||
|
||||
def execute(self, data):
|
||||
gcode_to_add = self.getSettingValueByKey("gcode_to_add") + "\n"
|
||||
skip_layers = self.getSettingValueByKey("skip_layers")
|
||||
count = 0
|
||||
for layer in data:
|
||||
# Check that a layer is being printed
|
||||
lines = layer.split("\n")
|
||||
for line in lines:
|
||||
if ";LAYER:" in line:
|
||||
index = data.index(layer)
|
||||
if count == 0:
|
||||
if self.getSettingValueByKey("insert_location") == "before":
|
||||
layer = gcode_to_add + layer
|
||||
else:
|
||||
layer = layer + gcode_to_add
|
||||
|
||||
data[index] = layer
|
||||
|
||||
count = (count + 1) % (skip_layers + 1)
|
||||
break
|
||||
return data
|
||||
# Exit if the script is not enabled
|
||||
if not bool(self.getSettingValueByKey("enabled")):
|
||||
return data
|
||||
#Initialize variables
|
||||
mycode = self.getSettingValueByKey("gcode_to_add").upper()
|
||||
start_layer = int(self.getSettingValueByKey("start_layer"))
|
||||
end_layer = int(self.getSettingValueByKey("end_layer"))
|
||||
when_to_insert = self.getSettingValueByKey("insert_frequency")
|
||||
end_list = [0]
|
||||
print_sequence = Application.getInstance().getGlobalContainerStack().getProperty("print_sequence", "value")
|
||||
# Get the topmost layer number and adjust the end_list
|
||||
if end_layer == -1:
|
||||
if print_sequence == "all_at_once":
|
||||
for lnum in range(0, len(data) - 1):
|
||||
if ";LAYER:" in data[lnum]:
|
||||
the_top = int(data[lnum].split(";LAYER:")[1].split("\n")[0])
|
||||
end_list[0] = the_top
|
||||
# Get the topmost layer number for each model and append it to the end_list
|
||||
if print_sequence == "one_at_a_time":
|
||||
for lnum in range(0, 10):
|
||||
if ";LAYER:0" in data[lnum]:
|
||||
start_at = lnum + 1
|
||||
break
|
||||
for lnum in range(start_at, len(data)-1, 1):
|
||||
if ";LAYER:" in data[lnum] and not ";LAYER:0" in data[lnum] and not ";LAYER:-" in data[lnum]:
|
||||
end_list[len(end_list) - 1] = int(data[lnum].split(";LAYER:")[1].split("\n")[0])
|
||||
continue
|
||||
if ";LAYER:0" in data[lnum]:
|
||||
end_list.append(0)
|
||||
elif end_layer != -1:
|
||||
if print_sequence == "all_at_once":
|
||||
# Catch an error if the entered End_Layer > the top layer in the gcode
|
||||
for e_num, layer in enumerate(data):
|
||||
if ";LAYER:" in layer:
|
||||
top_layer = int(data[e_num].split(";LAYER:")[1].split("\n")[0])
|
||||
end_list[0] = end_layer - 1
|
||||
if top_layer < end_layer - 1:
|
||||
end_list[0] = top_layer
|
||||
elif print_sequence == "one_at_a_time":
|
||||
# Find the index of the first Layer:0
|
||||
for lnum in range(0, 10):
|
||||
if ";LAYER:0" in data[lnum]:
|
||||
start_at = lnum + 1
|
||||
break
|
||||
# Get the top layer number for each model
|
||||
for lnum in range(start_at, len(data)-1):
|
||||
if ";LAYER:" in data[lnum] and not ";LAYER:0" in data[lnum] and not ";LAYER:-" in data[lnum]:
|
||||
end_list[len(end_list) - 1] = int(data[lnum].split(";LAYER:")[1].split("\n")[0])
|
||||
if ";LAYER:0" in data[lnum]:
|
||||
end_list.append(0)
|
||||
# Adjust the end list if an end layer was named
|
||||
for index, num in enumerate(end_list):
|
||||
if num > end_layer - 1:
|
||||
end_list[index] = end_layer - 1
|
||||
#If the gcode_to_enter is multi-line then replace the commas with newline characters
|
||||
if mycode != "":
|
||||
if "," in mycode:
|
||||
mycode = re.sub(",", "\n",mycode)
|
||||
gcode_to_add = mycode
|
||||
#Get the insertion frequency
|
||||
match when_to_insert:
|
||||
case "every_layer":
|
||||
freq = 1
|
||||
case "every_2nd":
|
||||
freq = 2
|
||||
case "every_3rd":
|
||||
freq = 3
|
||||
case "every_5th":
|
||||
freq = 5
|
||||
case "every_10th":
|
||||
freq = 10
|
||||
case "every_25th":
|
||||
freq = 25
|
||||
case "every_50th":
|
||||
freq = 50
|
||||
case "every_100th":
|
||||
freq = 100
|
||||
case "once_only":
|
||||
the_insert_layer = int(self.getSettingValueByKey("single_end_layer"))-1
|
||||
case _:
|
||||
raise ValueError(f"Unexpected insertion frequency {when_to_insert}")
|
||||
#Single insertion
|
||||
if when_to_insert == "once_only":
|
||||
# For print sequence 'All at once'
|
||||
if print_sequence == "all_at_once":
|
||||
for index, layer in enumerate(data):
|
||||
if ";LAYER:" + str(the_insert_layer) + "\n" in layer:
|
||||
lines = layer.split("\n")
|
||||
lines.insert(1,gcode_to_add)
|
||||
data[index] = "\n".join(lines)
|
||||
return data
|
||||
# For print sequence 'One at a time'
|
||||
else:
|
||||
for index, layer in enumerate(data):
|
||||
if ";LAYER:" + str(the_insert_layer) + "\n" in layer:
|
||||
lines = layer.split("\n")
|
||||
lines.insert(1,gcode_to_add)
|
||||
data[index] = "\n".join(lines)
|
||||
return data
|
||||
# For multiple insertions
|
||||
if when_to_insert != "once_only":
|
||||
# Search from the line after the first Layer:0 so we know when a model ends if in One at a Time mode.
|
||||
first_0 = True
|
||||
next_layer = start_layer - 1
|
||||
end_layer = end_list.pop(0)
|
||||
for index, layer in enumerate(data):
|
||||
lines = layer.split("\n")
|
||||
for l_index, line in enumerate(lines):
|
||||
if ";LAYER:" in line:
|
||||
layer_number = int(line.split(":")[1])
|
||||
if layer_number == next_layer and layer_number <= end_layer:
|
||||
lines.insert(l_index + 1,gcode_to_add)
|
||||
data[index] = "\n".join(lines)
|
||||
next_layer += freq
|
||||
# Reset the next_layer for one-at-a-time
|
||||
if next_layer > int(end_layer):
|
||||
next_layer = start_layer - 1
|
||||
# Index to the next end_layer when a Layer:0 is encountered
|
||||
try:
|
||||
if not first_0 and layer_number == 0:
|
||||
end_layer = end_list.pop(0)
|
||||
except:
|
||||
pass
|
||||
# Beyond the initial Layer:0 futher Layer:0's indicate the top layer of a model.
|
||||
if layer_number == 0:
|
||||
first_0 = False
|
||||
break
|
||||
return data
|
|
@ -338,7 +338,7 @@ class PauseAtHeight(Script):
|
|||
nbr_negative_layers += 1
|
||||
|
||||
#Track the latest printing temperature in order to resume at the correct temperature.
|
||||
if re.match("T(\d*)", line):
|
||||
if re.match(r"T(\d*)", line):
|
||||
current_t = self.getValue(line, "T")
|
||||
m = self.getValue(line, "M")
|
||||
if m is not None and (m == 104 or m == 109) and self.getValue(line, "S") is not None:
|
||||
|
|
988
plugins/PostProcessingPlugin/scripts/PurgeLinesAndUnload.py
Normal file
|
@ -0,0 +1,988 @@
|
|||
# August 2024 - Designed by: GregValiant (Greg Foresi). Straightened out by: Hellaholic
|
||||
#
|
||||
# NOTE: You may have purge lines in your startup, or you may use this script, you should not do both. The script will attempt to comment out existing StartUp purge lines.
|
||||
# 'Add Purge Lines to StartUp' Allows the user to determine where the purge lines are on the build plate, or to not use purge lines if a print extends to the limits of the build surface.
|
||||
# This script will attempt to recognize and comment out purge lines in the StartUp Gcode but they should be removed if using this script.
|
||||
# The setting 'Purge Line Length' is only avaialble for rectangular beds because I was too lazy to calculate the 45° arcs.
|
||||
# 'Move to Start' takes an orthogonal path around the periphery before moving in to the print start location. It eliminates strings across the print area.
|
||||
# 'Adjust Starting E' is a correction in the E location before the skirt/brim starts. The user can make an adjustment so that the skirt / brim / raft starts where it should.
|
||||
# 'Unload' adds code to the Ending Gcode that will unload the filament from the machine. The unlaod distance is broken into chunks to avoid overly long E distances.
|
||||
# Added extra moves to account for Cura adding a "Travel to Prime Tower" move that can cross the middle of the build surface.
|
||||
# Added ability to take 'disallowed areas' into account.
|
||||
|
||||
import math
|
||||
from ..Script import Script
|
||||
from UM.Application import Application
|
||||
from UM.Message import Message
|
||||
import re
|
||||
from UM.Logger import Logger
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class Location(str, Enum):
|
||||
LEFT = "left"
|
||||
RIGHT = "right"
|
||||
REAR = "rear"
|
||||
FRONT = "front"
|
||||
|
||||
|
||||
class Position(tuple, Enum):
|
||||
LEFT_FRONT = ("left", "front")
|
||||
RIGHT_FRONT = ("right", "front")
|
||||
LEFT_REAR = ("left", "rear")
|
||||
RIGHT_REAR = ("right", "rear")
|
||||
|
||||
|
||||
class PurgeLinesAndUnload(Script):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.global_stack = Application.getInstance().getGlobalContainerStack()
|
||||
self.extruder = self.global_stack.extruderList
|
||||
self.end_purge_location = None
|
||||
self.speed_travel = None
|
||||
# This will be True when there are more than 4 'machine_disallowed_areas'
|
||||
self.show_warning = False
|
||||
self.disallowed_areas = self.global_stack.getProperty("machine_disallowed_areas", "value")
|
||||
self.extruder = self.global_stack.extruderList
|
||||
self.extruder_count = self.global_stack.getProperty("machine_extruder_count", "value")
|
||||
self.bed_shape = self.global_stack.getProperty("machine_shape", "value")
|
||||
self.origin_at_center = self.global_stack.getProperty("machine_center_is_zero", "value")
|
||||
self.machine_width = self.global_stack.getProperty("machine_width", "value")
|
||||
self.machine_depth = self.global_stack.getProperty("machine_depth", "value")
|
||||
self.machine_left = 1.0
|
||||
self.machine_right = self.machine_width - 1.0
|
||||
self.machine_front = 1.0
|
||||
self.machine_back = self.machine_depth - 1.0
|
||||
self.start_x = None
|
||||
self.start_y = None
|
||||
|
||||
def initialize(self) -> None:
|
||||
super().initialize()
|
||||
# Get the StartUp Gcode from Cura and attempt to catch if it contains purge lines. Message the user if an extrusion is in the startup.
|
||||
startup_gcode = self.global_stack.getProperty("machine_start_gcode", "value")
|
||||
start_lines = startup_gcode.splitlines()
|
||||
for line in start_lines:
|
||||
if "G1" in line and " E" in line and (" X" in line or " Y" in line):
|
||||
Message(title="[Purge Lines and Unload]",
|
||||
text="It appears that there are 'purge lines' in the StartUp Gcode. Using the 'Add Purge Lines' function of this script will comment them out.").show()
|
||||
break
|
||||
# 'is_rectangular' is used to disable half-length purge lines for elliptic beds.
|
||||
self._instance.setProperty("is_rectangular", "value", True if self.global_stack.getProperty("machine_shape", "value") == "rectangular" else False)
|
||||
self._instance.setProperty("move_to_prime_tower", "value", True if self.global_stack.getProperty("machine_extruder_count", "value") > 1 else False)
|
||||
# Set the default E adjustment
|
||||
self._instance.setProperty("adjust_e_loc_to", "value", -abs(round(float(self.extruder[0].getProperty("retraction_amount", "value")), 1)))
|
||||
|
||||
def getSettingDataString(self):
|
||||
return """{
|
||||
"name": "Purge Lines and Unload Filament",
|
||||
"key": "PurgeLinesAndUnload",
|
||||
"metadata": {},
|
||||
"version": 2,
|
||||
"settings":
|
||||
{
|
||||
"add_purge_lines":
|
||||
{
|
||||
"label": "Add Purge Lines to StartUp",
|
||||
"description": "The purge lines can be left, right, front or back. If there are purge lines present in the StartUp Gcode remove them or comment them out before using this script. You don't want to double dip.",
|
||||
"type": "bool",
|
||||
"default_value": false,
|
||||
"value": false,
|
||||
"enabled": true
|
||||
},
|
||||
"purge_line_location":
|
||||
{
|
||||
"label": " Purge Line Location",
|
||||
"description": "What edge of the build plate should have the purge lines. If the printer is 'Elliptical' then it is assumed to be an 'Origin At Center' printer and the purge lines are 90° arcs.",
|
||||
"type": "enum",
|
||||
"options": {
|
||||
"left": "On left edge (Xmin)",
|
||||
"right": "On right edge (Xmax)",
|
||||
"front": "On front edge (Ymin)",
|
||||
"rear": "On back edge (Ymax)"},
|
||||
"default_value": "left",
|
||||
"enabled": "add_purge_lines"
|
||||
},
|
||||
"purge_line_length":
|
||||
{
|
||||
"label": " Purge Line Length",
|
||||
"description": "Select 'Full' for the entire Height or Width of the build plate. Select 'Half' for shorter purge lines. NOTE: This has no effect on elliptic beds.",
|
||||
"type": "enum",
|
||||
"options": {
|
||||
"purge_full": "Full",
|
||||
"purge_half": "Half"},
|
||||
"default_value": "purge_full",
|
||||
"enabled": "add_purge_lines and is_rectangular"
|
||||
},
|
||||
"border_distance":
|
||||
{
|
||||
"label": " Border Distance",
|
||||
"description": "This is the distance from the build plate edge to the first purge line. '0' works for most printers but you might want the lines further inboard. The allowable range is -12 to 12. ⚠️ Negative numbers are allowed for printers that have 'Disallowed Areas'. You must use due caution when using a negative value.",
|
||||
"type": "int",
|
||||
"unit": "mm ",
|
||||
"default_value": 0,
|
||||
"minimum_value": -12,
|
||||
"maximum_value": 12,
|
||||
"enabled": "add_purge_lines"
|
||||
},
|
||||
"prime_blob_enable":
|
||||
{
|
||||
"label": " Start with Prime Blob️",
|
||||
"description": "Enable a stationary purge before starting the purge lines. Available only when purge line location is 'left' or 'front'",
|
||||
"type": "bool",
|
||||
"default_value": false,
|
||||
"enabled": "add_purge_lines and purge_line_location in ['front', 'left']"
|
||||
},
|
||||
"prime_blob_distance":
|
||||
{
|
||||
"label": " Blob Distance️",
|
||||
"description": "How many mm's of filament should be extruded for the blob.",
|
||||
"type": "int",
|
||||
"default_value": 0,
|
||||
"unit": "mm ",
|
||||
"enabled": "add_purge_lines and prime_blob_enable and purge_line_location in ['front', 'left']"
|
||||
},
|
||||
"prime_blob_loc_x":
|
||||
{
|
||||
"label": " Blob Location X",
|
||||
"description": "The 'X' position to put the prime blob. 'Origin at Center' printers might require a negative value here. Keep in mind that purge lines always start in the left front, or the right rear. Pay attention or the nozzle can sit down into the prime blob.",
|
||||
"type": "int",
|
||||
"default_value": 0,
|
||||
"unit": "mm ",
|
||||
"enabled": "add_purge_lines and prime_blob_enable and purge_line_location in ['front', 'left']"
|
||||
},
|
||||
"prime_blob_loc_y":
|
||||
{
|
||||
"label": " Blob location Y",
|
||||
"description": "The 'Y' position to put the prime blob. 'Origin at Center' printers might require a negative value here. Keep in mind that purge lines always start in the left front, or the right rear. Pay attention or the nozzle can sit down into the prime blob.",
|
||||
"type": "int",
|
||||
"default_value": 0,
|
||||
"unit": "mm ",
|
||||
"enabled": "add_purge_lines and prime_blob_enable and purge_line_location in ['front', 'left']"
|
||||
},
|
||||
"move_to_start":
|
||||
{
|
||||
"label": "Circle around to layer start ⚠️",
|
||||
"description": "Depending on where the 'Layer Start X' and 'Layer Start Y' are for the print, the opening travel move can pass across the print area and leave a string there. This option will generate an orthogonal path that moves the nozzle around the edges of the build plate and then comes in to the Start Point. || ⚠️ || The nozzle can drop to Z0.0 and touch the build plate at each stop in order to 'nail down the string'. The nozzle always raises after the touch-down. It will not drag on the bed.",
|
||||
"type": "bool",
|
||||
"default_value": false,
|
||||
"enabled": true
|
||||
},
|
||||
"move_to_start_min_z":
|
||||
{
|
||||
"label": " Minimum Z height ⚠️",
|
||||
"description": "When moving to the start position, the nozzle can touch down on the build plate at each stop (Z = 0.0). That will stick the string to the build plate at each direction change so it doesn't pull across the print area. Some printers may not respond well to Z=0.0. You may set a minimum Z height here (min is 0.0 and max is 0.50). The string must stick or it defeats the purpose of moving around the periphery.",
|
||||
"type": "float",
|
||||
"default_value": 0.0,
|
||||
"minimum_value": 0.0,
|
||||
"maximum_value": 0.5,
|
||||
"enabled": "move_to_start"
|
||||
},
|
||||
"adjust_starting_e":
|
||||
{
|
||||
"label": "Adjust Starting E location",
|
||||
"description": "If there is a retraction after the purge lines in the Startup Gcode (like the 'Add Purge Lines' script here does) then often the skirt does not start where the nozzle starts. It is because Cura always adds a retraction prior to the print starting which results in a double retraction. Enabling this will allow you to adjust the starting E location and tune it so the skirt/brim/model starts right where it should. To fix a blob enter a positive number. To fix a 'dry start' enter a negative number.",
|
||||
"type": "bool",
|
||||
"default_value": false,
|
||||
"value": false,
|
||||
"enabled": true
|
||||
},
|
||||
"adjust_e_loc_to":
|
||||
{
|
||||
"label": " Starting E location",
|
||||
"description": "This is usually a negative amount and often equal to the '-Retraction Distance'. This 'G92 E' adjustment changes where the printer 'thinks' the end of the filament is in relation to the nozzle. It replaces the retraction that Cura adds prior to the start of 'LAYER:0'. If retraction is not enabled then this setting has no effect.",
|
||||
"type": "float",
|
||||
"unit": "mm ",
|
||||
"default_value": -6.5,
|
||||
"enabled": "adjust_starting_e"
|
||||
},
|
||||
"enable_unload":
|
||||
{
|
||||
"label": "Unload filament at print end",
|
||||
"description": "Adds an unload script to the Ending Gcode section. It goes in just ahead of the M104 S0. This scripts always unloads the active extruder. If the unload distance is greater than 150mm it will be broken into chunks to avoid tripping the excessive extrusion warning in some firmware.",
|
||||
"type": "bool",
|
||||
"default_value": false,
|
||||
"enabled": true
|
||||
},
|
||||
"unload_distance":
|
||||
{
|
||||
"label": " Unload Distance",
|
||||
"description": "The amount of filament to unload. Bowden printers usually require a significant amount and direct drives not as much.",
|
||||
"type": "int",
|
||||
"default_value": 440,
|
||||
"unit": "mm ",
|
||||
"enabled": "enable_unload"
|
||||
},
|
||||
"unload_quick_purge":
|
||||
{
|
||||
"label": " Quick purge before unload",
|
||||
"description": "When printing something fine that has a lot of retractions in a short space (like lettering or spires) right before the unload, the filament can get hung up in the hot end and unload can fail. A quick purge will soften the end of the filament so it will retract correctly. This 'quick purge' will take place at the last position of the nozzle.",
|
||||
"type": "bool",
|
||||
"default_value": false,
|
||||
"enabled": "enable_unload"
|
||||
},
|
||||
"move_to_prime_tower":
|
||||
{
|
||||
"label": "Hidden setting",
|
||||
"description": "Hidden setting that enables 'move_to_prime_tower' for multi extruder machines.",
|
||||
"type": "bool",
|
||||
"default_value": false,
|
||||
"enabled": false
|
||||
},
|
||||
"is_rectangular":
|
||||
{
|
||||
"label": "Bed is rectangular",
|
||||
"description": "Hidden setting that disables 'purge line length' for elliptical beds.",
|
||||
"type": "bool",
|
||||
"default_value": false,
|
||||
"enabled": false
|
||||
}
|
||||
}
|
||||
}"""
|
||||
|
||||
def execute(self, data):
|
||||
# Exit if the Gcode has already been processed.
|
||||
for num in range(0, len(data)):
|
||||
layer = data[num].split("\n")
|
||||
for line in layer:
|
||||
if ";LAYER:" in line:
|
||||
break
|
||||
elif "PurgeLinesAndUnload" in line:
|
||||
Logger.log("i", "[Add Purge Lines and Unload Filament] has already run on this gcode.")
|
||||
return data
|
||||
# The function also retrieves extruder settings used later in the script
|
||||
# 't0_has_offsets' is used to exit 'Add Purge Lines' and 'Circle around...' because the script is not compatible with machines with the right nozzle as the primary nozzle.
|
||||
self.t0_has_offsets = False
|
||||
self.init_ext_nr = self._get_initial_tool()
|
||||
# Adjust the usable size of the bed per any 'disallowed areas'
|
||||
self._get_build_plate_extents()
|
||||
# The start location changes according to which quadrant the nozzle is in at the beginning
|
||||
self.end_purge_location = self._get_real_start_point(data[1])
|
||||
self.border_distance = self.getSettingValueByKey("border_distance")
|
||||
self.prime_blob_enable = self.getSettingValueByKey("prime_blob_enable")
|
||||
if self.prime_blob_enable:
|
||||
self.prime_blob_distance = self.getSettingValueByKey("prime_blob_distance")
|
||||
else:
|
||||
self.prime_blob_distance = 0
|
||||
# Set the minimum Z to stick the string to the build plate when Move to Start is selected.
|
||||
self.touchdown_z = self.getSettingValueByKey("move_to_start_min_z")
|
||||
|
||||
# Mapping settings to corresponding methods
|
||||
procedures = {
|
||||
"add_purge_lines": self._add_purge_lines,
|
||||
"move_to_prime_tower": self._move_to_prime_tower,
|
||||
"move_to_start": self._move_to_start,
|
||||
"adjust_starting_e": self._adjust_starting_e,
|
||||
"enable_unload": self._unload_filament
|
||||
}
|
||||
# Run selected procedures
|
||||
for setting, method in procedures.items():
|
||||
if self.getSettingValueByKey(setting):
|
||||
method(data)
|
||||
# Format the startup and ending gcodes
|
||||
data[1] = self._format_string(data[1])
|
||||
data[-1] = self._format_string(data[-1])
|
||||
if self.getSettingValueByKey("add_purge_lines"):
|
||||
if self.show_warning:
|
||||
msg_text = ("The printer has ( " + str(len(self.disallowed_areas))
|
||||
+ " ) 'disallowed areas'. That can cause the area available for the purge lines to be small.\nOpen the Gcode file for preview in Cura and check the purge line location to insure it is acceptable.")
|
||||
else:
|
||||
msg_text = "Open the Gcode file for preview in Cura. Make sure the 'Purge Lines' don't run underneath something else and are acceptable."
|
||||
Message(title="[Purge Lines and Unload]", text=msg_text).show()
|
||||
return data
|
||||
|
||||
def _get_real_start_point(self, first_section: str) -> tuple:
|
||||
last_x, last_y = 0.0, 0.0
|
||||
start_quadrant = Position.LEFT_FRONT
|
||||
|
||||
for line in first_section.split("\n"):
|
||||
if line.startswith(";") and not line.startswith(";LAYER_COUNT") or not line:
|
||||
continue
|
||||
|
||||
if line.startswith("G28"):
|
||||
last_x, last_y = 0.0, 0.0
|
||||
elif line[:3] in {"G0 ", "G1 "}:
|
||||
last_x = self.getValue(line, "X") if " X" in line else last_x
|
||||
last_y = self.getValue(line, "Y") if " Y" in line else last_y
|
||||
elif "LAYER_COUNT" in line:
|
||||
break
|
||||
|
||||
midpoint_x, midpoint_y = (0.0, 0.0) if self.origin_at_center else (
|
||||
self.machine_width / 2, self.machine_depth / 2)
|
||||
|
||||
if last_x <= midpoint_x and last_y <= midpoint_y:
|
||||
start_quadrant = Position.LEFT_FRONT
|
||||
elif last_x > midpoint_x and last_y <= midpoint_y:
|
||||
start_quadrant = Position.RIGHT_FRONT
|
||||
elif last_x > midpoint_x and last_y > midpoint_y:
|
||||
start_quadrant = Position.RIGHT_REAR
|
||||
elif last_x <= midpoint_x and last_y > midpoint_y:
|
||||
start_quadrant = Position.LEFT_REAR
|
||||
|
||||
return start_quadrant
|
||||
|
||||
"""
|
||||
For some multi-extruder printers.
|
||||
Takes into account a 'Move to Prime Tower' if there is one and adds orthogonal travel moves to get there.
|
||||
'Move to Prime Tower' does not require that the prime tower is enabled,
|
||||
only that 'machine_extruder_start_position_?' is in the definition file.
|
||||
"""
|
||||
|
||||
def _move_to_prime_tower(self, first_section: str) -> str:
|
||||
if self.extruder_count == 1:
|
||||
return first_section
|
||||
adjustment_lines = ""
|
||||
move_to_prime_present = False
|
||||
prime_tower_x = self.global_stack.getProperty("prime_tower_position_x", "value")
|
||||
prime_tower_y = self.global_stack.getProperty("prime_tower_position_y", "value")
|
||||
prime_tower_loc = self._prime_tower_quadrant(prime_tower_x, prime_tower_y)
|
||||
# Shortstop an error if Start Location comes through as None
|
||||
if self.end_purge_location is None:
|
||||
self.end_purge_location = Position.LEFT_FRONT
|
||||
if prime_tower_loc != self.end_purge_location:
|
||||
startup = first_section[1].split("\n")
|
||||
for index, line in enumerate(startup):
|
||||
if ";LAYER_COUNT:" in line:
|
||||
try:
|
||||
if startup[index + 1].startswith("G0"):
|
||||
prime_move = startup[index + 1] + " ; Move to Prime Tower"
|
||||
adjustment_lines = self._move_to_location("Prime Tower", prime_tower_loc)
|
||||
startup[index + 1] = adjustment_lines + prime_move + "\n;---------------------[End of Prime Tower moves]\n" + startup[index]
|
||||
startup.pop(index)
|
||||
first_section[1] = "\n".join(startup)
|
||||
move_to_prime_present = True
|
||||
except IndexError:
|
||||
pass
|
||||
# The start_location changes to the prime tower location in case 'Move to Start' is enabled.
|
||||
if move_to_prime_present:
|
||||
self.end_purge_location = prime_tower_loc
|
||||
return first_section
|
||||
|
||||
# Determine the quadrant that the prime tower rests in so the orthogonal moves can be calculated
|
||||
def _prime_tower_quadrant(self, prime_tower_x: float, prime_tower_y: float) -> tuple:
|
||||
midpoint_x, midpoint_y = (0.0, 0.0) if self.origin_at_center else (
|
||||
self.machine_width / 2, self.machine_depth / 2)
|
||||
|
||||
if prime_tower_x < midpoint_x and prime_tower_y < midpoint_y:
|
||||
return Position.LEFT_FRONT
|
||||
elif prime_tower_x > midpoint_x and prime_tower_y < midpoint_y:
|
||||
return Position.RIGHT_FRONT
|
||||
elif prime_tower_x > midpoint_x and prime_tower_y > midpoint_y:
|
||||
return Position.RIGHT_REAR
|
||||
elif prime_tower_x < midpoint_x and prime_tower_y > midpoint_y:
|
||||
return Position.LEFT_REAR
|
||||
else:
|
||||
return Position.LEFT_FRONT # return Default in case of no match
|
||||
|
||||
def _move_to_location(self, location_name: str, location: tuple) -> str:
|
||||
"""
|
||||
Compare the input tuple (B) with the end purge location (A) and describe the move from A to B.
|
||||
Parameters:
|
||||
location_name (str): A descriptive name for the target location.
|
||||
location (tuple): The target tuple (e.g., ("right", "front")).
|
||||
Returns:
|
||||
str: G-code for the move from A to B or an empty string if no move is required.
|
||||
"""
|
||||
# Validate input
|
||||
if len(self.end_purge_location) != 2 or len(location) != 2:
|
||||
raise ValueError("Both locations must be tuples of length 2.")
|
||||
|
||||
# Extract components
|
||||
start_side, start_depth = self.end_purge_location
|
||||
target_side, target_depth = location
|
||||
# Start of the moves and a comment to highlight the move
|
||||
moves = [f";MESH:NONMESH---------[Circle around to {location_name}] Start from: {str(start_side)} {str(start_depth)} Go to: {target_side} {target_depth}\nG0 F600 Z2 ; Move up\n"]
|
||||
|
||||
# Helper function to add G-code for moves
|
||||
def add_move(axis: str, position: float) -> None:
|
||||
moves.append(
|
||||
f"G0 F{self.speed_travel} {axis}{position} ; Start move\n"
|
||||
f"G0 F600 Z{self.touchdown_z} ; Nail down the string\n"
|
||||
f"G0 F600 Z2 ; Move up\n"
|
||||
)
|
||||
|
||||
# Move to a corner
|
||||
if start_side == Location.LEFT:
|
||||
moves.append(f"G0 F{self.speed_travel} X{self.machine_left + 6} ; Init move\n")
|
||||
elif start_side == Location.RIGHT:
|
||||
moves.append(f"G0 F{self.speed_travel} X{self.machine_right - 6} ; Init move\n")
|
||||
if start_depth == Location.FRONT:
|
||||
add_move("Y", self.machine_front + 6)
|
||||
elif start_depth == Location.REAR:
|
||||
add_move("Y", self.machine_back - 6)
|
||||
# Compare sides
|
||||
if start_side != target_side:
|
||||
if target_side == Location.RIGHT:
|
||||
add_move("X", self.machine_right)
|
||||
else:
|
||||
add_move("X", self.machine_left)
|
||||
# Compare positions
|
||||
if start_depth != target_depth:
|
||||
if target_depth == Location.REAR:
|
||||
add_move("Y", self.machine_back)
|
||||
else:
|
||||
add_move("Y", self.machine_front)
|
||||
if len(moves) == 1:
|
||||
moves.append(f"G0 F{self.speed_travel} Y{self.start_y} ; Move to start Y\n")
|
||||
# Combine moves into a single G-code string and return
|
||||
return "".join(moves)
|
||||
|
||||
def _get_build_plate_extents(self):
|
||||
"""
|
||||
Machine disallowed areas can be ordered at the whim of the definition author and cannot be counted on when parsed
|
||||
This determines a simple rectangle that will be available for the purge lines. For some machines (Ex: UM3) it can be a small rectangle.
|
||||
If there are "extruder offsets" then use them to adjust the 'machine_right' and 'machine_back' independent of any disallowed areas.
|
||||
"""
|
||||
if self.bed_shape == "rectangular":
|
||||
if self.disallowed_areas:
|
||||
if len(self.disallowed_areas) > 4:
|
||||
self.show_warning = True
|
||||
mid_x = 0
|
||||
mid_y = 0
|
||||
left_x = -(self.machine_width / 2)
|
||||
right_x = (self.machine_width / 2)
|
||||
front_y = (self.machine_depth / 2)
|
||||
back_y = -(self.machine_depth / 2)
|
||||
for rect in self.disallowed_areas:
|
||||
for corner in rect:
|
||||
x = corner[0]
|
||||
if mid_x > x > left_x:
|
||||
left_x = x
|
||||
if mid_x < x < right_x:
|
||||
right_x = x
|
||||
y = corner[1]
|
||||
if mid_y < y < front_y:
|
||||
front_y = y
|
||||
if mid_y > y > back_y:
|
||||
back_y = y
|
||||
if self.origin_at_center:
|
||||
self.machine_left = round(left_x, 2)
|
||||
self.machine_right = round(right_x, 2)
|
||||
self.machine_front = round(front_y, 2)
|
||||
self.machine_back = round(back_y, 2)
|
||||
else:
|
||||
self.machine_left = round(left_x + self.machine_width / 2, 2)
|
||||
self.machine_right = round(right_x + self.machine_width / 2, 2)
|
||||
self.machine_front = round((self.machine_depth / 2) - front_y, 2)
|
||||
self.machine_back = round((self.machine_depth / 2) - back_y, 2)
|
||||
else:
|
||||
if self.origin_at_center:
|
||||
self.machine_left = round(-(self.machine_width / 2), 2)
|
||||
self.machine_right = round((self.machine_width / 2) - self.nozzle_offset_x, 2)
|
||||
self.machine_front = round(-(self.machine_depth / 2) + self.nozzle_offset_y, 2)
|
||||
self.machine_back = round((self.machine_depth / 2) - self.nozzle_offset_y, 2)
|
||||
else:
|
||||
self.machine_left = 0
|
||||
self.machine_right = self.machine_width - self.nozzle_offset_x
|
||||
if self.nozzle_offset_y >= 0:
|
||||
self.machine_front = 0
|
||||
self.machine_back = self.machine_depth - self.nozzle_offset_y
|
||||
elif self.nozzle_offset_y < 0:
|
||||
self.machine_front = abs(self.nozzle_offset_y)
|
||||
self.machine_back = self.machine_depth
|
||||
return
|
||||
|
||||
# Add Purge Lines to the user defined position on the build plate
|
||||
def _add_purge_lines(self, data: str):
|
||||
if self.t0_has_offsets:
|
||||
data[0] += "; [Purge Lines and Unload] 'Add Purge Lines' did not run because the assumed primary nozzle (T0) has tool offsets.\n"
|
||||
Message(title = "[Purge Lines and Unload]", text = "'Add Purge Lines' did not run because the assumed primary nozzle (T0) has tool offsets").show()
|
||||
return data
|
||||
|
||||
def calculate_purge_volume(line_width, purge_length, volume_per_mm):
|
||||
return round((line_width * 0.3 * purge_length) * 1.25 / volume_per_mm, 5)
|
||||
|
||||
def adjust_for_prime_blob_gcode(retract_speed, retract_distance):
|
||||
"""Generates G-code lines for prime blob adjustment."""
|
||||
gcode_lines = [
|
||||
f"G1 F{retract_speed} E{retract_distance} ; Unretract",
|
||||
"G92 E0 ; Reset extruder\n"
|
||||
]
|
||||
return "\n".join(gcode_lines)
|
||||
|
||||
purge_location = self.getSettingValueByKey("purge_line_location")
|
||||
purge_extrusion_full = True if self.getSettingValueByKey("purge_line_length") == "purge_full" else False
|
||||
purge_str = ";TYPE:CUSTOM----------[Purge Lines]\nG0 F600 Z2 ; Move up\nG92 E0 ; Reset extruder\n"
|
||||
purge_str += self._get_blob_code()
|
||||
# Normal cartesian printer with origin at the left front corner
|
||||
if self.bed_shape == "rectangular" and not self.origin_at_center:
|
||||
if purge_location == Location.LEFT:
|
||||
purge_len = int(self.machine_back - 20) if purge_extrusion_full else int((self.machine_back - self.machine_front) / 2)
|
||||
y_stop = int(self.machine_back - 10) if purge_extrusion_full else int(self.machine_depth / 2)
|
||||
purge_volume = calculate_purge_volume(self.init_line_width, purge_len, self.mm3_per_mm)
|
||||
purge_str = purge_str.replace("Lines", "Lines at MinX")
|
||||
# Travel to the purge start
|
||||
purge_str += f"G0 F{self.speed_travel} X{self.machine_left + self.border_distance} Y{self.machine_front + 10} ; Move to start\n"
|
||||
purge_str += f"G0 F600 Z0.3 ; Move down\n"
|
||||
if self.prime_blob_enable:
|
||||
purge_str += adjust_for_prime_blob_gcode(self.retract_speed, self.retract_dist)
|
||||
# Purge two lines
|
||||
purge_str += f"G1 F{self.print_speed} X{self.machine_left + self.border_distance} Y{y_stop} E{purge_volume} ; First line\n"
|
||||
purge_str += f"G0 X{self.machine_left + 3 + self.border_distance} Y{y_stop} ; Move over\n"
|
||||
purge_str += f"G1 F{self.print_speed} X{self.machine_left + 3 + self.border_distance} Y{self.machine_front + 10} E{round(purge_volume * 2, 5)} ; Second line\n"
|
||||
# Retract if enabled
|
||||
purge_str += f"G1 F{int(self.retract_speed)} E{round(purge_volume * 2 - self.retract_dist, 5)} ; Retract\n" if self.retraction_enable else ""
|
||||
purge_str += "G0 F600 Z8 ; Move Up\nG4 S1 ; Wait for 1 second\n"
|
||||
# Wipe
|
||||
purge_str += f"G0 F{self.print_speed} X{self.machine_left + 3 + self.border_distance} Y{self.machine_front + 20} Z0.3 ; Slide over and down\n"
|
||||
purge_str += f"G0 X{self.machine_left + 3 + self.border_distance} Y{self.machine_front + 35} ; Wipe\n"
|
||||
self.end_purge_location = Position.LEFT_FRONT
|
||||
elif purge_location == Location.RIGHT:
|
||||
purge_len = int(self.machine_depth - 20) if purge_extrusion_full else int((self.machine_back - self.machine_front) / 2)
|
||||
y_stop = int(self.machine_front + 10) if purge_extrusion_full else int(self.machine_depth / 2)
|
||||
purge_volume = calculate_purge_volume(self.init_line_width, purge_len, self.mm3_per_mm)
|
||||
purge_str = purge_str.replace("Lines", "Lines at MaxX")
|
||||
# Travel to the purge start
|
||||
purge_str += f"G0 F{self.speed_travel} X{self.machine_right - self.border_distance} ; Move\nG0 Y{self.machine_back - 10} ; Move\n"
|
||||
purge_str += f"G0 F600 Z0.3 ; Move down\n"
|
||||
if self.prime_blob_enable:
|
||||
purge_str += adjust_for_prime_blob_gcode(self.retract_speed, self.retract_dist)
|
||||
# Purge two lines
|
||||
purge_str += f"G1 F{self.print_speed} X{self.machine_right - self.border_distance} Y{y_stop} E{purge_volume} ; First line\n"
|
||||
purge_str += f"G0 X{self.machine_right - 3 - self.border_distance} Y{y_stop} ; Move over\n"
|
||||
purge_str += f"G1 F{self.print_speed} X{self.machine_right - 3 - self.border_distance} Y{self.machine_back - 10} E{purge_volume * 2} ; Second line\n"
|
||||
# Retract if enabled
|
||||
purge_str += f"G1 F{int(self.retract_speed)} E{round(purge_volume * 2 - self.retract_dist, 5)} ; Retract\n" if self.retraction_enable else ""
|
||||
purge_str += "G0 F600 Z8 ; Move Up\nG4 S1 ; Wait for 1 second\n"
|
||||
# Wipe
|
||||
purge_str += f"G0 F{self.print_speed} X{self.machine_right - 3 - self.border_distance} Y{self.machine_back - 20} Z0.3 ; Slide over and down\n"
|
||||
purge_str += f"G0 X{self.machine_right - 3 - self.border_distance} Y{self.machine_back - 35} ; Wipe\n"
|
||||
self.end_purge_location = Position.RIGHT_REAR
|
||||
elif purge_location == Location.FRONT:
|
||||
purge_len = int(self.machine_width) - self.nozzle_offset_x - 20 if purge_extrusion_full else int(
|
||||
(self.machine_right - self.machine_left) / 2)
|
||||
x_stop = int(self.machine_right - 10) if purge_extrusion_full else int(self.machine_width / 2)
|
||||
purge_volume = calculate_purge_volume(self.init_line_width, purge_len, self.mm3_per_mm)
|
||||
purge_str = purge_str.replace("Lines", "Lines at MinY")
|
||||
# Travel to the purge start
|
||||
purge_str += f"G0 F{self.speed_travel} X{self.machine_left + 10} Y{self.machine_front + self.border_distance} ; Move to start\n"
|
||||
purge_str += f"G0 F600 Z0.3 ; Move down\n"
|
||||
if self.prime_blob_enable:
|
||||
purge_str += adjust_for_prime_blob_gcode(self.retract_speed, self.retract_dist)
|
||||
# Purge two lines
|
||||
purge_str += f"G1 F{self.print_speed} X{x_stop} Y{self.machine_front + self.border_distance} E{purge_volume} ; First line\n"
|
||||
purge_str += f"G0 X{x_stop} Y{self.machine_front + 3 + self.border_distance} ; Move over\n"
|
||||
purge_str += f"G1 F{self.print_speed} X{self.machine_left + 10} Y{self.machine_front + 3 + self.border_distance} E{purge_volume * 2} ; Second line\n"
|
||||
# Retract if enabled
|
||||
purge_str += f"G1 F{int(self.retract_speed)} E{round(purge_volume * 2 - self.retract_dist, 5)} ; Retract\n" if self.retraction_enable else ""
|
||||
purge_str += "G0 F600 Z8 ; Move Up\nG4 S1 ; Wait for 1 second\n"
|
||||
# Wipe
|
||||
purge_str += f"G0 F{self.print_speed} X{self.machine_left + 20} Y{self.machine_front + 3 + self.border_distance} Z0.3 ; Slide over and down\n"
|
||||
purge_str += f"G0 X{self.machine_left + 35} Y{self.machine_front + 3 + self.border_distance} ; Wipe\n"
|
||||
self.end_purge_location = Position.LEFT_FRONT
|
||||
elif purge_location == Location.REAR:
|
||||
purge_len = int(self.machine_width - 20) if purge_extrusion_full else int(
|
||||
(self.machine_right - self.machine_left) / 2)
|
||||
x_stop = int(self.machine_left + 10) if purge_extrusion_full else int(self.machine_width / 2)
|
||||
purge_volume = calculate_purge_volume(self.init_line_width, purge_len, self.mm3_per_mm)
|
||||
purge_str = purge_str.replace("Lines", "Lines at MaxY")
|
||||
# Travel to the purge start
|
||||
purge_str += f"G0 F{self.speed_travel} Y{self.machine_back - self.border_distance} ; Ortho Move to back\n"
|
||||
purge_str += f"G0 X{self.machine_right - 10} ; Ortho move to start\n"
|
||||
purge_str += f"G0 F600 Z0.3 ; Move down\n"
|
||||
if self.prime_blob_enable:
|
||||
purge_str += adjust_for_prime_blob_gcode(self.retract_speed, self.retract_dist)
|
||||
# Purge two lines
|
||||
purge_str += f"G1 F{self.print_speed} X{x_stop} Y{self.machine_back - self.border_distance} E{purge_volume} ; First line\n"
|
||||
purge_str += f"G0 X{x_stop} Y{self.machine_back - 3 - self.border_distance} ; Move over\n"
|
||||
purge_str += f"G1 F{self.print_speed} X{self.machine_right - 10} Y{self.machine_back - 3 - self.border_distance} E{purge_volume * 2} ; Second line\n"
|
||||
# Retract if enabled
|
||||
purge_str += f"G1 F{int(self.retract_speed)} E{round(purge_volume * 2 - self.retract_dist, 5)} ; Retract\n" if self.retraction_enable else ""
|
||||
purge_str += "G0 F600 Z8 ; Move Up\nG4 S1 ; Wait 1 second\n"
|
||||
# Wipe
|
||||
purge_str += f"G0 F{self.print_speed} X{self.machine_right - 20} Y{self.machine_back - 3 - self.border_distance} Z0.3 ; Slide over and down\n"
|
||||
purge_str += f"G0 X{self.machine_right - 35} Y{self.machine_back - 3 - self.border_distance} ; Wipe\n"
|
||||
self.end_purge_location = Position.RIGHT_REAR
|
||||
# Some cartesian printers (BIBO, Weedo, MethodX, etc.) are Origin at Center
|
||||
elif self.bed_shape == "rectangular" and self.origin_at_center:
|
||||
if purge_location == Location.LEFT:
|
||||
purge_len = int(self.machine_back - self.machine_front - 20) if purge_extrusion_full else abs(
|
||||
int(self.machine_front - 10))
|
||||
y_stop = int(self.machine_back - 10) if purge_extrusion_full else 0
|
||||
purge_volume = calculate_purge_volume(self.init_line_width, purge_len, self.mm3_per_mm)
|
||||
# Travel to the purge start
|
||||
purge_str += f"G0 F{self.speed_travel} X{self.machine_left + self.border_distance} Y{self.machine_front + 10} ; Move to start\n"
|
||||
purge_str += f"G0 F600 Z0.3 ; Move down\n"
|
||||
if self.prime_blob_enable:
|
||||
purge_str += adjust_for_prime_blob_gcode(self.retract_speed, self.retract_dist)
|
||||
# Purge two lines
|
||||
purge_str += f"G1 F{self.print_speed} X{self.machine_left + self.border_distance} Y{y_stop} E{purge_volume} ; First line\n"
|
||||
purge_str += f"G0 X{self.machine_left + 3 + self.border_distance} Y{y_stop} ; Move over\n"
|
||||
purge_str += f"G1 F{self.print_speed} X{self.machine_left + 3 + self.border_distance} Y{self.machine_front + 10} E{round(purge_volume * 2, 5)} ; Second line\n"
|
||||
# Retract if enabled
|
||||
purge_str += f"G1 F{int(self.retract_speed)} E{round(purge_volume * 2 - self.retract_dist, 5)} ; Retract\n" if self.retraction_enable else ""
|
||||
purge_str += "G0 F600 Z8 ; Move Up\nG4 S1 ; Wait for 1 second\n"
|
||||
# Wipe
|
||||
purge_str += f"G0 F{self.print_speed} X{self.machine_left + 3 + self.border_distance} Y{self.machine_front + 20} Z0.3 ; Slide over and down\n"
|
||||
purge_str += f"G0 X{self.machine_left + 3 + self.border_distance} Y{self.machine_front + 35} ; Wipe\n"
|
||||
self.end_purge_location = Position.LEFT_FRONT
|
||||
elif purge_location == Location.RIGHT:
|
||||
purge_len = int(self.machine_back - 20) if purge_extrusion_full else int(
|
||||
(self.machine_back - self.machine_front) / 2)
|
||||
y_stop = int(self.machine_front + 10) if purge_extrusion_full else 0
|
||||
purge_volume = calculate_purge_volume(self.init_line_width, purge_len, self.mm3_per_mm)
|
||||
# Travel to the purge start
|
||||
purge_str += f"G0 F{self.speed_travel} X{self.machine_right - self.border_distance} Z2 ; Move\nG0 Y{self.machine_back - 10} Z2 ; Move to start\n"
|
||||
purge_str += f"G0 F600 Z0.3 ; Move down\n"
|
||||
if self.prime_blob_enable:
|
||||
purge_str += adjust_for_prime_blob_gcode(self.retract_speed, self.retract_dist)
|
||||
# Purge two lines
|
||||
purge_str += f"G1 F{self.print_speed} X{self.machine_right - self.border_distance} Y{y_stop} E{purge_volume} ; First line\n"
|
||||
purge_str += f"G0 X{self.machine_right - 3 - self.border_distance} Y{y_stop} ; Move over\n"
|
||||
purge_str += f"G1 F{self.print_speed} X{self.machine_right - 3 - self.border_distance} Y{self.machine_back - 10} E{purge_volume * 2} ; Second line\n"
|
||||
# Retract if enabled
|
||||
purge_str += f"G1 F{int(self.retract_speed)} E{round(purge_volume * 2 - self.retract_dist, 5)} ; Retract\n" if self.retraction_enable else ""
|
||||
purge_str += "G0 F600 Z8 ; Move Up\nG4 S1 ; Wait for 1 second\n"
|
||||
# Wipe
|
||||
purge_str += f"G0 F{self.print_speed} X{self.machine_right - 3 - self.border_distance} Y{self.machine_back - 20} Z0.3 ; Slide over and down\n"
|
||||
purge_str += f"G0 X{self.machine_right - 3 - self.border_distance} Y{self.machine_back - 35} ; Wipe\n"
|
||||
self.end_purge_location = Position.RIGHT_REAR
|
||||
elif purge_location == Location.FRONT:
|
||||
purge_len = int(self.machine_right - self.machine_left - 20) if purge_extrusion_full else int(
|
||||
(self.machine_right - self.machine_left) / 2)
|
||||
x_stop = int(self.machine_right - 10) if purge_extrusion_full else 0
|
||||
purge_volume = calculate_purge_volume(self.init_line_width, purge_len, self.mm3_per_mm)
|
||||
# Travel to the purge start
|
||||
purge_str += f"G0 F{self.speed_travel} X{self.machine_left + 10} Z2 ; Move\nG0 Y{self.machine_front + self.border_distance} Z2 ; Move to start\n"
|
||||
purge_str += f"G0 F600 Z0.3 ; Move down\n"
|
||||
if self.prime_blob_enable:
|
||||
purge_str += adjust_for_prime_blob_gcode(self.retract_speed, self.retract_dist)
|
||||
# Purge two lines
|
||||
purge_str += f"G1 F{self.print_speed} X{x_stop} Y{self.machine_front + self.border_distance} E{purge_volume} ; First line\n"
|
||||
purge_str += f"G0 X{x_stop} Y{self.machine_front + 3 + self.border_distance} ; Move over\n"
|
||||
purge_str += f"G1 F{self.print_speed} X{self.machine_left + 10} Y{self.machine_front + 3 + self.border_distance} E{purge_volume * 2} ; Second line\n"
|
||||
# Retract if enabled
|
||||
purge_str += f"G1 F{int(self.retract_speed)} E{round(purge_volume * 2 - self.retract_dist, 5)} ; Retract\n" if self.retraction_enable else ""
|
||||
purge_str += "G0 F600 Z8 ; Move Up\nG4 S1 ; Wait for 1 second\n"
|
||||
# Wipe
|
||||
purge_str += f"G0 F{self.print_speed} X{self.machine_left + 20} Y{self.machine_front + 3 + self.border_distance} Z0.3 ; Slide over and down\n"
|
||||
purge_str += f"G0 X{self.machine_left + 35} Y{self.machine_front + 3 + self.border_distance} ; Wipe\n"
|
||||
self.end_purge_location = Position.LEFT_FRONT
|
||||
elif purge_location == Location.REAR:
|
||||
purge_len = int(self.machine_right - self.machine_left - 20) if purge_extrusion_full else abs(
|
||||
int(self.machine_right - 10))
|
||||
x_stop = int(self.machine_left + 10) if purge_extrusion_full else 0
|
||||
purge_volume = calculate_purge_volume(self.init_line_width, purge_len, self.mm3_per_mm)
|
||||
# Travel to the purge start
|
||||
purge_str += f"G0 F{self.speed_travel} Y{self.machine_back - self.border_distance} Z2; Ortho Move to back\n"
|
||||
purge_str += f"G0 X{self.machine_right - 10} Z2 ; Ortho Move to start\n"
|
||||
purge_str += f"G0 F600 Z0.3 ; Move down\n"
|
||||
if self.prime_blob_enable:
|
||||
purge_str += adjust_for_prime_blob_gcode(self.retract_speed, self.retract_dist)
|
||||
# Purge two lines
|
||||
purge_str += f"G1 F{self.print_speed} X{x_stop} Y{self.machine_back - self.border_distance} E{purge_volume} ; First line\n"
|
||||
purge_str += f"G0 X{x_stop} Y{self.machine_back - 3 - self.border_distance} ; Move over\n"
|
||||
purge_str += f"G1 F{self.print_speed} X{self.machine_right - 10} Y{self.machine_back - 3 - self.border_distance} E{purge_volume * 2} ; Second line\n"
|
||||
# Retract if enabled
|
||||
purge_str += f"G1 F{int(self.retract_speed)} E{round(purge_volume * 2 - self.retract_dist, 5)} ; Retract\n" if self.retraction_enable else ""
|
||||
purge_str += "G0 F600 Z8 ; Move Up\nG4 S1 ; Wait for 1 second\n"
|
||||
# Wipe
|
||||
purge_str += f"G0 F{self.print_speed} X{self.machine_right - 20} Y{self.machine_back - 3 - self.border_distance} Z0.3 ; Slide over and down\n"
|
||||
purge_str += f"G0 X{self.machine_right - 35} Y{self.machine_back - 3 - self.border_distance} ; Wipe\n"
|
||||
self.end_purge_location = Position.RIGHT_REAR
|
||||
# Elliptic printers with Origin at Center
|
||||
elif self.bed_shape == "elliptic":
|
||||
if purge_location in [Location.LEFT, Location.RIGHT]:
|
||||
radius_1 = round((self.machine_width / 2) - 1, 2)
|
||||
else: # For purge_location in [Location.FRONT, Location.REAR]
|
||||
radius_1 = round((self.machine_depth / 2) - 1, 2)
|
||||
purge_len = int(radius_1) * math.pi / 4
|
||||
purge_volume = calculate_purge_volume(self.init_line_width, purge_len, self.mm3_per_mm)
|
||||
if purge_location == Location.LEFT:
|
||||
# Travel to the purge start
|
||||
purge_str += f"G0 F{self.speed_travel} X-{round(radius_1 * .707, 2)} Y-{round(radius_1 * .707, 2)} ; Travel\n"
|
||||
purge_str += f"G0 F600 Z0.3 ; Move down\n"
|
||||
# Purge two arcs
|
||||
purge_str += f"G2 F{self.print_speed} X-{round(radius_1 * .707, 2)} Y{round(radius_1 * .707, 2)} I{round(radius_1 * .707, 2)} J{round(radius_1 * .707, 2)} E{purge_volume} ; First Arc\n"
|
||||
purge_str += f"G0 X-{round((radius_1 - 3) * .707, 2)} Y{round((radius_1 - 3) * .707, 2)} ; Move Over\n"
|
||||
purge_str += f"G3 F{self.print_speed} X-{round((radius_1 - 3) * .707, 2)} Y-{round((radius_1 - 3) * .707, 2)} I{round((radius_1 - 3) * .707, 2)} J-{round((radius_1 - 3) * .707, 2)} E{purge_volume * 2} ; Second Arc\n"
|
||||
purge_str += f"G1 X-{round((radius_1 - 3) * .707 - 25, 2)} E{round(purge_volume * 2 + 1, 5)} ; Move Over\n"
|
||||
# Retract if enabled
|
||||
purge_str += f"G1 F{int(self.retract_speed)} E{round((purge_volume * 2 + 1) - self.retract_dist, 5)} ; Retract\n" if self.retraction_enable else ""
|
||||
purge_str += "G0 F600 Z5 ; Move Up\nG4 S1 ; Wait 1 Second\n"
|
||||
# Wipe
|
||||
purge_str += f"G0 F{self.print_speed} X-{round((radius_1 - 3) * .707 - 15, 2)} Z0.3 ; Slide Over\n"
|
||||
purge_str += f"G0 F{self.print_speed} X-{round((radius_1 - 3) * .707, 2)} ; Wipe\n"
|
||||
self.end_purge_location = Position.LEFT_FRONT
|
||||
elif purge_location == Location.RIGHT:
|
||||
# Travel to the purge start
|
||||
purge_str += f"G0 F{self.speed_travel} X{round(radius_1 * .707, 2)} Y-{round(radius_1 * .707, 2)} ; Travel\n"
|
||||
purge_str += f"G0 F600 Z0.3 ; Move down\n"
|
||||
# Purge two arcs
|
||||
purge_str += f"G3 F{self.print_speed} X{round(radius_1 * .707, 2)} Y{round(radius_1 * .707, 2)} I-{round(radius_1 * .707, 2)} J{round(radius_1 * .707, 2)} E{purge_volume} ; First Arc\n"
|
||||
purge_str += f"G0 X{round((radius_1 - 3) * .707, 2)} Y{round((radius_1 - 3) * .707, 2)} ; Move Over\n"
|
||||
purge_str += f"G2 F{self.print_speed} X{round((radius_1 - 3) * .707, 2)} Y-{round((radius_1 - 3) * .707, 2)} I-{round((radius_1 - 3) * .707, 2)} J-{round((radius_1 - 3) * .707, 2)} E{purge_volume * 2} ; Second Arc\n"
|
||||
purge_str += f"G1 X{round((radius_1 - 3) * .707 - 25, 2)} E{round(purge_volume * 2 + 1, 5)} ; Move Over\n"
|
||||
# Retract if enabled
|
||||
purge_str += f"G1 F{int(self.retract_speed)} E{round((purge_volume * 2 + 1) - self.retract_dist, 5)} ; Retract\n" if self.retraction_enable else ""
|
||||
purge_str += "G0 F600 Z5 ; Move Up\nG4 S1 ; Wait 1 Second\n"
|
||||
# Wipe
|
||||
purge_str += f"G0 F{self.print_speed} X{round((radius_1 - 3) * .707 - 15, 2)} Z0.3 ; Slide Over\n"
|
||||
purge_str += f"G0 F{self.print_speed} X{round((radius_1 - 3) * .707, 2)} ; Wipe\n"
|
||||
self.end_purge_location = Position.RIGHT_REAR
|
||||
elif purge_location == Location.FRONT:
|
||||
# Travel to the purge start
|
||||
purge_str += f"G0 F{self.speed_travel} X-{round(radius_1 * .707, 2)} Y-{round(radius_1 * .707, 2)} ; Travel\n"
|
||||
purge_str += f"G0 F600 Z0.3 ; Move down\n"
|
||||
# Purge two arcs
|
||||
purge_str += f"G3 F{self.print_speed} X{round(radius_1 * .707, 2)} Y-{round(radius_1 * .707, 2)} I{round(radius_1 * .707, 2)} J{round(radius_1 * .707, 2)} E{purge_volume} ; First Arc\n"
|
||||
purge_str += f"G0 X{round((radius_1 - 3) * .707, 2)} Y-{round((radius_1 - 3) * .707, 2)} ; Move Over\n"
|
||||
purge_str += f"G2 F{self.print_speed} X-{round((radius_1 - 3) * .707, 2)} Y-{round((radius_1 - 3) * .707, 2)} I-{round((radius_1 - 3) * .707, 2)} J{round((radius_1 - 3) * .707, 2)} E{purge_volume * 2} ; Second Arc\n"
|
||||
purge_str += f"G1 Y-{round((radius_1 - 3) * .707 - 25, 2)} E{round(purge_volume * 2 + 1, 5)} ; Move Over\n"
|
||||
# Retract if enabled
|
||||
purge_str += f"G1 F{int(self.retract_speed)} E{round((purge_volume * 2 + 1) - self.retract_dist, 5)} ; Retract\n" if self.retraction_enable else ""
|
||||
purge_str += "G0 F600 Z5 ; Move Up\nG4 S1 ; Wait 1 Second\n"
|
||||
# Wipe
|
||||
purge_str += f"G0 F{self.print_speed} Y-{round((radius_1 - 3) * .707 - 15, 2)} Z0.3 ; Slide Over\n"
|
||||
purge_str += f"G0 F{self.print_speed} Y-{round((radius_1 - 3) * .707, 2)} ; Wipe\n"
|
||||
self.end_purge_location = Position.LEFT_FRONT
|
||||
elif purge_location == Location.REAR:
|
||||
# Travel to the purge start
|
||||
purge_str += f"G0 F{self.speed_travel} X{round(radius_1 * .707, 2)} Y{round(radius_1 * .707, 2)} ; Travel\n"
|
||||
purge_str += f"G0 F600 Z0.3 ; Move down\n"
|
||||
# Purge two arcs
|
||||
purge_str += f"G3 F{self.print_speed} X-{round(radius_1 * .707, 2)} Y{round(radius_1 * .707, 2)} I-{round(radius_1 * .707, 2)} J-{round(radius_1 * .707, 2)} E{purge_volume} ; First Arc\n"
|
||||
purge_str += f"G0 X-{round((radius_1 - 3) * .707, 2)} Y{round((radius_1 - 3) * .707, 2)} ; Move Over\n"
|
||||
purge_str += f"G2 F{self.print_speed} X{round((radius_1 - 3) * .707, 2)} Y{round((radius_1 - 3) * .707, 2)} I{round((radius_1 - 3) * .707, 2)} J-{round((radius_1 - 3) * .707, 2)} E{purge_volume * 2} ; Second Arc\n"
|
||||
purge_str += f"G1 Y{round((radius_1 - 3) * .707 - 25, 2)} E{round(purge_volume * 2 + 1, 5)} ; Move Over\n"
|
||||
# Retract if enabled
|
||||
purge_str += f"G1 F{int(self.retract_speed)} E{round((purge_volume * 2 + 1) - self.retract_dist, 5)} ; Retract\n" if self.retraction_enable else ""
|
||||
purge_str += "G0 F600 Z5\nG4 S1 ; Wait 1 Second\n"
|
||||
# Wipe
|
||||
purge_str += f"G0 F{self.print_speed} Y{round((radius_1 - 3) * .707 - 15, 2)} Z0.3 ; Slide Over\n"
|
||||
purge_str += f"G0 F{self.print_speed} Y{round((radius_1 - 3) * .707, 2)} ; Wipe\n"
|
||||
self.end_purge_location = Position.RIGHT_REAR
|
||||
|
||||
# Common ending for purge_str
|
||||
purge_str += "G0 F600 Z2 ; Move Z\n;---------------------[End of Purge]"
|
||||
|
||||
# Comment out any existing purge lines in data
|
||||
startup = data[1].split("\n")
|
||||
for index, line in enumerate(startup):
|
||||
if "G1" in line and " E" in line and (" X" in line or " Y" in line):
|
||||
next_line = index
|
||||
try:
|
||||
while not startup[next_line].startswith("G92 E0"):
|
||||
startup[next_line] = ";" + startup[next_line]
|
||||
next_line += 1
|
||||
except IndexError:
|
||||
break
|
||||
data[1] = "\n".join(startup)
|
||||
|
||||
# Find the insertion location in data
|
||||
purge_str = self._format_string(purge_str)
|
||||
startup_section = data[1].split("\n")
|
||||
insert_index = len(startup_section) - 1
|
||||
for num in range(len(startup_section) - 1, 0, -1):
|
||||
# In Absolute Extrusion mode - insert above the last G92 E0 line
|
||||
if "G92 E0" in startup_section[num]:
|
||||
insert_index = num
|
||||
break
|
||||
# In Relative Extrusion mode - insert above the M83 line
|
||||
elif "M83" in startup_section[num]:
|
||||
insert_index = num
|
||||
break
|
||||
startup_section.insert(insert_index, purge_str)
|
||||
data[1] = "\n".join(startup_section)
|
||||
return data
|
||||
|
||||
# Travel moves around the bed periphery to keep strings from crossing the footprint of the model.
|
||||
def _move_to_start(self, data: str) -> str:
|
||||
if self.t0_has_offsets:
|
||||
data[0] += "; [Purge Lines and Unload] 'Circle Around to Layer Start' did not run because the assumed primary nozzle (T0) has tool offsets.\n"
|
||||
Message(title = "[Purge Lines and Unload]", text = "'Circle Around to Layer Start' did not run because the assumed primary nozzle (T0) has tool offsets.").show()
|
||||
return data
|
||||
self.start_x = None
|
||||
self.start_y = None
|
||||
move_str = None
|
||||
layer = data[2].split("\n")
|
||||
for line in layer:
|
||||
if line.startswith("G0") and " X" in line and " Y" in line:
|
||||
self.start_x = self.getValue(line, "X")
|
||||
self.start_y = self.getValue(line, "Y")
|
||||
break
|
||||
self.start_x = self.start_x or 0
|
||||
self.start_y = self.start_y or 0
|
||||
if self.end_purge_location is None:
|
||||
self.end_purge_location = Position.LEFT_FRONT
|
||||
midpoint_x = self.machine_width / 2
|
||||
midpoint_y = self.machine_depth / 2
|
||||
if not self.origin_at_center:
|
||||
if float(self.start_x) <= float(midpoint_x):
|
||||
x_target = Location.LEFT
|
||||
else:
|
||||
x_target = Location.RIGHT
|
||||
if float(self.start_y) <= float(midpoint_y):
|
||||
y_target = Location.FRONT
|
||||
else:
|
||||
y_target = Location.REAR
|
||||
else:
|
||||
if float(self.start_x) <= 0:
|
||||
x_target = Location.LEFT
|
||||
else:
|
||||
x_target = Location.RIGHT
|
||||
if float(self.start_y) <= 0:
|
||||
y_target = Location.FRONT
|
||||
else:
|
||||
y_target = Location.REAR
|
||||
target_location = (x_target, y_target)
|
||||
if self.bed_shape == "rectangular":
|
||||
move_str = self._move_to_location("Layer Start", target_location)
|
||||
elif self.bed_shape == "elliptic" and self.origin_at_center:
|
||||
move_str = f";MESH:NONMESH---------[Travel to Layer Start]\nG0 F600 Z2 ; Move up\n"
|
||||
radius = self.machine_width / 2
|
||||
offset_sin = round(2 ** .5 / 2 * radius, 2)
|
||||
if target_location == Position.LEFT_FRONT:
|
||||
move_str += f"G0 F{self.speed_travel} X-{offset_sin} Z2 ; Move\nG0 Y-{offset_sin} Z2 ; Move to start\n"
|
||||
elif target_location == Position.LEFT_REAR:
|
||||
if self.end_purge_location == Position.LEFT_REAR:
|
||||
move_str += f"G2 X0 Y{offset_sin} I{offset_sin} J{offset_sin} ; Move around to start\n"
|
||||
else:
|
||||
move_str += f"G0 F{self.speed_travel} X-{offset_sin} Z2 ; Ortho move\nG0 Y{offset_sin} Z2 ; Ortho move\n"
|
||||
elif target_location == Position.RIGHT_FRONT:
|
||||
move_str += f"G0 F{self.speed_travel} X{offset_sin} Z2 ; Ortho move\nG0 Y-{offset_sin} Z2 ; Ortho move\n"
|
||||
elif target_location == Position.RIGHT_REAR:
|
||||
move_str += f"G0 F{self.speed_travel} X{offset_sin} Z2 ; Ortho move\nG0 Y{offset_sin} Z2 ; Ortho move\n"
|
||||
move_str += ";---------------------[End of layer start travels]"
|
||||
# Add the move_str to the end of the StartUp section and move 'LAYER_COUNT' to the end.
|
||||
startup = data[1].split("\n")
|
||||
move_str = self._format_string(move_str)
|
||||
if move_str.startswith("\n"):
|
||||
move_str = move_str[1:]
|
||||
startup.append(move_str)
|
||||
# Move the 'LAYER_COUNT' line so it's at the end of data[1]
|
||||
for index, line in enumerate(startup):
|
||||
if "LAYER_COUNT" in line:
|
||||
lay_count = startup.pop(index) + "\n"
|
||||
startup.append(lay_count)
|
||||
break
|
||||
|
||||
data[1] = "\n".join(startup)
|
||||
# Remove any double-spaced lines
|
||||
data[1] = data[1].replace("\n\n", "\n")
|
||||
return data
|
||||
|
||||
# Unloading a large amount of filament in a single command can trip the 'Overlong Extrusion' warning in some firmware. Unloads longer than 150mm are split into individual 150mm segments.
|
||||
def _unload_filament(self, data: str) -> str:
|
||||
extrude_speed = 3000
|
||||
quick_purge_speed = round(float(self.nozzle_size) * 500)
|
||||
if self.material_diameter > 2: quick_purge_speed *= .38 # Adjustment for 2.85 filament
|
||||
retract_amount = self.extruder[0].getProperty("retraction_amount", "value")
|
||||
quick_purge_amount = retract_amount + 5 if retract_amount < 2.0 else retract_amount * 2
|
||||
unload_distance = self.getSettingValueByKey("unload_distance")
|
||||
quick_purge = self.getSettingValueByKey("unload_quick_purge")
|
||||
lines = data[-1].split("\n")
|
||||
for index, line in enumerate(lines):
|
||||
# Unload the filament just before the hot end turns off.
|
||||
if line.startswith("M104") and "S0" in line:
|
||||
filament_str = (
|
||||
"M83 ; [Unload] Relative extrusion\n"
|
||||
"M400 ; Complete all moves\n"
|
||||
)
|
||||
if quick_purge:
|
||||
filament_str += f"G1 F{quick_purge_speed} E{quick_purge_amount} ; Quick Purge before unload\n"
|
||||
if unload_distance > 150:
|
||||
filament_str += "".join(
|
||||
f"G1 F{extrude_speed} E-150 ; Unload some\n"
|
||||
for _ in range(unload_distance // 150)
|
||||
)
|
||||
remaining_unload = unload_distance % 150
|
||||
if remaining_unload > 0:
|
||||
filament_str += f"G1 F{extrude_speed} E-{remaining_unload} ; Unload the remainder\n"
|
||||
else:
|
||||
filament_str += f"G1 F{extrude_speed} E-{unload_distance} ; Unload\n"
|
||||
filament_str += (
|
||||
"M82 ; Absolute Extrusion\n"
|
||||
"G92 E0 ; Reset Extruder\n"
|
||||
)
|
||||
lines[index] = filament_str + line
|
||||
break
|
||||
data[-1] = "\n".join(lines)
|
||||
return data
|
||||
|
||||
# Make an adjustment to the starting E location so the skirt/brim/raft starts out when the nozzle starts out.
|
||||
def _adjust_starting_e(self, data: str) -> str:
|
||||
if not self.extruder[0].getProperty("retraction_enable", "value"):
|
||||
return data
|
||||
adjust_amount = self.getSettingValueByKey("adjust_e_loc_to")
|
||||
lines = data[1].split("\n")
|
||||
lines.reverse()
|
||||
if self.global_stack.getProperty("machine_firmware_retract", "value"):
|
||||
search_pattern = r"G10"
|
||||
else:
|
||||
search_pattern = r"G1 F(\d*) E-(\d.*)"
|
||||
for index, line in enumerate(lines):
|
||||
if re.search(search_pattern, line):
|
||||
lines[index] = re.sub(search_pattern, f"G92 E{adjust_amount}", line)
|
||||
lines.reverse()
|
||||
data[1] = "\n".join(lines)
|
||||
break
|
||||
return data
|
||||
|
||||
# Format the purge or travel-to-start strings. No reason they shouldn't look nice.
|
||||
def _format_string(self, any_gcode_str: str):
|
||||
temp_lines = any_gcode_str.split("\n")
|
||||
gap_len = 0
|
||||
for temp_line in temp_lines:
|
||||
if ";" in temp_line and not temp_line.startswith(";"):
|
||||
if gap_len - len(temp_line.split(";")[0]) + 1 < 0:
|
||||
gap_len = len(temp_line.split(";")[0]) + 1
|
||||
if gap_len < 30: gap_len = 30
|
||||
for temp_index, temp_line in enumerate(temp_lines):
|
||||
if ";" in temp_line and not temp_line.startswith(";"):
|
||||
temp_lines[temp_index] = temp_line.replace(temp_line.split(";")[0], temp_line.split(";")[0] + str(
|
||||
" " * (gap_len - len(temp_line.split(";")[0]))), 1)
|
||||
# This formats lines that are commented out but contain additional comments Ex: ;M420 ; leveling mesh
|
||||
elif temp_line.startswith(";") and ";" in temp_line[1:]:
|
||||
temp_lines[temp_index] = temp_line[1:].replace(temp_line[1:].split(";")[0],
|
||||
";" + temp_line[1:].split(";")[0] + str(" " * (
|
||||
gap_len - 1 - len(
|
||||
temp_line[1:].split(";")[0]))), 1)
|
||||
any_gcode_str = "\n".join(temp_lines)
|
||||
return any_gcode_str
|
||||
|
||||
def _get_initial_tool(self) -> int:
|
||||
# Get the Initial Extruder
|
||||
num = Application.getInstance().getExtruderManager().getInitialExtruderNr()
|
||||
if num is None or num == -1:
|
||||
num = 0
|
||||
# If there is an extruder offset X then it will be used to adjust the "machine_right" and a Y offset will adjust the "machine_back"
|
||||
if self.extruder_count > 1 and bool(self.global_stack.getProperty("machine_use_extruder_offset_to_offset_coords", "value")):
|
||||
self.nozzle_offset_x = self.extruder[1].getProperty("machine_nozzle_offset_x", "value")
|
||||
self.nozzle_offset_y = self.extruder[1].getProperty("machine_nozzle_offset_y", "value")
|
||||
else:
|
||||
self.nozzle_offset_x = 0.0
|
||||
self.nozzle_offset_y = 0.0
|
||||
self.material_diameter = self.extruder[num].getProperty("material_diameter", "value")
|
||||
self.nozzle_size = self.extruder[num].getProperty("machine_nozzle_size", "value")
|
||||
self.init_line_width = self.extruder[num].getProperty("skirt_brim_line_width", "value")
|
||||
self.print_speed = round(self.extruder[num].getProperty("speed_print", "value") * 60 * .75)
|
||||
self.speed_travel = round(self.extruder[num].getProperty("speed_travel", "value") * 60)
|
||||
self.retract_dist = self.extruder[num].getProperty("retraction_amount", "value")
|
||||
self.retraction_enable = self.extruder[num].getProperty("retraction_enable", "value")
|
||||
self.retract_speed = self.extruder[num].getProperty("retraction_retract_speed", "value") * 60
|
||||
self.mm3_per_mm = (self.material_diameter / 2) ** 2 * math.pi
|
||||
# Don't add purge lines if 'T0' has offsets.
|
||||
t0_x_offset = self.extruder[0].getProperty("machine_nozzle_offset_x", "value")
|
||||
t0_y_offset = self.extruder[0].getProperty("machine_nozzle_offset_y", "value")
|
||||
if t0_x_offset or t0_y_offset:
|
||||
self.t0_has_offsets = True
|
||||
return num
|
||||
|
||||
def _get_blob_code(self) -> str:
|
||||
if not self.prime_blob_enable or self.prime_blob_distance == 0 or self.getSettingValueByKey("purge_line_location") not in ["front", "left"]:
|
||||
return ""
|
||||
# Set extruder speed for 1.75 filament
|
||||
speed_blob = round(float(self.nozzle_size) * 500)
|
||||
# Adjust speed if 2.85 filament
|
||||
if self.material_diameter > 2: speed_blob *= .4
|
||||
blob_x = self.getSettingValueByKey("prime_blob_loc_x")
|
||||
blob_y = self.getSettingValueByKey("prime_blob_loc_y")
|
||||
blob_string = "G0 F1200 Z20 ; Move up\n"
|
||||
blob_string += f"G0 F{self.speed_travel} X{blob_x} Y{blob_y} ; Move to blob location\n"
|
||||
blob_string += f"G1 F{speed_blob} E{self.prime_blob_distance} ; Blob\n"
|
||||
blob_string += f"G1 F{self.retract_speed} E{self.prime_blob_distance - self.retract_dist} ; Retract\n"
|
||||
blob_string += "G92 E0 ; Reset extruder\n"
|
||||
blob_string += "M300 P500 S600 ; Beep\n"
|
||||
blob_string += "G4 S2 ; Wait\n"
|
||||
return blob_string
|
|
@ -1,20 +1,20 @@
|
|||
# Copyright (c) 2017 Ghostkeeper
|
||||
# The PostProcessingPlugin is released under the terms of the LGPLv3 or higher.
|
||||
# Altered by GregValiant (Greg Foresi) February, 2025.
|
||||
# Added option for "first instance only"
|
||||
# Added option for a layer search with a Start Layer and an End layer.
|
||||
# Added 'Ignore StartUp G-code' and 'Ignore Ending G-code' options
|
||||
|
||||
import re # To perform the search and replace.
|
||||
|
||||
import re
|
||||
from ..Script import Script
|
||||
|
||||
from UM.Application import Application
|
||||
|
||||
class SearchAndReplace(Script):
|
||||
"""Performs a search-and-replace on all g-code.
|
||||
|
||||
Due to technical limitations, the search can't cross the border between
|
||||
layers.
|
||||
"""Performs a search-and-replace on the g-code.
|
||||
"""
|
||||
|
||||
def getSettingDataString(self):
|
||||
return """{
|
||||
return r"""{
|
||||
"name": "Search and Replace",
|
||||
"key": "SearchAndReplace",
|
||||
"metadata": {},
|
||||
|
@ -23,37 +23,145 @@ class SearchAndReplace(Script):
|
|||
{
|
||||
"search":
|
||||
{
|
||||
"label": "Search",
|
||||
"description": "All occurrences of this text will get replaced by the replacement text.",
|
||||
"label": "Search for:",
|
||||
"description": "All occurrences of this text (within the search range) will be replaced by the 'Replace with' string. The search string is 'Case Sensitive' and 'Layer' is not the same as 'layer'.",
|
||||
"type": "str",
|
||||
"default_value": ""
|
||||
},
|
||||
"replace":
|
||||
{
|
||||
"label": "Replace",
|
||||
"description": "The search text will get replaced by this text.",
|
||||
"label": "Replace with:",
|
||||
"description": "The 'Search For' text will get replaced by this text. For MultiLine insertions use the newline character '\\n' as the delimiter. If your Search term ends with a '\\n' remember to add '\\n' to the end of this Replace term.",
|
||||
"type": "str",
|
||||
"default_value": ""
|
||||
},
|
||||
"is_regex":
|
||||
{
|
||||
"label": "Use Regular Expressions",
|
||||
"description": "When enabled, the search text will be interpreted as a regular expression.",
|
||||
"description": "When disabled the search string is treated as a simple text string. When enabled, the search text will be interpreted as a Python regular expression.",
|
||||
"type": "bool",
|
||||
"default_value": false
|
||||
},
|
||||
"enable_layer_search":
|
||||
{
|
||||
"label": "Enable search within a Layer Range:",
|
||||
"description": "When enabled, You can choose a Start and End layer for the search. When 'Layer Search' is enabled the StartUp and Ending gcodes are always ignored.",
|
||||
"type": "bool",
|
||||
"default_value": false,
|
||||
"enabled": true
|
||||
},
|
||||
"search_start":
|
||||
{
|
||||
"label": "Start S&R at Layer:",
|
||||
"description": "Use the Cura Preview layer numbering.",
|
||||
"type": "int",
|
||||
"default_value": 1,
|
||||
"minimum_value": 1,
|
||||
"enabled": "enable_layer_search"
|
||||
},
|
||||
"search_end":
|
||||
{
|
||||
"label": "Stop S&R at end of Layer:",
|
||||
"description": "Use the Cura Preview layer numbering. The replacements will conclude at the end of this layer. If the End Layer is equal to the Start Layer then only that single layer is searched.",
|
||||
"type": "int",
|
||||
"default_value": 2,
|
||||
"minimum_value": 1,
|
||||
"enabled": "enable_layer_search"
|
||||
},
|
||||
"first_instance_only":
|
||||
{
|
||||
"label": "Replace first instance only:",
|
||||
"description": "When enabled only the first instance is replaced.",
|
||||
"type": "bool",
|
||||
"default_value": false,
|
||||
"enabled": true
|
||||
},
|
||||
"ignore_start":
|
||||
{
|
||||
"label": "Ignore StartUp G-code:",
|
||||
"description": "When enabled the StartUp Gcode is unaffected. The StartUp Gcode is everything from ';generated with Cura...' to ';LAYER_COUNT:' inclusive.",
|
||||
"type": "bool",
|
||||
"default_value": true,
|
||||
"enabled": "not enable_layer_search"
|
||||
},
|
||||
"ignore_end":
|
||||
{
|
||||
"label": "Ignore Ending G-code:",
|
||||
"description": "When enabled the Ending Gcode is unaffected.",
|
||||
"type": "bool",
|
||||
"default_value": true,
|
||||
"enabled": "not enable_layer_search"
|
||||
}
|
||||
}
|
||||
}"""
|
||||
|
||||
def execute(self, data):
|
||||
global_stack = Application.getInstance().getGlobalContainerStack()
|
||||
extruder = global_stack.extruderList
|
||||
retract_enabled = bool(extruder[0].getProperty("retraction_enable", "value"))
|
||||
search_string = self.getSettingValueByKey("search")
|
||||
if not self.getSettingValueByKey("is_regex"):
|
||||
search_string = re.escape(search_string) #Need to search for the actual string, not as a regex.
|
||||
search_regex = re.compile(search_string)
|
||||
|
||||
replace_string = self.getSettingValueByKey("replace")
|
||||
is_regex = self.getSettingValueByKey("is_regex")
|
||||
enable_layer_search = self.getSettingValueByKey("enable_layer_search")
|
||||
start_layer = self.getSettingValueByKey("search_start")
|
||||
end_layer = self.getSettingValueByKey("search_end")
|
||||
ignore_start = self.getSettingValueByKey("ignore_start")
|
||||
ignore_end = self.getSettingValueByKey("ignore_end")
|
||||
if enable_layer_search:
|
||||
ignore_start = True
|
||||
ignore_end = True
|
||||
first_instance_only = bool(self.getSettingValueByKey("first_instance_only"))
|
||||
|
||||
for layer_number, layer in enumerate(data):
|
||||
data[layer_number] = re.sub(search_regex, replace_string, layer) #Replace all.
|
||||
# Account for missing layer numbers when a raft is used
|
||||
start_index = 1
|
||||
end_index = len(data) - 1
|
||||
data_list = [0,1]
|
||||
layer_list = [-1,0]
|
||||
lay_num = 1
|
||||
for index, layer in enumerate(data):
|
||||
if re.search(r";LAYER:(-?\d+)", layer):
|
||||
data_list.append(index)
|
||||
layer_list.append(lay_num)
|
||||
lay_num += 1
|
||||
|
||||
return data
|
||||
# Get the start and end indexes within the data
|
||||
if not enable_layer_search:
|
||||
if ignore_start:
|
||||
start_index = 2
|
||||
else:
|
||||
start_index = 1
|
||||
|
||||
if ignore_end:
|
||||
end_index = data_list[len(data_list) - 1]
|
||||
else:
|
||||
# Account for the extra data item when retraction is enabled
|
||||
end_index = data_list[len(data_list) - 1] + (2 if retract_enabled else 1)
|
||||
|
||||
elif enable_layer_search:
|
||||
for index, num in enumerate(layer_list):
|
||||
if num == start_layer:
|
||||
start_index = data_list[index]
|
||||
if num == end_layer:
|
||||
end_index = data_list[index]
|
||||
|
||||
# Make replacements
|
||||
replace_one = False
|
||||
if not is_regex:
|
||||
search_string = re.escape(search_string)
|
||||
search_regex = re.compile(search_string)
|
||||
for num in range(start_index, end_index + 1, 1):
|
||||
layer = data[num]
|
||||
# First_instance only
|
||||
if first_instance_only:
|
||||
if re.search(search_regex, layer) and replace_one == False:
|
||||
data[num] = re.sub(search_regex, replace_string, data[num], 1)
|
||||
replace_one = True
|
||||
break
|
||||
# All instances
|
||||
else:
|
||||
if end_index > start_index:
|
||||
data[num] = re.sub(search_regex, replace_string, layer)
|
||||
elif end_index == start_index:
|
||||
layer = data[start_index]
|
||||
data[start_index] = re.sub(search_regex, replace_string, layer)
|
||||
return data
|
||||
|
|
|
@ -1,9 +1,15 @@
|
|||
# Copyright (c) 2020 Ultimaker B.V.
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
# Created by Wayne Porter
|
||||
# Modified 5/15/2023 - Greg Valiant (Greg Foresi)
|
||||
# Created by Wayne Porter
|
||||
# Added insertion frequency
|
||||
# Adjusted for use with Relative Extrusion
|
||||
# Changed Retract to a boolean and when true use the regular Cura retract settings.
|
||||
# Use the regular Cura settings for Travel Speed and Speed_Z instead of asking.
|
||||
# Added code to check the E location to prevent retracts if the filament was already retracted.
|
||||
# Added 'Pause before image' per LemanRus
|
||||
|
||||
from ..Script import Script
|
||||
|
||||
from UM.Application import Application
|
||||
from UM.Logger import Logger
|
||||
|
||||
class TimeLapse(Script):
|
||||
def __init__(self):
|
||||
|
@ -11,7 +17,7 @@ class TimeLapse(Script):
|
|||
|
||||
def getSettingDataString(self):
|
||||
return """{
|
||||
"name": "Time Lapse",
|
||||
"name": "Time Lapse Camera",
|
||||
"key": "TimeLapse",
|
||||
"metadata": {},
|
||||
"version": 2,
|
||||
|
@ -19,24 +25,49 @@ class TimeLapse(Script):
|
|||
{
|
||||
"trigger_command":
|
||||
{
|
||||
"label": "Trigger camera command",
|
||||
"description": "G-code command used to trigger camera.",
|
||||
"label": "Camera Trigger Command",
|
||||
"description": "G-code command used to trigger the camera. The setting box will take any command and parameters.",
|
||||
"type": "str",
|
||||
"default_value": "M240"
|
||||
},
|
||||
"insert_frequency":
|
||||
{
|
||||
"label": "How often (layers)",
|
||||
"description": "Every so many layers (always starts at the first layer whether it's the model or a raft).",
|
||||
"type": "enum",
|
||||
"options": {
|
||||
"every_layer": "Every Layer",
|
||||
"every_2nd": "Every 2nd",
|
||||
"every_3rd": "Every 3rd",
|
||||
"every_5th": "Every 5th",
|
||||
"every_10th": "Every 10th",
|
||||
"every_25th": "Every 25th",
|
||||
"every_50th": "Every 50th",
|
||||
"every_100th": "Every 100th"},
|
||||
"default_value": "every_layer"
|
||||
},
|
||||
"anti_shake_length":
|
||||
{
|
||||
"label": "Pause before image",
|
||||
"description": "How long to wait (in ms) before capturing the image. This is to allow the printer to 'settle down' after movement. To disable set this to '0'.",
|
||||
"type": "int",
|
||||
"default_value": 0,
|
||||
"minimum_value": 0,
|
||||
"unit": "ms"
|
||||
},
|
||||
"pause_length":
|
||||
{
|
||||
"label": "Pause length",
|
||||
"label": "Pause after image",
|
||||
"description": "How long to wait (in ms) after camera was triggered.",
|
||||
"type": "int",
|
||||
"default_value": 700,
|
||||
"default_value": 500,
|
||||
"minimum_value": 0,
|
||||
"unit": "ms"
|
||||
},
|
||||
"park_print_head":
|
||||
{
|
||||
"label": "Park Print Head",
|
||||
"description": "Park the print head out of the way. Assumes absolute positioning.",
|
||||
"description": "Park the print head out of the way.",
|
||||
"type": "bool",
|
||||
"default_value": true
|
||||
},
|
||||
|
@ -55,90 +86,166 @@ class TimeLapse(Script):
|
|||
"description": "What Y location does the head move to for photo.",
|
||||
"unit": "mm",
|
||||
"type": "float",
|
||||
"default_value": 190,
|
||||
"enabled": "park_print_head"
|
||||
},
|
||||
"park_feed_rate":
|
||||
{
|
||||
"label": "Park Feed Rate",
|
||||
"description": "How fast does the head move to the park coordinates.",
|
||||
"unit": "mm/s",
|
||||
"type": "float",
|
||||
"default_value": 9000,
|
||||
"default_value": 0,
|
||||
"enabled": "park_print_head"
|
||||
},
|
||||
"retract":
|
||||
{
|
||||
"label": "Retraction Distance",
|
||||
"description": "Filament retraction distance for camera trigger.",
|
||||
"unit": "mm",
|
||||
"type": "int",
|
||||
"default_value": 0
|
||||
"label": "Retract when required",
|
||||
"description": "Retract if there isn't already a retraction. If unchecked then there will be no retraction even if there is none in the gcode. If retractions are not enabled in Cura there won't be a retraction. regardless of this setting.",
|
||||
"type": "bool",
|
||||
"default_value": true
|
||||
},
|
||||
"zhop":
|
||||
{
|
||||
"label": "Z-Hop Height When Parking",
|
||||
"description": "Z-hop length before parking",
|
||||
"description": "The height to lift the nozzle off the print before parking.",
|
||||
"unit": "mm",
|
||||
"type": "float",
|
||||
"default_value": 0
|
||||
"default_value": 2.0,
|
||||
"minimum_value": 0.0
|
||||
},
|
||||
"ensure_final_image":
|
||||
{
|
||||
"label": "Ensure Final Image",
|
||||
"description": "Depending on how the layer numbers work out with the 'How Often' frequency there might not be an image taken at the end of the last layer. This will ensure that one is taken. There is no parking as the Ending Gcode comes right up.",
|
||||
"type": "bool",
|
||||
"default_value": false
|
||||
}
|
||||
}
|
||||
}"""
|
||||
|
||||
def execute(self, data):
|
||||
feed_rate = self.getSettingValueByKey("park_feed_rate")
|
||||
mycura = Application.getInstance().getGlobalContainerStack()
|
||||
relative_extrusion = bool(mycura.getProperty("relative_extrusion", "value"))
|
||||
extruder = mycura.extruderList
|
||||
retract_speed = int(extruder[0].getProperty("retraction_speed", "value"))*60
|
||||
retract_dist = round(float(extruder[0].getProperty("retraction_amount", "value")), 2)
|
||||
retract_enabled = bool(extruder[0].getProperty("retraction_enable", "value"))
|
||||
firmware_retract = bool(mycura.getProperty("machine_firmware_retract", "value"))
|
||||
speed_z = int(extruder[0].getProperty("speed_z_hop", "value"))*60
|
||||
if relative_extrusion:
|
||||
rel_cmd = 83
|
||||
else:
|
||||
rel_cmd = 82
|
||||
travel_speed = int(extruder[0].getProperty("speed_travel", "value"))*60
|
||||
park_print_head = self.getSettingValueByKey("park_print_head")
|
||||
x_park = self.getSettingValueByKey("head_park_x")
|
||||
y_park = self.getSettingValueByKey("head_park_y")
|
||||
trigger_command = self.getSettingValueByKey("trigger_command")
|
||||
pause_length = self.getSettingValueByKey("pause_length")
|
||||
retract = int(self.getSettingValueByKey("retract"))
|
||||
retract = bool(self.getSettingValueByKey("retract"))
|
||||
zhop = self.getSettingValueByKey("zhop")
|
||||
gcode_to_append = ";TimeLapse Begin\n"
|
||||
ensure_final_image = bool(self.getSettingValueByKey("ensure_final_image"))
|
||||
when_to_insert = self.getSettingValueByKey("insert_frequency")
|
||||
last_x = 0
|
||||
last_y = 0
|
||||
last_z = 0
|
||||
|
||||
last_e = 0
|
||||
prev_e = 0
|
||||
is_retracted = False
|
||||
gcode_to_append = ""
|
||||
if park_print_head:
|
||||
gcode_to_append += self.putValue(G=1, F=feed_rate,
|
||||
X=x_park, Y=y_park) + " ;Park print head\n"
|
||||
gcode_to_append += self.putValue(M=400) + " ;Wait for moves to finish\n"
|
||||
gcode_to_append += trigger_command + " ;Snap Photo\n"
|
||||
gcode_to_append += self.putValue(G=4, P=pause_length) + " ;Wait for camera\n"
|
||||
|
||||
for idx, layer in enumerate(data):
|
||||
for line in layer.split("\n"):
|
||||
if self.getValue(line, "G") in {0, 1}: # Track X,Y,Z location.
|
||||
last_x = self.getValue(line, "X", last_x)
|
||||
last_y = self.getValue(line, "Y", last_y)
|
||||
last_z = self.getValue(line, "Z", last_z)
|
||||
# Check that a layer is being printed
|
||||
lines = layer.split("\n")
|
||||
for line in lines:
|
||||
if ";LAYER:" in line:
|
||||
if retract != 0: # Retract the filament so no stringing happens
|
||||
layer += self.putValue(M=83) + " ;Extrude Relative\n"
|
||||
layer += self.putValue(G=1, E=-retract, F=3000) + " ;Retract filament\n"
|
||||
layer += self.putValue(M=82) + " ;Extrude Absolute\n"
|
||||
layer += self.putValue(M=400) + " ;Wait for moves to finish\n" # Wait to fully retract before hopping
|
||||
|
||||
if zhop != 0:
|
||||
layer += self.putValue(G=1, Z=last_z+zhop, F=3000) + " ;Z-Hop\n"
|
||||
|
||||
layer += gcode_to_append
|
||||
|
||||
if zhop != 0:
|
||||
layer += self.putValue(G=0, X=last_x, Y=last_y, Z=last_z) + "; Restore position \n"
|
||||
else:
|
||||
layer += self.putValue(G=0, X=last_x, Y=last_y) + "; Restore position \n"
|
||||
|
||||
if retract != 0:
|
||||
layer += self.putValue(M=400) + " ;Wait for moves to finish\n"
|
||||
layer += self.putValue(M=83) + " ;Extrude Relative\n"
|
||||
layer += self.putValue(G=1, E=retract, F=3000) + " ;Retract filament\n"
|
||||
layer += self.putValue(M=82) + " ;Extrude Absolute\n"
|
||||
|
||||
data[idx] = layer
|
||||
break
|
||||
gcode_to_append += f"G0 F{travel_speed} X{x_park} Y{y_park} ;Park print head\n"
|
||||
gcode_to_append += "M400 ;Wait for moves to finish\n"
|
||||
anti_shake_length = self.getSettingValueByKey("anti_shake_length")
|
||||
if anti_shake_length > 0:
|
||||
gcode_to_append += f"G4 P{anti_shake_length} ;Wait for printer to settle down\n"
|
||||
gcode_to_append += trigger_command + " ;Snap the Image\n"
|
||||
gcode_to_append += f"G4 P{pause_length} ;Wait for camera to finish\n"
|
||||
match when_to_insert:
|
||||
case "every_layer":
|
||||
step_freq = 1
|
||||
case "every_2nd":
|
||||
step_freq = 2
|
||||
case "every_3rd":
|
||||
step_freq = 3
|
||||
case "every_5th":
|
||||
step_freq = 5
|
||||
case "every_10th":
|
||||
step_freq = 10
|
||||
case "every_25th":
|
||||
step_freq = 25
|
||||
case "every_50th":
|
||||
step_freq = 50
|
||||
case "every_100th":
|
||||
step_freq = 100
|
||||
case _:
|
||||
step_freq = 1
|
||||
# Use the step_freq to index through the layers----------------------------------------
|
||||
for num in range(2,len(data)-1,step_freq):
|
||||
layer = data[num]
|
||||
try:
|
||||
# Track X,Y,Z location.--------------------------------------------------------
|
||||
for line in layer.split("\n"):
|
||||
if self.getValue(line, "G") in {0, 1}:
|
||||
last_x = self.getValue(line, "X", last_x)
|
||||
last_y = self.getValue(line, "Y", last_y)
|
||||
last_z = self.getValue(line, "Z", last_z)
|
||||
#Track the E location so that if there is already a retraction we don't double dip.
|
||||
if rel_cmd == 82:
|
||||
if " E" in line:
|
||||
last_e = line.split("E")[1]
|
||||
if float(last_e) < float(prev_e):
|
||||
is_retracted = True
|
||||
else:
|
||||
is_retracted = False
|
||||
prev_e = last_e
|
||||
elif rel_cmd == 83:
|
||||
if " E" in line:
|
||||
last_e = line.split("E")[1]
|
||||
if float(last_e) < 0:
|
||||
is_retracted = True
|
||||
else:
|
||||
is_retracted = False
|
||||
prev_e = last_e
|
||||
if firmware_retract and self.getValue(line, "G") in {10, 11}:
|
||||
if self.getValue(line, "G") == 10:
|
||||
is_retracted = True
|
||||
last_e = float(prev_e) - float(retract_dist)
|
||||
if self.getValue(line, "G") == 11:
|
||||
is_retracted = False
|
||||
last_e = float(prev_e) + float(retract_dist)
|
||||
prev_e = last_e
|
||||
lines = layer.split("\n")
|
||||
# Insert the code----------------------------------------------------
|
||||
camera_code = ""
|
||||
for line in lines:
|
||||
if ";LAYER:" in line:
|
||||
if retract and not is_retracted and retract_enabled: # Retract unless already retracted
|
||||
camera_code += ";TYPE:CUSTOM-----------------TimeLapse Begin\n"
|
||||
camera_code += "M83 ;Extrude Relative\n"
|
||||
if not firmware_retract:
|
||||
camera_code += f"G1 F{retract_speed} E-{retract_dist} ;Retract filament\n"
|
||||
else:
|
||||
camera_code += "G10 ;Retract filament\n"
|
||||
else:
|
||||
camera_code += ";TYPE:CUSTOM-----------------TimeLapse Begin\n"
|
||||
if zhop != 0:
|
||||
camera_code += f"G1 F{speed_z} Z{round(last_z + zhop,2)} ;Z-Hop\n"
|
||||
camera_code += gcode_to_append
|
||||
camera_code += f"G0 F{travel_speed} X{last_x} Y{last_y} ;Restore XY position\n"
|
||||
if zhop != 0:
|
||||
camera_code += f"G0 F{speed_z} Z{last_z} ;Restore Z position\n"
|
||||
if retract and not is_retracted and retract_enabled:
|
||||
if not firmware_retract:
|
||||
camera_code += f"G1 F{retract_speed} E{retract_dist} ;Un-Retract filament\n"
|
||||
else:
|
||||
camera_code += "G11 ;Un-Retract filament\n"
|
||||
camera_code += f"M{rel_cmd} ;Extrude Mode\n"
|
||||
camera_code += f";{'-' * 28}TimeLapse End"
|
||||
# Format the camera code to be inserted
|
||||
temp_lines = camera_code.split("\n")
|
||||
for temp_index, temp_line in enumerate(temp_lines):
|
||||
if ";" in temp_line and not temp_line.startswith(";"):
|
||||
temp_lines[temp_index] = temp_line.replace(temp_line.split(";")[0], temp_line.split(";")[0] + str(" " * (29 - len(temp_line.split(";")[0]))),1)
|
||||
temp_lines = "\n".join(temp_lines)
|
||||
lines.insert(len(lines) - 2, temp_lines)
|
||||
data[num] = "\n".join(lines)
|
||||
break
|
||||
except Exception as e:
|
||||
Logger.log("w", "TimeLapse Error: " + repr(e))
|
||||
# Take a final image if there was no camera shot at the end of the last layer.
|
||||
if "TimeLapse Begin" not in data[len(data) - (3 if retract_enabled else 2)] and ensure_final_image:
|
||||
data[len(data)-1] = "M400 ; Wait for all moves to finish\n" + trigger_command + " ;Snap the final Image\n" + f"G4 P{pause_length} ;Wait for camera\n" + data[len(data)-1]
|
||||
return data
|
||||
|
|
|
@ -101,7 +101,8 @@ class RemovableDriveOutputDevice(OutputDevice):
|
|||
self._stream = open(file_name, "wt", buffering = 1, encoding = "utf-8")
|
||||
else: #Binary mode.
|
||||
self._stream = open(file_name, "wb", buffering = 1)
|
||||
job = WriteFileJob(writer, self._stream, nodes, preferred_format["mode"])
|
||||
writer_args = {"mime_type": preferred_format["mime_type"]}
|
||||
job = WriteFileJob(writer, self._stream, nodes, preferred_format["mode"], writer_args)
|
||||
job.setFileName(file_name)
|
||||
job.progress.connect(self._onProgress)
|
||||
job.finished.connect(self._onFinished)
|
||||
|
|
|
@ -67,39 +67,40 @@ class SimulationPass(RenderPass):
|
|||
if not self._compatibility_mode:
|
||||
self._layer_shader.setUniformValue("u_starts_color", Color(*Application.getInstance().getTheme().getColor("layerview_starts").getRgb()))
|
||||
|
||||
if self._layer_view:
|
||||
self._layer_shader.setUniformValue("u_max_feedrate", self._layer_view.getMaxFeedrate())
|
||||
self._layer_shader.setUniformValue("u_min_feedrate", self._layer_view.getMinFeedrate())
|
||||
self._layer_shader.setUniformValue("u_max_thickness", self._layer_view.getMaxThickness())
|
||||
self._layer_shader.setUniformValue("u_min_thickness", self._layer_view.getMinThickness())
|
||||
self._layer_shader.setUniformValue("u_max_line_width", self._layer_view.getMaxLineWidth())
|
||||
self._layer_shader.setUniformValue("u_min_line_width", self._layer_view.getMinLineWidth())
|
||||
self._layer_shader.setUniformValue("u_max_flow_rate", self._layer_view.getMaxFlowRate())
|
||||
self._layer_shader.setUniformValue("u_min_flow_rate", self._layer_view.getMinFlowRate())
|
||||
self._layer_shader.setUniformValue("u_layer_view_type", self._layer_view.getSimulationViewType())
|
||||
self._layer_shader.setUniformValue("u_extruder_opacity", self._layer_view.getExtruderOpacities())
|
||||
self._layer_shader.setUniformValue("u_show_travel_moves", self._layer_view.getShowTravelMoves())
|
||||
self._layer_shader.setUniformValue("u_show_helpers", self._layer_view.getShowHelpers())
|
||||
self._layer_shader.setUniformValue("u_show_skin", self._layer_view.getShowSkin())
|
||||
self._layer_shader.setUniformValue("u_show_infill", self._layer_view.getShowInfill())
|
||||
self._layer_shader.setUniformValue("u_show_starts", self._layer_view.getShowStarts())
|
||||
else:
|
||||
#defaults
|
||||
self._layer_shader.setUniformValue("u_max_feedrate", 1)
|
||||
self._layer_shader.setUniformValue("u_min_feedrate", 0)
|
||||
self._layer_shader.setUniformValue("u_max_thickness", 1)
|
||||
self._layer_shader.setUniformValue("u_min_thickness", 0)
|
||||
self._layer_shader.setUniformValue("u_max_flow_rate", 1)
|
||||
self._layer_shader.setUniformValue("u_min_flow_rate", 0)
|
||||
self._layer_shader.setUniformValue("u_max_line_width", 1)
|
||||
self._layer_shader.setUniformValue("u_min_line_width", 0)
|
||||
self._layer_shader.setUniformValue("u_layer_view_type", 1)
|
||||
self._layer_shader.setUniformValue("u_extruder_opacity", [[1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1]])
|
||||
self._layer_shader.setUniformValue("u_show_travel_moves", 0)
|
||||
self._layer_shader.setUniformValue("u_show_helpers", 1)
|
||||
self._layer_shader.setUniformValue("u_show_skin", 1)
|
||||
self._layer_shader.setUniformValue("u_show_infill", 1)
|
||||
self._layer_shader.setUniformValue("u_show_starts", 1)
|
||||
for shader in [self._layer_shader, self._layer_shadow_shader]:
|
||||
if self._layer_view:
|
||||
shader.setUniformValue("u_max_feedrate", self._layer_view.getMaxFeedrate())
|
||||
shader.setUniformValue("u_min_feedrate", self._layer_view.getMinFeedrate())
|
||||
shader.setUniformValue("u_max_thickness", self._layer_view.getMaxThickness())
|
||||
shader.setUniformValue("u_min_thickness", self._layer_view.getMinThickness())
|
||||
shader.setUniformValue("u_max_line_width", self._layer_view.getMaxLineWidth())
|
||||
shader.setUniformValue("u_min_line_width", self._layer_view.getMinLineWidth())
|
||||
shader.setUniformValue("u_max_flow_rate", self._layer_view.getMaxFlowRate())
|
||||
shader.setUniformValue("u_min_flow_rate", self._layer_view.getMinFlowRate())
|
||||
shader.setUniformValue("u_layer_view_type", self._layer_view.getSimulationViewType())
|
||||
shader.setUniformValue("u_extruder_opacity", self._layer_view.getExtruderOpacities())
|
||||
shader.setUniformValue("u_show_travel_moves", self._layer_view.getShowTravelMoves())
|
||||
shader.setUniformValue("u_show_helpers", self._layer_view.getShowHelpers())
|
||||
shader.setUniformValue("u_show_skin", self._layer_view.getShowSkin())
|
||||
shader.setUniformValue("u_show_infill", self._layer_view.getShowInfill())
|
||||
shader.setUniformValue("u_show_starts", self._layer_view.getShowStarts())
|
||||
else:
|
||||
#defaults
|
||||
shader.setUniformValue("u_max_feedrate", 1)
|
||||
shader.setUniformValue("u_min_feedrate", 0)
|
||||
shader.setUniformValue("u_max_thickness", 1)
|
||||
shader.setUniformValue("u_min_thickness", 0)
|
||||
shader.setUniformValue("u_max_flow_rate", 1)
|
||||
shader.setUniformValue("u_min_flow_rate", 0)
|
||||
shader.setUniformValue("u_max_line_width", 1)
|
||||
shader.setUniformValue("u_min_line_width", 0)
|
||||
shader.setUniformValue("u_layer_view_type", 1)
|
||||
shader.setUniformValue("u_extruder_opacity", [[1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1]])
|
||||
shader.setUniformValue("u_show_travel_moves", 0)
|
||||
shader.setUniformValue("u_show_helpers", 1)
|
||||
shader.setUniformValue("u_show_skin", 1)
|
||||
shader.setUniformValue("u_show_infill", 1)
|
||||
shader.setUniformValue("u_show_starts", 1)
|
||||
|
||||
if not self._tool_handle_shader:
|
||||
self._tool_handle_shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "toolhandle.shader"))
|
||||
|
|
|
@ -222,12 +222,11 @@ class SimulationView(CuraView):
|
|||
|
||||
self.setPath(i + fractional_value)
|
||||
|
||||
def advanceTime(self, time_increase: float) -> bool:
|
||||
def advanceTime(self, time_increase: float) -> None:
|
||||
"""
|
||||
Advance the time by the given amount.
|
||||
|
||||
:param time_increase: The amount of time to advance (in seconds).
|
||||
:return: True if the time was advanced, False if the end of the simulation was reached.
|
||||
"""
|
||||
total_duration = 0.0
|
||||
if len(self.cumulativeLineDuration()) > 0:
|
||||
|
@ -237,15 +236,13 @@ class SimulationView(CuraView):
|
|||
# If we have reached the end of the simulation, go to the next layer.
|
||||
if self.getCurrentLayer() == self.getMaxLayers():
|
||||
# If we are already at the last layer, go to the first layer.
|
||||
self.setTime(total_duration)
|
||||
return False
|
||||
|
||||
# advance to the next layer, and reset the time
|
||||
self.setLayer(self.getCurrentLayer() + 1)
|
||||
self.setLayer(0)
|
||||
else:
|
||||
# advance to the next layer, and reset the time
|
||||
self.setLayer(self.getCurrentLayer() + 1)
|
||||
self.setTime(0.0)
|
||||
else:
|
||||
self.setTime(self._current_time + time_increase)
|
||||
return True
|
||||
|
||||
def cumulativeLineDuration(self) -> List[float]:
|
||||
# Make sure _cumulative_line_duration is initialized properly
|
||||
|
@ -256,9 +253,19 @@ class SimulationView(CuraView):
|
|||
polylines = self.getLayerData()
|
||||
if polylines is not None:
|
||||
for polyline in polylines.polygons:
|
||||
for line_duration in list((polyline.lineLengths / polyline.lineFeedrates)[0]):
|
||||
for line_index in range(len(polyline.lineLengths)):
|
||||
line_length = polyline.lineLengths[line_index]
|
||||
line_feedrate = polyline.lineFeedrates[line_index][0]
|
||||
|
||||
if line_feedrate > 0.0:
|
||||
line_duration = line_length / line_feedrate
|
||||
else:
|
||||
# Something is wrong with this line, set an arbitrary non-null duration
|
||||
line_duration = 0.1
|
||||
|
||||
total_duration += line_duration / SimulationView.SIMULATION_FACTOR
|
||||
self._cumulative_line_duration.append(total_duration)
|
||||
|
||||
# for tool change we add an extra tool path
|
||||
self._cumulative_line_duration.append(total_duration)
|
||||
# set current cached layer
|
||||
|
@ -583,7 +590,7 @@ class SimulationView(CuraView):
|
|||
self._max_thickness = sys.float_info.min
|
||||
self._min_flow_rate = sys.float_info.max
|
||||
self._max_flow_rate = sys.float_info.min
|
||||
self._cumulative_line_duration = {}
|
||||
self._cumulative_line_duration = []
|
||||
|
||||
# The colour scheme is only influenced by the visible lines, so filter the lines by if they should be visible.
|
||||
visible_line_types = []
|
||||
|
|
|
@ -144,9 +144,7 @@ Item
|
|||
{
|
||||
// divide by 1000 to account for ms to s conversion
|
||||
const advance_time = simulationTimer.interval / 1000.0;
|
||||
if (!UM.SimulationView.advanceTime(advance_time)) {
|
||||
playButton.pauseSimulation();
|
||||
}
|
||||
UM.SimulationView.advanceTime(advance_time);
|
||||
// The status must be set here instead of in the resumeSimulation function otherwise it won't work
|
||||
// correctly, because part of the logic is in this trigger function.
|
||||
isSimulationPlaying = true;
|
||||
|
|
|
@ -54,9 +54,9 @@ class SimulationViewProxy(QObject):
|
|||
def currentPath(self):
|
||||
return self._simulation_view.getCurrentPath()
|
||||
|
||||
@pyqtSlot(float, result=bool)
|
||||
def advanceTime(self, duration: float) -> bool:
|
||||
return self._simulation_view.advanceTime(duration)
|
||||
@pyqtSlot(float)
|
||||
def advanceTime(self, duration: float) -> None:
|
||||
self._simulation_view.advanceTime(duration)
|
||||
|
||||
@pyqtProperty(int, notify=currentPathChanged)
|
||||
def minimumPath(self):
|
||||
|
|
|
@ -360,8 +360,8 @@ geometry41core =
|
|||
((v_prev_line_type[0] != 1) && (v_line_type[0] == 1)) ||
|
||||
((v_prev_line_type[0] != 4) && (v_line_type[0] == 4))
|
||||
)) {
|
||||
float w = size_x;
|
||||
float h = size_y;
|
||||
float w = max(0.05, size_x);
|
||||
float h = max(0.05, size_y);
|
||||
|
||||
myEmitVertex(v_vertex[0] + vec3( w, h, w), u_starts_color, normalize(vec3( 1.0, 1.0, 1.0)), viewProjectionMatrix * (gl_in[0].gl_Position + vec4( w, h, w, 0.0))); // Front-top-left
|
||||
myEmitVertex(v_vertex[0] + vec3(-w, h, w), u_starts_color, normalize(vec3(-1.0, 1.0, 1.0)), viewProjectionMatrix * (gl_in[0].gl_Position + vec4(-w, h, w, 0.0))); // Front-top-right
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
# Copyright (c) 2023 UltiMaker
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
import datetime
|
||||
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import time
|
||||
from typing import Optional, Set, TYPE_CHECKING
|
||||
from typing import Any, Optional, Set, TYPE_CHECKING
|
||||
|
||||
from PyQt6.QtCore import pyqtSlot, QObject
|
||||
from PyQt6.QtNetwork import QNetworkRequest
|
||||
|
@ -33,7 +34,18 @@ class SliceInfo(QObject, Extension):
|
|||
no model files are being sent (Just a SHA256 hash of the model).
|
||||
"""
|
||||
|
||||
info_url = "https://stats.ultimaker.com/api/cura"
|
||||
info_url = "https://statistics.ultimaker.com/api/v2/cura/slice"
|
||||
|
||||
_adjust_flattened_names = {
|
||||
"extruders_extruder": "extruders",
|
||||
"extruders_settings": "extruders",
|
||||
"models_model": "models",
|
||||
"models_transformation_data": "models_transformation",
|
||||
"print_settings_": "",
|
||||
"print_times": "print_time",
|
||||
"active_machine_": "",
|
||||
"slice_uuid": "slice_id",
|
||||
}
|
||||
|
||||
def __init__(self, parent = None):
|
||||
QObject.__init__(self, parent)
|
||||
|
@ -112,6 +124,26 @@ class SliceInfo(QObject, Extension):
|
|||
|
||||
return list(sorted(user_modified_setting_keys))
|
||||
|
||||
def _flattenData(self, data: Any, result: dict, current_flat_key: Optional[str] = None, lift_list: bool = False) -> None:
|
||||
if isinstance(data, dict):
|
||||
for key, value in data.items():
|
||||
total_flat_key = key if current_flat_key is None else f"{current_flat_key}_{key}"
|
||||
self._flattenData(value, result, total_flat_key, lift_list)
|
||||
elif isinstance(data, list):
|
||||
for item in data:
|
||||
self._flattenData(item, result, current_flat_key, True)
|
||||
else:
|
||||
actual_flat_key = current_flat_key.lower()
|
||||
for key, value in self._adjust_flattened_names.items():
|
||||
if actual_flat_key.startswith(key):
|
||||
actual_flat_key = actual_flat_key.replace(key, value)
|
||||
if lift_list:
|
||||
if actual_flat_key not in result:
|
||||
result[actual_flat_key] = []
|
||||
result[actual_flat_key].append(data)
|
||||
else:
|
||||
result[actual_flat_key] = data
|
||||
|
||||
def _onWriteStarted(self, output_device):
|
||||
try:
|
||||
if not self._application.getPreferences().getValue("info/send_slice_info"):
|
||||
|
@ -125,8 +157,7 @@ class SliceInfo(QObject, Extension):
|
|||
global_stack = machine_manager.activeMachine
|
||||
|
||||
data = dict() # The data that we're going to submit.
|
||||
data["time_stamp"] = time.time()
|
||||
data["schema_version"] = 0
|
||||
data["schema_version"] = 1000
|
||||
data["cura_version"] = self._application.getVersion()
|
||||
data["cura_build_type"] = ApplicationMetadata.CuraBuildType
|
||||
org_id = user_profile.get("organization_id", None) if user_profile else None
|
||||
|
@ -298,6 +329,11 @@ class SliceInfo(QObject, Extension):
|
|||
"time_backend": int(round(time_backend)),
|
||||
}
|
||||
|
||||
# Massage data into format used in the DB:
|
||||
flat_data = dict()
|
||||
self._flattenData(data, flat_data)
|
||||
data = flat_data
|
||||
|
||||
# Convert data to bytes
|
||||
binary_data = json.dumps(data).encode("utf-8")
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ from UM.Settings.InstanceContainer import InstanceContainer
|
|||
from cura.CuraApplication import CuraApplication
|
||||
from cura.Settings.GlobalStack import GlobalStack
|
||||
from cura.Utils.Threading import call_on_qt_thread
|
||||
from cura.API import CuraAPI
|
||||
|
||||
from UM.i18n import i18nCatalog
|
||||
|
||||
|
@ -50,7 +51,7 @@ class UFPWriter(MeshWriter):
|
|||
# Qt thread. The File read/write operations right now are executed on separated threads because they are scheduled
|
||||
# by the Job class.
|
||||
@call_on_qt_thread
|
||||
def write(self, stream, nodes, mode = MeshWriter.OutputMode.BinaryMode):
|
||||
def write(self, stream, nodes, mode = MeshWriter.OutputMode.BinaryMode, **kwargs):
|
||||
archive = VirtualFile()
|
||||
archive.openStream(stream, "application/x-ufp", OpenMode.WriteOnly)
|
||||
|
||||
|
@ -85,7 +86,8 @@ class UFPWriter(MeshWriter):
|
|||
try:
|
||||
archive.addContentType(extension="json", mime_type="application/json")
|
||||
setting_textio = StringIO()
|
||||
json.dump(self._getSliceMetadata(), setting_textio, separators=(", ", ": "), indent=4)
|
||||
api = CuraApplication.getInstance().getCuraAPI()
|
||||
json.dump(api.interface.settings.getSliceMetadata(), setting_textio, separators=(", ", ": "), indent=4)
|
||||
steam = archive.getStream(SLICE_METADATA_PATH)
|
||||
steam.write(setting_textio.getvalue().encode("UTF-8"))
|
||||
except EnvironmentError as e:
|
||||
|
@ -210,57 +212,3 @@ class UFPWriter(MeshWriter):
|
|||
return [{"name": item.getName()}
|
||||
for item in DepthFirstIterator(node)
|
||||
if item.getMeshData() is not None and not item.callDecoration("isNonPrintingMesh")]
|
||||
|
||||
def _getSliceMetadata(self) -> Dict[str, Dict[str, Dict[str, str]]]:
|
||||
"""Get all changed settings and all settings. For each extruder and the global stack"""
|
||||
print_information = CuraApplication.getInstance().getPrintInformation()
|
||||
machine_manager = CuraApplication.getInstance().getMachineManager()
|
||||
settings = {
|
||||
"material": {
|
||||
"length": print_information.materialLengths,
|
||||
"weight": print_information.materialWeights,
|
||||
"cost": print_information.materialCosts,
|
||||
},
|
||||
"global": {
|
||||
"changes": {},
|
||||
"all_settings": {},
|
||||
},
|
||||
"quality": asdict(machine_manager.activeQualityDisplayNameMap()),
|
||||
}
|
||||
|
||||
def _retrieveValue(container: InstanceContainer, setting_: str):
|
||||
value_ = container.getProperty(setting_, "value")
|
||||
for _ in range(0, 1024): # Prevent possibly endless loop by not using a limit.
|
||||
if not isinstance(value_, SettingFunction):
|
||||
return value_ # Success!
|
||||
value_ = value_(container)
|
||||
return 0 # Fallback value after breaking possibly endless loop.
|
||||
|
||||
global_stack = cast(GlobalStack, Application.getInstance().getGlobalContainerStack())
|
||||
|
||||
# Add global user or quality changes
|
||||
global_flattened_changes = InstanceContainer.createMergedInstanceContainer(global_stack.userChanges, global_stack.qualityChanges)
|
||||
for setting in global_flattened_changes.getAllKeys():
|
||||
settings["global"]["changes"][setting] = _retrieveValue(global_flattened_changes, setting)
|
||||
|
||||
# Get global all settings values without user or quality changes
|
||||
for setting in global_stack.getAllKeys():
|
||||
settings["global"]["all_settings"][setting] = _retrieveValue(global_stack, setting)
|
||||
|
||||
for i, extruder in enumerate(global_stack.extruderList):
|
||||
# Add extruder fields to settings dictionary
|
||||
settings[f"extruder_{i}"] = {
|
||||
"changes": {},
|
||||
"all_settings": {},
|
||||
}
|
||||
|
||||
# Add extruder user or quality changes
|
||||
extruder_flattened_changes = InstanceContainer.createMergedInstanceContainer(extruder.userChanges, extruder.qualityChanges)
|
||||
for setting in extruder_flattened_changes.getAllKeys():
|
||||
settings[f"extruder_{i}"]["changes"][setting] = _retrieveValue(extruder_flattened_changes, setting)
|
||||
|
||||
# Get extruder all settings values without user or quality changes
|
||||
for setting in extruder.getAllKeys():
|
||||
settings[f"extruder_{i}"]["all_settings"][setting] = _retrieveValue(extruder, setting)
|
||||
|
||||
return settings
|
||||
|
|
Before ![]() (image error) Size: 202 KiB After ![]() (image error) Size: 90 KiB ![]() ![]() |
Before ![]() (image error) Size: 620 KiB After ![]() (image error) Size: 64 KiB ![]() ![]() |
Before ![]() (image error) Size: 140 KiB After ![]() (image error) Size: 90 KiB ![]() ![]() |
Before ![]() (image error) Size: 185 KiB After ![]() (image error) Size: 75 KiB ![]() ![]() |
Before ![]() (image error) Size: 398 KiB After ![]() (image error) Size: 119 KiB ![]() ![]() |
Before ![]() (image error) Size: 899 KiB After ![]() (image error) Size: 138 KiB ![]() ![]() |
Before ![]() (image error) Size: 682 KiB After ![]() (image error) Size: 110 KiB ![]() ![]() |
Before ![]() (image error) Size: 874 KiB After ![]() (image error) Size: 50 KiB ![]() ![]() |
Before ![]() (image error) Size: 430 KiB After ![]() (image error) Size: 123 KiB ![]() ![]() |
Before ![]() (image error) Size: 1.2 MiB After ![]() (image error) Size: 90 KiB ![]() ![]() |
BIN
plugins/UM3NetworkPrinting/resources/png/Ultimaker S6.png
Normal file
After ![]() (image error) Size: 107 KiB |
Before ![]() (image error) Size: 735 KiB After ![]() (image error) Size: 77 KiB ![]() ![]() |
BIN
plugins/UM3NetworkPrinting/resources/png/Ultimaker S8.png
Normal file
After ![]() (image error) Size: 141 KiB |
|
@ -42,7 +42,7 @@ class CloudApiClient:
|
|||
CLUSTER_API_ROOT = f"{ROOT_PATH}/connect/v1"
|
||||
CURA_API_ROOT = f"{ROOT_PATH}/cura/v1"
|
||||
|
||||
DEFAULT_REQUEST_TIMEOUT = 10 # seconds
|
||||
DEFAULT_REQUEST_TIMEOUT = 30 # seconds
|
||||
|
||||
# In order to avoid garbage collection we keep the callbacks in this list.
|
||||
_anti_gc_callbacks = [] # type: List[Callable[[Any], None]]
|
||||
|
|
|
@ -331,7 +331,7 @@ class CloudOutputDevice(UltimakerNetworkedPrinterOutputDevice):
|
|||
return False
|
||||
|
||||
[printer, *_] = self._printers
|
||||
return printer.type in ("MakerBot Method X", "MakerBot Method XL", "MakerBot Sketch")
|
||||
return printer.type in ("MakerBot Method", "MakerBot Method X", "MakerBot Method XL", "MakerBot Sketch", "MakerBot Sketch Large", "MakerBot Sketch Sprint")
|
||||
|
||||
@pyqtProperty(bool, notify=_cloudClusterPrintersChanged)
|
||||
def supportsPrintJobActions(self) -> bool:
|
||||
|
|
|
@ -3,5 +3,7 @@
|
|||
"ultimaker_methodx": "MakerBot Method X",
|
||||
"ultimaker_methodxl": "MakerBot Method XL",
|
||||
"ultimaker_factor4": "Ultimaker Factor 4",
|
||||
"ultimaker_sketch": "MakerBot Sketch"
|
||||
"ultimaker_sketch": "MakerBot Sketch",
|
||||
"ultimaker_sketch_large": "MakerBot Sketch Large",
|
||||
"ultimaker_sketch_sprint": "MakerBot Sketch Sprint"
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ class ExportFileJob(WriteFileJob):
|
|||
|
||||
# Determine the filename.
|
||||
job_name = CuraApplication.getInstance().getPrintInformation().jobName
|
||||
job_name = re.sub("[^\w\-. ()]", "-", job_name)
|
||||
job_name = re.sub(r"[^\w\-. ()]", "-", job_name)
|
||||
extension = self._mesh_format_handler.preferred_format.get("extension", "")
|
||||
self.setFileName("{}.{}".format(job_name, extension))
|
||||
|
||||
|
|
|
@ -97,6 +97,8 @@ class USBPrinterOutputDevice(PrinterOutputDevice):
|
|||
|
||||
CuraApplication.getInstance().getOnExitCallbackManager().addCallback(self._checkActivePrintingUponAppExit)
|
||||
|
||||
CuraApplication.getInstance().getPreferences().addPreference("usb_printing/enabled", False)
|
||||
|
||||
# This is a callback function that checks if there is any printing in progress via USB when the application tries
|
||||
# to exit. If so, it will show a confirmation before
|
||||
def _checkActivePrintingUponAppExit(self) -> None:
|
||||
|
@ -144,6 +146,8 @@ class USBPrinterOutputDevice(PrinterOutputDevice):
|
|||
|
||||
CuraApplication.getInstance().getController().setActiveStage("MonitorStage")
|
||||
|
||||
CuraApplication.getInstance().getPreferences().setValue("usb_printing/enabled", True)
|
||||
|
||||
#Find the g-code to print.
|
||||
gcode_textio = StringIO()
|
||||
gcode_writer = cast(MeshWriter, PluginRegistry.getInstance().getPluginObject("GCodeWriter"))
|
||||
|
|
|
@ -0,0 +1,103 @@
|
|||
# Copyright (c) 2024 UltiMaker
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
import configparser
|
||||
from typing import Dict, List, Tuple
|
||||
import io
|
||||
from UM.VersionUpgrade import VersionUpgrade
|
||||
|
||||
# Just to be sure, since in my testing there were both 0.1.0 and 0.2.0 settings about.
|
||||
_PLUGIN_NAME = "_plugin__curaenginegradualflow"
|
||||
_FROM_PLUGINS_SETTINGS = {
|
||||
"gradual_flow_enabled",
|
||||
"max_flow_acceleration",
|
||||
"layer_0_max_flow_acceleration",
|
||||
"gradual_flow_discretisation_step_size",
|
||||
"reset_flow_duration",
|
||||
} # type: Set[str]
|
||||
|
||||
_NEW_SETTING_VERSION = "24"
|
||||
|
||||
|
||||
class VersionUpgrade58to59(VersionUpgrade):
|
||||
def upgradePreferences(self, serialized: str, filename: str) -> Tuple[List[str], List[str]]:
|
||||
"""
|
||||
Upgrades preferences to remove from the visibility list the settings that were removed in this version.
|
||||
It also changes the preferences to have the new version number.
|
||||
|
||||
This removes any settings that were removed in the new Cura version.
|
||||
:param serialized: The original contents of the preferences file.
|
||||
:param filename: The file name of the preferences file.
|
||||
:return: A list of new file names, and a list of the new contents for
|
||||
those files.
|
||||
"""
|
||||
parser = configparser.ConfigParser(interpolation = None)
|
||||
parser.read_string(serialized)
|
||||
|
||||
# Update version number.
|
||||
parser["metadata"]["setting_version"] = _NEW_SETTING_VERSION
|
||||
|
||||
# Fix renamed settings for visibility
|
||||
if "visible_settings" in parser["general"]:
|
||||
all_setting_keys = parser["general"]["visible_settings"].strip().split(";")
|
||||
if all_setting_keys:
|
||||
for idx, key in enumerate(all_setting_keys):
|
||||
if key.startswith(_PLUGIN_NAME):
|
||||
all_setting_keys[idx] = key.split("__")[-1]
|
||||
parser["general"]["visible_settings"] = ";".join(all_setting_keys)
|
||||
|
||||
result = io.StringIO()
|
||||
parser.write(result)
|
||||
return [filename], [result.getvalue()]
|
||||
|
||||
def upgradeInstanceContainer(self, serialized: str, filename: str) -> Tuple[List[str], List[str]]:
|
||||
"""
|
||||
Upgrades instance containers to remove the settings that were removed in this version.
|
||||
It also changes the instance containers to have the new version number.
|
||||
|
||||
This removes any settings that were removed in the new Cura version and updates settings that need to be updated
|
||||
with a new value.
|
||||
|
||||
:param serialized: The original contents of the instance container.
|
||||
:param filename: The original file name of the instance container.
|
||||
:return: A list of new file names, and a list of the new contents for
|
||||
those files.
|
||||
"""
|
||||
parser = configparser.ConfigParser(interpolation = None, comment_prefixes = ())
|
||||
parser.read_string(serialized)
|
||||
|
||||
# Update version number.
|
||||
parser["metadata"]["setting_version"] = _NEW_SETTING_VERSION
|
||||
|
||||
# Rename settings.
|
||||
if "values" in parser:
|
||||
for key, value in parser["values"].items():
|
||||
if key.startswith(_PLUGIN_NAME):
|
||||
parser["values"][key.split("__")[-1]] = parser["values"][key]
|
||||
del parser["values"][key]
|
||||
|
||||
result = io.StringIO()
|
||||
parser.write(result)
|
||||
return [filename], [result.getvalue()]
|
||||
|
||||
def upgradeStack(self, serialized: str, filename: str) -> Tuple[List[str], List[str]]:
|
||||
"""
|
||||
Upgrades stacks to have the new version number.
|
||||
|
||||
:param serialized: The original contents of the stack.
|
||||
:param filename: The original file name of the stack.
|
||||
:return: A list of new file names, and a list of the new contents for
|
||||
those files.
|
||||
"""
|
||||
parser = configparser.ConfigParser(interpolation = None)
|
||||
parser.read_string(serialized)
|
||||
|
||||
# Update version number.
|
||||
if "metadata" not in parser:
|
||||
parser["metadata"] = {}
|
||||
|
||||
parser["metadata"]["setting_version"] = _NEW_SETTING_VERSION
|
||||
|
||||
result = io.StringIO()
|
||||
parser.write(result)
|
||||
return [filename], [result.getvalue()]
|
61
plugins/VersionUpgrade/VersionUpgrade58to59/__init__.py
Normal file
|
@ -0,0 +1,61 @@
|
|||
# Copyright (c) 2024 UltiMaker
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
from typing import Any, Dict, TYPE_CHECKING
|
||||
|
||||
from . import VersionUpgrade58to59
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from UM.Application import Application
|
||||
|
||||
upgrade = VersionUpgrade58to59.VersionUpgrade58to59()
|
||||
|
||||
|
||||
def getMetaData() -> Dict[str, Any]:
|
||||
return {
|
||||
"version_upgrade": {
|
||||
# From To Upgrade function
|
||||
("preferences", 7000023): ("preferences", 7000024, upgrade.upgradePreferences),
|
||||
("machine_stack", 6000023): ("machine_stack", 6000024, upgrade.upgradeStack),
|
||||
("extruder_train", 6000023): ("extruder_train", 6000024, upgrade.upgradeStack),
|
||||
("definition_changes", 4000023): ("definition_changes", 4000024, upgrade.upgradeInstanceContainer),
|
||||
("quality_changes", 4000023): ("quality_changes", 4000024, upgrade.upgradeInstanceContainer),
|
||||
("quality", 4000023): ("quality", 4000024, upgrade.upgradeInstanceContainer),
|
||||
("user", 4000023): ("user", 4000024, upgrade.upgradeInstanceContainer),
|
||||
("intent", 4000023): ("intent", 4000024, upgrade.upgradeInstanceContainer),
|
||||
},
|
||||
"sources": {
|
||||
"preferences": {
|
||||
"get_version": upgrade.getCfgVersion,
|
||||
"location": {"."}
|
||||
},
|
||||
"machine_stack": {
|
||||
"get_version": upgrade.getCfgVersion,
|
||||
"location": {"./machine_instances"}
|
||||
},
|
||||
"extruder_train": {
|
||||
"get_version": upgrade.getCfgVersion,
|
||||
"location": {"./extruders"}
|
||||
},
|
||||
"definition_changes": {
|
||||
"get_version": upgrade.getCfgVersion,
|
||||
"location": {"./definition_changes"}
|
||||
},
|
||||
"quality_changes": {
|
||||
"get_version": upgrade.getCfgVersion,
|
||||
"location": {"./quality_changes"}
|
||||
},
|
||||
"quality": {
|
||||
"get_version": upgrade.getCfgVersion,
|
||||
"location": {"./quality"}
|
||||
},
|
||||
"user": {
|
||||
"get_version": upgrade.getCfgVersion,
|
||||
"location": {"./user"}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def register(app: "Application") -> Dict[str, Any]:
|
||||
return {"version_upgrade": upgrade}
|
8
plugins/VersionUpgrade/VersionUpgrade58to59/plugin.json
Normal file
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"name": "Version Upgrade 5.8 to 5.9",
|
||||
"author": "UltiMaker",
|
||||
"version": "1.0.0",
|
||||
"description": "Upgrades configurations from Cura 5.8 to Cura 5.9.",
|
||||
"api": 8,
|
||||
"i18n-catalog": "cura"
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
import configparser
|
||||
import io
|
||||
from typing import Dict, Tuple, List
|
||||
|
||||
from UM.VersionUpgrade import VersionUpgrade
|
||||
|
||||
_RENAMED_SETTINGS = {
|
||||
"wall_overhang_speed_factor": "wall_overhang_speed_factors"
|
||||
} # type: Dict[str, str]
|
||||
|
||||
_NEW_SETTING_VERSION = "25"
|
||||
|
||||
|
||||
class VersionUpgrade59to510(VersionUpgrade):
|
||||
def upgradePreferences(self, serialized: str, filename: str):
|
||||
parser = configparser.ConfigParser(interpolation = None)
|
||||
parser.read_string(serialized)
|
||||
|
||||
# Fix 'renamed'(ish) settings for visibility
|
||||
if "visible_settings" in parser["general"]:
|
||||
all_setting_keys = parser["general"]["visible_settings"].strip().split(";")
|
||||
if all_setting_keys:
|
||||
for idx, key in enumerate(all_setting_keys):
|
||||
if key in _RENAMED_SETTINGS:
|
||||
all_setting_keys[idx] = _RENAMED_SETTINGS[key]
|
||||
parser["general"]["visible_settings"] = ";".join(all_setting_keys)
|
||||
|
||||
# Update version number.
|
||||
parser["metadata"]["setting_version"] = _NEW_SETTING_VERSION
|
||||
|
||||
result = io.StringIO()
|
||||
parser.write(result)
|
||||
return [filename], [result.getvalue()]
|
||||
|
||||
def upgradeInstanceContainer(self, serialized: str, filename: str) -> Tuple[List[str], List[str]]:
|
||||
parser = configparser.ConfigParser(interpolation = None, comment_prefixes = ())
|
||||
parser.read_string(serialized)
|
||||
|
||||
# Update version number.
|
||||
parser["metadata"]["setting_version"] = _NEW_SETTING_VERSION
|
||||
|
||||
if "values" in parser:
|
||||
for old_name, new_name in _RENAMED_SETTINGS.items():
|
||||
if old_name in parser["values"]:
|
||||
parser["values"][new_name] = parser["values"][old_name]
|
||||
del parser["values"][old_name]
|
||||
if "wall_overhang_speed_factors" in parser["values"]:
|
||||
old_value = float(parser["values"]["wall_overhang_speed_factors"])
|
||||
new_value = [max(1, int(round(old_value)))]
|
||||
parser["values"]["wall_overhang_speed_factor"] = str(new_value)
|
||||
|
||||
result = io.StringIO()
|
||||
parser.write(result)
|
||||
return [filename], [result.getvalue()]
|
||||
|
||||
def upgradeStack(self, serialized: str, filename: str) -> Tuple[List[str], List[str]]:
|
||||
parser = configparser.ConfigParser(interpolation = None)
|
||||
parser.read_string(serialized)
|
||||
|
||||
# Update version number.
|
||||
if "metadata" not in parser:
|
||||
parser["metadata"] = {}
|
||||
|
||||
parser["metadata"]["setting_version"] = _NEW_SETTING_VERSION
|
||||
|
||||
result = io.StringIO()
|
||||
parser.write(result)
|
||||
return [filename], [result.getvalue()]
|
60
plugins/VersionUpgrade/VersionUpgrade59to510/__init__.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
# Copyright (c) 2024 UltiMaker
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
from typing import Any, Dict, TYPE_CHECKING
|
||||
|
||||
from . import VersionUpgrade59to510
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from UM.Application import Application
|
||||
|
||||
upgrade = VersionUpgrade59to510.VersionUpgrade59to510()
|
||||
|
||||
def getMetaData() -> Dict[str, Any]:
|
||||
return {
|
||||
"version_upgrade": {
|
||||
# From To Upgrade function
|
||||
("preferences", 7000024): ("preferences", 7000025, upgrade.upgradePreferences),
|
||||
("machine_stack", 6000024): ("machine_stack", 6000025, upgrade.upgradeStack),
|
||||
("extruder_train", 6000024): ("extruder_train", 6000025, upgrade.upgradeStack),
|
||||
("definition_changes", 4000024): ("definition_changes", 4000025, upgrade.upgradeInstanceContainer),
|
||||
("quality_changes", 4000024): ("quality_changes", 4000025, upgrade.upgradeInstanceContainer),
|
||||
("quality", 4000024): ("quality", 4000025, upgrade.upgradeInstanceContainer),
|
||||
("user", 4000024): ("user", 4000025, upgrade.upgradeInstanceContainer),
|
||||
("intent", 4000024): ("intent", 4000025, upgrade.upgradeInstanceContainer),
|
||||
},
|
||||
"sources": {
|
||||
"preferences": {
|
||||
"get_version": upgrade.getCfgVersion,
|
||||
"location": {"."}
|
||||
},
|
||||
"machine_stack": {
|
||||
"get_version": upgrade.getCfgVersion,
|
||||
"location": {"./machine_instances"}
|
||||
},
|
||||
"extruder_train": {
|
||||
"get_version": upgrade.getCfgVersion,
|
||||
"location": {"./extruders"}
|
||||
},
|
||||
"definition_changes": {
|
||||
"get_version": upgrade.getCfgVersion,
|
||||
"location": {"./definition_changes"}
|
||||
},
|
||||
"quality_changes": {
|
||||
"get_version": upgrade.getCfgVersion,
|
||||
"location": {"./quality_changes"}
|
||||
},
|
||||
"quality": {
|
||||
"get_version": upgrade.getCfgVersion,
|
||||
"location": {"./quality"}
|
||||
},
|
||||
"user": {
|
||||
"get_version": upgrade.getCfgVersion,
|
||||
"location": {"./user"}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def register(app: "Application") -> Dict[str, Any]:
|
||||
return {"version_upgrade": upgrade}
|
8
plugins/VersionUpgrade/VersionUpgrade59to510/plugin.json
Normal file
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"name": "Version Upgrade 5.9 to 5.10",
|
||||
"author": "Ultimaker B.V.",
|
||||
"version": "1.0.0",
|
||||
"description": "Upgrades configurations from Cura 5.9 to Cura 5.10",
|
||||
"api": 8,
|
||||
"i18n-catalog": "cura"
|
||||
}
|
|
@ -11,12 +11,14 @@ import xml.etree.ElementTree as ET
|
|||
from UM.PluginRegistry import PluginRegistry
|
||||
from UM.Resources import Resources
|
||||
from UM.Logger import Logger
|
||||
from UM.Decorators import CachedMemberFunctions
|
||||
import UM.Dictionary
|
||||
from UM.Settings.InstanceContainer import InstanceContainer
|
||||
from UM.Settings.ContainerRegistry import ContainerRegistry
|
||||
from UM.ConfigurationErrorMessage import ConfigurationErrorMessage
|
||||
|
||||
from cura.CuraApplication import CuraApplication
|
||||
from cura.PrinterOutput.FormatMaps import FormatMaps
|
||||
from cura.Machines.VariantType import VariantType
|
||||
|
||||
try:
|
||||
|
@ -70,6 +72,8 @@ class XmlMaterialProfile(InstanceContainer):
|
|||
Logger.log("w", "Can't change metadata {key} of material {material_id} because it's read-only.".format(key = key, material_id = self.getId()))
|
||||
return
|
||||
|
||||
CachedMemberFunctions.clearInstanceCache(self)
|
||||
|
||||
# Some metadata such as diameter should also be instantiated to be a setting. Go though all values for the
|
||||
# "properties" field and apply the new values to SettingInstances as well.
|
||||
new_setting_values_dict = {}
|
||||
|
@ -249,7 +253,7 @@ class XmlMaterialProfile(InstanceContainer):
|
|||
machine_variant_map[definition_id][variant_name] = variant_dict
|
||||
|
||||
# Map machine human-readable names to IDs
|
||||
product_id_map = self.getProductIdMap()
|
||||
product_id_map = FormatMaps.getProductIdMap()
|
||||
|
||||
for definition_id, container in machine_container_map.items():
|
||||
definition_id = container.getMetaDataEntry("definition")
|
||||
|
@ -479,6 +483,7 @@ class XmlMaterialProfile(InstanceContainer):
|
|||
first.append(element)
|
||||
|
||||
def clearData(self):
|
||||
CachedMemberFunctions.clearInstanceCache(self)
|
||||
self._metadata = {
|
||||
"id": self.getId(),
|
||||
"name": ""
|
||||
|
@ -518,6 +523,8 @@ class XmlMaterialProfile(InstanceContainer):
|
|||
def deserialize(self, serialized, file_name = None):
|
||||
"""Overridden from InstanceContainer"""
|
||||
|
||||
CachedMemberFunctions.clearInstanceCache(self)
|
||||
|
||||
containers_to_add = []
|
||||
# update the serialized data first
|
||||
from UM.Settings.Interfaces import ContainerInterface
|
||||
|
@ -647,7 +654,7 @@ class XmlMaterialProfile(InstanceContainer):
|
|||
self._dirty = False
|
||||
|
||||
# Map machine human-readable names to IDs
|
||||
product_id_map = self.getProductIdMap()
|
||||
product_id_map = FormatMaps.getProductIdMap()
|
||||
|
||||
machines = data.iterfind("./um:settings/um:machine", self.__namespaces)
|
||||
for machine in machines:
|
||||
|
@ -911,9 +918,6 @@ class XmlMaterialProfile(InstanceContainer):
|
|||
base_metadata["properties"] = property_values
|
||||
base_metadata["definition"] = "fdmprinter"
|
||||
|
||||
# Certain materials are loaded but should not be visible / selectable to the user.
|
||||
base_metadata["visible"] = not base_metadata.get("abstract_color", False)
|
||||
|
||||
compatible_entries = data.iterfind("./um:settings/um:setting[@key='hardware compatible']", cls.__namespaces)
|
||||
try:
|
||||
common_compatibility = cls._parseCompatibleValue(next(compatible_entries).text) # type: ignore
|
||||
|
@ -923,7 +927,7 @@ class XmlMaterialProfile(InstanceContainer):
|
|||
result_metadata.append(base_metadata)
|
||||
|
||||
# Map machine human-readable names to IDs
|
||||
product_id_map = cls.getProductIdMap()
|
||||
product_id_map = FormatMaps.getProductIdMap()
|
||||
|
||||
for machine in data.iterfind("./um:settings/um:machine", cls.__namespaces):
|
||||
machine_compatibility = common_compatibility
|
||||
|
@ -1083,10 +1087,8 @@ class XmlMaterialProfile(InstanceContainer):
|
|||
# Skip material properties (eg diameter) or metadata (eg GUID)
|
||||
return
|
||||
|
||||
if instance.value is True:
|
||||
data = "yes"
|
||||
elif instance.value is False:
|
||||
data = "no"
|
||||
if tag_name != "cura:setting" and isinstance(instance.value, bool):
|
||||
data = "yes" if instance.value else "no"
|
||||
else:
|
||||
data = str(instance.value)
|
||||
|
||||
|
@ -1129,29 +1131,6 @@ class XmlMaterialProfile(InstanceContainer):
|
|||
id_list = list(id_list)
|
||||
return id_list
|
||||
|
||||
__product_to_id_map: Optional[Dict[str, List[str]]] = None
|
||||
|
||||
@classmethod
|
||||
def getProductIdMap(cls) -> Dict[str, List[str]]:
|
||||
"""Gets a mapping from product names in the XML files to their definition IDs.
|
||||
|
||||
This loads the mapping from a file.
|
||||
"""
|
||||
if cls.__product_to_id_map is not None:
|
||||
return cls.__product_to_id_map
|
||||
|
||||
plugin_path = cast(str, PluginRegistry.getInstance().getPluginPath("XmlMaterialProfile"))
|
||||
product_to_id_file = os.path.join(plugin_path, "product_to_id.json")
|
||||
with open(product_to_id_file, encoding = "utf-8") as f:
|
||||
contents = ""
|
||||
for line in f:
|
||||
contents += line if "#" not in line else "".join([line.replace("#", str(n)) for n in range(1, 12)])
|
||||
cls.__product_to_id_map = json.loads(contents)
|
||||
cls.__product_to_id_map = {key: [value] for key, value in cls.__product_to_id_map.items()}
|
||||
#This also loads "Ultimaker S5" -> "ultimaker_s5" even though that is not strictly necessary with the default to change spaces into underscores.
|
||||
#However it is not always loaded with that default; this mapping is also used in serialize() without that default.
|
||||
return cls.__product_to_id_map
|
||||
|
||||
@staticmethod
|
||||
def _parseCompatibleValue(value: str):
|
||||
"""Parse the value of the "material compatible" property."""
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
{
|
||||
"Ultimaker #": "ultimaker#",
|
||||
"Ultimaker # Extended": "ultimaker#_extended",
|
||||
"Ultimaker # Extended+": "ultimaker#_extended_plus",
|
||||
"Ultimaker # Go": "ultimaker#_go",
|
||||
"Ultimaker #+": "ultimaker#_plus",
|
||||
"Ultimaker #+ Connect": "ultimaker#_plus_connect",
|
||||
"Ultimaker S#": "ultimaker_s#",
|
||||
"Ultimaker Factor #": "ultimaker_factor#",
|
||||
"Ultimaker Original": "ultimaker_original",
|
||||
"Ultimaker Original+": "ultimaker_original_plus",
|
||||
"Ultimaker Original Dual Extrusion": "ultimaker_original_dual",
|
||||
"IMADE3D JellyBOX": "imade3d_jellybox",
|
||||
"DUAL600": "strateo3d",
|
||||
"IDEX420": "strateo3d_IDEX420",
|
||||
"IDEX420 Duplicate": "strateo3d_IDEX420_duplicate",
|
||||
"IDEX420 Mirror": "strateo3d_IDEX420_mirror",
|
||||
"UltiMaker Method": "ultimaker_method",
|
||||
"UltiMaker Method X": "ultimaker_methodx",
|
||||
"UltiMaker Method XL": "ultimaker_methodxl",
|
||||
"UltiMaker Sketch": "ultimaker_sketch",
|
||||
"UltiMaker Sketch Large": "ultimaker_sketch_large"
|
||||
}
|