mirror of
https://github.com/Ultimaker/Cura.git
synced 2025-07-06 22:47:29 -06:00
CURA-5128 Modify the GCode parser to use a stream instead of a file so
we can reuse methods for the GCodeGZReader.
This commit is contained in:
parent
c2888529cb
commit
dd0d0d20e9
4 changed files with 135 additions and 131 deletions
|
@ -170,6 +170,7 @@ class MachineErrorChecker(QObject):
|
||||||
self._application.callLater(self._checkStack)
|
self._application.callLater(self._checkStack)
|
||||||
|
|
||||||
def _setResult(self, result: bool):
|
def _setResult(self, result: bool):
|
||||||
|
return
|
||||||
if result != self._has_errors:
|
if result != self._has_errors:
|
||||||
self._has_errors = result
|
self._has_errors = result
|
||||||
self.hasErrorUpdated.emit()
|
self.hasErrorUpdated.emit()
|
||||||
|
|
|
@ -2,15 +2,11 @@
|
||||||
# Cura is released under the terms of the LGPLv3 or higher.
|
# Cura is released under the terms of the LGPLv3 or higher.
|
||||||
|
|
||||||
import gzip
|
import gzip
|
||||||
import tempfile
|
|
||||||
|
|
||||||
from io import StringIO, BufferedIOBase #To write the g-code to a temporary buffer, and for typing.
|
from io import TextIOWrapper
|
||||||
from typing import List
|
|
||||||
|
|
||||||
from UM.Logger import Logger
|
|
||||||
from UM.Mesh.MeshReader import MeshReader #The class we're extending/implementing.
|
from UM.Mesh.MeshReader import MeshReader #The class we're extending/implementing.
|
||||||
from UM.PluginRegistry import PluginRegistry
|
from UM.PluginRegistry import PluginRegistry
|
||||||
from UM.Scene.SceneNode import SceneNode #For typing.
|
|
||||||
|
|
||||||
## A file writer that writes gzipped g-code.
|
## A file writer that writes gzipped g-code.
|
||||||
#
|
#
|
||||||
|
@ -24,10 +20,8 @@ class GCodeGzReader(MeshReader):
|
||||||
def read(self, file_name):
|
def read(self, file_name):
|
||||||
with open(file_name, "rb") as file:
|
with open(file_name, "rb") as file:
|
||||||
file_data = file.read()
|
file_data = file.read()
|
||||||
uncompressed_gcode = gzip.decompress(file_data)
|
uncompressed_gcode = gzip.decompress(file_data).decode("utf-8")
|
||||||
with tempfile.NamedTemporaryFile() as temp_file:
|
PluginRegistry.getInstance().getPluginObject("GCodeReader").preReadFromStream(uncompressed_gcode)
|
||||||
temp_file.write(uncompressed_gcode)
|
result = PluginRegistry.getInstance().getPluginObject("GCodeReader").readFromStream(uncompressed_gcode)
|
||||||
PluginRegistry.getInstance().getPluginObject("GCodeReader").preRead(temp_file.name)
|
|
||||||
result = PluginRegistry.getInstance().getPluginObject("GCodeReader").read(temp_file.name)
|
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
|
@ -286,8 +286,8 @@ class FlavorParser:
|
||||||
extruder.getProperty("machine_nozzle_offset_y", "value")]
|
extruder.getProperty("machine_nozzle_offset_y", "value")]
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def processGCodeFile(self, file_name):
|
def processGCodeFile(self, stream):
|
||||||
Logger.log("d", "Preparing to load %s" % file_name)
|
Logger.log("d", "Preparing to load GCode")
|
||||||
self._cancelled = False
|
self._cancelled = False
|
||||||
# We obtain the filament diameter from the selected printer to calculate line widths
|
# We obtain the filament diameter from the selected printer to calculate line widths
|
||||||
self._filament_diameter = Application.getInstance().getGlobalContainerStack().getProperty("material_diameter", "value")
|
self._filament_diameter = Application.getInstance().getGlobalContainerStack().getProperty("material_diameter", "value")
|
||||||
|
@ -300,124 +300,124 @@ class FlavorParser:
|
||||||
gcode_list = []
|
gcode_list = []
|
||||||
self._is_layers_in_file = False
|
self._is_layers_in_file = False
|
||||||
|
|
||||||
Logger.log("d", "Opening file %s" % file_name)
|
|
||||||
|
|
||||||
self._extruder_offsets = self._extruderOffsets() # dict with index the extruder number. can be empty
|
self._extruder_offsets = self._extruderOffsets() # dict with index the extruder number. can be empty
|
||||||
|
|
||||||
with open(file_name, "r") as file:
|
##############################################################################################
|
||||||
file_lines = 0
|
## This part is where the action starts
|
||||||
current_line = 0
|
##############################################################################################
|
||||||
for line in file:
|
file_lines = 0
|
||||||
file_lines += 1
|
current_line = 0
|
||||||
gcode_list.append(line)
|
for line in stream.split("\n"):
|
||||||
if not self._is_layers_in_file and line[:len(self._layer_keyword)] == self._layer_keyword:
|
file_lines += 1
|
||||||
self._is_layers_in_file = True
|
gcode_list.append(line)
|
||||||
file.seek(0)
|
if not self._is_layers_in_file and line[:len(self._layer_keyword)] == self._layer_keyword:
|
||||||
|
self._is_layers_in_file = True
|
||||||
|
# stream.seek(0)
|
||||||
|
|
||||||
file_step = max(math.floor(file_lines / 100), 1)
|
file_step = max(math.floor(file_lines / 100), 1)
|
||||||
|
|
||||||
self._clearValues()
|
self._clearValues()
|
||||||
|
|
||||||
self._message = Message(catalog.i18nc("@info:status", "Parsing G-code"),
|
self._message = Message(catalog.i18nc("@info:status", "Parsing G-code"),
|
||||||
lifetime=0,
|
lifetime=0,
|
||||||
title = catalog.i18nc("@info:title", "G-code Details"))
|
title = catalog.i18nc("@info:title", "G-code Details"))
|
||||||
|
|
||||||
self._message.setProgress(0)
|
self._message.setProgress(0)
|
||||||
self._message.show()
|
self._message.show()
|
||||||
|
|
||||||
Logger.log("d", "Parsing %s..." % file_name)
|
Logger.log("d", "Parsing Gcode...")
|
||||||
|
|
||||||
current_position = self._position(0, 0, 0, 0, [0])
|
current_position = self._position(0, 0, 0, 0, [0])
|
||||||
current_path = []
|
current_path = []
|
||||||
min_layer_number = 0
|
min_layer_number = 0
|
||||||
negative_layers = 0
|
negative_layers = 0
|
||||||
previous_layer = 0
|
previous_layer = 0
|
||||||
|
|
||||||
for line in file:
|
for line in stream.split("\n"):
|
||||||
if self._cancelled:
|
if self._cancelled:
|
||||||
Logger.log("d", "Parsing %s cancelled" % file_name)
|
Logger.log("d", "Parsing Gcode file cancelled")
|
||||||
return None
|
return None
|
||||||
current_line += 1
|
current_line += 1
|
||||||
|
|
||||||
if current_line % file_step == 0:
|
if current_line % file_step == 0:
|
||||||
self._message.setProgress(math.floor(current_line / file_lines * 100))
|
self._message.setProgress(math.floor(current_line / file_lines * 100))
|
||||||
Job.yieldThread()
|
Job.yieldThread()
|
||||||
if len(line) == 0:
|
if len(line) == 0:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if line.find(self._type_keyword) == 0:
|
if line.find(self._type_keyword) == 0:
|
||||||
type = line[len(self._type_keyword):].strip()
|
type = line[len(self._type_keyword):].strip()
|
||||||
if type == "WALL-INNER":
|
if type == "WALL-INNER":
|
||||||
self._layer_type = LayerPolygon.InsetXType
|
self._layer_type = LayerPolygon.InsetXType
|
||||||
elif type == "WALL-OUTER":
|
elif type == "WALL-OUTER":
|
||||||
self._layer_type = LayerPolygon.Inset0Type
|
self._layer_type = LayerPolygon.Inset0Type
|
||||||
elif type == "SKIN":
|
elif type == "SKIN":
|
||||||
self._layer_type = LayerPolygon.SkinType
|
self._layer_type = LayerPolygon.SkinType
|
||||||
elif type == "SKIRT":
|
elif type == "SKIRT":
|
||||||
self._layer_type = LayerPolygon.SkirtType
|
self._layer_type = LayerPolygon.SkirtType
|
||||||
elif type == "SUPPORT":
|
elif type == "SUPPORT":
|
||||||
self._layer_type = LayerPolygon.SupportType
|
self._layer_type = LayerPolygon.SupportType
|
||||||
elif type == "FILL":
|
elif type == "FILL":
|
||||||
self._layer_type = LayerPolygon.InfillType
|
self._layer_type = LayerPolygon.InfillType
|
||||||
else:
|
else:
|
||||||
Logger.log("w", "Encountered a unknown type (%s) while parsing g-code.", type)
|
Logger.log("w", "Encountered a unknown type (%s) while parsing g-code.", type)
|
||||||
|
|
||||||
# When the layer change is reached, the polygon is computed so we have just one layer per layer per extruder
|
# When the layer change is reached, the polygon is computed so we have just one layer per layer per extruder
|
||||||
if self._is_layers_in_file and line[:len(self._layer_keyword)] == self._layer_keyword:
|
if self._is_layers_in_file and line[:len(self._layer_keyword)] == self._layer_keyword:
|
||||||
try:
|
try:
|
||||||
layer_number = int(line[len(self._layer_keyword):])
|
layer_number = int(line[len(self._layer_keyword):])
|
||||||
self._createPolygon(self._current_layer_thickness, current_path, self._extruder_offsets.get(self._extruder_number, [0, 0]))
|
self._createPolygon(self._current_layer_thickness, current_path, self._extruder_offsets.get(self._extruder_number, [0, 0]))
|
||||||
current_path.clear()
|
|
||||||
|
|
||||||
# When using a raft, the raft layers are stored as layers < 0, it mimics the same behavior
|
|
||||||
# as in ProcessSlicedLayersJob
|
|
||||||
if layer_number < min_layer_number:
|
|
||||||
min_layer_number = layer_number
|
|
||||||
if layer_number < 0:
|
|
||||||
layer_number += abs(min_layer_number)
|
|
||||||
negative_layers += 1
|
|
||||||
else:
|
|
||||||
layer_number += negative_layers
|
|
||||||
|
|
||||||
# In case there is a gap in the layer count, empty layers are created
|
|
||||||
for empty_layer in range(previous_layer + 1, layer_number):
|
|
||||||
self._createEmptyLayer(empty_layer)
|
|
||||||
|
|
||||||
self._layer_number = layer_number
|
|
||||||
previous_layer = layer_number
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# This line is a comment. Ignore it (except for the layer_keyword)
|
|
||||||
if line.startswith(";"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
G = self._getInt(line, "G")
|
|
||||||
if G is not None:
|
|
||||||
# When find a movement, the new posistion is calculated and added to the current_path, but
|
|
||||||
# don't need to create a polygon until the end of the layer
|
|
||||||
current_position = self.processGCode(G, line, current_position, current_path)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# When changing the extruder, the polygon with the stored paths is computed
|
|
||||||
if line.startswith("T"):
|
|
||||||
T = self._getInt(line, "T")
|
|
||||||
if T is not None:
|
|
||||||
self._createPolygon(self._current_layer_thickness, current_path, self._extruder_offsets.get(self._extruder_number, [0, 0]))
|
|
||||||
current_path.clear()
|
|
||||||
|
|
||||||
current_position = self.processTCode(T, line, current_position, current_path)
|
|
||||||
|
|
||||||
if line.startswith("M"):
|
|
||||||
M = self._getInt(line, "M")
|
|
||||||
self.processMCode(M, line, current_position, current_path)
|
|
||||||
|
|
||||||
# "Flush" leftovers. Last layer paths are still stored
|
|
||||||
if len(current_path) > 1:
|
|
||||||
if self._createPolygon(self._current_layer_thickness, current_path, self._extruder_offsets.get(self._extruder_number, [0, 0])):
|
|
||||||
self._layer_number += 1
|
|
||||||
current_path.clear()
|
current_path.clear()
|
||||||
|
|
||||||
|
# When using a raft, the raft layers are stored as layers < 0, it mimics the same behavior
|
||||||
|
# as in ProcessSlicedLayersJob
|
||||||
|
if layer_number < min_layer_number:
|
||||||
|
min_layer_number = layer_number
|
||||||
|
if layer_number < 0:
|
||||||
|
layer_number += abs(min_layer_number)
|
||||||
|
negative_layers += 1
|
||||||
|
else:
|
||||||
|
layer_number += negative_layers
|
||||||
|
|
||||||
|
# In case there is a gap in the layer count, empty layers are created
|
||||||
|
for empty_layer in range(previous_layer + 1, layer_number):
|
||||||
|
self._createEmptyLayer(empty_layer)
|
||||||
|
|
||||||
|
self._layer_number = layer_number
|
||||||
|
previous_layer = layer_number
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# This line is a comment. Ignore it (except for the layer_keyword)
|
||||||
|
if line.startswith(";"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
G = self._getInt(line, "G")
|
||||||
|
if G is not None:
|
||||||
|
# When find a movement, the new posistion is calculated and added to the current_path, but
|
||||||
|
# don't need to create a polygon until the end of the layer
|
||||||
|
current_position = self.processGCode(G, line, current_position, current_path)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# When changing the extruder, the polygon with the stored paths is computed
|
||||||
|
if line.startswith("T"):
|
||||||
|
T = self._getInt(line, "T")
|
||||||
|
if T is not None:
|
||||||
|
self._createPolygon(self._current_layer_thickness, current_path, self._extruder_offsets.get(self._extruder_number, [0, 0]))
|
||||||
|
current_path.clear()
|
||||||
|
|
||||||
|
current_position = self.processTCode(T, line, current_position, current_path)
|
||||||
|
|
||||||
|
if line.startswith("M"):
|
||||||
|
M = self._getInt(line, "M")
|
||||||
|
self.processMCode(M, line, current_position, current_path)
|
||||||
|
|
||||||
|
# "Flush" leftovers. Last layer paths are still stored
|
||||||
|
if len(current_path) > 1:
|
||||||
|
if self._createPolygon(self._current_layer_thickness, current_path, self._extruder_offsets.get(self._extruder_number, [0, 0])):
|
||||||
|
self._layer_number += 1
|
||||||
|
current_path.clear()
|
||||||
|
|
||||||
material_color_map = numpy.zeros((8, 4), dtype = numpy.float32)
|
material_color_map = numpy.zeros((8, 4), dtype = numpy.float32)
|
||||||
material_color_map[0, :] = [0.0, 0.7, 0.9, 1.0]
|
material_color_map[0, :] = [0.0, 0.7, 0.9, 1.0]
|
||||||
material_color_map[1, :] = [0.7, 0.9, 0.0, 1.0]
|
material_color_map[1, :] = [0.7, 0.9, 0.0, 1.0]
|
||||||
|
@ -441,11 +441,11 @@ class FlavorParser:
|
||||||
gcode_dict = {active_build_plate_id: gcode_list}
|
gcode_dict = {active_build_plate_id: gcode_list}
|
||||||
Application.getInstance().getController().getScene().gcode_dict = gcode_dict
|
Application.getInstance().getController().getScene().gcode_dict = gcode_dict
|
||||||
|
|
||||||
Logger.log("d", "Finished parsing %s" % file_name)
|
Logger.log("d", "Finished parsing Gcode")
|
||||||
self._message.hide()
|
self._message.hide()
|
||||||
|
|
||||||
if self._layer_number == 0:
|
if self._layer_number == 0:
|
||||||
Logger.log("w", "File %s doesn't contain any valid layers" % file_name)
|
Logger.log("w", "File doesn't contain any valid layers")
|
||||||
|
|
||||||
settings = Application.getInstance().getGlobalContainerStack()
|
settings = Application.getInstance().getGlobalContainerStack()
|
||||||
machine_width = settings.getProperty("machine_width", "value")
|
machine_width = settings.getProperty("machine_width", "value")
|
||||||
|
@ -454,7 +454,7 @@ class FlavorParser:
|
||||||
if not self._center_is_zero:
|
if not self._center_is_zero:
|
||||||
scene_node.setPosition(Vector(-machine_width / 2, 0, machine_depth / 2))
|
scene_node.setPosition(Vector(-machine_width / 2, 0, machine_depth / 2))
|
||||||
|
|
||||||
Logger.log("d", "Loaded %s" % file_name)
|
Logger.log("d", "GCode loading finished")
|
||||||
|
|
||||||
if Preferences.getInstance().getValue("gcodereader/show_caution"):
|
if Preferences.getInstance().getValue("gcodereader/show_caution"):
|
||||||
caution_message = Message(catalog.i18nc(
|
caution_message = Message(catalog.i18nc(
|
||||||
|
|
|
@ -24,21 +24,30 @@ class GCodeReader(MeshReader):
|
||||||
|
|
||||||
Preferences.getInstance().addPreference("gcodereader/show_caution", True)
|
Preferences.getInstance().addPreference("gcodereader/show_caution", True)
|
||||||
|
|
||||||
|
def preReadFromStream(self, stream, *args, **kwargs):
|
||||||
|
for line in stream.split("\n"):
|
||||||
|
if line[:len(self._flavor_keyword)] == self._flavor_keyword:
|
||||||
|
try:
|
||||||
|
self._flavor_reader = self._flavor_readers_dict[line[len(self._flavor_keyword):].rstrip()]
|
||||||
|
return FileReader.PreReadResult.accepted
|
||||||
|
except:
|
||||||
|
# If there is no entry in the dictionary for this flavor, just skip and select the by-default flavor
|
||||||
|
break
|
||||||
|
|
||||||
|
# If no flavor is found in the GCode, then we use the by-default
|
||||||
|
self._flavor_reader = self._flavor_readers_dict[self._flavor_default]
|
||||||
|
return FileReader.PreReadResult.accepted
|
||||||
|
|
||||||
# PreRead is used to get the correct flavor. If not, Marlin is set by default
|
# PreRead is used to get the correct flavor. If not, Marlin is set by default
|
||||||
def preRead(self, file_name, *args, **kwargs):
|
def preRead(self, file_name, *args, **kwargs):
|
||||||
with open(file_name, "r", encoding = "utf-8") as file:
|
with open(file_name, "r", encoding = "utf-8") as file:
|
||||||
for line in file:
|
file_data = file.read()
|
||||||
if line[:len(self._flavor_keyword)] == self._flavor_keyword:
|
return self.preReadFromStream(file_data, args, kwargs)
|
||||||
try:
|
|
||||||
self._flavor_reader = self._flavor_readers_dict[line[len(self._flavor_keyword):].rstrip()]
|
|
||||||
return FileReader.PreReadResult.accepted
|
|
||||||
except:
|
|
||||||
# If there is no entry in the dictionary for this flavor, just skip and select the by-default flavor
|
|
||||||
break
|
|
||||||
|
|
||||||
# If no flavor is found in the GCode, then we use the by-default
|
def readFromStream(self, stream):
|
||||||
self._flavor_reader = self._flavor_readers_dict[self._flavor_default]
|
return self._flavor_reader.processGCodeFile(stream)
|
||||||
return FileReader.PreReadResult.accepted
|
|
||||||
|
|
||||||
def read(self, file_name):
|
def read(self, file_name):
|
||||||
return self._flavor_reader.processGCodeFile(file_name)
|
with open(file_name, "r", encoding = "utf-8") as file:
|
||||||
|
file_data = file.read()
|
||||||
|
return self.readFromStream(file_data)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue