Revert formatting, move to other PR, add fixes

This commit is contained in:
classicrocker883 2025-03-16 06:08:37 -04:00
parent 81f96c2cf5
commit 3fbdc04d54
No known key found for this signature in database
GPG key ID: 11D10A09D3B1FFC2
18 changed files with 136 additions and 115 deletions

View file

@ -225,7 +225,7 @@ struct Flags<N, false> {
// Flag bits for more than 64 states
template<size_t N>
struct Flags<N, true> {
uint8_t bitmask[(N + 7) >> 3];
uint8_t bitmask[(N+7)>>3];
// Proxy class for handling bit assignment
class BitProxy {
public:

View file

@ -86,11 +86,15 @@ constexpr uint8_t ui8_to_percent(const uint8_t i) { return (int(i) * 100 + 127)
const xyze_char_t axis_codes LOGICAL_AXIS_ARRAY('E', 'X', 'Y', 'Z', AXIS4_NAME, AXIS5_NAME, AXIS6_NAME, AXIS7_NAME, AXIS8_NAME, AXIS9_NAME);
#if NUM_AXES <= XYZ && !HAS_EXTRUDERS
#define AXIS_CHAR(A) ((char)('X' + A))
#define IAXIS_CHAR AXIS_CHAR
#if ANY(HAS_MOTOR_CURRENT_SPI, HAS_MOTOR_CURRENT_I2C, HAS_MOTOR_CURRENT_DAC)
#define IAXIS_CHAR AXIS_CHAR
#endif
#else
const xyze_char_t iaxis_codes LOGICAL_AXIS_ARRAY('E', 'X', 'Y', 'Z', 'I', 'J', 'K', 'U', 'V', 'W');
#if ANY(HAS_MOTOR_CURRENT_SPI, HAS_MOTOR_CURRENT_I2C, HAS_MOTOR_CURRENT_DAC)
const xyze_char_t iaxis_codes LOGICAL_AXIS_ARRAY('E', 'X', 'Y', 'Z', 'I', 'J', 'K', 'U', 'V', 'W');
#define IAXIS_CHAR(A) iaxis_codes[A]
#endif
#define AXIS_CHAR(A) axis_codes[A]
#define IAXIS_CHAR(A) iaxis_codes[A]
#endif
#if ENABLED(MARLIN_DEV_MODE)

View file

@ -611,7 +611,6 @@
* (calling advance_status_scroll, status_and_len for a scrolling status message)
*/
#define HAS_DISPLAY 1
#define HAS_UI_UPDATE 1
#define HAS_STATUS_MESSAGE 1
#endif

View file

@ -205,14 +205,6 @@ public:
static void init();
static void reinit_lcd() { TERN_(REINIT_NOISY_LCD, init_lcd()); }
#if HAS_WIRED_LCD
static bool detected();
#else
static bool detected() { return true; }
#endif
#if HAS_MULTI_LANGUAGE
static uint8_t language;
static void set_language(const uint8_t lang);
@ -490,8 +482,8 @@ public:
* @param cstr A C-string to set as the status.
*/
static void set_status_no_expire_P(PGM_P const pstr) { set_status_P(pstr, true); }
static void set_status_no_expire(const char * const cstr) { set_status(cstr, true); }
static void set_status_no_expire(FSTR_P const fstr) { set_status(fstr, true); }
static void set_status_no_expire(const char * const cstr) { set_status(cstr, true); }
static void set_status_no_expire(FSTR_P const fstr) { set_status(fstr, true); }
/**
* @brief Set a status with a format string and parameters.
@ -503,9 +495,6 @@ public:
template<typename... Args>
static void status_printf(int8_t level, FSTR_P const ffmt, Args... more) { status_printf_P(level, FTOP(ffmt), more...); }
// Periodic or as-needed display update
static void update() IF_DISABLED(HAS_UI_UPDATE, {});
// Tell the screen to redraw on the next call
FORCE_INLINE static void refresh() {
TERN_(HAS_WIRED_LCD, refresh(LCDVIEW_CLEAR_CALL_REDRAW));
@ -513,6 +502,9 @@ public:
#if HAS_DISPLAY
// Periodic or as-needed display update
static void update();
static void init_lcd();
// Erase the LCD contents. Do the lowest-level thing required to clear the LCD.
@ -606,6 +598,10 @@ public:
static void status_screen();
#else
static void quick_feedback(const bool=true) {}
#endif // HAS_WIRED_LCD
#if HAS_MARLINUI_U8GLIB
@ -631,6 +627,7 @@ public:
#else // No LCD
static void update() {}
static void init_lcd() {}
static void clear_lcd() {}
static void clear_for_drawing() {}
@ -638,9 +635,8 @@ public:
#endif
#if !HAS_WIRED_LCD
static void quick_feedback(const bool=true) {}
#endif
static bool detected() IF_DISABLED(HAS_WIRED_LCD, { return true; });
static void reinit_lcd() { TERN_(REINIT_NOISY_LCD, init_lcd()); }
static void completion_feedback(const bool good=true);

View file

@ -97,7 +97,7 @@
static uint8_t CRC7(const uint8_t *data, uint8_t n) {
uint8_t crc = 0;
while (n > 0) {
crc = pgm_read_byte(&crctab7[(crc << 1) ^ *data++ ]);
crc = pgm_read_byte(&crctab7[ (crc << 1) ^ *data++ ]);
n--;
}
return (crc << 1) | 1;

View file

@ -160,7 +160,7 @@ uint8_t UHS_USB_HOST_BASE::doSoftReset(uint8_t parent, uint8_t port, uint8_t add
return rcode;
}
/**
/*
* Pseudo code so you may understand the code flow.
*
* reset; (happens at the lower level)
@ -168,7 +168,7 @@ uint8_t UHS_USB_HOST_BASE::doSoftReset(uint8_t parent, uint8_t port, uint8_t add
* reset;
* If there are no configuration descriptors {
* //
* // NOTE: I know of no device that does this.
* // Note: I know of no device that does this.
* // I suppose there could be one though.
* //
* try to enumerate.
@ -1003,7 +1003,7 @@ uint8_t UHS_USB_HOST_BASE::ctrlReq(uint8_t addr, uint64_t Request, uint16_t nbyt
rcode = ctrlReqRead(pep, &left, &read, nbytes, dataptr);
#if UHS_DEVICE_WINDOWS_USB_SPEC_VIOLATION_DESCRIPTOR_DEVICE
HOST_DEBUG("RESULT: 0x%2.2x 0x%2.2x 0x%2.2x 0x%8.8lx%8.8lx\r\n", rcode, addr, read, (uint32_t)((Request >> 32) & 0xFFFFFFFFLU), (uint32_t)(Request & 0xFFFFFFFFLU));
HOST_DEBUG("RESULT: 0x%2.2x 0x%2.2x 0x%2.2x 0x%8.8lx%8.8lx\r\n", rcode, addr, read, (uint32_t)((Request>>32)&0xFFFFFFFFLU), (uint32_t)(Request&0xFFFFFFFFLU));
// Should only be used for GET_DESCRIPTOR USB_DESCRIPTOR_DEVICE
constexpr uint32_t req_match = ((uint32_t)USB_DESCRIPTOR_DEVICE << 24) |
((uint32_t)USB_REQUEST_GET_DESCRIPTOR << 8);
@ -1167,11 +1167,10 @@ uint8_t UHS_NI UHS_VSI::SetInterface(ENUMERATION_INFO *ei) {
#if 0
/**
* TODO: Move this silliness to a NONE driver.
/* TO-DO: Move this silliness to a NONE driver.
* When we have a generic NONE driver we can:
* - Extract ALL device information to help users with a new device.
* - Use an unknown device from a sketch, kind of like usblib does.
* o Extract ALL device information to help users with a new device.
* o Use an unknown device from a sketch, kind of like usblib does.
* This will aid in making more drivers in a faster way.
*/
uint8_t UHS_USB_HOST_BASE::DefaultAddressing(uint8_t parent, uint8_t port, uint8_t speed) {

View file

@ -15,15 +15,19 @@ if __name__ == "__main__":
"-Os",
"-mcpu=cortex-m3",
"-mthumb",
"-fsigned-char",
"-fno-move-loop-invariants",
"-fno-strict-aliasing",
"-fsingle-precision-constant",
"--specs=nano.specs",
"--specs=nosys.specs",
"-MMD",
"-MP",
"-MMD", "-MP",
"-IMarlin/src/HAL/STM32F1",
"-DTARGET_STM32F1",
"-DARDUINO_ARCH_STM32",
"-DPLATFORM_M997_SUPPORT"
@ -39,7 +43,9 @@ else:
if pioutil.is_pio_build():
pioutil.env.Append(
ARFLAGS=["rcs"],
ASFLAGS=["-x", "assembler-with-cpp"],
CXXFLAGS=[
"-fabi-version=0",
"-fno-use-cxa-atexit",

View file

@ -71,7 +71,7 @@ def apply_opt(name, val, conf=None):
prefix = ""
if val == "off":
prefix, val = "//", "" # Item doesn't appear in config dump
# val = "false" # Item appears in config dump
#val = "false" # Item appears in config dump
# Uppercase the option unless already mixed/uppercase
added = name.upper() if name.islower() else name

View file

@ -47,8 +47,8 @@ if pioutil.is_pio_build():
#
board = env.BoardConfig()
variant = board.get("build.variant")
# mcu_type = board.get("build.mcu")[:-2]
# series = mcu_type[:7].upper() + "xx"
#mcu_type = board.get("build.mcu")[:-2]
#series = mcu_type[:7].upper() + "xx"
# Make sure the local variant sub-folder exists
if marlin_variant_pattern.match(str(variant).lower()):

View file

@ -96,5 +96,5 @@ def main():
else:
apply_config(conf)
if __name__ == "__main__":
if __name__ == '__main__':
main()

View file

@ -33,7 +33,7 @@ if pioutil.is_pio_build():
found_envs = re.match(r"\s*#include .+" + envregex, line)
if found_envs:
envlist = re.findall(envregex + r"(\w+)", line)
return ["env:" + s for s in envlist]
return [ "env:"+s for s in envlist ]
return []
def check_envs(build_env, board_envs, config):
@ -91,13 +91,13 @@ if pioutil.is_pio_build():
motherboard = env['MARLIN_FEATURES']['MOTHERBOARD']
board_envs = get_envs_for_board(motherboard)
config = env.GetProjectConfig()
result = check_envs("env:" + build_env, board_envs, config)
result = check_envs("env:"+build_env, board_envs, config)
# Make sure board is compatible with the build environment. Skip for _test,
# since the board is manipulated as each unit test is executed.
if not result and build_env != "linux_native_test":
err = "Error: Build environment '%s' is incompatible with %s. Use one of these environments: %s" % \
(build_env, motherboard, ", ".join([e[4:] for e in board_envs if e.startswith("env:")]))
( build_env, motherboard, ", ".join([ e[4:] for e in board_envs if e.startswith("env:") ]) )
raise SystemExit(err)
#
@ -136,13 +136,13 @@ if pioutil.is_pio_build():
#
mixedin = []
p = project_dir / "Marlin/src/lcd/dogm"
for f in ["ultralcd_DOGM.cpp", "ultralcd_DOGM.h"]:
for f in [ "ultralcd_DOGM.cpp", "ultralcd_DOGM.h" ]:
if (p / f).is_file():
mixedin += [f]
mixedin += [ f ]
p = project_dir / "Marlin/src/feature/bedlevel/abl"
for f in ["abl.cpp", "abl.h"]:
for f in [ "abl.cpp", "abl.h" ]:
if (p / f).is_file():
mixedin += [f]
mixedin += [ f ]
if mixedin:
err = "ERROR: Old files fell into your Marlin folder. Remove %s and try again" % ", ".join(mixedin)
raise SystemExit(err)
@ -158,4 +158,5 @@ if pioutil.is_pio_build():
err = "ERROR: FILAMENT_RUNOUT_SCRIPT needs a %c parameter (e.g., \"M600 T%c\") when NUM_RUNOUT_SENSORS is > 1"
raise SystemExit(err)
sanity_check_target()

View file

@ -14,7 +14,7 @@
import re, json
from pathlib import Path
def extend_dict(d: dict, k: tuple):
def extend_dict(d:dict, k:tuple):
if len(k) >= 1 and k[0] not in d:
d[k[0]] = {}
if len(k) >= 2 and k[1] not in d[k[0]]:
@ -31,7 +31,6 @@ grouping_patterns = [
re.compile(r'^(HOTENDS|BED|PROBE|COOLER)$'),
re.compile(r'^[XYZIJKUVW]M(IN|AX)$')
]
# If the indexed part of the option name matches a pattern
# then add it to the dictionary.
def find_grouping(gdict, filekey, sectkey, optkey, pindex):
@ -56,7 +55,7 @@ def group_options(schema):
for optkey in s:
find_grouping(found_groups, filekey, sectkey, optkey, pindex)
fkeys = [k for k in found_groups.keys()]
fkeys = [ k for k in found_groups.keys() ]
for kkey in fkeys:
items = found_groups[kkey]
if len(items) > 1:
@ -116,7 +115,7 @@ def extract_files(filekey):
ERROR = 9 # Syntax error
# A JSON object to store the data
sch_out = {key: {} for key in filekey.values()}
sch_out = { key:{} for key in filekey.values() }
# Regex for #define NAME [VALUE] [COMMENT] with sanitized line
defgrep = re.compile(r'^(//)?\s*(#define)\s+([A-Za-z0-9_]+)\s*(.*?)\s*(//.+)?$')
# Pattern to match a float value
@ -145,7 +144,7 @@ def extract_files(filekey):
# Clean the line for easier parsing
the_line = the_line.strip()
if join_line: # A previous line is being made longer
if join_line: # A previous line is being made longer
line += (' ' if line else '') + the_line
else: # Otherwise, start the line anew
line, line_start = the_line, line_number
@ -205,7 +204,7 @@ def extract_files(filekey):
if m:
sec = m[1]
elif not sc.startswith('========'):
bufref.append(c) # Anything else is part of the comment
bufref.append(c) # Anything else is part of the comment
return opt, sec
# For slash comments, capture consecutive slash comments.
@ -224,7 +223,7 @@ def extract_files(filekey):
if endpos < 0:
cline = line
else:
cline, line = line[:endpos].strip(), line[endpos + 2 :].strip()
cline, line = line[:endpos].strip(), line[endpos+2:].strip()
# Temperature sensors are done
if state == Parse.GET_SENSORS:
@ -253,8 +252,8 @@ def extract_files(filekey):
elif state == Parse.NORMAL:
# Skip a commented define when evaluating comment opening
st = 2 if re.match(r'^//\s*#define', line) else 0
cpos1 = line.find('/*') # Start a block comment on the line?
cpos2 = line.find('//', st) # Start an end of line comment on the line?
cpos1 = line.find('/*') # Start a block comment on the line?
cpos2 = line.find('//', st) # Start an end of line comment on the line?
# Only the first comment starter gets evaluated
cpos = -1
@ -277,7 +276,7 @@ def extract_files(filekey):
# Process the start of a new comment
if cpos != -1:
comment_buff = []
cline, line = line[cpos + 2 :].strip(), line[:cpos].strip()
cline, line = line[cpos+2:].strip(), line[:cpos].strip()
if state == Parse.BLOCK_COMMENT:
# Strip leading '*' from block comments
@ -327,11 +326,11 @@ def extract_files(filekey):
conditions.append(prev)
elif cparts[0] == '#if':
conditions.append([atomize(line[3:].strip())])
conditions.append([ atomize(line[3:].strip()) ])
elif cparts[0] == '#ifdef':
conditions.append([f'defined({line[6:].strip()})'])
conditions.append([ f'defined({line[6:].strip()})' ])
elif cparts[0] == '#ifndef':
conditions.append([f'!defined({line[7:].strip()})'])
conditions.append([ f'!defined({line[7:].strip()})' ])
# Handle a complete #define line
elif defmatch is not None:
@ -345,10 +344,10 @@ def extract_files(filekey):
# Create a new dictionary for the current #define
define_info = {
'section': section,
'name' : define_name,
'name': define_name,
'enabled': enabled,
'line' : line_start,
'sid' : sid
'line': line_start,
'sid': sid
}
# Type is based on the value
@ -420,7 +419,7 @@ def extract_files(filekey):
# If define has already been seen...
if define_name in sch_out[fk][section]:
info = sch_out[fk][section][define_name]
if isinstance(info, dict): info = [info] # Convert a single dict into a list
if isinstance(info, dict): info = [ info ] # Convert a single dict into a list
info.append(define_info) # Add to the list
else:
# Add the define dict with name as key
@ -511,5 +510,5 @@ def main():
print("Generating YML ...")
dump_yaml(schema, Path('schema.yml'))
if __name__ == "__main__":
if __name__ == '__main__':
main()

View file

@ -254,16 +254,16 @@ def compute_build_signature(env):
sections = {}
for header in real_config:
for name in real_config[header]:
# print(f" name: {name}")
#print(f" name: {name}")
if name not in ignore:
ddict = real_config[header][name]
# print(f" real_config[{header}][{name}]:", ddict)
#print(f" real_config[{header}][{name}]:", ddict)
sect = ddict['section']
if sect not in sections: sections[sect] = {}
sections[sect][name] = ddict
# Get all sections as a list of strings, with spaces and dashes replaced by underscores
long_list = [re.sub(r'[- ]+', '_', x).lower() for x in sections.keys()]
long_list = [ re.sub(r'[- ]+', '_', x).lower() for x in sections.keys() ]
# Make comma-separated lists of sections with 64 characters or less
sec_lines = []
while len(long_list):
@ -271,7 +271,7 @@ def compute_build_signature(env):
while len(long_list) and len(line) + len(long_list[0]) < 64 - 1:
line += long_list.pop(0) + ', '
sec_lines.append(line.strip())
sec_lines[-1] = sec_lines[-1][:-1] # Remove the last comma
sec_lines[-1] = sec_lines[-1][:-1] # Remove the last comma
else:
sec_lines = ['all']

View file

@ -13,6 +13,7 @@ if pioutil.is_pio_build():
#
# Give the binary a distinctive name
#
env['PROGNAME'] = "MarlinSimulator"
#
@ -20,23 +21,25 @@ if pioutil.is_pio_build():
#
emsg = ''
fatal = 0
import sys
if sys.platform == 'darwin':
import shutil
gcc = shutil.which('gcc')
if not gcc or gcc == '/usr/bin/gcc':
if not gcc:
if gcc == '' or gcc == '/usr/bin/gcc':
if gcc == '':
emsg = "\u001b[31mNo GCC found in your configured shell PATH."
elif gcc == '/usr/bin/gcc':
emsg = "\u001b[31mCan't build Marlin Native on macOS using the included version of GCC (clang)."
emsg += "\n\u001b[31mSee 'native.ini' for instructions to install GCC with MacPorts or Homebrew."
fatal = 1
else:
#
# Silence half of the ranlib warnings. (No equivalent for 'ARFLAGS')
#
env['RANLIBFLAGS'] += ["-no_warning_for_no_symbols"]
env['RANLIBFLAGS'] += [ "-no_warning_for_no_symbols" ]
# Default paths for Xcode and a lucky GL/gl.h dropped by Mesa
xcode_path = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks"
@ -45,12 +48,17 @@ if pioutil.is_pio_build():
import os.path
if os.path.exists(xcode_path):
env['BUILD_FLAGS'] += ["-F" + xcode_path]
env['BUILD_FLAGS'] += [ "-F" + xcode_path ]
emsg = "\u001b[33mUsing OpenGL framework headers from Xcode.app"
elif os.path.exists(mesa_path):
env['BUILD_FLAGS'] += ['-D__MESA__']
env['BUILD_FLAGS'] += [ '-D__MESA__' ]
emsg = f"\u001b[33mUsing OpenGL header from {mesa_path}"
else:
emsg = "\u001b[31mNo OpenGL headers found. Install Xcode for matching headers, or use 'sudo port install mesa' to get a GL/gl.h."
fatal = 1

View file

@ -47,7 +47,7 @@ if pioutil.is_pio_build():
#
# If MF_*_BUFFER_SIZE, SERIAL_*_BUFFER_SIZE, USART_*_BUF_SIZE, are
# defined, the first of these values will be used as the minimum.
build_flags = env.ParseFlags(env.get("BUILD_FLAGS"))["CPPDEFINES"]
build_flags = env.ParseFlags(env.get('BUILD_FLAGS'))["CPPDEFINES"]
mf = env["MARLIN_FEATURES"]
# Get the largest defined buffer sizes for RX or TX, using defaults for undefined

View file

@ -18,7 +18,7 @@ def glyph_bits(size_x, size_y, font, glyph_ord):
v = row
rpad = size_x - glyph.bbW
if rpad < 0: rpad = 0
if glyph.bbW > size_x: v = v >> (glyph.bbW - size_x) # some glyphs are actually too wide to fit!
if glyph.bbW > size_x: v = v >> (glyph.bbW - size_x) # some glyphs are actually too wide to fit!
v = v << (glyph_bytes * 8) - size_x + rpad
v = v >> glyph.bbX
bits[y + desc + glyph.bbY] |= v
@ -30,26 +30,26 @@ def glyph_bits(size_x, size_y, font, glyph_ord):
def marlin_font_hzk():
fonts = [
[ 6, 12, 'marlin-6x12-3.bdf'],
[ 8, 16, 'marlin-8x16.bdf'],
[10, 20, 'marlin-10x20.bdf'],
[12, 24, 'marlin-12x24.bdf'],
[14, 28, 'marlin-14x28.bdf'],
[16, 32, 'marlin-16x32.bdf'],
[20, 40, 'marlin-20x40.bdf'],
[24, 48, 'marlin-24x48.bdf'],
[28, 56, 'marlin-28x56.bdf'],
[32, 64, 'marlin-32x64.bdf']
[6,12,'marlin-6x12-3.bdf'],
[8,16,'marlin-8x16.bdf'],
[10,20,'marlin-10x20.bdf'],
[12,24,'marlin-12x24.bdf'],
[14,28,'marlin-14x28.bdf'],
[16,32,'marlin-16x32.bdf'],
[20,40,'marlin-20x40.bdf'],
[24,48,'marlin-24x48.bdf'],
[28,56,'marlin-28x56.bdf'],
[32,64,'marlin-32x64.bdf']
]
with open('marlin_fixed.hzk', 'wb') as output:
with open('marlin_fixed.hzk','wb') as output:
for f in fonts:
with open(f[2], 'rb') as file:
print(f"{f[0]}x{f[1]}")
print(f'{f[0]}x{f[1]}')
font = bdflib.reader.read_bdf(file)
if font is None:
print(f"Failed to read font from {f[2]}")
print(f'Failed to read font from {f[2]}')
continue # Skip this font and move to the next one
for glyph in range(128):
@ -61,9 +61,9 @@ def marlin_font_hzk():
z = b.to_bytes(glyph_bytes, 'big')
output.write(z)
except OverflowError:
print("Overflow")
print(f"{glyph}")
print('Overflow')
print(f'{glyph}')
if font and glyph in font: print(font[glyph])
else: print(f"Glyph {glyph} not found in the font or font is None")
for b in bits: print(f"{b:0{f[0]}b}")
else: print(f'Glyph {glyph} not found in the font or font is None')
for b in bits: print(f'{b:0{f[0]}b}')
return

View file

@ -1,5 +1,6 @@
#!/usr/bin/env python
"""Thermistor Value Lookup Table Generator
"""
Thermistor Value Lookup Table Generator
Generates lookup to temperature values for use in a microcontroller in C format based on:
https://en.wikipedia.org/wiki/Steinhart-Hart_equation

View file

@ -8,8 +8,8 @@
from __future__ import print_function
# Your G-code file/folder
folder = "../"
my_file = "test.gcode"
folder = '../'
my_file = 'test.gcode'
# this is the minimum of G1 instructions which should be between 2 different heights
min_g1 = 3
@ -19,10 +19,10 @@ min_g1 = 3
max_g1 = 100000000
# g29 keyword
g29_keyword = "G29"
g29_keyword = 'G29'
# output filename
output_file = folder + "g29_" + my_file
output_file = folder + 'g29_' + my_file
# input filename
input_file = folder + my_file
@ -42,6 +42,7 @@ lines_of_g1 = 0
gcode = []
# return only G0-G1-lines
def has_g_move(line):
return line[:2].upper() in ("G0", "G1")
@ -69,12 +70,13 @@ def find_axis(line, axis):
except ValueError:
return None
# save the min or max-values for each axis
def set_mima(line):
global min_x, max_x, min_y, max_y, last_z
current_x = find_axis(line, "x")
current_y = find_axis(line, "y")
current_x = find_axis(line, 'x')
current_y = find_axis(line, 'y')
if current_x is not None:
min_x = min(current_x, min_x)
@ -89,10 +91,11 @@ def set_mima(line):
# find z in the code and return it
def find_z(gcode, start_at_line=0):
for i in range(start_at_line, len(gcode)):
my_z = find_axis(gcode[i], "Z")
my_z = find_axis(gcode[i], 'Z')
if my_z is not None:
return my_z, i
def z_parse(gcode, start_at_line=0, end_at_line=0):
i = start_at_line
all_z = []
@ -111,29 +114,31 @@ def z_parse(gcode, start_at_line=0, end_at_line=0):
all_z.append(z)
z_at_line.append(i)
temp_line = i - last_i - 1
temp_line = i - last_i -1
line_between_z.append(i - last_i - 1)
# last_z = z
last_i = i
if 0 < end_at_line <= i or temp_line >= min_g1:
# print("break at line {} at height {}"".format(i, z))
# print('break at line {} at height {}'.format(i, z))
break
line_between_z = line_between_z[1:]
return all_z, line_between_z, z_at_line
# get the lines which should be the first layer
def get_lines(gcode, minimum):
i = 0
all_z, line_between_z, z_at_line = z_parse(gcode, end_at_line=max_g1)
# print("Detected Z heights:", all_z)
# print('Detected Z heights:', all_z)
for count in line_between_z:
i += 1
if count > minimum:
# print("layer: {}:{}".format(z_at_line[i-1], z_at_line[i]))
# print('layer: {}:{}'.format(z_at_line[i-1], z_at_line[i]))
return z_at_line[i - 1], z_at_line[i]
with open(input_file, "r", encoding="utf_8") as file:
with open(input_file, 'r', encoding='utf_8') as file:
lines = 0
for line in file:
lines += 1
@ -146,7 +151,7 @@ file.close()
layer_range = get_lines(gcode, min_g1)
if layer_range is None:
print("Error: Unable to determine layer range. Exiting.")
print('Error: Unable to determine layer range. Exiting.')
exit(1)
start, end = layer_range
@ -154,45 +159,48 @@ start, end = layer_range
for i in range(start, end):
set_mima(gcode[i])
print("x_min:{} x_max:{}\ny_min:{} y_max:{}".format(min_x, max_x, min_y, max_y))
print('x_min:{} x_max:{}\ny_min:{} y_max:{}'.format(min_x, max_x, min_y, max_y))
# resize min/max - values for minimum scan
if max_x - min_x < min_size:
offset_x = int((min_size - (max_x - min_x)) / 2 + 0.5) # int round up
# print("min_x! with {}".format(int(max_x - min_x)))
# print('min_x! with {}'.format(int(max_x - min_x)))
min_x = int(min_x) - offset_x
max_x = int(max_x) + offset_x
if max_y - min_y < min_size:
offset_y = int((min_size - (max_y - min_y)) / 2 + 0.5) # int round up
# print("min_y! with {}".format(int(max_y - min_y)))
# print('min_y! with {}'.format(int(max_y - min_y)))
min_y = int(min_y) - offset_y
max_y = int(max_y) + offset_y
new_command = "G29 L{0} R{1} F{2} B{3} P{4}\n".format(
min_x, max_x, min_y, max_y, probing_points
)
new_command = 'G29 L{0} R{1} F{2} B{3} P{4}\n'.format(min_x,
max_x,
min_y,
max_y,
probing_points)
g29_found = False
g28_found = False
with open(input_file, "r", encoding="utf_8") as in_file, open(output_file, "w", encoding="utf_8") as out_file:
with open(input_file, 'r', encoding='utf_8') as in_file, open(output_file, "w", encoding="utf_8") as out_file:
for line in in_file:
# Check if G29 already exists
if line.strip().upper().startswith(g29_keyword):
g29_found = True
out_file.write(new_command)
print("Write G29.")
print('Write G29.')
else:
out_file.write(line)
# If we find G28 and G29 wasn't found earlier, insert G29 after G28
if not g29_found and line.strip().upper().startswith("G28"):
if not g29_found and line.strip().upper().startswith('G28'):
g28_found = True # Mark that G28 was found
out_file.write(new_command) # Insert G29 command
print("Note: G29 was not found.\nInserted G29 after G28.")
print('Note: G29 was not found.\nInserted G29 after G28.')
# Debugging messages
if not g28_found and not g29_found:
print("Warning: G28 not found! G29 was not added.")
print('Warning: G28 not found! G29 was not added.')
print("auto G29 finished!")
print('auto G29 finished!')