various fixes, spacing, typos

This commit is contained in:
classicrocker883 2025-02-24 16:10:39 -05:00
parent efba06c4ce
commit 137f0f830a
15 changed files with 109 additions and 124 deletions

View file

@ -4286,7 +4286,7 @@ void Temperature::isr() {
// Update lcd buttons 488 times per second
//
static bool do_buttons;
if ((do_buttons ^= true)) ui.update_buttons();
if (FLIP(do_buttons)) ui.update_buttons();
/**
* One sensor is sampled on every other call of the ISR.

View file

@ -15,19 +15,15 @@ if __name__ == "__main__":
"-Os",
"-mcpu=cortex-m3",
"-mthumb",
"-fsigned-char",
"-fno-move-loop-invariants",
"-fno-strict-aliasing",
"-fsingle-precision-constant",
"--specs=nano.specs",
"--specs=nosys.specs",
"-MMD", "-MP",
"-MMD",
"-MP",
"-IMarlin/src/HAL/STM32F1",
"-DTARGET_STM32F1",
"-DARDUINO_ARCH_STM32",
"-DPLATFORM_M997_SUPPORT"
@ -43,9 +39,7 @@ else:
if pioutil.is_pio_build():
pioutil.env.Append(
ARFLAGS=["rcs"],
ASFLAGS=["-x", "assembler-with-cpp"],
CXXFLAGS=[
"-fabi-version=0",
"-fno-use-cxa-atexit",

View file

@ -38,6 +38,6 @@ if pioutil.is_pio_build():
else:
# The following almost works, but __start__ (from wirish/start.S) is not seen by common.inc
board.update("build.variants_dir", source_root_str);
board.update("build.variants_dir", source_root_str)
src = str(source_dir)
env.Append(BUILD_FLAGS=[f"-I{src}", f"-L{src}/ld"]) # Add include path for variant

View file

@ -71,7 +71,7 @@ def apply_opt(name, val, conf=None):
prefix = ""
if val == "off":
prefix, val = "//", "" # Item doesn't appear in config dump
#val = "false" # Item appears in config dump
# val = "false" # Item appears in config dump
# Uppercase the option unless already mixed/uppercase
added = name.upper() if name.islower() else name
@ -261,7 +261,7 @@ if __name__ == "__main__":
#
# From command line use the given file name
#
import sys
import sys, os.path
args = sys.argv[1:]
if len(args) > 0:
if args[0].endswith('.ini'):

View file

@ -47,8 +47,8 @@ if pioutil.is_pio_build():
#
board = env.BoardConfig()
variant = board.get("build.variant")
#mcu_type = board.get("build.mcu")[:-2]
#series = mcu_type[:7].upper() + "xx"
# mcu_type = board.get("build.mcu")[:-2]
# series = mcu_type[:7].upper() + "xx"
# Make sure the local variant sub-folder exists
if marlin_variant_pattern.match(str(variant).lower()):

View file

@ -96,5 +96,5 @@ def main():
else:
apply_config(conf)
if __name__ == '__main__':
if __name__ == "__main__":
main()

View file

@ -33,7 +33,7 @@ if pioutil.is_pio_build():
found_envs = re.match(r"\s*#include .+" + envregex, line)
if found_envs:
envlist = re.findall(envregex + r"(\w+)", line)
return [ "env:"+s for s in envlist ]
return ["env:" + s for s in envlist]
return []
def check_envs(build_env, board_envs, config):
@ -91,13 +91,13 @@ if pioutil.is_pio_build():
motherboard = env['MARLIN_FEATURES']['MOTHERBOARD']
board_envs = get_envs_for_board(motherboard)
config = env.GetProjectConfig()
result = check_envs("env:"+build_env, board_envs, config)
result = check_envs("env:" + build_env, board_envs, config)
# Make sure board is compatible with the build environment. Skip for _test,
# since the board is manipulated as each unit test is executed.
if not result and build_env != "linux_native_test":
err = "Error: Build environment '%s' is incompatible with %s. Use one of these environments: %s" % \
( build_env, motherboard, ", ".join([ e[4:] for e in board_envs if e.startswith("env:") ]) )
(build_env, motherboard, ", ".join([e[4:] for e in board_envs if e.startswith("env:")]))
raise SystemExit(err)
#
@ -136,13 +136,13 @@ if pioutil.is_pio_build():
#
mixedin = []
p = project_dir / "Marlin/src/lcd/dogm"
for f in [ "ultralcd_DOGM.cpp", "ultralcd_DOGM.h" ]:
for f in ["ultralcd_DOGM.cpp", "ultralcd_DOGM.h"]:
if (p / f).is_file():
mixedin += [ f ]
mixedin += [f]
p = project_dir / "Marlin/src/feature/bedlevel/abl"
for f in [ "abl.cpp", "abl.h" ]:
for f in ["abl.cpp", "abl.h"]:
if (p / f).is_file():
mixedin += [ f ]
mixedin += [f]
if mixedin:
err = "ERROR: Old files fell into your Marlin folder. Remove %s and try again" % ", ".join(mixedin)
raise SystemExit(err)
@ -158,5 +158,4 @@ if pioutil.is_pio_build():
err = "ERROR: FILAMENT_RUNOUT_SCRIPT needs a %c parameter (e.g., \"M600 T%c\") when NUM_RUNOUT_SENSORS is > 1"
raise SystemExit(err)
sanity_check_target()

View file

@ -14,7 +14,7 @@
import re, json
from pathlib import Path
def extend_dict(d:dict, k:tuple):
def extend_dict(d: dict, k: tuple):
if len(k) >= 1 and k[0] not in d:
d[k[0]] = {}
if len(k) >= 2 and k[1] not in d[k[0]]:
@ -31,6 +31,7 @@ grouping_patterns = [
re.compile(r'^(HOTENDS|BED|PROBE|COOLER)$'),
re.compile(r'^[XYZIJKUVW]M(IN|AX)$')
]
# If the indexed part of the option name matches a pattern
# then add it to the dictionary.
def find_grouping(gdict, filekey, sectkey, optkey, pindex):
@ -55,7 +56,7 @@ def group_options(schema):
for optkey in s:
find_grouping(found_groups, filekey, sectkey, optkey, pindex)
fkeys = [ k for k in found_groups.keys() ]
fkeys = [k for k in found_groups.keys()]
for kkey in fkeys:
items = found_groups[kkey]
if len(items) > 1:
@ -115,7 +116,7 @@ def extract_files(filekey):
ERROR = 9 # Syntax error
# A JSON object to store the data
sch_out = { key:{} for key in filekey.values() }
sch_out = {key: {} for key in filekey.values()}
# Regex for #define NAME [VALUE] [COMMENT] with sanitized line
defgrep = re.compile(r'^(//)?\s*(#define)\s+([A-Za-z0-9_]+)\s*(.*?)\s*(//.+)?$')
# Pattern to match a float value
@ -144,7 +145,7 @@ def extract_files(filekey):
# Clean the line for easier parsing
the_line = the_line.strip()
if join_line: # A previous line is being made longer
if join_line: # A previous line is being made longer
line += (' ' if line else '') + the_line
else: # Otherwise, start the line anew
line, line_start = the_line, line_number
@ -204,7 +205,7 @@ def extract_files(filekey):
if m:
sec = m[1]
elif not sc.startswith('========'):
bufref.append(c) # Anything else is part of the comment
bufref.append(c) # Anything else is part of the comment
return opt, sec
# For slash comments, capture consecutive slash comments.
@ -223,7 +224,7 @@ def extract_files(filekey):
if endpos < 0:
cline = line
else:
cline, line = line[:endpos].strip(), line[endpos+2:].strip()
cline, line = line[:endpos].strip(), line[endpos + 2 :].strip()
# Temperature sensors are done
if state == Parse.GET_SENSORS:
@ -252,8 +253,8 @@ def extract_files(filekey):
elif state == Parse.NORMAL:
# Skip a commented define when evaluating comment opening
st = 2 if re.match(r'^//\s*#define', line) else 0
cpos1 = line.find('/*') # Start a block comment on the line?
cpos2 = line.find('//', st) # Start an end of line comment on the line?
cpos1 = line.find('/*') # Start a block comment on the line?
cpos2 = line.find('//', st) # Start an end of line comment on the line?
# Only the first comment starter gets evaluated
cpos = -1
@ -276,7 +277,7 @@ def extract_files(filekey):
# Process the start of a new comment
if cpos != -1:
comment_buff = []
cline, line = line[cpos+2:].strip(), line[:cpos].strip()
cline, line = line[cpos + 2 :].strip(), line[:cpos].strip()
if state == Parse.BLOCK_COMMENT:
# Strip leading '*' from block comments
@ -326,7 +327,7 @@ def extract_files(filekey):
conditions.append(prev)
elif cparts[0] == '#if':
conditions.append([ atomize(line[3:].strip()) ])
conditions.append([atomize(line[3:].strip())])
elif cparts[0] == '#ifdef':
conditions.append([ f'defined({line[6:].strip()})' ])
elif cparts[0] == '#ifndef':
@ -344,10 +345,10 @@ def extract_files(filekey):
# Create a new dictionary for the current #define
define_info = {
'section': section,
'name': define_name,
'name' : define_name,
'enabled': enabled,
'line': line_start,
'sid': sid
'line' : line_start,
'sid' : sid
}
# Type is based on the value
@ -419,7 +420,7 @@ def extract_files(filekey):
# If define has already been seen...
if define_name in sch_out[fk][section]:
info = sch_out[fk][section][define_name]
if isinstance(info, dict): info = [ info ] # Convert a single dict into a list
if isinstance(info, dict): info = [info] # Convert a single dict into a list
info.append(define_info) # Add to the list
else:
# Add the define dict with name as key
@ -510,5 +511,5 @@ def main():
print("Generating YML ...")
dump_yaml(schema, Path('schema.yml'))
if __name__ == '__main__':
if __name__ == "__main__":
main()

View file

@ -254,16 +254,16 @@ def compute_build_signature(env):
sections = {}
for header in real_config:
for name in real_config[header]:
#print(f" name: {name}")
# print(f" name: {name}")
if name not in ignore:
ddict = real_config[header][name]
#print(f" real_config[{header}][{name}]:", ddict)
# print(f" real_config[{header}][{name}]:", ddict)
sect = ddict['section']
if sect not in sections: sections[sect] = {}
sections[sect][name] = ddict
# Get all sections as a list of strings, with spaces and dashes replaced by underscores
long_list = [ re.sub(r'[- ]+', '_', x).lower() for x in sections.keys() ]
long_list = [re.sub(r'[- ]+', '_', x).lower() for x in sections.keys()]
# Make comma-separated lists of sections with 64 characters or less
sec_lines = []
while len(long_list):
@ -271,7 +271,7 @@ def compute_build_signature(env):
while len(long_list) and len(line) + len(long_list[0]) < 64 - 1:
line += long_list.pop(0) + ', '
sec_lines.append(line.strip())
sec_lines[-1] = sec_lines[-1][:-1] # Remove the last comma
sec_lines[-1] = sec_lines[-1][:-1] # Remove the last comma
else:
sec_lines = ['all']

View file

@ -13,7 +13,6 @@ if pioutil.is_pio_build():
#
# Give the binary a distinctive name
#
env['PROGNAME'] = "MarlinSimulator"
#
@ -21,25 +20,23 @@ if pioutil.is_pio_build():
#
emsg = ''
fatal = 0
import sys
if sys.platform == 'darwin':
import shutil
gcc = shutil.which('gcc')
if gcc == '' or gcc == '/usr/bin/gcc':
if gcc == '':
if not gcc or gcc == '/usr/bin/gcc':
if not gcc:
emsg = "\u001b[31mNo GCC found in your configured shell PATH."
elif gcc == '/usr/bin/gcc':
emsg = "\u001b[31mCan't build Marlin Native on macOS using the included version of GCC (clang)."
emsg += "\n\u001b[31mSee 'native.ini' for instructions to install GCC with MacPorts or Homebrew."
fatal = 1
else:
#
# Silence half of the ranlib warnings. (No equivalent for 'ARFLAGS')
#
env['RANLIBFLAGS'] += [ "-no_warning_for_no_symbols" ]
env['RANLIBFLAGS'] += ["-no_warning_for_no_symbols"]
# Default paths for Xcode and a lucky GL/gl.h dropped by Mesa
xcode_path = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks"
@ -48,17 +45,12 @@ if pioutil.is_pio_build():
import os.path
if os.path.exists(xcode_path):
env['BUILD_FLAGS'] += [ "-F" + xcode_path ]
env['BUILD_FLAGS'] += ["-F" + xcode_path]
emsg = "\u001b[33mUsing OpenGL framework headers from Xcode.app"
elif os.path.exists(mesa_path):
env['BUILD_FLAGS'] += [ '-D__MESA__' ]
env['BUILD_FLAGS'] += ['-D__MESA__']
emsg = f"\u001b[33mUsing OpenGL header from {mesa_path}"
else:
emsg = "\u001b[31mNo OpenGL headers found. Install Xcode for matching headers, or use 'sudo port install mesa' to get a GL/gl.h."
fatal = 1

View file

@ -47,7 +47,7 @@ if pioutil.is_pio_build():
#
# If MF_*_BUFFER_SIZE, SERIAL_*_BUFFER_SIZE, USART_*_BUF_SIZE, are
# defined, the first of these values will be used as the minimum.
build_flags = env.ParseFlags(env.get('BUILD_FLAGS'))["CPPDEFINES"]
build_flags = env.ParseFlags(env.get("BUILD_FLAGS"))["CPPDEFINES"]
mf = env["MARLIN_FEATURES"]
# Get the largest defined buffer sizes for RX or TX, using defaults for undefined

View file

@ -107,10 +107,12 @@ def z_parse(gcode, start_at_line=0, end_at_line=0):
last_i = -1
while len(gcode) > i:
try:
z, i = find_z(gcode, i + 1)
except TypeError:
break
result = find_z(gcode, i + 1)
if result is None:
break # Exit loop if find_z() returns None
z, i = result # Now safe to unpack
all_z.append(z)
z_at_line.append(i)
@ -119,7 +121,7 @@ def z_parse(gcode, start_at_line=0, end_at_line=0):
# last_z = z
last_i = i
if 0 < end_at_line <= i or temp_line >= min_g1:
# print('break at line {} at height {}'.format(i, z))
# print("break at line {} at height {}"".format(i, z))
break
line_between_z = line_between_z[1:]
@ -133,7 +135,7 @@ def get_lines(gcode, minimum):
for count in line_between_z:
i += 1
if count > minimum:
# print('layer: {}:{}'.format(z_at_line[i-1], z_at_line[i]))
# print("layer: {}:{}".format(z_at_line[i-1], z_at_line[i]))
return z_at_line[i - 1], z_at_line[i]
@ -147,21 +149,28 @@ with open(input_file, 'r') as file:
gcode.append(line)
file.close()
start, end = get_lines(gcode, min_g1)
layer_range = get_lines(gcode, min_g1)
if layer_range is None:
print("Error: Unable to determine layer range. Exiting.")
exit(1)
start, end = layer_range
for i in range(start, end):
set_mima(gcode[i])
print('x_min:{} x_max:{}\ny_min:{} y_max:{}'.format(min_x, max_x, min_y, max_y))
print("x_min:{} x_max:{}\ny_min:{} y_max:{}".format(min_x, max_x, min_y, max_y))
# resize min/max - values for minimum scan
if max_x - min_x < min_size:
offset_x = int((min_size - (max_x - min_x)) / 2 + 0.5) # int round up
# print('min_x! with {}'.format(int(max_x - min_x)))
# print("min_x! with {}".format(int(max_x - min_x)))
min_x = int(min_x) - offset_x
max_x = int(max_x) + offset_x
if max_y - min_y < min_size:
offset_y = int((min_size - (max_y - min_y)) / 2 + 0.5) # int round up
# print('min_y! with {}'.format(int(max_y - min_y)))
# print("min_y! with {}".format(int(max_y - min_y)))
min_y = int(min_y) - offset_y
max_y = int(max_y) + offset_y
@ -172,17 +181,12 @@ new_command = 'G29 L{0} R{1} F{2} B{3} P{4}\n'.format(min_x,
max_y,
probing_points)
out_file = open(output_file, 'w')
in_file = open(input_file, 'r')
with open(input_file, 'r') as in_file, open(output_file, 'w') as out_file:
for line in in_file:
if line.strip().upper().startswith(g29_keyword): # Improved condition
out_file.write(new_command)
print("write G29")
else:
out_file.write(line)
for line in in_file:
if line[:len(g29_keyword)].upper() == g29_keyword:
out_file.write(new_command)
print('write G29')
else:
out_file.write(line)
file.close()
out_file.close()
print('auto G29 finished')
print("auto G29 finished")

View file

@ -11,7 +11,6 @@ Use --single to export all languages to a single CSV file.
import re
from pathlib import Path
from sys import argv
from languageUtil import namebyid
LANGHOME = "Marlin/src/lcd/language"
@ -25,6 +24,39 @@ if not Path(LANGHOME).is_dir():
print("Edit LANGHOME or cd to the root of the repo before running.")
exit(1)
# A dictionary to contain language names
LANGNAME = {
'an': "Aragonese",
'bg': "Bulgarian",
'ca': "Catalan",
'cz': "Czech",
'da': "Danish",
'de': "German",
'el': "Greek", 'el_CY': "Greek (Cyprus)", 'el_gr': "Greek (Greece)",
'en': "English",
'es': "Spanish",
'eu': "Basque-Euskera",
'fi': "Finnish",
'fr': "French", 'fr_na': "French (no accent)",
'gl': "Galician",
'hr': "Croatian (Hrvatski)",
'hu': "Hungarian / Magyar",
'it': "Italian",
'jp_kana': "Japanese (Kana)",
'ko_KR': "Korean",
'nl': "Dutch",
'pl': "Polish",
'pt': "Portuguese", 'pt_br': "Portuguese (Brazil)",
'ro': "Romanian",
'ru': "Russian",
'sk': "Slovak",
'sv': "Swedish",
'tr': "Turkish",
'uk': "Ukrainian",
'vi': "Vietnamese",
'zh_CN': "Simplified Chinese", 'zh_TW': "Traditional Chinese"
}
# A limit just for testing
LIMIT = 0
@ -112,6 +144,10 @@ langcodes = list(language_strings.keys())
# Report the total number of unique strings
print("Found %s distinct LCD strings." % len(names))
def namebyid(id):
if id in LANGNAME: return LANGNAME[id]
return '<unknown>'
# Write a single language entry to the CSV file with narrow, wide, and tall strings
def write_csv_lang(f, strings, name):
f.write(',')

View file

@ -17,7 +17,7 @@ TODO: Use the defines and comments above the namespace from existing language fi
"""
import sys, re, requests, csv, datetime
#from languageUtil import namebyid
#from languageExport import namebyid
LANGHOME = "Marlin/src/lcd/language"
OUTDIR = 'out-language'
@ -62,7 +62,7 @@ for row in reader:
gothead = True
numcols = len(row)
if row[0] != 'name':
print('Error: first column should be "name"')
print("Error: first column should be 'name'")
exit(1)
# The rest of the columns are language codes and names
for i in range(1, numcols):

View file

@ -1,41 +0,0 @@
#!/usr/bin/env python3
#
# languageUtil.py
#
# A dictionary to contain language names
LANGNAME = {
'an': "Aragonese",
'bg': "Bulgarian",
'ca': "Catalan",
'cz': "Czech",
'da': "Danish",
'de': "German",
'el': "Greek", 'el_CY': "Greek (Cyprus)", 'el_gr': "Greek (Greece)",
'en': "English",
'es': "Spanish",
'eu': "Basque-Euskera",
'fi': "Finnish",
'fr': "French", 'fr_na': "French (no accent)",
'gl': "Galician",
'hr': "Croatian (Hrvatski)",
'hu': "Hungarian / Magyar",
'it': "Italian",
'jp_kana': "Japanese (Kana)",
'ko_KR': "Korean",
'nl': "Dutch",
'pl': "Polish",
'pt': "Portuguese", 'pt_br': "Portuguese (Brazil)",
'ro': "Romanian",
'ru': "Russian",
'sk': "Slovak",
'sv': "Swedish",
'tr': "Turkish",
'uk': "Ukrainian",
'vi': "Vietnamese",
'zh_CN': "Simplified Chinese", 'zh_TW': "Traditional Chinese"
}
def namebyid(id):
if id in LANGNAME: return LANGNAME[id]
return '<unknown>'