From 5c42b396e015da9ebe7d16094f99e014229224fc Mon Sep 17 00:00:00 2001 From: Jack_up <36533859+GiacomoGuaresi@users.noreply.github.com> Date: Fri, 20 Jun 2025 15:38:02 +0200 Subject: [PATCH] Add check for obsolete keys in profiles (#9955) * feat: Add check for obsolete keys in filament profiles and improve error handling * feat: Enhance error handling in machine profile checks and filament name consistency * feat: Add option to check for obsolete keys in profile validation * feat: Clarify help message for obsolete keys check in filament profiles --- .../How-to-create-profiles.md | 4 + scripts/orca_extra_profile_check.py | 197 +++++++++++++----- 2 files changed, 150 insertions(+), 51 deletions(-) diff --git a/doc/developer-reference/How-to-create-profiles.md b/doc/developer-reference/How-to-create-profiles.md index c84ac037ff..3a3a9adb24 100644 --- a/doc/developer-reference/How-to-create-profiles.md +++ b/doc/developer-reference/How-to-create-profiles.md @@ -401,11 +401,15 @@ In addition to the Orca validator, you should run the `orca_extra_profile_check. python ./orca_extra_profile_check.py ``` + You can also enable or disable specific checks: +- `--help`: displays help information - `--vendor` (optional): checks only the specified vendor. If omitted, all vendors are checked. - `--check-filaments` (enabled by default): checks `compatible_printers` fields in filament profiles - `--check-materials`: checks default material names in machine profiles +- `--check-obsolete-keys`: checks for obsolete keys in profiles + #### Sample usage with all checks enabled diff --git a/scripts/orca_extra_profile_check.py b/scripts/orca_extra_profile_check.py index 893ebeb9c2..ff856a0ee3 100644 --- a/scripts/orca_extra_profile_check.py +++ b/scripts/orca_extra_profile_check.py @@ -3,6 +3,37 @@ import json import argparse from pathlib import Path +OBSOLETE_KEYS = { + "acceleration", "scale", "rotate", "duplicate", "duplicate_grid", + "bed_size", "print_center", "g0", "wipe_tower_per_color_wipe", + "support_sharp_tails", "support_remove_small_overhangs", "support_with_sheath", + "tree_support_collision_resolution", "tree_support_with_infill", + "max_volumetric_speed", "max_print_speed", "support_closing_radius", + "remove_freq_sweep", "remove_bed_leveling", "remove_extrusion_calibration", + "support_transition_line_width", "support_transition_speed", "bed_temperature", + "bed_temperature_initial_layer", "can_switch_nozzle_type", "can_add_auxiliary_fan", + "extra_flush_volume", "spaghetti_detector", "adaptive_layer_height", + "z_hop_type", "z_lift_type", "bed_temperature_difference", "long_retraction_when_cut", + "retraction_distance_when_cut", "extruder_type", "internal_bridge_support_thickness", + "extruder_clearance_max_radius", "top_area_threshold", "reduce_wall_solid_infill", + "filament_load_time", "filament_unload_time", "smooth_coefficient", + "overhang_totally_speed", "silent_mode", "overhang_speed_classic" +} + +# Utility functions for printing messages in different colors. +def print_error(msg): + print(f"\033[91m[ERROR]\033[0m {msg}") # Red + +def print_warning(msg): + print(f"\033[93m[WARNING]\033[0m {msg}") # Yellow + +def print_info(msg): + print(f"\033[94m[INFO]\033[0m {msg}") # Blue + +def print_success(msg): + print(f"\033[92m[SUCCESS]\033[0m {msg}") # Green + + # Add helper function for duplicate key detection. def no_duplicates_object_pairs_hook(pairs): seen = {} @@ -37,17 +68,17 @@ def check_filament_compatible_printers(vendor_folder): # Use custom hook to detect duplicates. data = json.load(fp, object_pairs_hook=no_duplicates_object_pairs_hook) except ValueError as ve: - print(f"Duplicate key error in {file_path}: {ve}") + print_error(f"Duplicate key error in {file_path}: {ve}") error += 1 continue except Exception as e: - print(f"Error processing {file_path}: {e}") + print_error(f"Error processing {file_path}: {e}") error += 1 continue profile_name = data['name'] if profile_name in profiles: - print(f"Duplicated profile {profile_name}: {file_path}") + print_error(f"Duplicated profile {profile_name}: {file_path}") error += 1 continue @@ -76,10 +107,10 @@ def check_filament_compatible_printers(vendor_folder): try: compatible_printers = get_inherit_property(profile, "compatible_printers") if not compatible_printers or (isinstance(compatible_printers, list) and not compatible_printers): - print(f"'compatible_printers' missing in {profile['file_path']}") + print_error(f"'compatible_printers' missing in {profile['file_path']}") error += 1 except ValueError as ve: - print(f"Unable to parse {profile['file_path']}: {ve}") + print_error(f"Unable to parse {profile['file_path']}: {ve}") error += 1 continue @@ -109,7 +140,7 @@ def load_available_filament_profiles(profiles_dir, vendor_name): if "name" in data: profiles.add(data["name"]) except Exception as e: - print(f"Error loading filament profile {file_path}: {e}") + print_error(f"Error loading filament profile {file_path}: {e}") return profiles @@ -124,13 +155,14 @@ def check_machine_default_materials(profiles_dir, vendor_name): Returns: int: Number of missing filament references found + int: the number of warnings found (0 or 1) """ error_count = 0 machine_dir = profiles_dir / vendor_name / "machine" if not machine_dir.exists(): - print(f"No machine profiles found for vendor: {vendor_name}") - return 0 + print_warning(f"No machine profiles found for vendor: {vendor_name}") + return 0, 1 # Load available filament profiles vendor_filaments = load_available_filament_profiles(profiles_dir, vendor_name) @@ -153,7 +185,7 @@ def check_machine_default_materials(profiles_dir, vendor_name): if isinstance(default_materials, list): for material in default_materials: if material not in all_available_filaments: - print(f"Missing filament profile: '{material}' referenced in {file_path.relative_to(profiles_dir)}") + print_error(f"Missing filament profile: '{material}' referenced in {file_path.relative_to(profiles_dir)}") error_count += 1 else: # Handle semicolon-separated list of materials in a string @@ -161,43 +193,51 @@ def check_machine_default_materials(profiles_dir, vendor_name): for material in default_materials.split(";"): material = material.strip() if material and material not in all_available_filaments: - print(f"Missing filament profile: '{material}' referenced in {file_path.relative_to(profiles_dir)}") + print_error(f"Missing filament profile: '{material}' referenced in {file_path.relative_to(profiles_dir)}") error_count += 1 else: # Single material in a string if default_materials not in all_available_filaments: - print(f"Missing filament profile: '{default_materials}' referenced in {file_path.relative_to(profiles_dir)}") + print_error(f"Missing filament profile: '{default_materials}' referenced in {file_path.relative_to(profiles_dir)}") error_count += 1 except Exception as e: - print(f"Error processing machine profile {file_path}: {e}") + print_error(f"Error processing machine profile {file_path}: {e}") error_count += 1 - return error_count + return error_count, 0 def check_filament_name_consistency(profiles_dir, vendor_name): """ Make sure filament profile names match in both vendor json and subpath files. Filament profiles work only if the name in .json matches the name in sub_path file, or if it's one of the sub_path file's `renamed_from`. + + Parameters: + profiles_dir (Path): Base profiles directory + vendor_name (str): Vendor name + + Returns: + int: Number of errors found + int: Number of warnings found (0 or 1) """ error_count = 0 vendor_dir = profiles_dir / vendor_name vendor_file = profiles_dir / (vendor_name + ".json") if not vendor_file.exists(): - print(f"No profiles found for vendor: {vendor_name} at {vendor_file}") - return 0 + print_warning(f"No profiles found for vendor: {vendor_name} at {vendor_file}") + return 0, 1 try: with open(vendor_file, 'r', encoding='UTF-8') as fp: data = json.load(fp) except Exception as e: - print(f"Error loading vendor profile {vendor_file}: {e}") - return 1 + print_error(f"Error loading vendor profile {vendor_file}: {e}") + return 1, 0 if 'filament_list' not in data: - return 0 + return 0, 0 for child in data['filament_list']: name_in_vendor = child['name'] @@ -205,7 +245,7 @@ def check_filament_name_consistency(profiles_dir, vendor_name): sub_file = vendor_dir / sub_path if not sub_file.exists(): - print(f"Missing sub profile: '{sub_path}' declared in {vendor_file.relative_to(profiles_dir)}") + print_error(f"Missing sub profile: '{sub_path}' declared in {vendor_file.relative_to(profiles_dir)}") error_count += 1 continue @@ -213,7 +253,7 @@ def check_filament_name_consistency(profiles_dir, vendor_name): with open(sub_file, 'r', encoding='UTF-8') as fp: sub_data = json.load(fp) except Exception as e: - print(f"Error loading profile {sub_file}: {e}") + print_error(f"Error loading profile {sub_file}: {e}") error_count += 1 continue @@ -227,10 +267,10 @@ def check_filament_name_consistency(profiles_dir, vendor_name): if name_in_vendor in renamed_from: continue - print(f"Filament name mismatch: required '{name_in_vendor}' in {vendor_file.relative_to(profiles_dir)} but found '{name_in_sub}' in {sub_file.relative_to(profiles_dir)}, and none of its `renamed_from` matches the required name either") + print_error(f"Filament name mismatch: required '{name_in_vendor}' in {vendor_file.relative_to(profiles_dir)} but found '{name_in_sub}' in {sub_file.relative_to(profiles_dir)}, and none of its `renamed_from` matches the required name either") error_count += 1 - return error_count + return error_count, 0 def check_filament_id(vendor, vendor_folder): """ @@ -251,11 +291,11 @@ def check_filament_id(vendor, vendor_folder): # Use custom hook to detect duplicates. data = json.load(fp, object_pairs_hook=no_duplicates_object_pairs_hook) except ValueError as ve: - print(f"Duplicate key error in {file_path}: {ve}") + print_error(f"Duplicate key error in {file_path}: {ve}") error += 1 continue except Exception as e: - print(f"Error processing {file_path}: {e}") + print_error(f"Error processing {file_path}: {e}") error += 1 continue @@ -266,52 +306,107 @@ def check_filament_id(vendor, vendor_folder): if len(filament_id) > 8: error += 1 - print(f"Filament id too long \"{filament_id}\": {file_path}") + print_error(f"Filament id too long \"{filament_id}\": {file_path}") return error +def check_obsolete_keys(profiles_dir, vendor_name): + """ + Check for obsolete keys in all filament profiles for a vendor. + + Parameters: + profiles_dir (Path): Base profiles directory + vendor_name (str): Vendor name + obsolete_keys (set): Set of obsolete key names to check + + Returns: + int: Number of obsolete keys found + """ + error_count = 0 + vendor_path = profiles_dir / vendor_name / "filament" + + if not vendor_path.exists(): + return 0 + + for file_path in vendor_path.rglob("*.json"): + try: + with open(file_path, "r", encoding="UTF-8") as fp: + data = json.load(fp) + except Exception as e: + print_warning(f"Error reading profile {file_path.relative_to(profiles_dir)}: {e}") + error_count += 1 + continue + + for key in data.keys(): + if key in OBSOLETE_KEYS: + print_warning(f"Obsolete key: '{key}' found in {file_path.relative_to(profiles_dir)}") + error_count += 1 + + return error_count + def main(): - print("Checking profiles ...") - parser = argparse.ArgumentParser(description="Check profiles for issues") - parser.add_argument("--vendor", type=str, required=False, help="Vendor name") - parser.add_argument("--check-filaments", default=True, action="store_true", help="Check compatible_printers in filament profiles") + parser = argparse.ArgumentParser( + description="Check 3D printer profiles for common issues", + formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument("--vendor", type=str, help="Specify a single vendor to check") + parser.add_argument("--check-filaments", action="store_true", help="Check 'compatible_printers' in filament profiles") parser.add_argument("--check-materials", action="store_true", help="Check default materials in machine profiles") + parser.add_argument("--check-obsolete-keys", action="store_true", help="Warn if obsolete keys are found in filament profiles") args = parser.parse_args() - + + print_info("Checking profiles ...") + script_dir = Path(__file__).resolve().parent profiles_dir = script_dir.parent / "resources" / "profiles" checked_vendor_count = 0 errors_found = 0 - - if args.vendor: + warnings_found = 0 + + def run_checks(vendor_name): + nonlocal errors_found, warnings_found, checked_vendor_count + vendor_path = profiles_dir / vendor_name + if args.check_filaments or not (args.check_materials and not args.check_filaments): - errors_found += check_filament_compatible_printers(profiles_dir / args.vendor / "filament") + errors_found += check_filament_compatible_printers(vendor_path / "filament") + if args.check_materials: - errors_found += check_machine_default_materials(profiles_dir, args.vendor) - errors_found += check_filament_name_consistency(profiles_dir, args.vendor) - errors_found += check_filament_id(args.vendor, profiles_dir / args.vendor / "filament") + new_errors, new_warnings = check_machine_default_materials(profiles_dir, vendor_name) + errors_found += new_errors + warnings_found += new_warnings + + if args.check_obsolete_keys: + warnings_found += check_obsolete_keys(profiles_dir, vendor_name) + + new_errors, new_warnings = check_filament_name_consistency(profiles_dir, vendor_name) + errors_found += new_errors + warnings_found += new_warnings + + errors_found += check_filament_id(vendor_name, vendor_path / "filament") checked_vendor_count += 1 + + if args.vendor: + run_checks(args.vendor) else: for vendor_dir in profiles_dir.iterdir(): - if not vendor_dir.is_dir(): + if not vendor_dir.is_dir() or vendor_dir.name == "OrcaFilamentLibrary": continue - errors_found += check_filament_name_consistency(profiles_dir, vendor_dir.name) - errors_found += check_filament_id(vendor_dir.name, vendor_dir / "filament") - # skip "OrcaFilamentLibrary" folder - if vendor_dir.name == "OrcaFilamentLibrary": - continue - if args.check_filaments or not (args.check_materials and not args.check_filaments): - errors_found += check_filament_compatible_printers(vendor_dir / "filament") - if args.check_materials: - errors_found += check_machine_default_materials(profiles_dir, vendor_dir.name) - checked_vendor_count += 1 + run_checks(vendor_dir.name) + # ✨ Output finale in stile "compilatore" + print("\n==================== SUMMARY ====================") + print_info(f"Checked vendors : {checked_vendor_count}") if errors_found > 0: - print(f"Errors found in {errors_found} profile files") - exit(-1) + print_error(f"Files with errors : {errors_found}") else: - print(f"Checked {checked_vendor_count} vendor files") - exit(0) + print_success("Files with errors : 0") + if warnings_found > 0: + print_warning(f"Files with warnings : {warnings_found}") + else: + print_success("Files with warnings : 0") + print("=================================================") + + exit(-1 if errors_found > 0 else 0) if __name__ == "__main__":