Merge branch 'Ultimaker:main' into sigma_pro

This commit is contained in:
Orel 2024-03-01 11:26:47 +01:00 committed by GitHub
commit 5f8ea4abcb
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2608 changed files with 51049 additions and 15982 deletions

View file

@ -4,7 +4,14 @@ labels: ["Type: Bug", "Status: Triage", "Slicing Error :collision:"]
body: body:
- type: markdown - type: markdown
attributes: attributes:
value: | value: |
### 💥 Slicing Crash Analysis Tool 💥
We are taking steps to analyze an increase in reported crashes more systematically. We'll need some help with that. 😇
Before filling out the report below, we want you to try a special Cura 5.7 Alpha.
This version of Cura has an updated slicing engine that will automatically send a report to the Cura Team for analysis.
#### [You can find the downloads here](https://github.com/Ultimaker/Cura/discussions/18080) ####
If you still encounter a crash you are still welcome to report the issue so we can use your model as a test case, you can find instructions on how to do that below.
### Project File ### Project File
**⚠️ Before you continue, we need your project file to troubleshoot a slicing crash.** **⚠️ Before you continue, we need your project file to troubleshoot a slicing crash.**
It contains the printer and settings we need for troubleshooting. It contains the printer and settings we need for troubleshooting.
@ -23,14 +30,14 @@ body:
- type: input - type: input
attributes: attributes:
label: Cura Version label: Cura Version
placeholder: 5.3.1 placeholder: 5.6.0
validations: validations:
required: true required: true
- type: markdown - type: markdown
attributes: attributes:
value: | value: |
We work hard on improving our slicing crashes. Our most recent release is 5.3.1. We work hard on improving our slicing crashes. Our most recent release is 5.6.0.
If you are not on the latest version of Cura, [you can download it here](https://github.com/Ultimaker/Cura/releases/tag/5.3.1) If you are not on the latest version of Cura, [you can download it here](https://github.com/Ultimaker/Cura/releases/latest)
- type: input - type: input
attributes: attributes:
label: Operating System label: Operating System
@ -68,4 +75,3 @@ body:
description: You can add the zip file and additional information that is relevant to the issue in the comments below. description: You can add the zip file and additional information that is relevant to the issue in the comments below.
validations: validations:
required: true required: true

View file

@ -1,153 +0,0 @@
name: Create and Upload Conan package
on:
workflow_call:
inputs:
project_name:
required: true
type: string
recipe_id_full:
required: true
type: string
build_id:
required: true
type: number
build_info:
required: false
default: true
type: boolean
recipe_id_latest:
required: false
type: string
runs_on:
required: true
type: string
python_version:
required: true
type: string
conan_config_branch:
required: false
type: string
conan_logging_level:
required: false
type: string
conan_clean_local_cache:
required: false
type: boolean
default: false
conan_upload_community:
required: false
default: true
type: boolean
env:
CONAN_LOGIN_USERNAME: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD: ${{ secrets.CONAN_PASS }}
CONAN_LOG_RUN_TO_OUTPUT: 1
CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
CONAN_NON_INTERACTIVE: 1
jobs:
conan-package-create:
runs-on: ${{ inputs.runs_on }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: ${{ inputs.python_version }}
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r https://raw.githubusercontent.com/Ultimaker/Cura/main/.github/workflows/requirements-conan-package.txt
# Note the runner requirements are always installed from the main branch in the Ultimaker/Cura repo
- name: Use Conan download cache (Bash)
if: ${{ runner.os != 'Windows' }}
run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
- name: Use Conan download cache (Powershell)
if: ${{ runner.os == 'Windows' }}
run: conan config set storage.download_cache="C:\Users\runneradmin\.conan\conan_download_cache"
- name: Cache Conan local repository packages (Bash)
uses: actions/cache@v3
if: ${{ runner.os != 'Windows' }}
with:
path: |
$HOME/.conan/data
$HOME/.conan/conan_download_cache
key: conan-${{ inputs.runs_on }}-${{ runner.arch }}-create-cache
- name: Cache Conan local repository packages (Powershell)
uses: actions/cache@v3
if: ${{ runner.os == 'Windows' }}
with:
path: |
C:\Users\runneradmin\.conan\data
C:\.conan
C:\Users\runneradmin\.conan\conan_download_cache
key: conan-${{ inputs.runs_on }}-${{ runner.arch }}-create-cache
- name: Install MacOS system requirements
if: ${{ runner.os == 'Macos' }}
run: brew install autoconf automake ninja
# NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
# This is maybe because grub caches the disk it uses last time, which is recreated each time.
- name: Install Linux system requirements
if: ${{ runner.os == 'Linux' }}
run: |
sudo rm /var/cache/debconf/config.dat
sudo dpkg --configure -a
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config flex bison -y
- name: Install GCC-13 on ubuntu
if: ${{ startsWith(inputs.runs_on, 'ubuntu') }}
run: |
sudo apt install g++-13 gcc-13 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
- name: Create the default Conan profile
run: conan profile new default --detect
- name: Get Conan configuration from branch
if: ${{ inputs.conan_config_branch != '' }}
run: conan config install https://github.com/Ultimaker/conan-config.git -a "-b ${{ inputs.conan_config_branch }}"
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Add Cura private Artifactory remote
run: conan remote add cura-private https://ultimaker.jfrog.io/artifactory/api/conan/cura-private True
- name: Create the Packages
run: conan install ${{ inputs.recipe_id_full }} --build=missing --update -c tools.build:skip_test=True
- name: Upload the Package(s)
if: ${{ always() && inputs.conan_upload_community }}
run: conan upload ${{ inputs.recipe_id_full }} -r cura --all -c
- name: Upload the Package(s) to the private Artifactory
if: ${{ always() && ! inputs.conan_upload_community }}
run: conan upload ${{ inputs.recipe_id_full }} -r cura-private --all -c

View file

@ -1,27 +1,6 @@
---
name: conan-package name: conan-package
# Exports the recipe, sources and binaries for Mac, Windows and Linux and upload these to the server such that these can
# be used downstream.
#
# It should run on pushes against main or CURA-* branches, but it will only create the binaries for main and release branches
on: on:
workflow_dispatch:
inputs:
create_binaries_windows:
required: true
default: false
description: 'create binaries Windows'
create_binaries_linux:
required: true
default: false
description: 'create binaries Linux'
create_binaries_macos:
required: true
default: false
description: 'create binaries Macos'
push: push:
paths: paths:
- 'plugins/**' - 'plugins/**'
@ -32,114 +11,40 @@ on:
- 'packaging/**' - 'packaging/**'
- '.github/workflows/conan-*.yml' - '.github/workflows/conan-*.yml'
- '.github/workflows/notify.yml' - '.github/workflows/notify.yml'
- '.github/workflows/requirements-conan-package.txt' - '.github/workflows/requirements-runner.txt'
- 'requirements*.txt' - 'requirements*.txt'
- 'conanfile.py' - 'conanfile.py'
- 'conandata.yml' - 'conandata.yml'
- 'GitVersion.yml'
- '*.jinja' - '*.jinja'
branches: branches:
- main - 'main'
- 'CURA-*' - 'CURA-*'
- '[1-9].[0-9]' - 'PP-*'
- '[1-9].[0-9][0-9]' - '[0-9].[0-9]*'
tags: - '[0-9].[0-9][0-9]*'
- '[1-9].[0-9].[0-9]*'
- '[1-9].[0-9].[0-9]'
- '[1-9].[0-9][0-9].[0-9]*'
env: env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }} CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }} CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
CONAN_LOGIN_USERNAME_CURA_CE: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA_CE: ${{ secrets.CONAN_PASS }}
CONAN_LOG_RUN_TO_OUTPUT: 1
CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
CONAN_NON_INTERACTIVE: 1
permissions: { }
jobs: jobs:
conan-recipe-version: conan-recipe-version:
permissions: uses: ultimaker/cura-workflows/.github/workflows/conan-recipe-version.yml@main
contents: read
uses: ultimaker/cura/.github/workflows/conan-recipe-version.yml@main
with: with:
project_name: cura project_name: cura
conan-package-create-linux: conan-package-export:
needs: [ conan-recipe-version ] needs: [ conan-recipe-version ]
runs-on: 'ubuntu-latest' uses: ultimaker/cura-workflows/.github/workflows/conan-recipe-export.yml@main
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Cache Conan data
id: cache-conan
uses: actions/cache@v3
with:
path: ~/.conan
key: ${{ runner.os }}-conan
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: '3.11.x'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r .github/workflows/requirements-conan-package.txt
# NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
# This is maybe because grub caches the disk it uses last time, which is recreated each time.
- name: Install Linux system requirements
if: ${{ runner.os == 'Linux' }}
run: |
sudo rm /var/cache/debconf/config.dat
sudo dpkg --configure -a
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install efibootmgr build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config flex bison g++-12 gcc-12 -y
- name: Install GCC-13
run: |
sudo apt install g++-13 gcc-13 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Create the Packages
run: conan create . ${{ needs.conan-recipe-version.outputs.recipe_id_full }} --build=missing --update -o ${{ needs.conan-recipe-version.outputs.project_name }}:devtools=True -c tools.build:skip_test=True
- name: Create the latest alias
if: always()
run: conan alias ${{ needs.conan-recipe-version.outputs.recipe_id_latest }} ${{ needs.conan-recipe-version.outputs.recipe_id_full }}
- name: Upload the Package(s)
if: always()
run: |
conan upload ${{ needs.conan-recipe-version.outputs.recipe_id_full }} -r cura --all -c
conan upload ${{ needs.conan-recipe-version.outputs.recipe_id_latest }} -r cura -c
notify-create:
if: ${{ always() && (github.event_name == 'push' && (github.ref_name == 'main' || github.ref_name == 'master' || needs.conan-recipe-version.outputs.is_release_branch == 'true')) }}
needs: [ conan-recipe-version, conan-package-create-linux ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
with: with:
success: ${{ contains(join(needs.*.result, ','), 'success') }} recipe_id_full: ${{ needs.conan-recipe-version.outputs.recipe_id_full }}
success_title: "New binaries created in ${{ github.repository }}" recipe_id_latest: ${{ needs.conan-recipe-version.outputs.recipe_id_latest }}
success_body: "Created binaries for ${{ needs.conan-recipe-version.outputs.recipe_id_full }}"
failure_title: "Failed to create binaries in ${{ github.repository }}"
failure_body: "Failed to created binaries for ${{ needs.conan-recipe-version.outputs.recipe_id_full }}"
secrets: inherit secrets: inherit
conan-package-create:
needs: [ conan-recipe-version, conan-package-export ]
uses: ultimaker/cura-workflows/.github/workflows/conan-package-create-linux.yml@main
with:
recipe_id_full: ${{ needs.conan-recipe-version.outputs.recipe_id_full }}
conan_extra_args: "-o cura:enable_i18n=True"
secrets: inherit

View file

@ -1,107 +0,0 @@
name: Export Conan Recipe to server
on:
workflow_call:
inputs:
recipe_id_full:
required: true
type: string
recipe_id_latest:
required: false
type: string
runs_on:
required: true
type: string
python_version:
required: true
type: string
conan_config_branch:
required: false
type: string
conan_logging_level:
required: false
type: string
conan_export_binaries:
required: false
type: boolean
conan_upload_community:
required: false
default: true
type: boolean
env:
CONAN_LOGIN_USERNAME: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD: ${{ secrets.CONAN_PASS }}
CONAN_LOG_RUN_TO_OUTPUT: 1
CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
CONAN_NON_INTERACTIVE: 1
jobs:
package-export:
runs-on: ${{ inputs.runs_on }}
steps:
- name: Checkout project
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: ${{ inputs.python_version }}
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements and Create default Conan profile
run: |
pip install -r https://raw.githubusercontent.com/Ultimaker/Cura/main/.github/workflows/requirements-conan-package.txt
conan profile new default --detect
# Note the runner requirements are always installed from the main branch in the Ultimaker/Cura repo
- name: Cache Conan local repository packages
uses: actions/cache@v3
with:
path: $HOME/.conan/data
key: ${{ runner.os }}-conan-export-cache
- name: Get Conan configuration from branch
if: ${{ inputs.conan_config_branch != '' }}
run: conan config install https://github.com/Ultimaker/conan-config.git -a "-b ${{ inputs.conan_config_branch }}"
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Add Cura private Artifactory remote
run: conan remote add cura-private https://ultimaker.jfrog.io/artifactory/api/conan/cura-private True
- name: Export the Package (binaries)
if: ${{ inputs.conan_export_binaries }}
run: conan create . ${{ inputs.recipe_id_full }} --build=missing --update -c tools.build:skip_test=True
- name: Export the Package
if: ${{ !inputs.conan_export_binaries }}
run: conan export . ${{ inputs.recipe_id_full }}
- name: Create the latest alias
if: always()
run: conan alias ${{ inputs.recipe_id_latest }} ${{ inputs.recipe_id_full }}
- name: Upload the Package(s)
if: ${{ always() && inputs.conan_upload_community }}
run: |
conan upload ${{ inputs.recipe_id_full }} -r cura --all -c
conan upload ${{ inputs.recipe_id_latest }} -r cura -c
- name: Upload the Package(s) to the private Artifactory
if: ${{ always() && ! inputs.conan_upload_community }}
run: |
conan upload ${{ inputs.recipe_id_full }} -r cura-private --all -c
conan upload ${{ inputs.recipe_id_latest }} -r cura-private -c

View file

@ -1,217 +0,0 @@
name: Get Conan Recipe Version
on:
workflow_call:
inputs:
project_name:
required: true
type: string
user:
required: false
default: ultimaker
type: string
additional_buildmetadata:
required: false
default: ""
type: string
outputs:
recipe_id_full:
description: "The full Conan recipe id: <name>/<version>@<user>/<channel>"
value: ${{ jobs.get-semver.outputs.recipe_id_full }}
recipe_id_latest:
description: "The full Conan recipe aliased (latest) id: <name>/(latest)@<user>/<channel>"
value: ${{ jobs.get-semver.outputs.recipe_id_latest }}
recipe_semver_full:
description: "The full semver <Major>.<Minor>.<Patch>-<PreReleaseTag>+<BuildMetaData>"
value: ${{ jobs.get-semver.outputs.semver_full }}
is_release_branch:
description: "is current branch a release branch?"
value: ${{ jobs.get-semver.outputs.release_branch }}
user:
description: "The conan user"
value: ${{ jobs.get-semver.outputs.user }}
channel:
description: "The conan channel"
value: ${{ jobs.get-semver.outputs.channel }}
project_name:
description: "The conan projectname"
value: ${{ inputs.project_name }}
jobs:
get-semver:
runs-on: ubuntu-latest
outputs:
recipe_id_full: ${{ steps.get-conan-broadcast-data.outputs.recipe_id_full }}
recipe_id_latest: ${{ steps.get-conan-broadcast-data.outputs.recipe_id_latest }}
semver_full: ${{ steps.get-conan-broadcast-data.outputs.semver_full }}
is_release_branch: ${{ steps.get-conan-broadcast-data.outputs.is_release_branch }}
user: ${{ steps.get-conan-broadcast-data.outputs.user }}
channel: ${{ steps.get-conan-broadcast-data.outputs.channel }}
steps:
- name: Checkout repo
uses: actions/checkout@v3
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
with:
fetch-depth: 0
ref: ${{ github.head_ref }}
- name: Checkout repo PR
uses: actions/checkout@v3
if: ${{ github.event.pull_request.head.repo.full_name != github.repository }}
with:
fetch-depth: 0
ref: ${{ github.base_ref }}
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: "3.11.x"
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements and Create default Conan profile
run: |
pip install -r .github/workflows/requirements-conan-package.txt
pip install gitpython
- id: get-conan-broadcast-data
name: Get Conan broadcast data
run: |
import subprocess
import os
from conan.tools.scm import Version
from conan.errors import ConanException
from git import Repo
repo = Repo('.')
user = "${{ inputs.user }}".lower()
project_name = "${{ inputs.project_name }}"
event_name = "${{ github.event_name }}"
issue_number = "${{ github.ref }}".split('/')[2]
is_tag = "${{ github.ref_type }}" == "tag"
is_release_branch = False
ref_name = "${{ github.base_ref }}" if event_name == "pull_request" else "${{ github.ref_name }}"
buildmetadata = "" if "${{ inputs.additional_buildmetadata }}" == "" else "${{ inputs.additional_buildmetadata }}_"
# FIXME: for when we push a tag (such as an release)
channel = "testing"
if is_tag:
branch_version = Version(ref_name)
is_release_branch = True
channel = "_"
user = "_"
actual_version = f"{branch_version}"
else:
try:
branch_version = Version(repo.active_branch.name)
except ConanException:
branch_version = Version('0.0.0')
if ref_name == f"{branch_version.major}.{branch_version.minor}":
channel = 'stable'
is_release_branch = True
elif ref_name in ("main", "master"):
channel = 'testing'
else:
channel = "_".join(repo.active_branch.name.replace("-", "_").split("_")[:2]).lower()
if "pull_request" in event_name:
channel = f"pr_{issue_number}"
# %% Get the actual version
latest_branch_version = Version("0.0.0")
latest_branch_tag = None
for tag in repo.active_branch.repo.tags:
if str(tag).startswith("firmware") or str(tag).startswith("master"):
continue # Quick-fix for the versioning scheme name of the embedded team in fdm_materials(_private) repo
try:
version = Version(tag)
except ConanException:
continue
if version > latest_branch_version and version < Version("6.0.0"):
# FIXME: stupid old Cura tags 13.04 etc. keep popping up, als the fdm_material tag for firmware are messing with this
latest_branch_version = version
latest_branch_tag = repo.tag(tag)
if latest_branch_tag:
# %% Get the actual version
sha_commit = repo.commit().hexsha[:6]
latest_branch_version_prerelease = latest_branch_version.pre
if latest_branch_version.pre and not "." in str(latest_branch_version.pre):
# The prerealese did not contain a version number, default it to 1
latest_branch_version_prerelease = f"{latest_branch_version.pre}.1"
if event_name == "pull_request":
actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{latest_branch_version.patch}-{str(latest_branch_version_prerelease).lower()}+{buildmetadata}pr_{issue_number}_{sha_commit}"
channel_metadata = f"{channel}_{sha_commit}"
else:
if channel in ("stable", "_", ""):
channel_metadata = f"{sha_commit}"
else:
channel_metadata = f"{channel}_{sha_commit}"
if is_release_branch:
if (latest_branch_version.pre == "" or latest_branch_version.pre is None) and branch_version > latest_branch_version:
actual_version = f"{branch_version.major}.{branch_version.minor}.0-beta.1+{buildmetadata}{channel_metadata}"
elif latest_branch_version.pre == "":
# An actual full release has been created, we are working on patch
bump_up_patch = int(str(latest_branch_version.patch)) + 1
actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{bump_up_patch}-beta.1+{buildmetadata}{channel_metadata}"
elif latest_branch_version.pre is None:
actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{int(latest_branch_version.patch.value) + 1}-beta.1+{buildmetadata}{channel_metadata}"
else:
# An beta release has been created we are working toward a next beta or full release
bump_up_release_tag = int(str(latest_branch_version.pre).split('.')[1]) + 1
actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{latest_branch_version.patch}-{str(latest_branch_version.pre).split('.')[0]}.{bump_up_release_tag}+{buildmetadata}{channel_metadata}"
else:
max_branches_version = Version("0.0.0")
for branch in repo.references:
try:
if "remotes/origin" in branch.abspath:
b_version = Version(branch.name.split("/")[-1])
if b_version < Version("6.0.0") and b_version > max_branches_version:
max_branches_version = b_version
except:
pass
if max_branches_version > latest_branch_version:
actual_version = f"{max_branches_version.major}.{int(str(max_branches_version.minor)) + 1}.0-alpha+{buildmetadata}{channel}_{sha_commit}"
else:
actual_version = f"{latest_branch_version.major}.{int(str(latest_branch_version.minor)) + 1}.0-alpha+{buildmetadata}{channel_metadata}"
# %% Set the environment output
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"name={project_name}\n")
f.writelines(f"version={actual_version}\n")
f.writelines(f"channel={channel}\n")
f.writelines(f"recipe_id_full={project_name}/{actual_version}@{user}/{channel}\n")
f.writelines(f"recipe_id_latest={project_name}/latest@{user}/{channel}\n")
f.writelines(f"semver_full={actual_version}\n")
f.writelines(f"is_release_branch={str(is_release_branch).lower()}\n")
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
with open(summary_env, "w") as f:
f.writelines(f"# {project_name}\n")
f.writelines(f"name={project_name}\n")
f.writelines(f"version={actual_version}\n")
f.writelines(f"channel={channel}\n")
f.writelines(f"recipe_id_full={project_name}/{actual_version}@{user}/{channel}\n")
f.writelines(f"recipe_id_latest={project_name}/latest@{user}/{channel}\n")
f.writelines(f"semver_full={actual_version}\n")
f.writelines(f"is_release_branch={str(is_release_branch).lower()}\n")
shell: python

View file

@ -31,31 +31,41 @@ on:
type: boolean type: boolean
schedule: schedule:
# Daily at 5:20 CET # Daily at 4:15 CET (main-branch) and 5:15 CET (release-branch)
- cron: '20 4 * * *' - cron: '15 3 * * *'
- cron: '15 4 * * *'
env: env:
CURA_CONAN_VERSION: ${{ inputs.cura_conan_version || 'cura/latest@ultimaker/testing' }}
CONAN_ARGS: ${{ inputs.conan_args || '' }} CONAN_ARGS: ${{ inputs.conan_args || '' }}
ENTERPRISE: ${{ inputs.enterprise || false }} ENTERPRISE: ${{ inputs.enterprise || false }}
STAGING: ${{ inputs.staging || false }} STAGING: ${{ inputs.staging || false }}
jobs: jobs:
windows-installer: default_values:
uses: ./.github/workflows/windows.yml uses: ultimaker/cura-workflows/.github/workflows/cura-installer-default-value.yml@main
with: with:
cura_conan_version: ${{ github.event.inputs.CURA_CONAN_VERSION }} cura_conan_version: ${{ inputs.cura_conan_version }}
latest_release: '5.6'
latest_release_schedule_hour: 4
latest_release_tag: 'nightly'
windows-installer:
uses: ultimaker/cura-workflows/.github/workflows/cura-installer-windows.yml@main
needs: [ default_values ]
with:
cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }}
conan_args: ${{ github.event.inputs.conan_args }} conan_args: ${{ github.event.inputs.conan_args }}
enterprise: ${{ github.event.inputs.enterprise == 'true' }} enterprise: ${{ github.event.inputs.enterprise == 'true' }}
staging: ${{ github.event.inputs.staging == 'true' }} staging: ${{ github.event.inputs.staging == 'true' }}
architecture: X64 architecture: X64
operating_system: windows-2022 operating_system: self-hosted-Windows-X64
secrets: inherit secrets: inherit
linux-installer: linux-installer:
uses: ./.github/workflows/linux.yml uses: ultimaker/cura-workflows/.github/workflows/cura-installer-linux.yml@main
needs: [ default_values ]
with: with:
cura_conan_version: ${{ github.event.inputs.CURA_CONAN_VERSION }} cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }}
conan_args: ${{ github.event.inputs.conan_args }} conan_args: ${{ github.event.inputs.conan_args }}
enterprise: ${{ github.event.inputs.enterprise == 'true' }} enterprise: ${{ github.event.inputs.enterprise == 'true' }}
staging: ${{ github.event.inputs.staging == 'true' }} staging: ${{ github.event.inputs.staging == 'true' }}
@ -64,37 +74,39 @@ jobs:
secrets: inherit secrets: inherit
macos-installer: macos-installer:
uses: ./.github/workflows/macos.yml uses: ultimaker/cura-workflows/.github/workflows/cura-installer-macos.yml@main
needs: [ default_values ]
with: with:
cura_conan_version: ${{ github.event.inputs.CURA_CONAN_VERSION }} cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }}
conan_args: ${{ github.event.inputs.conan_args }} conan_args: ${{ github.event.inputs.conan_args }}
enterprise: ${{ github.event.inputs.enterprise == 'true' }} enterprise: ${{ github.event.inputs.enterprise == 'true' }}
staging: ${{ github.event.inputs.staging == 'true' }} staging: ${{ github.event.inputs.staging == 'true' }}
architecture: X64 architecture: X64
operating_system: macos-11.0 operating_system: self-hosted-X64
secrets: inherit secrets: inherit
macos-arm-installer: macos-arm-installer:
uses: ./.github/workflows/macos.yml uses: ultimaker/cura-workflows/.github/workflows/cura-installer-macos.yml@main
needs: [ default_values ]
with: with:
cura_conan_version: ${{ github.event.inputs.CURA_CONAN_VERSION }} cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }}
conan_args: ${{ github.event.inputs.conan_args }} conan_args: ${{ github.event.inputs.conan_args }}
enterprise: ${{ github.event.inputs.enterprise == 'true' }} enterprise: ${{ github.event.inputs.enterprise == 'true' }}
staging: ${{ github.event.inputs.staging == 'true' }} staging: ${{ github.event.inputs.staging == 'true' }}
architecture: ARM64 architecture: ARM64
operating_system: self-hosted operating_system: self-hosted-ARM64
secrets: inherit secrets: inherit
# Run and update nightly release when the nightly input is set to true or if the schedule is triggered # Run and update nightly release when the nightly input is set to true or if the schedule is triggered
update-nightly-release: update-nightly-release:
if: ${{ always() && (! cancelled()) && contains(needs.*.result, 'success') && (! contains(needs.*.result, 'failure')) && (inputs.nightly || github.event_name == 'schedule') }} if: ${{ inputs.nightly || github.event_name == 'schedule' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [ windows-installer, linux-installer, macos-installer, macos-arm-installer ] needs: [ default_values, windows-installer, linux-installer, macos-installer, macos-arm-installer ]
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
with:
# It's not necessary to download all three, but it does make sure we have at least one if an OS is skipped. fetch-depth: 1
- name: Download the run info - name: Download the run info
uses: actions/download-artifact@v2 uses: actions/download-artifact@v2
@ -144,13 +156,21 @@ jobs:
name: ${{ steps.filename.outputs.LINUX }}-AppImage name: ${{ steps.filename.outputs.LINUX }}-AppImage
path: installers path: installers
- name: Download linux installer jobs asc artifacts
uses: actions/download-artifact@v2
with:
name: ${{ steps.filename.outputs.LINUX }}-asc
path: installers
- name: Rename Linux installer to nightlies - name: Rename Linux installer to nightlies
run: | run: |
mv installers/${{ steps.filename.outputs.LINUX }}.AppImage installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-X64.AppImage mv installers/${{ steps.filename.outputs.LINUX }}.AppImage installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-X64.AppImage
mv installers/${{ steps.filename.outputs.LINUX }}.AppImage.asc installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-X64.AppImage.asc
- name: Update nightly release for Linux - name: Update nightly release for Linux
run: | run: |
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-X64.AppImage --clobber gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-X64.AppImage --clobber
gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-X64.AppImage.asc --clobber
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@ -173,8 +193,8 @@ jobs:
- name: Update nightly release for Windows - name: Update nightly release for Windows
run: | run: |
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.msi --clobber gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.msi --clobber
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.exe --clobber gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.exe --clobber
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@ -197,8 +217,8 @@ jobs:
- name: Update nightly release for MacOS (X64) - name: Update nightly release for MacOS (X64)
run: | run: |
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.dmg --clobber gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.dmg --clobber
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.pkg --clobber gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.pkg --clobber
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@ -208,7 +228,7 @@ jobs:
name: ${{ steps.filename.outputs.MAC_ARM_DMG }}-dmg name: ${{ steps.filename.outputs.MAC_ARM_DMG }}-dmg
path: installers path: installers
- name: Download acOS (ARM-64) pkg installer jobs artifacts - name: Download MacOS (ARM-64) pkg installer jobs artifacts
uses: actions/download-artifact@v2 uses: actions/download-artifact@v2
with: with:
name: ${{ steps.filename.outputs.MAC_ARM_PKG }}-pkg name: ${{ steps.filename.outputs.MAC_ARM_PKG }}-pkg
@ -221,14 +241,32 @@ jobs:
- name: Update nightly release for MacOS (ARM-64) - name: Update nightly release for MacOS (ARM-64)
run: | run: |
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.dmg --clobber gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.dmg --clobber
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.pkg --clobber gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.pkg --clobber
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: create the release notes
shell: python
run: |
import os
import datetime
from jinja2 import Template
with open(".github/workflows/release_notes.md.jinja", "r") as f:
release_notes = Template(f.read())
current_nightly_beta = "${{ needs.default_values.outputs.release_tag }}".split("nightly-")[-1]
with open("release-notes.md", "w") as f:
f.write(release_notes.render(
timestamp="${{ steps.filename.outputs.NIGHTLY_TIME }}",
branch="" if "${{ needs.default-values.outputs.release_tag == 'nightly' }}" == 'true' else current_nightly_beta,
branch_specific="" if os.getenv("GITHUB_REF") == "refs/heads/main" else f"?branch={current_nightly_beta}",
))
- name: Update nightly release description (with date) - name: Update nightly release description (with date)
if: always() if: always()
run: | run: |
gh release edit nightly --title "${{ steps.filename.outputs.NIGHTLY_NAME }}" --notes "Nightly release created on: ${{ steps.filename.outputs.NIGHTLY_TIME }}" gh release edit ${{ needs.default_values.outputs.release_tag }} --title "${{ steps.filename.outputs.NIGHTLY_NAME }}" --notes-file release-notes.md
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View file

@ -1,5 +1,5 @@
name: Linux Installer name: Linux Installer
run-name: ${{ inputs.cura_conan_version }} for Linux-${{ inputs.architecture }} by @${{ github.actor }} run-name: ${{ inputs.cura_conan_version }} by @${{ github.actor }}
on: on:
workflow_dispatch: workflow_dispatch:
@ -39,247 +39,14 @@ on:
options: options:
- ubuntu-22.04 - ubuntu-22.04
workflow_call:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'X64'
type: string
operating_system:
description: 'OS'
required: true
default: 'ubuntu-22.04'
type: string
env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
CURA_CONAN_VERSION: ${{ inputs.cura_conan_version }}
ENTERPRISE: ${{ inputs.enterprise }}
STAGING: ${{ inputs.staging }}
jobs: jobs:
cura-installer-create: linux-installer:
runs-on: ${{ inputs.operating_system }} uses: ultimaker/cura-workflows/.github/workflows/cura-installer-linux.yml@main
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: '3.10.x'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r .github/workflows/requirements-conan-package.txt
- name: Cache Conan local repository packages (Bash)
uses: actions/cache@v3
with:
path: |
$HOME/.conan/data
$HOME/.conan/conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }}-installer-cache
- name: Hack needed specifically for ubuntu-22.04 from mid-Feb 2023 onwards
if: ${{ startsWith(inputs.operating_system, 'ubuntu-22.04') }}
run: sudo apt remove libodbc2 libodbcinst2 unixodbc-common -y
# NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
# This is maybe because grub caches the disk it uses last time, which is recreated each time.
- name: Install Linux system requirements
run: |
sudo rm /var/cache/debconf/config.dat
sudo dpkg --configure -a
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config binutils coreutils desktop-file-utils fakeroot fuse libgdk-pixbuf2.0-dev patchelf squashfs-tools strace util-linux zsync -y
# Get the AppImage tool
wget --no-check-certificate --quiet https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage -O $GITHUB_WORKSPACE/appimagetool
chmod +x $GITHUB_WORKSPACE/appimagetool
echo "APPIMAGETOOL_LOCATION=$GITHUB_WORKSPACE/appimagetool" >> $GITHUB_ENV
# Get the AppImage builder
wget --no-check-certificate --quiet -O $GITHUB_WORKSPACE/appimage-builder-x86_64.AppImage https://github.com/AppImageCrafters/appimage-builder/releases/download/v1.1.0/appimage-builder-1.1.0-x86_64.AppImage
chmod +x appimage-builder-x86_64.AppImage
echo "APPIMAGEBUILDER_LOCATION=$GITHUB_WORKSPACE/appimage-builder-x86_64.AppImage" >> $GITHUB_ENV
# Make sure these tools can be found on the path
echo "$GITHUB_WORKSPACE" >> $GITHUB_PATH
- name: Install GCC-13
run: |
sudo apt install g++-13 gcc-13 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Configure GPG Key Linux (Bash)
run: echo -n "$GPG_PRIVATE_KEY" | base64 --decode | gpg --import
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Use Conan download cache (Bash)
run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
- name: Create the Packages (Bash)
run: conan install $CURA_CONAN_VERSION ${{ inputs.conan_args }} --build=missing --update -if cura_inst -g VirtualPythonEnv -o cura:enterprise=$ENTERPRISE -o cura:staging=$STAGING --json "cura_inst/conan_install_info.json"
- name: Upload the Package(s)
if: always()
run: |
conan upload "*" -r cura --all -c
- name: Set Environment variables for Cura (bash)
run: |
. ./cura_inst/bin/activate_github_actions_env.sh
. ./cura_inst/bin/activate_github_actions_version_env.sh
# FIXME: This is a workaround to ensure that we use and pack a shared library for OpenSSL 1.1.1l. We currently compile
# OpenSSL statically for CPython, but our Python Dependenies (such as PyQt6) require a shared library.
# Because Conan won't allow for building the same library with two different options (easily) we need to install it explicitly
# and do a manual copy to the VirtualEnv, such that Pyinstaller can find it.
- name: Install OpenSSL shared
run: conan install openssl/1.1.1l@_/_ --build=missing --update -o openssl:shared=True -g deploy
- name: Copy OpenSSL shared (Bash)
run: |
cp ./openssl/lib/*.so* ./cura_inst/bin/ || true
cp ./openssl/lib/*.dylib* ./cura_inst/bin/ || true
- name: Create the Cura dist
run: pyinstaller ./cura_inst/UltiMaker-Cura.spec
- name: Output the name file name and extension
id: filename
shell: python
run: |
import os
enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-linux-${{ inputs.architecture }}"
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"INSTALLER_FILENAME={installer_filename}\n")
- name: Summarize the used Conan dependencies
shell: python
run: |
import os
import json
from pathlib import Path
conan_install_info_path = Path("cura_inst/conan_install_info.json")
conan_info = {"installed": []}
if os.path.exists(conan_install_info_path):
with open(conan_install_info_path, "r") as f:
conan_info = json.load(f)
sorted_deps = sorted([dep["recipe"]["id"].replace('#', r' rev: ') for dep in conan_info["installed"]])
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("# ${{ steps.filename.outputs.INSTALLER_FILENAME }}\n")
f.writelines("## Conan packages:\n")
for dep in sorted_deps:
f.writelines(f"`{dep}`\n")
- name: Summarize the used Python modules
shell: python
run: |
import os
import pkg_resources
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("## Python modules:\n")
for package in pkg_resources.working_set:
f.writelines(f"`{package.key}/{package.version}`\n")
- name: Create the Linux AppImage (Bash)
run: |
python ../cura_inst/packaging/AppImage-builder/create_appimage.py ./UltiMaker-Cura $CURA_VERSION_FULL "${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage"
chmod +x "${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage"
working-directory: dist
- name: Upload the AppImage
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-AppImage
path: |
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage
retention-days: 5
- name: Write the run info
shell: python
run: |
import os
with open("run_info.sh", "w") as f:
f.writelines(f'echo "CURA_VERSION_FULL={os.environ["CURA_VERSION_FULL"]}" >> $GITHUB_ENV\n')
- name: Upload the run info
uses: actions/upload-artifact@v3
with:
name: linux-run-info
path: |
run_info.sh
retention-days: 5
notify-export:
if: ${{ always() }}
needs: [ cura-installer-create ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
with: with:
success: ${{ contains(join(needs.*.result, ','), 'success') }} cura_conan_version: ${{ inputs.cura_conan_version }}
success_title: "Create the Cura distributions" conan_args: ${{ inputs.conan_args }}
success_body: "Installers for ${{ inputs.cura_conan_version }}" enterprise: ${{ inputs.enterprise }}
failure_title: "Failed to create the Cura distributions" staging: ${{ inputs.staging }}
failure_body: "Failed to create at least 1 installer for ${{ inputs.cura_conan_version }}" architecture: ${{ inputs.architecture }}
secrets: inherit operating_system: ${{ inputs.operating_system }}
secrets: inherit

View file

@ -1,5 +1,5 @@
name: Macos Installer name: MacOS Installer
run-name: ${{ inputs.cura_conan_version }} for Macos-${{ inputs.architecture }} by @${{ github.actor }} run-name: ${{ inputs.cura_conan_version }} by @${{ github.actor }}
on: on:
workflow_dispatch: workflow_dispatch:
@ -27,7 +27,7 @@ on:
architecture: architecture:
description: 'Architecture' description: 'Architecture'
required: true required: true
default: 'X64' default: 'ARM64'
type: choice type: choice
options: options:
- X64 - X64
@ -35,260 +35,22 @@ on:
operating_system: operating_system:
description: 'OS' description: 'OS'
required: true required: true
default: 'macos-11' default: 'self-hosted-ARM64'
type: choice type: choice
options: options:
- self-hosted - self-hosted-X64
- self-hosted-ARM64
- macos-11 - macos-11
- macos-12 - macos-12
workflow_call:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'X64'
type: string
operating_system:
description: 'OS'
required: true
default: 'macos-11'
type: string
env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
CODESIGN_IDENTITY: ${{ secrets.CODESIGN_IDENTITY }}
MAC_NOTARIZE_USER: ${{ secrets.MAC_NOTARIZE_USER }}
MAC_NOTARIZE_PASS: ${{ secrets.MAC_NOTARIZE_PASS }}
MACOS_CERT_P12: ${{ secrets.MACOS_CERT_P12 }}
MACOS_CERT_INSTALLER_P12: ${{ secrets.MACOS_CERT_INSTALLER_P12 }}
MACOS_CERT_USER: ${{ secrets.MACOS_CERT_USER }}
MACOS_CERT_PASSPHRASE: ${{ secrets.MACOS_CERT_PASSPHRASE }}
CURA_CONAN_VERSION: ${{ inputs.cura_conan_version }}
ENTERPRISE: ${{ inputs.enterprise }}
STAGING: ${{ inputs.staging }}
jobs: jobs:
cura-installer-create: macos-installer:
runs-on: ${{ inputs.operating_system }} uses: ultimaker/cura-workflows/.github/workflows/cura-installer-macos.yml@main
outputs:
INSTALLER_FILENAME: ${{ steps.filename.outputs.INSTALLER_FILENAME }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: '3.10.x'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r .github/workflows/requirements-conan-package.txt
- name: Cache Conan local repository packages (Bash)
uses: actions/cache@v3
with:
path: |
$HOME/.conan/data
$HOME/.conan/conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }}-installer-cache
- name: Install MacOS system requirements
run: brew install cmake autoconf automake ninja create-dmg
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Remove Macos keychain (Bash)
run: security delete-keychain signing_temp.keychain || true
- name: Configure Macos keychain Developer Cert(Bash)
id: macos-keychain-developer-cert
uses: apple-actions/import-codesign-certs@v1
with:
keychain-password: ${{ secrets.MACOS_KEYCHAIN_PASSWORD }}
p12-file-base64: ${{ secrets.MACOS_CERT_P12 }}
p12-password: ${{ secrets.MACOS_CERT_PASSPHRASE }}
- name: Configure Macos keychain Installer Cert (Bash)
id: macos-keychain-installer-cert
uses: apple-actions/import-codesign-certs@v1
with:
keychain-password: ${{ secrets.MACOS_KEYCHAIN_PASSWORD }}
create-keychain: false # keychain is created in previous use of action.
p12-file-base64: ${{ secrets.MACOS_CERT_INSTALLER_P12 }}
p12-password: ${{ secrets.MACOS_CERT_PASSPHRASE }}
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Use Conan download cache (Bash)
run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
- name: Create the Packages (Bash)
run: conan install $CURA_CONAN_VERSION ${{ inputs.conan_args }} --build=missing --update -if cura_inst -g VirtualPythonEnv -o cura:enterprise=$ENTERPRISE -o cura:staging=$STAGING --json "cura_inst/conan_install_info.json"
- name: Upload the Package(s)
if: always()
run: |
conan upload "*" -r cura --all -c
- name: Set Environment variables for Cura (bash)
run: |
. ./cura_inst/bin/activate_github_actions_env.sh
. ./cura_inst/bin/activate_github_actions_version_env.sh
- name: Unlock Macos keychain (Bash)
run: security unlock -p $TEMP_KEYCHAIN_PASSWORD signing_temp.keychain
env:
TEMP_KEYCHAIN_PASSWORD: ${{ steps.macos-keychain-developer-cert.outputs.keychain-password }}
# FIXME: This is a workaround to ensure that we use and pack a shared library for OpenSSL 1.1.1l. We currently compile
# OpenSSL statically for CPython, but our Python Dependenies (such as PyQt6) require a shared library.
# Because Conan won't allow for building the same library with two different options (easily) we need to install it explicitly
# and do a manual copy to the VirtualEnv, such that Pyinstaller can find it.
- name: Install OpenSSL shared
run: conan install openssl/1.1.1l@_/_ --build=missing --update -o openssl:shared=True -g deploy
- name: Copy OpenSSL shared (Bash)
run: |
cp ./openssl/lib/*.so* ./cura_inst/bin/ || true
cp ./openssl/lib/*.dylib* ./cura_inst/bin/ || true
- name: Create the Cura dist
run: pyinstaller ./cura_inst/UltiMaker-Cura.spec
- name: Output the name file name and extension
id: filename
shell: python
run: |
import os
enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-macos-${{ inputs.architecture }}"
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"INSTALLER_FILENAME={installer_filename}\n")
- name: Summarize the used Conan dependencies
shell: python
run: |
import os
import json
from pathlib import Path
conan_install_info_path = Path("cura_inst/conan_install_info.json")
conan_info = {"installed": []}
if os.path.exists(conan_install_info_path):
with open(conan_install_info_path, "r") as f:
conan_info = json.load(f)
sorted_deps = sorted([dep["recipe"]["id"].replace('#', r' rev: ') for dep in conan_info["installed"]])
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("# ${{ steps.filename.outputs.INSTALLER_FILENAME }}\n")
f.writelines("## Conan packages:\n")
for dep in sorted_deps:
f.writelines(f"`{dep}`\n")
- name: Summarize the used Python modules
shell: python
run: |
import os
import pkg_resources
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("## Python modules:\n")
for package in pkg_resources.working_set:
f.writelines(f"`{package.key}/{package.version}`\n")
- name: Create the Macos dmg (Bash)
run: python ../cura_inst/packaging/MacOS/build_macos.py --source_path ../cura_inst --dist_path . --cura_conan_version $CURA_CONAN_VERSION --filename "${{ steps.filename.outputs.INSTALLER_FILENAME }}" --build_dmg --build_pkg --app_name "$CURA_APP_NAME"
working-directory: dist
- name: Upload the dmg
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-dmg
path: |
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.dmg
retention-days: 5
- name: Upload the pkg
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-pkg
path: |
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.pkg
retention-days: 5
- name: Write the run info
shell: python
run: |
import os
with open("run_info.sh", "w") as f:
f.writelines(f'echo "CURA_VERSION_FULL={os.environ["CURA_VERSION_FULL"]}" >> $GITHUB_ENV\n')
- name: Upload the run info
uses: actions/upload-artifact@v3
with:
name: macos-run-info
path: |
run_info.sh
retention-days: 5
notify-export:
if: ${{ always() }}
needs: [ cura-installer-create ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
with: with:
success: ${{ contains(join(needs.*.result, ','), 'success') }} cura_conan_version: ${{ inputs.cura_conan_version }}
success_title: "Create the Cura distributions" conan_args: ${{ inputs.conan_args }}
success_body: "Installers for ${{ inputs.cura_conan_version }}" enterprise: ${{ inputs.enterprise }}
failure_title: "Failed to create the Cura distributions" staging: ${{ inputs.staging }}
failure_body: "Failed to create at least 1 installer for ${{ inputs.cura_conan_version }}" architecture: ${{ inputs.architecture }}
secrets: inherit operating_system: ${{ inputs.operating_system }}
secrets: inherit

View file

@ -1,54 +0,0 @@
name: Get Conan Recipe Version
on:
workflow_call:
inputs:
success:
required: true
type: boolean
success_title:
required: true
type: string
success_body:
required: true
type: string
failure_title:
required: true
type: string
failure_body:
required: true
type: string
jobs:
slackNotification:
name: Slack Notification
runs-on: ubuntu-latest
steps:
- name: Slack notify on-success
if: ${{ inputs.success }}
uses: rtCamp/action-slack-notify@v2
env:
SLACK_USERNAME: ${{ github.repository }}
SLACK_COLOR: green
SLACK_ICON: https://github.com/Ultimaker/Cura/blob/main/icons/cura-128.png?raw=true
SLACK_TITLE: ${{ inputs.success_title }}
SLACK_MESSAGE: ${{ inputs.success_body }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
- name: Slack notify on-failure
if: ${{ !inputs.success }}
uses: rtCamp/action-slack-notify@v2
env:
SLACK_USERNAME: ${{ github.repository }}
SLACK_COLOR: red
SLACK_ICON: https://github.com/Ultimaker/Cura/blob/main/icons/cura-128.png?raw=true
SLACK_TITLE: ${{ inputs.failure_title }}
SLACK_MESSAGE: ${{ inputs.failure_body }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}

View file

@ -1,15 +1,10 @@
name: process-pull-request name: process-pull-request
on: on:
pull_request_target: pull_request_target:
types: [opened, reopened, edited, synchronize, review_requested, ready_for_review, assigned] types: [ opened, reopened, edited, review_requested, ready_for_review, assigned ]
jobs: jobs:
add_label: add_label:
runs-on: ubuntu-latest uses: ultimaker/cura-workflows/.github/workflows/process-pull-request.yml@main
steps: secrets: inherit
- uses: actions/checkout@v2
- uses: actions-ecosystem/action-add-labels@v1
if: ${{ github.event.pull_request.head.repo.full_name != github.repository }}
with:
labels: 'PR: Community Contribution :crown:'

View file

@ -0,0 +1,39 @@
# Nightlies
> :clock12: Created at: {{ timestamp }}
| | |
|--------------:|--------------------------------------------------------------------------------------------|
| **Nightlies** | [![nightly {{ branch }}](https://github.com/Ultimaker/Cura/actions/workflows/installers.yml/badge.svg{{ branch_specific }}
?event=schedule)](https://github.com/Ultimaker/Cura/actions/workflows/installers.yml) |
# Unit Test results
| | |
|-------------------------------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| **Cura {{ branch }}** | [![unit-test](https://github.com/Ultimaker/Cura/actions/workflows/unit-test.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/Cura/actions/workflows/unit-test.yml) |
| **CuraEngine {{ branch }}** | [![unit-test](https://github.com/Ultimaker/CuraEngine/actions/workflows/unit-test.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/CuraEngine/actions/workflows/unit-test.yml) |
| **Uranium {{ branch }}** | [![unit-test](https://github.com/Ultimaker/Uranium/actions/workflows/unit-test.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/Uranium/actions/workflows/unit-test.yml) |
| **CuraEngine GradualFlow 0.1** | [![unit-test](https://github.com/Ultimaker/CuraEngine_plugin_gradual_flow/actions/workflows/unit-test.yml/badge.svg?branch=0.1)](https://github.com/Ultimaker/CuraEngine_plugin_gradual_flow/actions/workflows/unit-test.yml) |
| **synsepalum-dulcificum 0.1** | [![unit-test](https://github.com/Ultimaker/synsepalum-dulcificum/actions/workflows/unit-test.yml/badge.svg?branch=0.1)](https://github.com/Ultimaker/synsepalum-dulcificum/actions/workflows/unit-test.yml) |
| **libSavitar** | [![unit-test](https://github.com/Ultimaker/libSavitar/actions/workflows/unit-test.yml/badge.svg)](https://github.com/Ultimaker/libSavitar/actions/workflows/unit-test.yml) |
| **libnest2d** | [![unit-test](https://github.com/Ultimaker/libnest2d/actions/workflows/unit-test.yml/badge.svg)](https://github.com/Ultimaker/libnest2d/actions/workflows/unit-test.yml) |
# Conan packages
| | |
|------------------------------------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| **Cura {{ branch }}** | [![conan-package](https://github.com/Ultimaker/Cura/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/Cura/actions/workflows/conan-package.yml) |
| **CuraEngine {{ branch }}** | [![conan-package](https://github.com/Ultimaker/CuraEngine/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/CuraEngine/actions/workflows/conan-package.yml) |
| **Uranium {{ branch }}** | [![conan-package](https://github.com/Ultimaker/Uranium/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/Uranium/actions/workflows/conan-package.yml) |
| **fdm_materials {{ branch }}** | [![conan-package](https://github.com/Ultimaker/fdm_materials/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/fdm_materials/actions/workflows/conan-package.yml) |
| **cura-binary-data {{ branch }}** | [![conan-package](https://github.com/Ultimaker/cura-binary-data/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/cura-binary-data/actions/workflows/conan-package.yml) |
| **CuraEngine GradualFlow 0.1** | [![conan-package](https://github.com/Ultimaker/CuraEngine_plugin_gradual_flow/actions/workflows/conan-package.yml/badge.svg?branch=0.1)](https://github.com/Ultimaker/CuraEngine_plugin_gradual_flow/actions/workflows/conan-package.yml) |
| **synsepalum-dulcificum 0.1** | [![conan-package](https://github.com/Ultimaker/synsepalum-dulcificum/actions/workflows/conan-package.yml/badge.svg?branch=0.1)](https://github.com/Ultimaker/synsepalum-dulcificum/actions/workflows/conan-package.yml) |
| **CuraEngine gRPC definitions 0.1** | [![conan-package](https://github.com/Ultimaker/CuraEngine_grpc_definitions/actions/workflows/conan-package.yml/badge.svg?branch=0.1)](https://github.com/Ultimaker/CuraEngine_grpc_definitions/actions/workflows/conan-package.yml) |
| **libArcus** | [![conan-package](https://github.com/Ultimaker/libArcus/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/libArcus/actions/workflows/conan-package.yml) |
| **pyArcus** | [![conan-package](https://github.com/Ultimaker/pyArcus/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/pyArcus/actions/workflows/conan-package.yml) |
| **libSavitar** | [![conan-package](https://github.com/Ultimaker/libSavitar/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/libSavitar/actions/workflows/conan-package.yml) |
| **pySavitar** | [![conan-package](https://github.com/Ultimaker/pySavitar/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/pySavitar/actions/workflows/conan-package.yml) |
| **libnest2d** | [![conan-package](https://github.com/Ultimaker/libnest2d/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/libnest2d/actions/workflows/conan-package.yml) |
| **pynest2d** | [![conan-package](https://github.com/Ultimaker/pynest2d/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/pynest2d/actions/workflows/conan-package.yml) |

View file

@ -1,2 +1,2 @@
conan>=1.60.2,<2.0.0 conan>=1.60.2,<2.0.0
sip sip<=6.7.12

View file

View file

@ -1,82 +1,14 @@
name: unit-test-post name: unit-test-post
on: on:
workflow_run: workflow_run:
workflows: [ "unit-test" ] workflows: [ "unit-test" ]
types: [ completed ] types: [ completed ]
jobs: jobs:
publish-test-results: publish-test-results:
if: ${{ github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' }} uses: ultimaker/cura-workflows/.github/workflows/unit-test-post.yml@main
runs-on: ubuntu-latest with:
event: ${{ github.event.workflow_run.event }}
steps: conclusion: ${{ github.event.workflow_run.conclusion }}
- name: Download analysis results secrets: inherit
uses: actions/github-script@v3.1.0
with:
script: |
let artifacts = await github.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
});
let matchArtifact = artifacts.data.artifacts.filter((artifact) => {
return artifact.name == "test-result"
})[0];
let download = await github.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: "zip",
});
let fs = require("fs");
fs.writeFileSync("${{github.workspace}}/test-result.zip", Buffer.from(download.data));
- name: Set environment variables
run: |
mkdir pr_env
unzip test-result.zip -d pr_env
echo "pr_id=$(cat pr_env/pr-id.txt)" >> $GITHUB_ENV
echo "pr_head_repo=$(cat pr_env/pr-head-repo.txt)" >> $GITHUB_ENV
echo "pr_head_ref=$(cat pr_env/pr-head-ref.txt)" >> $GITHUB_ENV
- uses: actions/checkout@v3
with:
repository: ${{ env.pr_head_repo }}
ref: ${{ env.pr_head_ref }}
persist-credentials: false
- name: Redownload analysis results
uses: actions/github-script@v3.1.0
with:
script: |
let artifacts = await github.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
});
let matchArtifact = artifacts.data.artifacts.filter((artifact) => {
return artifact.name == "test-result"
})[0];
let download = await github.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: "zip",
});
let fs = require("fs");
fs.writeFileSync("${{github.workspace}}/test-result.zip", Buffer.from(download.data));
- name: Extract analysis results
run: |
mkdir -p tests
unzip test-result.zip -d tests
- name: Publish Unit Test Results
id: test-results
uses: EnricoMi/publish-unit-test-result-action@v1
with:
files: "tests/**/*.xml"
- name: Conclusion
run: echo "Conclusion is ${{ steps.test-results.outputs.json && fromJSON( steps.test-results.outputs.json ).conclusion }}"

View file

@ -1,4 +1,3 @@
---
name: unit-test name: unit-test
on: on:
@ -9,23 +8,18 @@ on:
- 'cura/**' - 'cura/**'
- 'icons/**' - 'icons/**'
- 'tests/**' - 'tests/**'
- 'packaging/**'
- '.github/workflows/conan-*.yml'
- '.github/workflows/unit-test.yml' - '.github/workflows/unit-test.yml'
- '.github/workflows/notify.yml' - '.github/workflows/requirements-runner.txt'
- '.github/workflows/requirements-conan-package.txt'
- 'requirements*.txt' - 'requirements*.txt'
- 'conanfile.py' - 'conanfile.py'
- 'conandata.yml' - 'conandata.yml'
- 'GitVersion.yml'
- '*.jinja' - '*.jinja'
branches: branches:
- main - main
- 'CURA-*' - 'CURA-*'
- '[1-9]+.[0-9]+' - 'PP-*'
tags: - '[0-9]+.[0-9]+'
- '[0-9]+.[0-9]+.[0-9]+'
- '[0-9]+.[0-9]+-beta'
pull_request: pull_request:
paths: paths:
- 'plugins/**' - 'plugins/**'
@ -33,134 +27,36 @@ on:
- 'cura/**' - 'cura/**'
- 'icons/**' - 'icons/**'
- 'tests/**' - 'tests/**'
- 'packaging/**'
- '.github/workflows/conan-*.yml'
- '.github/workflows/unit-test.yml' - '.github/workflows/unit-test.yml'
- '.github/workflows/notify.yml' - '.github/workflows/requirements-runner.txt'
- '.github/workflows/requirements-conan-package.txt'
- 'requirements*.txt' - 'requirements*.txt'
- 'conanfile.py' - 'conanfile.py'
- 'conandata.yml' - 'conandata.yml'
- 'GitVersion.yml'
- '*.jinja' - '*.jinja'
branches: branches:
- main - main
- '[1-9]+.[0-9]+' - '[0-9]+.[0-9]+'
tags:
- '[0-9]+.[0-9]+.[0-9]+'
- '[0-9]+.[0-9]+-beta'
env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
CONAN_LOGIN_USERNAME_CURA_CE: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA_CE: ${{ secrets.CONAN_PASS }}
CONAN_LOG_RUN_TO_OUTPUT: 1
CONAN_LOGGING_LEVEL: info
CONAN_NON_INTERACTIVE: 1
permissions: permissions:
contents: read contents: read
env:
CONAN_LOGIN_USERNAME: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD: ${{ secrets.CONAN_PASS }}
jobs: jobs:
conan-recipe-version: conan-recipe-version:
uses: ultimaker/cura/.github/workflows/conan-recipe-version.yml@main uses: ultimaker/cura-workflows/.github/workflows/conan-recipe-version.yml@main
with: with:
project_name: cura project_name: cura
testing: testing:
runs-on: ubuntu-22.04 uses: ultimaker/cura-workflows/.github/workflows/unit-test.yml@main
needs: [ conan-recipe-version ] needs: [ conan-recipe-version ]
with:
steps: recipe_id_full: ${{ needs.conan-recipe-version.outputs.recipe_id_full }}
- name: Checkout conan_extra_args: '-g VirtualPythonEnv -o cura:devtools=True -c tools.build:skip_test=False --options "*:enable_sentry=False"'
uses: actions/checkout@v3 unit_test_cmd: 'pytest --junitxml=junit_cura.xml'
with: unit_test_dir: 'tests'
fetch-depth: 2 conan_generator_dir: './venv/bin'
secrets: inherit
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: '3.11.x'
architecture: 'x64'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements and Create default Conan profile
run: pip install -r requirements-conan-package.txt
working-directory: .github/workflows/
- name: Use Conan download cache (Bash)
if: ${{ runner.os != 'Windows' }}
run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
- name: Cache Conan local repository packages (Bash)
uses: actions/cache@v3
if: ${{ runner.os != 'Windows' }}
with:
path: |
$HOME/.conan/data
$HOME/.conan/conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }}-unit-cache
# NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
# This is maybe because grub caches the disk it uses last time, which is recreated each time.
- name: Install Linux system requirements
if: ${{ runner.os == 'Linux' }}
run: |
sudo rm /var/cache/debconf/config.dat
sudo dpkg --configure -a
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config -y
- name: Install GCC-13
run: |
sudo apt install g++-13 gcc-13 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Get Conan profile
run: conan profile new default --detect --force
- name: Install dependencies
run: conan install . ${{ needs.conan-recipe-version.outputs.recipe_id_full }} --build=missing --update -o cura:devtools=True -g VirtualPythonEnv -if venv
- name: Upload the Dependency package(s)
run: conan upload "*" -r cura --all -c
- name: Set Environment variables for Cura (bash)
if: ${{ runner.os != 'Windows' }}
run: |
. ./venv/bin/activate_github_actions_env.sh
- name: Run Unit Test
id: run-test
run: |
pytest --junitxml=junit_cura.xml
working-directory: tests
- name: Save PR metadata
if: always()
run: |
echo ${{ github.event.number }} > pr-id.txt
echo ${{ github.event.pull_request.head.repo.full_name }} > pr-head-repo.txt
echo ${{ github.event.pull_request.head.ref }} > pr-head-ref.txt
working-directory: tests
- name: Upload Test Results
if: always()
uses: actions/upload-artifact@v3
with:
name: test-result
path: |
tests/**/*.xml
tests/pr-id.txt
tests/pr-head-repo.txt
tests/pr-head-ref.txt

View file

@ -1,5 +1,5 @@
name: Windows Installer name: Windows Installer
run-name: ${{ inputs.cura_conan_version }} for Windows-${{ inputs.architecture }} by @${{ github.actor }} run-name: ${{ inputs.cura_conan_version }} by @${{ github.actor }}
on: on:
workflow_dispatch: workflow_dispatch:
@ -34,254 +34,20 @@ on:
operating_system: operating_system:
description: 'OS' description: 'OS'
required: true required: true
default: 'windows-2022' default: 'self-hosted-Windows-X64'
type: choice type: choice
options: options:
- self-hosted-Windows-X64
- windows-2022 - windows-2022
workflow_call:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'X64'
type: string
operating_system:
description: 'OS'
required: true
default: 'windows-2022'
type: string
env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
WIN_CERT_INSTALLER_CER: ${{ secrets.WIN_CERT_INSTALLER_CER }}
WIN_CERT_INSTALLER_CER_PASS: ${{ secrets.WIN_CERT_INSTALLER_CER_PASS }}
CURA_CONAN_VERSION: ${{ inputs.cura_conan_version }}
ENTERPRISE: ${{ inputs.enterprise }}
STAGING: ${{ inputs.staging }}
jobs: jobs:
cura-installer-create: windows-installer:
runs-on: ${{ inputs.operating_system }} uses: ultimaker/cura-workflows/.github/workflows/cura-installer-windows.yml@main
outputs:
INSTALLER_FILENAME: ${{ steps.filename.outputs.INSTALLER_FILENAME }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: '3.10.x'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r .github/workflows/requirements-conan-package.txt
- name: Cache Conan local repository packages (Powershell)
uses: actions/cache@v3
with:
path: |
C:\Users\runneradmin\.conan\data
C:\.conan
C:\Users\runneradmin\.conan\conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }}-installer-cache
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Use Conan download cache (Powershell)
run: conan config set storage.download_cache="C:\Users\runneradmin\.conan\conan_download_cache"
- name: Create the Packages (Powershell)
run: conan install $Env:CURA_CONAN_VERSION ${{ inputs.conan_args }} --build=missing --update -if cura_inst -g VirtualPythonEnv -o cura:enterprise=$Env:ENTERPRISE -o cura:staging=$Env:STAGING --json "cura_inst/conan_install_info.json"
- name: Upload the Package(s)
if: always()
run: |
conan upload "*" -r cura --all -c
- name: Set Environment variables for Cura (Powershell)
run: |
echo "${Env:WIX}\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
.\cura_inst\Scripts\activate_github_actions_env.ps1
.\cura_inst\Scripts\activate_github_actions_version_env.ps1
- name: Install OpenSSL shared
run: conan install openssl/1.1.1l@_/_ --build=missing --update -o openssl:shared=True -g deploy
- name: Copy OpenSSL shared (Powershell)
run: |
cp openssl/bin/*.dll ./cura_inst/Scripts/
cp openssl/lib/*.lib ./cura_inst/Lib/
- name: Create the Cura dist
run: pyinstaller ./cura_inst/UltiMaker-Cura.spec
- name: Output the name file name and extension
id: filename
shell: python
run: |
import os
enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-win64-${{ inputs.architecture }}"
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"INSTALLER_FILENAME={installer_filename}\n")
- name: Summarize the used Conan dependencies
shell: python
run: |
import os
import json
from pathlib import Path
conan_install_info_path = Path("cura_inst/conan_install_info.json")
conan_info = {"installed": []}
if os.path.exists(conan_install_info_path):
with open(conan_install_info_path, "r") as f:
conan_info = json.load(f)
sorted_deps = sorted([dep["recipe"]["id"].replace('#', r' rev: ') for dep in conan_info["installed"]])
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("# ${{ steps.filename.outputs.INSTALLER_FILENAME }}\n")
f.writelines("## Conan packages:\n")
for dep in sorted_deps:
f.writelines(f"`{dep}`\n")
- name: Summarize the used Python modules
shell: python
run: |
import os
import pkg_resources
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("## Python modules:\n")
for package in pkg_resources.working_set:
f.writelines(f"`{package.key}/{package.version}`\n")
- name: Create PFX certificate from BASE64_PFX_CONTENT secret
id: create-pfx
env:
PFX_CONTENT: ${{ secrets.WIN_CERT_INSTALLER_CER }}
run: |
$pfxPath = Join-Path -Path $env:RUNNER_TEMP -ChildPath "cert.pfx";
$encodedBytes = [System.Convert]::FromBase64String($env:PFX_CONTENT);
Set-Content $pfxPath -Value $encodedBytes -AsByteStream;
echo "PFX_PATH=$pfxPath" >> $env:GITHUB_OUTPUT;
- name: Create the Windows msi installer (Powershell)
run: |
python ..\cura_inst\packaging\msi\create_windows_msi.py ..\cura_inst .\UltiMaker-Cura "${{steps.filename.outputs.INSTALLER_FILENAME }}.msi" "$Env:CURA_APP_NAME"
working-directory: dist
- name: Sign the Windows msi installer (Powershell)
env:
PFX_PATH: ${{ steps.create-pfx.outputs.PFX_PATH }}
run: |
& "C:/Program Files (x86)/Windows Kits/10/bin/10.0.17763.0/x86/signtool.exe" sign /f $Env:PFX_PATH /p "$Env:WIN_CERT_INSTALLER_CER_PASS" /fd SHA256 /t http://timestamp.digicert.com "${{steps.filename.outputs.INSTALLER_FILENAME }}.msi"
working-directory: dist
- name: Create the Windows exe installer (Powershell)
run: |
python ..\cura_inst\packaging\NSIS\create_windows_installer.py ../cura_inst . "${{steps.filename.outputs.INSTALLER_FILENAME }}.exe"
working-directory: dist
- name: Sign the Windows exe installer (Powershell)
env:
PFX_PATH: ${{ steps.create-pfx.outputs.PFX_PATH }}
run: |
& "C:/Program Files (x86)/Windows Kits/10/bin/10.0.17763.0/x86/signtool.exe" sign /f $Env:PFX_PATH /p "$Env:WIN_CERT_INSTALLER_CER_PASS" /fd SHA256 /t http://timestamp.digicert.com "${{steps.filename.outputs.INSTALLER_FILENAME }}.exe"
working-directory: dist
- name: Upload the msi
uses: actions/upload-artifact@v3
with:
name: ${{steps.filename.outputs.INSTALLER_FILENAME }}-msi
path: |
dist/${{steps.filename.outputs.INSTALLER_FILENAME }}.msi
retention-days: 5
- name: Upload the exe
uses: actions/upload-artifact@v3
with:
name: ${{steps.filename.outputs.INSTALLER_FILENAME }}-exe
path: |
dist/${{steps.filename.outputs.INSTALLER_FILENAME }}.exe
retention-days: 5
# NOTE: The extension is .sh, since this isn't going to build-environment, so not on the Win build image.
- name: Write the run info
shell: python
run: |
import os
with open("run_info.sh", "w") as f:
f.writelines(f'echo "CURA_VERSION_FULL={os.environ["CURA_VERSION_FULL"]}" >> $GITHUB_ENV\n')
# NOTE: The extension is .sh, since this isn't going to build-environment, so not on the Win build image.
- name: Upload the run info
uses: actions/upload-artifact@v3
with:
name: windows-run-info
path: |
run_info.sh
retention-days: 5
notify-export:
if: ${{ always() }}
needs: [ cura-installer-create ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
with: with:
success: ${{ contains(join(needs.*.result, ','), 'success') }} cura_conan_version: ${{ inputs.cura_conan_version }}
success_title: "Create the Cura distributions" conan_args: ${{ inputs.conan_args }}
success_body: "Installers for ${{ inputs.cura_conan_version }}" enterprise: ${{ inputs.enterprise }}
failure_title: "Failed to create the Cura distributions" staging: ${{ inputs.staging }}
failure_body: "Failed to create at least 1 installer for ${{ inputs.cura_conan_version }}" architecture: ${{ inputs.architecture }}
secrets: inherit operating_system: ${{ inputs.operating_system }}
secrets: inherit

3
.gitignore vendored
View file

@ -101,6 +101,7 @@ graph_info.json
Ultimaker-Cura.spec Ultimaker-Cura.spec
.run/ .run/
/printer-linter/src/printerlinter.egg-info/ /printer-linter/src/printerlinter.egg-info/
/resources/qml/Dialogs/AboutDialogVersionsList.qml
/plugins/CuraEngineGradualFlow /plugins/CuraEngineGradualFlow
/resources/bundled_packages/bundled_*.json /resources/bundled_packages/bundled_*.json
curaengine_plugin_gradual_flow
curaengine_plugin_gradual_flow.exe

View file

@ -2,6 +2,7 @@ checks:
diagnostic-mesh-file-extension: true diagnostic-mesh-file-extension: true
diagnostic-mesh-file-size: true diagnostic-mesh-file-size: true
diagnostic-definition-redundant-override: true diagnostic-definition-redundant-override: true
diagnostic-resources-macos-app-directory-name: true
fixes: fixes:
diagnostic-definition-redundant-override: true diagnostic-definition-redundant-override: true
format: format:

View file

@ -1,61 +0,0 @@
import QtQuick 2.2
import QtQuick.Controls 2.9
import UM 1.6 as UM
import Cura 1.5 as Cura
ListView
{
id: projectBuildInfoList
visible: false
anchors.top: creditsNotes.bottom
anchors.topMargin: UM.Theme.getSize("default_margin").height
width: parent.width
height: base.height - y - (2 * UM.Theme.getSize("default_margin").height + closeButton.height)
ScrollBar.vertical: UM.ScrollBar
{
id: projectBuildInfoListScrollBar
}
delegate: Row
{
spacing: UM.Theme.getSize("narrow_margin").width
UM.Label
{
text: (model.name)
width: (projectBuildInfoList.width* 0.4) | 0
elide: Text.ElideRight
}
UM.Label
{
text: (model.version)
width: (projectBuildInfoList.width *0.6) | 0
elide: Text.ElideRight
}
}
model: ListModel
{
id: developerInfo
}
Component.onCompleted:
{
var conan_installs = {{ conan_installs }};
var python_installs = {{ python_installs }};
developerInfo.append({ name : "<H1>Conan Installs</H1>", version : '' });
for (var n in conan_installs)
{
developerInfo.append({ name : conan_installs[n][0], version : conan_installs[n][1] });
}
developerInfo.append({ name : '', version : '' });
developerInfo.append({ name : "<H1>Python Installs</H1>", version : '' });
for (var n in python_installs)
{
developerInfo.append({ name : python_installs[n][0], version : python_installs[n][1] });
}
}
}

View file

@ -1,4 +1,4 @@
# Copyright (c) 2022 UltiMaker # Copyright (c) 2023 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
CuraAppName = "{{ cura_app_name }}" CuraAppName = "{{ cura_app_name }}"
@ -12,3 +12,6 @@ CuraCloudAccountAPIRoot = "{{ cura_cloud_account_api_root }}"
CuraMarketplaceRoot = "{{ cura_marketplace_root }}" CuraMarketplaceRoot = "{{ cura_marketplace_root }}"
CuraDigitalFactoryURL = "{{ cura_digital_factory_url }}" CuraDigitalFactoryURL = "{{ cura_digital_factory_url }}"
CuraLatestURL = "{{ cura_latest_url }}" CuraLatestURL = "{{ cura_latest_url }}"
ConanInstalls = {{ conan_installs }}
PythonInstalls = {{ python_installs }}

View file

@ -12,7 +12,7 @@
[![Badge Test]][Test] [![Badge Test]][Test]
[![Badge Conan]][Conan] [![Badge Conan]][Conan]
![Badge Downloads] [![Badge Downloads]][Downloads]
<br> <br>
<br> <br>
@ -59,14 +59,15 @@
[Contributors]: https://github.com/Ultimaker/Cura/graphs/contributors [Contributors]: https://github.com/Ultimaker/Cura/graphs/contributors
[PullRequests]: https://github.com/Ultimaker/Cura/pulls [PullRequests]: https://github.com/Ultimaker/Cura/pulls
[Machines]: https://github.com/Ultimaker/Cura/wiki/Adding-new-machine-profiles-to-Cura [Machines]: https://github.com/Ultimaker/Cura/wiki/Adding-new-machine-profiles-to-Cura
[Building]: https://github.com/Ultimaker/Cura/wiki/Running-Cura-from-Source [Building]: https://github.com/Ultimaker/Cura/wiki/Getting-Started
[Localize]: https://github.com/Ultimaker/Cura/wiki/Translating-Cura [Localize]: https://github.com/Ultimaker/Cura/wiki/Translating-Cura
[Settings]: https://github.com/Ultimaker/Cura/wiki/Cura-Settings [Settings]: https://github.com/Ultimaker/Cura/wiki/Profiles-&-Settings
[Plugins]: https://github.com/Ultimaker/Cura/wiki/Plugin-Directory [Plugins]: https://github.com/Ultimaker/Cura/wiki/Plugins-And-Packages
[Closed]: https://github.com/Ultimaker/Cura/issues?q=is%3Aissue+is%3Aclosed [Closed]: https://github.com/Ultimaker/Cura/issues?q=is%3Aissue+is%3Aclosed
[Issues]: https://github.com/Ultimaker/Cura/issues [Issues]: https://github.com/Ultimaker/Cura/issues
[Conan]: https://github.com/Ultimaker/Cura/actions/workflows/conan-package.yml [Conan]: https://github.com/Ultimaker/Cura/actions/workflows/conan-package.yml
[Test]: https://github.com/Ultimaker/Cura/actions/workflows/unit-test.yml [Test]: https://github.com/Ultimaker/Cura/actions/workflows/unit-test.yml
[Downloads]: https://github.com/Ultimaker/Cura/releases/latest
[License]: LICENSE [License]: LICENSE
[Report]: docs/Report.md [Report]: docs/Report.md
@ -81,8 +82,8 @@
[Badge License]: https://img.shields.io/badge/License-LGPL3-336887.svg?style=for-the-badge&labelColor=458cb5&logoColor=white&logo=GNU [Badge License]: https://img.shields.io/badge/License-LGPL3-336887.svg?style=for-the-badge&labelColor=458cb5&logoColor=white&logo=GNU
[Badge Closed]: https://img.shields.io/github/issues-closed/ultimaker/cura?style=for-the-badge&logoColor=white&labelColor=629944&color=446a30&logo=AddThis [Badge Closed]: https://img.shields.io/github/issues-closed/ultimaker/cura?style=for-the-badge&logoColor=white&labelColor=629944&color=446a30&logo=AddThis
[Badge Issues]: https://img.shields.io/github/issues/ultimaker/cura?style=for-the-badge&logoColor=white&labelColor=c34360&color=933349&logo=AdBlock [Badge Issues]: https://img.shields.io/github/issues/ultimaker/cura?style=for-the-badge&logoColor=white&labelColor=c34360&color=933349&logo=AdBlock
[Badge Conan]: https://img.shields.io/github/workflow/status/Ultimaker/Cura/conan-package?style=for-the-badge&logoColor=white&labelColor=6185aa&color=4c6987&logo=Conan&label=Conan%20Package [Badge Conan]: https://img.shields.io/github/actions/workflow/status/Ultimaker/Cura/conan-package.yml?branch=main&style=for-the-badge&logoColor=white&labelColor=6185aa&color=4c6987&logo=Conan&label=Conan%20Package
[Badge Test]: https://img.shields.io/github/workflow/status/Ultimaker/Cura/unit-test?style=for-the-badge&logoColor=white&labelColor=4a999d&color=346c6e&logo=Codacy&label=Unit%20Test [Badge Test]: https://img.shields.io/github/actions/workflow/status/Ultimaker/Cura/unit-test.yml?branch=main&style=for-the-badge&logoColor=white&labelColor=4a999d&color=346c6e&logo=Codacy&label=Unit%20Test
[Badge Size]: https://img.shields.io/github/repo-size/ultimaker/cura?style=for-the-badge&logoColor=white&labelColor=715a97&color=584674&logo=GoogleAnalytics [Badge Size]: https://img.shields.io/github/repo-size/ultimaker/cura?style=for-the-badge&logoColor=white&labelColor=715a97&color=584674&logo=GoogleAnalytics
[Badge Downloads]: https://img.shields.io/github/downloads-pre/Ultimaker/Cura/latest/total?style=for-the-badge [Badge Downloads]: https://img.shields.io/github/downloads-pre/Ultimaker/Cura/latest/total?style=for-the-badge
@ -90,7 +91,7 @@
<!---------------------------------[ Buttons ]---------------------------------> <!---------------------------------[ Buttons ]--------------------------------->
[Button Localize]: https://img.shields.io/badge/Help_Localize-e2467d?style=for-the-badge&logoColor=white&logo=GoogleTranslate [Button Localize]: https://img.shields.io/badge/Help_Localize-e2467d?style=for-the-badge&logoColor=white&logo=GoogleTranslate
[Button Machines]: https://img.shields.io/badge/Adding_Machines-yellow?style=for-the-badge&logoColor=white&logo=CloudFoundry [Button Machines]: https://img.shields.io/badge/Adding_Printers-yellow?style=for-the-badge&logoColor=white&logo=CloudFoundry
[Button Settings]: https://img.shields.io/badge/Configuration-00979D?style=for-the-badge&logoColor=white&logo=CodeReview [Button Settings]: https://img.shields.io/badge/Configuration-00979D?style=for-the-badge&logoColor=white&logo=CodeReview
[Button Building]: https://img.shields.io/badge/Building_Cura-blue?style=for-the-badge&logoColor=white&logo=GitBook [Button Building]: https://img.shields.io/badge/Building_Cura-blue?style=for-the-badge&logoColor=white&logo=GitBook
[Button Plugins]: https://img.shields.io/badge/Plugin_Usage-569A31?style=for-the-badge&logoColor=white&logo=ROS [Button Plugins]: https://img.shields.io/badge/Plugin_Usage-569A31?style=for-the-badge&logoColor=white&logo=ROS

View file

@ -55,7 +55,8 @@ exe = EXE(
target_arch={{ target_arch }}, target_arch={{ target_arch }},
codesign_identity=os.getenv('CODESIGN_IDENTITY', None), codesign_identity=os.getenv('CODESIGN_IDENTITY', None),
entitlements_file={{ entitlements_file }}, entitlements_file={{ entitlements_file }},
icon={{ icon }} icon={{ icon }},
contents_directory='.'
) )
coll = COLLECT( coll = COLLECT(
@ -70,188 +71,7 @@ coll = COLLECT(
) )
{% if macos == true %} {% if macos == true %}
# PyInstaller seems to copy everything in the resource folder for the MacOS, this causes issues with codesigning and notarizing app = BUNDLE(
# The folder structure should adhere to the one specified in Table 2-5
# https://developer.apple.com/library/archive/documentation/CoreFoundation/Conceptual/CFBundles/BundleTypes/BundleTypes.html#//apple_ref/doc/uid/10000123i-CH101-SW1
# The class below is basically ducktyping the BUNDLE class of PyInstaller and using our own `assemble` method for more fine-grain and specific
# control. Some code of the method below is copied from:
# https://github.com/pyinstaller/pyinstaller/blob/22d1d2a5378228744cc95f14904dae1664df32c4/PyInstaller/building/osx.py#L115
#-----------------------------------------------------------------------------
# Copyright (c) 2005-2022, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License (version 2
# or later) with exception for distributing the bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
#-----------------------------------------------------------------------------
import plistlib
import shutil
import PyInstaller.utils.osx as osxutils
from pathlib import Path
from PyInstaller.building.osx import BUNDLE
from PyInstaller.building.utils import (_check_path_overlap, _rmtree, add_suffix_to_extension, checkCache)
from PyInstaller.building.datastruct import logger
from PyInstaller.building.icon import normalize_icon_type
class UMBUNDLE(BUNDLE):
def assemble(self):
from PyInstaller.config import CONF
if _check_path_overlap(self.name) and os.path.isdir(self.name):
_rmtree(self.name)
logger.info("Building BUNDLE %s", self.tocbasename)
# Create a minimal Mac bundle structure.
macos_path = Path(self.name, "Contents", "MacOS")
resources_path = Path(self.name, "Contents", "Resources")
frameworks_path = Path(self.name, "Contents", "Frameworks")
os.makedirs(macos_path)
os.makedirs(resources_path)
os.makedirs(frameworks_path)
# Makes sure the icon exists and attempts to convert to the proper format if applicable
self.icon = normalize_icon_type(self.icon, ("icns",), "icns", CONF["workpath"])
# Ensure icon path is absolute
self.icon = os.path.abspath(self.icon)
# Copy icns icon to Resources directory.
shutil.copy(self.icon, os.path.join(self.name, 'Contents', 'Resources'))
# Key/values for a minimal Info.plist file
info_plist_dict = {
"CFBundleDisplayName": self.appname,
"CFBundleName": self.appname,
# Required by 'codesign' utility.
# The value for CFBundleIdentifier is used as the default unique name of your program for Code Signing
# purposes. It even identifies the APP for access to restricted OS X areas like Keychain.
#
# The identifier used for signing must be globally unique. The usual form for this identifier is a
# hierarchical name in reverse DNS notation, starting with the toplevel domain, followed by the company
# name, followed by the department within the company, and ending with the product name. Usually in the
# form: com.mycompany.department.appname
# CLI option --osx-bundle-identifier sets this value.
"CFBundleIdentifier": self.bundle_identifier,
"CFBundleExecutable": os.path.basename(self.exename),
"CFBundleIconFile": os.path.basename(self.icon),
"CFBundleInfoDictionaryVersion": "6.0",
"CFBundlePackageType": "APPL",
"CFBundleVersionString": self.version,
"CFBundleShortVersionString": self.version,
}
# Set some default values. But they still can be overwritten by the user.
if self.console:
# Setting EXE console=True implies LSBackgroundOnly=True.
info_plist_dict['LSBackgroundOnly'] = True
else:
# Let's use high resolution by default.
info_plist_dict['NSHighResolutionCapable'] = True
# Merge info_plist settings from spec file
if isinstance(self.info_plist, dict) and self.info_plist:
info_plist_dict.update(self.info_plist)
plist_filename = os.path.join(self.name, "Contents", "Info.plist")
with open(plist_filename, "wb") as plist_fh:
plistlib.dump(info_plist_dict, plist_fh)
links = []
_QT_BASE_PATH = {'PySide2', 'PySide6', 'PyQt5', 'PyQt6', 'PySide6'}
for inm, fnm, typ in self.toc:
# Adjust name for extensions, if applicable
inm, fnm, typ = add_suffix_to_extension(inm, fnm, typ)
inm = Path(inm)
fnm = Path(fnm)
# Copy files from cache. This ensures that are used files with relative paths to dynamic library
# dependencies (@executable_path)
if typ in ('EXTENSION', 'BINARY') or (typ == 'DATA' and inm.suffix == '.so'):
if any(['.' in p for p in inm.parent.parts]):
inm = Path(inm.name)
fnm = Path(checkCache(
str(fnm),
strip = self.strip,
upx = self.upx,
upx_exclude = self.upx_exclude,
dist_nm = str(inm),
target_arch = self.target_arch,
codesign_identity = self.codesign_identity,
entitlements_file = self.entitlements_file,
strict_arch_validation = (typ == 'EXTENSION'),
))
frame_dst = frameworks_path.joinpath(inm)
if not frame_dst.exists():
if frame_dst.is_dir():
os.makedirs(frame_dst, exist_ok = True)
else:
os.makedirs(frame_dst.parent, exist_ok = True)
shutil.copy(fnm, frame_dst, follow_symlinks = True)
macos_dst = macos_path.joinpath(inm)
if not macos_dst.exists():
if macos_dst.is_dir():
os.makedirs(macos_dst, exist_ok = True)
else:
os.makedirs(macos_dst.parent, exist_ok = True)
# Create relative symlink to the framework
symlink_to = Path(*[".." for p in macos_dst.relative_to(macos_path).parts], "Frameworks").joinpath(
frame_dst.relative_to(frameworks_path))
try:
macos_dst.symlink_to(symlink_to)
except FileExistsError:
pass
else:
if typ == 'DATA':
if any(['.' in p for p in inm.parent.parts]) or inm.suffix == '.so':
# Skip info dist egg and some not needed folders in tcl and tk, since they all contain dots in their files
logger.warning(f"Skipping DATA file {inm}")
continue
res_dst = resources_path.joinpath(inm)
if not res_dst.exists():
if res_dst.is_dir():
os.makedirs(res_dst, exist_ok = True)
else:
os.makedirs(res_dst.parent, exist_ok = True)
shutil.copy(fnm, res_dst, follow_symlinks = True)
macos_dst = macos_path.joinpath(inm)
if not macos_dst.exists():
if macos_dst.is_dir():
os.makedirs(macos_dst, exist_ok = True)
else:
os.makedirs(macos_dst.parent, exist_ok = True)
# Create relative symlink to the resource
symlink_to = Path(*[".." for p in macos_dst.relative_to(macos_path).parts], "Resources").joinpath(
res_dst.relative_to(resources_path))
try:
macos_dst.symlink_to(symlink_to)
except FileExistsError:
pass
else:
macos_dst = macos_path.joinpath(inm)
if not macos_dst.exists():
if macos_dst.is_dir():
os.makedirs(macos_dst, exist_ok = True)
else:
os.makedirs(macos_dst.parent, exist_ok = True)
shutil.copy(fnm, macos_dst, follow_symlinks = True)
# Sign the bundle
logger.info('Signing the BUNDLE...')
try:
osxutils.sign_binary(self.name, self.codesign_identity, self.entitlements_file, deep = True)
except Exception as e:
logger.warning(f"Error while signing the bundle: {e}")
logger.warning("You will need to sign the bundle manually!")
logger.info(f"Building BUNDLE {self.tocbasename} completed successfully.")
app = UMBUNDLE(
coll, coll,
name='{{ display_name }}.app', name='{{ display_name }}.app',
icon={{ icon }}, icon={{ icon }},
@ -266,10 +86,15 @@ app = UMBUNDLE(
'CFBundlePackageType': 'APPL', 'CFBundlePackageType': 'APPL',
'CFBundleVersionString': {{ version }}, 'CFBundleVersionString': {{ version }},
'CFBundleShortVersionString': {{ short_version }}, 'CFBundleShortVersionString': {{ short_version }},
'CFBundleURLTypes': [{
'CFBundleURLName': '{{ display_name }}',
'CFBundleURLSchemes': ['cura', 'slicer'],
}],
'CFBundleDocumentTypes': [{ 'CFBundleDocumentTypes': [{
'CFBundleTypeRole': 'Viewer', 'CFBundleTypeRole': 'Viewer',
'CFBundleTypeExtensions': ['*'], 'CFBundleTypeExtensions': ['stl', 'obj', '3mf', 'gcode', 'ufp'],
'CFBundleTypeName': 'Model Files', 'CFBundleTypeName': 'Model Files',
}] }]
}, },
){% endif %} )
{% endif %}

View file

@ -1,3 +1,17 @@
version: "5.7.0-alpha.1"
requirements:
- "uranium/(latest)@ultimaker/testing"
- "curaengine/(latest)@ultimaker/testing"
- "cura_binary_data/(latest)@ultimaker/testing"
- "fdm_materials/(latest)@ultimaker/testing"
- "curaengine_plugin_gradual_flow/0.1.0-beta.2"
- "dulcificum/latest@ultimaker/testing"
- "pysavitar/5.3.0"
- "pynest2d/5.3.0"
- "curaengine_grpc_definitions/(latest)@ultimaker/testing"
requirements_internal:
- "fdm_materials/(latest)@internal/testing"
- "cura_private_data/(latest)@internal/testing"
urls: urls:
default: default:
cloud_api_root: "https://api.ultimaker.com" cloud_api_root: "https://api.ultimaker.com"
@ -78,9 +92,15 @@ pyinstaller:
src: "bin" src: "bin"
dst: "." dst: "."
binary: "CuraEngine" binary: "CuraEngine"
curaengine_gradual_flow_plugin_service:
package: "curaengine_plugin_gradual_flow"
src: "bin"
dst: "."
binary: "curaengine_plugin_gradual_flow"
hiddenimports: hiddenimports:
- "pySavitar" - "pySavitar"
- "pyArcus" - "pyArcus"
- "pyDulcificum"
- "pynest2d" - "pynest2d"
- "PyQt6" - "PyQt6"
- "PyQt6.QtNetwork" - "PyQt6.QtNetwork"
@ -98,7 +118,6 @@ pyinstaller:
- "sqlite3" - "sqlite3"
- "trimesh" - "trimesh"
- "win32ctypes" - "win32ctypes"
- "PyQt6"
- "PyQt6.QtNetwork" - "PyQt6.QtNetwork"
- "PyQt6.sip" - "PyQt6.sip"
- "stl" - "stl"
@ -140,6 +159,10 @@ pycharm_targets:
module_name: Cura module_name: Cura
name: pytest in TestGCodeListDecorator.py name: pytest in TestGCodeListDecorator.py
script_name: tests/TestGCodeListDecorator.py script_name: tests/TestGCodeListDecorator.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestHitChecker.py
script_name: tests/TestHitChecker.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura module_name: Cura
name: pytest in TestIntentManager.py name: pytest in TestIntentManager.py
@ -168,6 +191,10 @@ pycharm_targets:
module_name: Cura module_name: Cura
name: pytest in TestPrintInformation.py name: pytest in TestPrintInformation.py
script_name: tests/TestPrintInformation.py script_name: tests/TestPrintInformation.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestPrintOrderManager.py
script_name: tests/TestPrintOrderManager.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura module_name: Cura
name: pytest in TestProfileRequirements.py name: pytest in TestProfileRequirements.py

View file

@ -1,10 +1,11 @@
import os import os
from io import StringIO
from pathlib import Path from pathlib import Path
from jinja2 import Template from jinja2 import Template
from conan import ConanFile from conan import ConanFile
from conan.tools.files import copy, rmdir, save, mkdir, rm from conan.tools.files import copy, rmdir, save, mkdir, rm, update_conandata
from conan.tools.microsoft import unix_path from conan.tools.microsoft import unix_path
from conan.tools.env import VirtualRunEnv, Environment, VirtualBuildEnv from conan.tools.env import VirtualRunEnv, Environment, VirtualBuildEnv
from conan.tools.scm import Version from conan.tools.scm import Version
@ -25,7 +26,7 @@ class CuraConan(ConanFile):
settings = "os", "compiler", "build_type", "arch" settings = "os", "compiler", "build_type", "arch"
# FIXME: Remove specific branch once merged to main # FIXME: Remove specific branch once merged to main
python_requires = "translationextractor/[>=2.1.1]@ultimaker/stable" python_requires = "translationextractor/[>=2.2.0]@ultimaker/stable"
options = { options = {
"enterprise": ["True", "False", "true", "false"], # Workaround for GH Action passing boolean as lowercase string "enterprise": ["True", "False", "true", "false"], # Workaround for GH Action passing boolean as lowercase string
@ -34,7 +35,8 @@ class CuraConan(ConanFile):
"cloud_api_version": "ANY", "cloud_api_version": "ANY",
"display_name": "ANY", # TODO: should this be an option?? "display_name": "ANY", # TODO: should this be an option??
"cura_debug_mode": [True, False], # FIXME: Use profiles "cura_debug_mode": [True, False], # FIXME: Use profiles
"internal": [True, False] "internal": ["True", "False", "true", "false"], # Workaround for GH Action passing boolean as lowercase string
"enable_i18n": [True, False],
} }
default_options = { default_options = {
"enterprise": "False", "enterprise": "False",
@ -43,12 +45,17 @@ class CuraConan(ConanFile):
"cloud_api_version": "1", "cloud_api_version": "1",
"display_name": "UltiMaker Cura", "display_name": "UltiMaker Cura",
"cura_debug_mode": False, # Not yet implemented "cura_debug_mode": False, # Not yet implemented
"internal": False, "internal": "False",
"enable_i18n": False,
} }
def set_version(self): def set_version(self):
if not self.version: if not self.version:
self.version = "5.5.0-alpha" self.version = self.conan_data["version"]
@property
def _i18n_options(self):
return self.conf.get("user.i18n:options", default = {"extract": True, "build": True}, check_type = dict)
@property @property
def _pycharm_targets(self): def _pycharm_targets(self):
@ -65,6 +72,8 @@ class CuraConan(ConanFile):
self._cura_env = Environment() self._cura_env = Environment()
self._cura_env.define("QML2_IMPORT_PATH", str(self._site_packages.joinpath("PyQt6", "Qt6", "qml"))) self._cura_env.define("QML2_IMPORT_PATH", str(self._site_packages.joinpath("PyQt6", "Qt6", "qml")))
self._cura_env.define("QT_PLUGIN_PATH", str(self._site_packages.joinpath("PyQt6", "Qt6", "plugins"))) self._cura_env.define("QT_PLUGIN_PATH", str(self._site_packages.joinpath("PyQt6", "Qt6", "plugins")))
if not self.in_local_cache:
self._cura_env.define("CURA_DATA_ROOT", str(self._share_dir.joinpath("cura")))
if self.settings.os == "Linux": if self.settings.os == "Linux":
self._cura_env.define("QT_QPA_FONTDIR", "/usr/share/fonts") self._cura_env.define("QT_QPA_FONTDIR", "/usr/share/fonts")
@ -76,6 +85,10 @@ class CuraConan(ConanFile):
def _enterprise(self): def _enterprise(self):
return self.options.enterprise in ["True", 'true'] return self.options.enterprise in ["True", 'true']
@property
def _internal(self):
return self.options.internal in ["True", 'true']
@property @property
def _app_name(self): def _app_name(self):
if self._enterprise: if self._enterprise:
@ -137,36 +150,43 @@ class CuraConan(ConanFile):
return "'x86_64'" return "'x86_64'"
return "None" return "None"
def _generate_about_versions(self, location): def _conan_installs(self):
with open(os.path.join(self.recipe_folder, "AboutDialogVersionsList.qml.jinja"), "r") as f: self.output.info("Collecting conan installs")
cura_version_py = Template(f.read()) conan_installs = {}
conan_installs = [] # list of conan installs
python_installs = [] for dependency in self.dependencies.host.values():
conan_installs[dependency.ref.name] = {
"version": dependency.ref.version,
"revision": dependency.ref.revision
}
return conan_installs
# list of conan installs def _python_installs(self):
for _, dependency in self.dependencies.host.items(): self.output.info("Collecting python installs")
conan_installs.append([dependency.ref.name,dependency.ref.version]) python_installs = {}
# list of python installs
run_env = VirtualRunEnv(self)
env = run_env.environment()
env.prepend_path("PYTHONPATH", str(self._site_packages.as_posix()))
venv_vars = env.vars(self, scope = "run")
#list of python installs
outer = '"' if self.settings.os == "Windows" else "'" outer = '"' if self.settings.os == "Windows" else "'"
inner = "'" if self.settings.os == "Windows" else '"' inner = "'" if self.settings.os == "Windows" else '"'
python_ins_cmd = f"python -c {outer}import pkg_resources; print({inner};{inner}.join([(s.key+{inner},{inner}+ s.version) for s in pkg_resources.working_set])){outer}"
from six import StringIO
buffer = StringIO() buffer = StringIO()
self.run(python_ins_cmd, run_environment= True, env = "conanrun", output=buffer) with venv_vars.apply():
self.run(f"""python -c {outer}import pkg_resources; print({inner};{inner}.join([(s.key+{inner},{inner}+ s.version) for s in pkg_resources.working_set])){outer}""",
env = "conanrun",
output = buffer)
packages = str(buffer.getvalue()).split("-----------------\n") packages = str(buffer.getvalue()).split("-----------------\n")
package = packages[1].strip('\r\n').split(";") packages = packages[1].strip('\r\n').split(";")
for pack in package: for package in packages:
python_installs.append(pack.split(",")) name, version = package.split(",")
python_installs[name] = {"version": version}
with open(os.path.join(location, "AboutDialogVersionsList.qml"), "w") as f:
f.write(cura_version_py.render(
conan_installs = conan_installs,
python_installs = python_installs
))
return python_installs
def _generate_cura_version(self, location): def _generate_cura_version(self, location):
with open(os.path.join(self.recipe_folder, "CuraVersion.py.jinja"), "r") as f: with open(os.path.join(self.recipe_folder, "CuraVersion.py.jinja"), "r") as f:
@ -177,7 +197,7 @@ class CuraConan(ConanFile):
cura_version = Version(self.conf.get("user.cura:version", default = self.version, check_type = str)) cura_version = Version(self.conf.get("user.cura:version", default = self.version, check_type = str))
pre_tag = f"-{cura_version.pre}" if cura_version.pre else "" pre_tag = f"-{cura_version.pre}" if cura_version.pre else ""
build_tag = f"+{cura_version.build}" if cura_version.build else "" build_tag = f"+{cura_version.build}" if cura_version.build else ""
internal_tag = f"+internal" if self.options.internal else "" internal_tag = f"+internal" if self._internal else ""
cura_version = f"{cura_version.major}.{cura_version.minor}.{cura_version.patch}{pre_tag}{build_tag}{internal_tag}" cura_version = f"{cura_version.major}.{cura_version.minor}.{cura_version.patch}{pre_tag}{build_tag}{internal_tag}"
with open(os.path.join(location, "CuraVersion.py"), "w") as f: with open(os.path.join(location, "CuraVersion.py"), "w") as f:
@ -192,13 +212,16 @@ class CuraConan(ConanFile):
cura_cloud_account_api_root = self.conan_data["urls"][self._urls]["cloud_account_api_root"], cura_cloud_account_api_root = self.conan_data["urls"][self._urls]["cloud_account_api_root"],
cura_marketplace_root = self.conan_data["urls"][self._urls]["marketplace_root"], cura_marketplace_root = self.conan_data["urls"][self._urls]["marketplace_root"],
cura_digital_factory_url = self.conan_data["urls"][self._urls]["digital_factory_url"], cura_digital_factory_url = self.conan_data["urls"][self._urls]["digital_factory_url"],
cura_latest_url = self.conan_data["urls"][self._urls]["cura_latest_url"])) cura_latest_url=self.conan_data["urls"][self._urls]["cura_latest_url"],
conan_installs=self._conan_installs(),
python_installs=self._python_installs(),
))
def _generate_pyinstaller_spec(self, location, entrypoint_location, icon_path, entitlements_file): def _generate_pyinstaller_spec(self, location, entrypoint_location, icon_path, entitlements_file):
pyinstaller_metadata = self.conan_data["pyinstaller"] pyinstaller_metadata = self.conan_data["pyinstaller"]
datas = [(str(self._base_dir.joinpath("conan_install_info.json")), ".")] datas = []
for data in pyinstaller_metadata["datas"].values(): for data in pyinstaller_metadata["datas"].values():
if not self.options.internal and data.get("internal", False): if not self._internal and data.get("internal", False):
continue continue
if "package" in data: # get the paths from conan package if "package" in data: # get the paths from conan package
@ -253,7 +276,7 @@ class CuraConan(ConanFile):
with open(os.path.join(self.recipe_folder, "UltiMaker-Cura.spec.jinja"), "r") as f: with open(os.path.join(self.recipe_folder, "UltiMaker-Cura.spec.jinja"), "r") as f:
pyinstaller = Template(f.read()) pyinstaller = Template(f.read())
version = self.conf_info.get("user.cura:version", default = self.version, check_type = str) version = self.conf.get("user.cura:version", default = self.version, check_type = str)
cura_version = Version(version) cura_version = Version(version)
with open(os.path.join(location, "UltiMaker-Cura.spec"), "w") as f: with open(os.path.join(location, "UltiMaker-Cura.spec"), "w") as f:
@ -277,6 +300,9 @@ class CuraConan(ConanFile):
short_version = f"'{cura_version.major}.{cura_version.minor}.{cura_version.patch}'", short_version = f"'{cura_version.major}.{cura_version.minor}.{cura_version.patch}'",
)) ))
def export(self):
update_conandata(self, {"version": self.version})
def export_sources(self): def export_sources(self):
copy(self, "*", os.path.join(self.recipe_folder, "plugins"), os.path.join(self.export_sources_folder, "plugins")) copy(self, "*", os.path.join(self.recipe_folder, "plugins"), os.path.join(self.export_sources_folder, "plugins"))
copy(self, "*", os.path.join(self.recipe_folder, "resources"), os.path.join(self.export_sources_folder, "resources"), excludes = "*.mo") copy(self, "*", os.path.join(self.recipe_folder, "resources"), os.path.join(self.export_sources_folder, "resources"), excludes = "*.mo")
@ -289,43 +315,50 @@ class CuraConan(ConanFile):
copy(self, "requirements-ultimaker.txt", self.recipe_folder, self.export_sources_folder) copy(self, "requirements-ultimaker.txt", self.recipe_folder, self.export_sources_folder)
copy(self, "cura_app.py", self.recipe_folder, self.export_sources_folder) copy(self, "cura_app.py", self.recipe_folder, self.export_sources_folder)
def config_options(self):
if self.settings.os == "Windows" and not self.conf.get("tools.microsoft.bash:path", check_type=str):
del self.options.enable_i18n
def configure(self): def configure(self):
self.options["pyarcus"].shared = True self.options["pyarcus"].shared = True
self.options["pysavitar"].shared = True self.options["pysavitar"].shared = True
self.options["pynest2d"].shared = True self.options["pynest2d"].shared = True
self.options["dulcificum"].shared = self.settings.os != "Windows"
self.options["cpython"].shared = True self.options["cpython"].shared = True
self.options["boost"].header_only = True self.options["boost"].header_only = True
if self.settings.os == "Linux": if self.settings.os == "Linux":
self.options["curaengine_grpc_definitions"].shared = True self.options["curaengine_grpc_definitions"].shared = True
self.options["openssl"].shared = True
if self.conf.get("user.curaengine:sentry_url", "", check_type=str) != "":
self.options["curaengine"].enable_sentry = True
self.options["arcus"].enable_sentry = True
self.options["clipper"].enable_sentry = True
def validate(self): def validate(self):
version = self.conf_info.get("user.cura:version", default = self.version, check_type = str) version = self.conf.get("user.cura:version", default = self.version, check_type = str)
if version and Version(version) <= Version("4"): if version and Version(version) <= Version("4"):
raise ConanInvalidConfiguration("Only versions 5+ are support") raise ConanInvalidConfiguration("Only versions 5+ are support")
def requirements(self): def requirements(self):
for req in self.conan_data["requirements"]:
if self._internal and "fdm_materials" in req:
continue
self.requires(req)
if self._internal:
for req in self.conan_data["requirements_internal"]:
self.requires(req)
self.requires("cpython/3.10.4@ultimaker/stable")
self.requires("clipper/6.4.2@ultimaker/stable")
self.requires("openssl/3.2.0")
self.requires("protobuf/3.21.12")
self.requires("boost/1.82.0") self.requires("boost/1.82.0")
self.requires("curaengine_grpc_definitions/latest@ultimaker/testing") self.requires("spdlog/1.12.0")
self.requires("fmt/10.1.1")
self.requires("zlib/1.2.13") self.requires("zlib/1.2.13")
self.requires("pyarcus/5.3.0")
self.requires("curaengine/(latest)@ultimaker/testing")
self.requires("pysavitar/5.3.0")
self.requires("pynest2d/5.3.0")
self.requires("curaengine_plugin_gradual_flow/(latest)@ultimaker/testing")
self.requires("uranium/(latest)@ultimaker/testing")
self.requires("cura_binary_data/(latest)@ultimaker/testing")
self.requires("cpython/3.10.4")
if self.options.internal:
self.requires("cura_private_data/(latest)@ultimaker/testing")
self.requires("fdm_materials/(latest)@internal/testing")
else:
self.requires("fdm_materials/(latest)@ultimaker/testing")
def build_requirements(self): def build_requirements(self):
if self.options.devtools: if self.options.get_safe("enable_i18n", False):
if self.settings.os != "Windows" or self.conf.get("tools.microsoft.bash:path", check_type = str): self.tool_requires("gettext/0.21", force_host_context = True)
# FIXME: once m4, autoconf, automake are Conan V2 ready use self.win_bash and add gettext as base tool_requirement
self.tool_requires("gettext/0.21@ultimaker/testing", force_host_context = True)
def layout(self): def layout(self):
self.folders.source = "." self.folders.source = "."
@ -346,7 +379,6 @@ class CuraConan(ConanFile):
vr.generate() vr.generate()
self._generate_cura_version(os.path.join(self.source_folder, "cura")) self._generate_cura_version(os.path.join(self.source_folder, "cura"))
self._generate_about_versions(os.path.join(self.source_folder, "resources","qml", "Dialogs"))
if not self.in_local_cache: if not self.in_local_cache:
# Copy CuraEngine.exe to bindirs of Virtual Python Environment # Copy CuraEngine.exe to bindirs of Virtual Python Environment
@ -357,10 +389,8 @@ class CuraConan(ConanFile):
# Copy the external plugins that we want to bundle with Cura # Copy the external plugins that we want to bundle with Cura
rmdir(self,str(self.source_path.joinpath("plugins", "CuraEngineGradualFlow"))) rmdir(self,str(self.source_path.joinpath("plugins", "CuraEngineGradualFlow")))
curaengine_plugin_gradual_flow = self.dependencies["curaengine_plugin_gradual_flow"].cpp_info curaengine_plugin_gradual_flow = self.dependencies["curaengine_plugin_gradual_flow"].cpp_info
copy(self, "*.py", curaengine_plugin_gradual_flow.resdirs[0], str(self.source_path.joinpath("plugins", "CuraEngineGradualFlow")), keep_path = True) copy(self, "*", curaengine_plugin_gradual_flow.resdirs[0], str(self.source_path.joinpath("plugins", "CuraEngineGradualFlow")), keep_path = True)
ext = ".exe" if self.settings.os == "Windows" else "" copy(self, "*", curaengine_plugin_gradual_flow.bindirs[0], self.source_folder, keep_path = False)
copy(self, f"curaengine_plugin_gradual_flow{ext}", curaengine_plugin_gradual_flow.resdirs[0], str(self.source_path.joinpath("plugins", "CuraEngineGradualFlow")), keep_path = True)
copy(self, "*.json", curaengine_plugin_gradual_flow.resdirs[0], str(self.source_path.joinpath("plugins", "CuraEngineGradualFlow")), keep_path = True)
copy(self, "bundled_*.json", curaengine_plugin_gradual_flow.resdirs[1], str(self.source_path.joinpath("resources", "bundled_packages")), keep_path = False) copy(self, "bundled_*.json", curaengine_plugin_gradual_flow.resdirs[1], str(self.source_path.joinpath("resources", "bundled_packages")), keep_path = False)
# Copy resources of cura_binary_data # Copy resources of cura_binary_data
@ -384,37 +414,40 @@ class CuraConan(ConanFile):
copy(self, "*", fdm_materials.resdirs[0], self.source_folder) copy(self, "*", fdm_materials.resdirs[0], self.source_folder)
# Copy internal resources # Copy internal resources
if self.options.internal: if self._internal:
cura_private_data = self.dependencies["cura_private_data"].cpp_info cura_private_data = self.dependencies["cura_private_data"].cpp_info
copy(self, "*", cura_private_data.resdirs[0], str(self._share_dir.joinpath("cura"))) copy(self, "*", cura_private_data.resdirs[0], str(self._share_dir.joinpath("cura")))
if self.options.devtools: if self.options.devtools:
entitlements_file = "'{}'".format(os.path.join(self.source_folder, "packaging", "MacOS", "cura.entitlements")) entitlements_file = "'{}'".format(os.path.join(self.source_folder, "packaging", "MacOS", "cura.entitlements"))
self._generate_pyinstaller_spec(location = self.generators_folder, self._generate_pyinstaller_spec(
entrypoint_location = "'{}'".format(os.path.join(self.source_folder, self.conan_data["pyinstaller"]["runinfo"]["entrypoint"])).replace("\\", "\\\\"), location=self.generators_folder,
icon_path = "'{}'".format(os.path.join(self.source_folder, "packaging", self.conan_data["pyinstaller"]["icon"][str(self.settings.os)])).replace("\\", "\\\\"), entrypoint_location="'{}'".format(
entitlements_file = entitlements_file if self.settings.os == "Macos" else "None") os.path.join(self.source_folder, self.conan_data["pyinstaller"]["runinfo"]["entrypoint"])).replace(
"\\", "\\\\"),
icon_path="'{}'".format(os.path.join(self.source_folder, "packaging",
self.conan_data["pyinstaller"]["icon"][
str(self.settings.os)])).replace("\\", "\\\\"),
entitlements_file=entitlements_file if self.settings.os == "Macos" else "None"
)
# Update the po and pot files if self.options.get_safe("enable_i18n", False) and self._i18n_options["extract"]:
if self.settings.os != "Windows" or self.conf.get("tools.microsoft.bash:path", check_type=str): vb = VirtualBuildEnv(self)
vb = VirtualBuildEnv(self) vb.generate()
vb.generate()
# FIXME: once m4, autoconf, automake are Conan V2 ready use self.win_bash and add gettext as base tool_requirement # # FIXME: once m4, autoconf, automake are Conan V2 ready use self.win_bash and add gettext as base tool_requirement
cpp_info = self.dependencies["gettext"].cpp_info cpp_info = self.dependencies["gettext"].cpp_info
pot = self.python_requires["translationextractor"].module.ExtractTranslations(self, cpp_info.bindirs[0]) pot = self.python_requires["translationextractor"].module.ExtractTranslations(self, cpp_info.bindirs[0])
pot.generate() pot.generate()
def build(self): def build(self):
if self.options.devtools: if self.options.get_safe("enable_i18n", False) and self._i18n_options["build"]:
if self.settings.os != "Windows" or self.conf.get("tools.microsoft.bash:path", check_type = str): for po_file in self.source_path.joinpath("resources", "i18n").glob("**/*.po"):
# FIXME: once m4, autoconf, automake are Conan V2 ready use self.win_bash and add gettext as base tool_requirement mo_file = Path(self.build_folder, po_file.with_suffix('.mo').relative_to(self.source_path))
for po_file in self.source_path.joinpath("resources", "i18n").glob("**/*.po"): mo_file = mo_file.parent.joinpath("LC_MESSAGES", mo_file.name)
mo_file = Path(self.build_folder, po_file.with_suffix('.mo').relative_to(self.source_path)) mkdir(self, str(unix_path(self, Path(mo_file).parent)))
mo_file = mo_file.parent.joinpath("LC_MESSAGES", mo_file.name) cpp_info = self.dependencies["gettext"].cpp_info
mkdir(self, str(unix_path(self, Path(mo_file).parent))) self.run(f"{cpp_info.bindirs[0]}/msgfmt {po_file} -o {mo_file} -f", env="conanbuild", ignore_errors=True)
cpp_info = self.dependencies["gettext"].cpp_info
self.run(f"{cpp_info.bindirs[0]}/msgfmt {po_file} -o {mo_file} -f", env="conanbuild", ignore_errors=True)
def deploy(self): def deploy(self):
copy(self, "*", os.path.join(self.package_folder, self.cpp.package.resdirs[2]), os.path.join(self.install_folder, "packaging"), keep_path = True) copy(self, "*", os.path.join(self.package_folder, self.cpp.package.resdirs[2]), os.path.join(self.install_folder, "packaging"), keep_path = True)
@ -432,7 +465,7 @@ class CuraConan(ConanFile):
copy(self, "*", uranium.libdirs[0], str(self._site_packages.joinpath("UM")), keep_path = True) copy(self, "*", uranium.libdirs[0], str(self._site_packages.joinpath("UM")), keep_path = True)
# Generate the GitHub Action version info Environment # Generate the GitHub Action version info Environment
version = self.conf_info.get("user.cura:version", default = self.version, check_type = str) version = self.conf.get("user.cura:version", default = self.version, check_type = str)
cura_version = Version(version) cura_version = Version(version)
env_prefix = "Env:" if self.settings.os == "Windows" else "" env_prefix = "Env:" if self.settings.os == "Windows" else ""
activate_github_actions_version_env = Template(r"""echo "CURA_VERSION_MAJOR={{ cura_version_major }}" >> ${{ env_prefix }}GITHUB_ENV activate_github_actions_version_env = Template(r"""echo "CURA_VERSION_MAJOR={{ cura_version_major }}" >> ${{ env_prefix }}GITHUB_ENV
@ -453,7 +486,6 @@ echo "CURA_APP_NAME={{ cura_app_name }}" >> ${{ env_prefix }}GITHUB_ENV
save(self, os.path.join(self._script_dir, f"activate_github_actions_version_env{ext}"), activate_github_actions_version_env) save(self, os.path.join(self._script_dir, f"activate_github_actions_version_env{ext}"), activate_github_actions_version_env)
self._generate_cura_version(os.path.join(self._site_packages, "cura")) self._generate_cura_version(os.path.join(self._site_packages, "cura"))
self._generate_about_versions(str(self._share_dir.joinpath("cura", "resources", "qml", "Dialogs")))
entitlements_file = "'{}'".format(Path(self.cpp_info.res_paths[2], "MacOS", "cura.entitlements")) entitlements_file = "'{}'".format(Path(self.cpp_info.res_paths[2], "MacOS", "cura.entitlements"))
self._generate_pyinstaller_spec(location = self._base_dir, self._generate_pyinstaller_spec(location = self._base_dir,
@ -484,10 +516,14 @@ echo "CURA_APP_NAME={{ cura_app_name }}" >> ${{ env_prefix }}GITHUB_ENV
if self.in_local_cache: if self.in_local_cache:
self.runenv_info.append_path("PYTHONPATH", os.path.join(self.package_folder, "site-packages")) self.runenv_info.append_path("PYTHONPATH", os.path.join(self.package_folder, "site-packages"))
self.env_info.PYTHONPATH.append(os.path.join(self.package_folder, "site-packages"))
self.runenv_info.append_path("PYTHONPATH", os.path.join(self.package_folder, "plugins")) self.runenv_info.append_path("PYTHONPATH", os.path.join(self.package_folder, "plugins"))
self.env_info.PYTHONPATH.append(os.path.join(self.package_folder, "plugins"))
else: else:
self.runenv_info.append_path("PYTHONPATH", self.source_folder) self.runenv_info.append_path("PYTHONPATH", self.source_folder)
self.env_info.PYTHONPATH.append(self.source_folder)
self.runenv_info.append_path("PYTHONPATH", os.path.join(self.source_folder, "plugins")) self.runenv_info.append_path("PYTHONPATH", os.path.join(self.source_folder, "plugins"))
self.env_info.PYTHONPATH.append(os.path.join(self.source_folder, "plugins"))
def package_id(self): def package_id(self):
self.info.clear() self.info.clear()
@ -500,6 +536,8 @@ echo "CURA_APP_NAME={{ cura_app_name }}" >> ${{ env_prefix }}GITHUB_ENV
del self.info.options.cloud_api_version del self.info.options.cloud_api_version
del self.info.options.display_name del self.info.options.display_name
del self.info.options.cura_debug_mode del self.info.options.cura_debug_mode
if self.options.get_safe("enable_i18n", False):
del self.info.options.enable_i18n
# TODO: Use the hash of requirements.txt and requirements-ultimaker.txt, Because changing these will actually result in a different # TODO: Use the hash of requirements.txt and requirements-ultimaker.txt, Because changing these will actually result in a different
# Cura. This is needed because the requirements.txt aren't managed by Conan and therefor not resolved in the package_id. This isn't # Cura. This is needed because the requirements.txt aren't managed by Conan and therefor not resolved in the package_id. This isn't

View file

@ -1,4 +1,4 @@
# Copyright (c) 2022 UltiMaker # Copyright (c) 2023 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
# --------- # ---------
@ -14,7 +14,7 @@ DEFAULT_CURA_LATEST_URL = "https://software.ultimaker.com/latest.json"
# Each release has a fixed SDK version coupled with it. It doesn't make sense to make it configurable because, for # Each release has a fixed SDK version coupled with it. It doesn't make sense to make it configurable because, for
# example Cura 3.2 with SDK version 6.1 will not work. So the SDK version is hard-coded here and left out of the # example Cura 3.2 with SDK version 6.1 will not work. So the SDK version is hard-coded here and left out of the
# CuraVersion.py.in template. # CuraVersion.py.in template.
CuraSDKVersion = "8.4.0" CuraSDKVersion = "8.6.0"
try: try:
from cura.CuraVersion import CuraLatestURL from cura.CuraVersion import CuraLatestURL
@ -69,13 +69,25 @@ try:
except ImportError: except ImportError:
CuraAppDisplayName = DEFAULT_CURA_DISPLAY_NAME CuraAppDisplayName = DEFAULT_CURA_DISPLAY_NAME
DEPENDENCY_INFO = {}
try: try:
from pathlib import Path from cura.CuraVersion import ConanInstalls
conan_install_info = Path(__file__).parent.parent.joinpath("conan_install_info.json")
if conan_install_info.exists(): if type(ConanInstalls) == dict:
import json CONAN_INSTALLS = ConanInstalls
with open(conan_install_info, "r") as f: else:
DEPENDENCY_INFO = json.loads(f.read()) CONAN_INSTALLS = {}
except:
pass except ImportError:
CONAN_INSTALLS = {}
try:
from cura.CuraVersion import PythonInstalls
if type(PythonInstalls) == dict:
PYTHON_INSTALLS = PythonInstalls
else:
PYTHON_INSTALLS = {}
except ImportError:
PYTHON_INSTALLS = {}

View file

@ -5,7 +5,7 @@ if TYPE_CHECKING:
class Arranger: class Arranger:
def createGroupOperationForArrange(self, *, add_new_nodes_in_scene: bool = False) -> Tuple["GroupedOperation", int]: def createGroupOperationForArrange(self, add_new_nodes_in_scene: bool = False) -> Tuple["GroupedOperation", int]:
""" """
Find placement for a set of scene nodes, but don't actually move them just yet. Find placement for a set of scene nodes, but don't actually move them just yet.
:param add_new_nodes_in_scene: Whether to create new scene nodes before applying the transformations and rotations :param add_new_nodes_in_scene: Whether to create new scene nodes before applying the transformations and rotations
@ -16,13 +16,12 @@ class Arranger:
""" """
raise NotImplementedError raise NotImplementedError
def arrange(self, *, add_new_nodes_in_scene: bool = False) -> bool: def arrange(self, add_new_nodes_in_scene: bool = False) -> bool:
""" """
Find placement for a set of scene nodes, and move them by using a single grouped operation. Find placement for a set of scene nodes, and move them by using a single grouped operation.
:param add_new_nodes_in_scene: Whether to create new scene nodes before applying the transformations and rotations :param add_new_nodes_in_scene: Whether to create new scene nodes before applying the transformations and rotations
:return: found_solution_for_all: Whether the algorithm found a place on the buildplate for all the objects :return: found_solution_for_all: Whether the algorithm found a place on the buildplate for all the objects
""" """
grouped_operation, not_fit_count = self.createGroupOperationForArrange( grouped_operation, not_fit_count = self.createGroupOperationForArrange(add_new_nodes_in_scene)
add_new_nodes_in_scene=add_new_nodes_in_scene)
grouped_operation.push() grouped_operation.push()
return not_fit_count == 0 return not_fit_count == 0

View file

@ -80,7 +80,7 @@ class GridArrange(Arranger):
self._allowed_grid_idx = self._build_plate_grid_ids.difference(self._fixed_nodes_grid_ids) self._allowed_grid_idx = self._build_plate_grid_ids.difference(self._fixed_nodes_grid_ids)
def createGroupOperationForArrange(self, *, add_new_nodes_in_scene: bool = False) -> Tuple[GroupedOperation, int]: def createGroupOperationForArrange(self, add_new_nodes_in_scene: bool = False) -> Tuple[GroupedOperation, int]:
# Find the sequence in which items are placed # Find the sequence in which items are placed
coord_build_plate_center_x = self._build_volume_bounding_box.width * 0.5 + self._build_volume_bounding_box.left coord_build_plate_center_x = self._build_volume_bounding_box.width * 0.5 + self._build_volume_bounding_box.left
coord_build_plate_center_y = self._build_volume_bounding_box.depth * 0.5 + self._build_volume_bounding_box.back coord_build_plate_center_y = self._build_volume_bounding_box.depth * 0.5 + self._build_volume_bounding_box.back
@ -118,8 +118,9 @@ class GridArrange(Arranger):
def _findOptimalGridOffset(self): def _findOptimalGridOffset(self):
if len(self._fixed_nodes) == 0: if len(self._fixed_nodes) == 0:
self._offset_x = 0 edge_disallowed_size = self._build_volume.getEdgeDisallowedSize()
self._offset_y = 0 self._offset_x = edge_disallowed_size
self._offset_y = edge_disallowed_size
return return
if len(self._fixed_nodes) == 1: if len(self._fixed_nodes) == 1:
@ -240,14 +241,8 @@ class GridArrange(Arranger):
center_grid_x = coord_grid_x + (0.5 * self._grid_width) center_grid_x = coord_grid_x + (0.5 * self._grid_width)
center_grid_y = coord_grid_y + (0.5 * self._grid_height) center_grid_y = coord_grid_y + (0.5 * self._grid_height)
bounding_box = node.getBoundingBox() return TranslateOperation(node, Vector(center_grid_x, node.getWorldPosition().y, center_grid_y),
center_node_x = (bounding_box.left + bounding_box.right) * 0.5 set_position=True)
center_node_y = (bounding_box.back + bounding_box.front) * 0.5
delta_x = center_grid_x - center_node_x
delta_y = center_grid_y - center_node_y
return TranslateOperation(node, Vector(delta_x, 0, delta_y))
def _getGridCornerPoints( def _getGridCornerPoints(
self, self,

View file

@ -6,6 +6,7 @@ from pynest2d import Point, Box, Item, NfpConfig, nest
from typing import List, TYPE_CHECKING, Optional, Tuple from typing import List, TYPE_CHECKING, Optional, Tuple
from UM.Application import Application from UM.Application import Application
from UM.Decorators import deprecated
from UM.Logger import Logger from UM.Logger import Logger
from UM.Math.Matrix import Matrix from UM.Math.Matrix import Matrix
from UM.Math.Polygon import Polygon from UM.Math.Polygon import Polygon
@ -48,8 +49,9 @@ class Nest2DArrange(Arranger):
def findNodePlacement(self) -> Tuple[bool, List[Item]]: def findNodePlacement(self) -> Tuple[bool, List[Item]]:
spacing = int(1.5 * self._factor) # 1.5mm spacing. spacing = int(1.5 * self._factor) # 1.5mm spacing.
machine_width = self._build_volume.getWidth() edge_disallowed_size = self._build_volume.getEdgeDisallowedSize()
machine_depth = self._build_volume.getDepth() machine_width = self._build_volume.getWidth() - (edge_disallowed_size * 2)
machine_depth = self._build_volume.getDepth() - (edge_disallowed_size * 2)
build_plate_bounding_box = Box(int(machine_width * self._factor), int(machine_depth * self._factor)) build_plate_bounding_box = Box(int(machine_width * self._factor), int(machine_depth * self._factor))
if self._fixed_nodes is None: if self._fixed_nodes is None:
@ -79,7 +81,6 @@ class Nest2DArrange(Arranger):
], numpy.float32)) ], numpy.float32))
disallowed_areas = self._build_volume.getDisallowedAreas() disallowed_areas = self._build_volume.getDisallowedAreas()
num_disallowed_areas_added = 0
for area in disallowed_areas: for area in disallowed_areas:
converted_points = [] converted_points = []
@ -94,7 +95,6 @@ class Nest2DArrange(Arranger):
disallowed_area = Item(converted_points) disallowed_area = Item(converted_points)
disallowed_area.markAsDisallowedAreaInBin(0) disallowed_area.markAsDisallowedAreaInBin(0)
node_items.append(disallowed_area) node_items.append(disallowed_area)
num_disallowed_areas_added += 1
for node in self._fixed_nodes: for node in self._fixed_nodes:
converted_points = [] converted_points = []
@ -107,24 +107,29 @@ class Nest2DArrange(Arranger):
item = Item(converted_points) item = Item(converted_points)
item.markAsFixedInBin(0) item.markAsFixedInBin(0)
node_items.append(item) node_items.append(item)
num_disallowed_areas_added += 1
config = NfpConfig() strategies = [NfpConfig.Alignment.CENTER] * 3 + [NfpConfig.Alignment.BOTTOM_LEFT] * 3
config.accuracy = 1.0 found_solution_for_all = False
config.alignment = NfpConfig.Alignment.DONT_ALIGN while not found_solution_for_all and len(strategies) > 0:
if self._lock_rotation: config = NfpConfig()
config.rotations = [0.0] config.accuracy = 1.0
config.alignment = NfpConfig.Alignment.CENTER
config.starting_point = strategies[0]
strategies = strategies[1:]
num_bins = nest(node_items, build_plate_bounding_box, spacing, config) if self._lock_rotation:
config.rotations = [0.0]
# Strip the fixed items (previously placed) and the disallowed areas from the results again. num_bins = nest(node_items, build_plate_bounding_box, spacing, config)
node_items = list(filter(lambda item: not item.isFixed(), node_items))
found_solution_for_all = num_bins == 1 # Strip the fixed items (previously placed) and the disallowed areas from the results again.
node_items = list(filter(lambda item: not item.isFixed(), node_items))
found_solution_for_all = num_bins == 1
return found_solution_for_all, node_items return found_solution_for_all, node_items
def createGroupOperationForArrange(self, *, add_new_nodes_in_scene: bool = False) -> Tuple[GroupedOperation, int]: def createGroupOperationForArrange(self, add_new_nodes_in_scene: bool = False) -> Tuple[GroupedOperation, int]:
scene_root = Application.getInstance().getController().getScene().getRoot() scene_root = Application.getInstance().getController().getScene().getRoot()
found_solution_for_all, node_items = self.findNodePlacement() found_solution_for_all, node_items = self.findNodePlacement()
@ -149,3 +154,30 @@ class Nest2DArrange(Arranger):
not_fit_count += 1 not_fit_count += 1
return grouped_operation, not_fit_count return grouped_operation, not_fit_count
@deprecated("Use the Nest2DArrange class instead")
def findNodePlacement(nodes_to_arrange: List["SceneNode"], build_volume: "BuildVolume",
fixed_nodes: Optional[List["SceneNode"]] = None, factor=10000) -> Tuple[bool, List[Item]]:
arranger = Nest2DArrange(nodes_to_arrange, build_volume, fixed_nodes, factor=factor)
return arranger.findNodePlacement()
@deprecated("Use the Nest2DArrange class instead")
def createGroupOperationForArrange(nodes_to_arrange: List["SceneNode"],
build_volume: "BuildVolume",
fixed_nodes: Optional[List["SceneNode"]] = None,
factor=10000,
add_new_nodes_in_scene: bool = False) -> Tuple[GroupedOperation, int]:
arranger = Nest2DArrange(nodes_to_arrange, build_volume, fixed_nodes, factor=factor)
return arranger.createGroupOperationForArrange(add_new_nodes_in_scene=add_new_nodes_in_scene)
@deprecated("Use the Nest2DArrange class instead")
def arrange(nodes_to_arrange: List["SceneNode"],
build_volume: "BuildVolume",
fixed_nodes: Optional[List["SceneNode"]] = None,
factor=10000,
add_new_nodes_in_scene: bool = False) -> bool:
arranger = Nest2DArrange(nodes_to_arrange, build_volume, fixed_nodes, factor=factor)
return arranger.arrange(add_new_nodes_in_scene=add_new_nodes_in_scene)

View file

@ -1,27 +1,35 @@
# Copyright (c) 2023 Ultimaker B.V. # Copyright (c) 2023 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
import socket
import os
import subprocess import subprocess
from typing import Optional, List from typing import Optional, List
from UM.Logger import Logger from UM.Logger import Logger
from UM.Message import Message from UM.Message import Message
from UM.Settings.AdditionalSettingDefinitionAppender import AdditionalSettingDefinitionsAppender from UM.Settings.AdditionalSettingDefinitionsAppender import AdditionalSettingDefinitionsAppender
from UM.PluginObject import PluginObject from UM.PluginObject import PluginObject
from UM.i18n import i18nCatalog from UM.i18n import i18nCatalog
from UM.Platform import Platform from UM.Platform import Platform
from UM.Resources import Resources
class BackendPlugin(AdditionalSettingDefinitionsAppender, PluginObject): class BackendPlugin(AdditionalSettingDefinitionsAppender, PluginObject):
catalog = i18nCatalog("cura") catalog = i18nCatalog("cura")
settings_catalog = i18nCatalog("fdmprinter.def.json")
def __init__(self) -> None: def __init__(self) -> None:
super().__init__() super().__init__(self.settings_catalog)
self.__port: int = 0 self.__port: int = 0
self._plugin_address: str = "127.0.0.1" self._plugin_address: str = "127.0.0.1"
self._plugin_command: Optional[List[str]] = None self._plugin_command: Optional[List[str]] = None
self._process = None self._process = None
self._is_running = False self._is_running = False
self._supported_slots: List[int] = [] self._supported_slots: List[int] = []
self._use_plugin = True
def usePlugin(self) -> bool:
return self._use_plugin
def getSupportedSlots(self) -> List[int]: def getSupportedSlots(self) -> List[int]:
return self._supported_slots return self._supported_slots
@ -38,6 +46,15 @@ class BackendPlugin(AdditionalSettingDefinitionsAppender, PluginObject):
def getAddress(self) -> str: def getAddress(self) -> str:
return self._plugin_address return self._plugin_address
def setAvailablePort(self) -> None:
"""
Sets the port to a random available port.
"""
sock = socket.socket()
sock.bind((self.getAddress(), 0))
port = sock.getsockname()[1]
self.setPort(port)
def _validatePluginCommand(self) -> list[str]: def _validatePluginCommand(self) -> list[str]:
""" """
Validate the plugin command and add the port parameter if it is missing. Validate the plugin command and add the port parameter if it is missing.
@ -55,14 +72,28 @@ class BackendPlugin(AdditionalSettingDefinitionsAppender, PluginObject):
:return: True if the plugin process started successfully, False otherwise. :return: True if the plugin process started successfully, False otherwise.
""" """
if not self.usePlugin():
return False
Logger.info(f"Starting backend_plugin [{self._plugin_id}] with command: {self._validatePluginCommand()}")
plugin_log_path = os.path.join(Resources.getDataStoragePath(), f"{self.getPluginId()}.log")
if os.path.exists(plugin_log_path):
try:
os.remove(plugin_log_path)
except:
pass # removing is only done such that it doesn't grow out of proportions, if it fails once or twice that is okay
Logger.info(f"Logging plugin output to: {plugin_log_path}")
try: try:
# STDIN needs to be None because we provide no input, but communicate via a local socket instead. # STDIN needs to be None because we provide no input, but communicate via a local socket instead.
# The NUL device sometimes doesn't exist on some computers. # The NUL device sometimes doesn't exist on some computers.
Logger.info(f"Starting backend_plugin [{self._plugin_id}] with command: {self._validatePluginCommand()}") with open(plugin_log_path, 'a') as f:
popen_kwargs = {"stdin": None} popen_kwargs = {
if Platform.isWindows(): "stdin": None,
popen_kwargs["creationflags"] = subprocess.CREATE_NO_WINDOW "stdout": f, # Redirect output to file
self._process = subprocess.Popen(self._validatePluginCommand(), **popen_kwargs) "stderr": subprocess.STDOUT, # Combine stderr and stdout
}
if Platform.isWindows():
popen_kwargs["creationflags"] = subprocess.CREATE_NO_WINDOW
self._process = subprocess.Popen(self._validatePluginCommand(), **popen_kwargs)
self._is_running = True self._is_running = True
return True return True
except PermissionError: except PermissionError:

View file

@ -120,6 +120,8 @@ class BuildVolume(SceneNode):
# Objects loaded at the moment. We are connected to the property changed events of these objects. # Objects loaded at the moment. We are connected to the property changed events of these objects.
self._scene_objects = set() # type: Set[SceneNode] self._scene_objects = set() # type: Set[SceneNode]
# Number of toplevel printable meshes. If there is more than one, the build volume needs to take account of the gantry height in One at a Time printing.
self._root_printable_object_count = 0
self._scene_change_timer = QTimer() self._scene_change_timer = QTimer()
self._scene_change_timer.setInterval(200) self._scene_change_timer.setInterval(200)
@ -151,6 +153,7 @@ class BuildVolume(SceneNode):
def _onSceneChangeTimerFinished(self): def _onSceneChangeTimerFinished(self):
root = self._application.getController().getScene().getRoot() root = self._application.getController().getScene().getRoot()
new_scene_objects = set(node for node in BreadthFirstIterator(root) if node.callDecoration("isSliceable")) new_scene_objects = set(node for node in BreadthFirstIterator(root) if node.callDecoration("isSliceable"))
if new_scene_objects != self._scene_objects: if new_scene_objects != self._scene_objects:
for node in new_scene_objects - self._scene_objects: #Nodes that were added to the scene. for node in new_scene_objects - self._scene_objects: #Nodes that were added to the scene.
self._updateNodeListeners(node) self._updateNodeListeners(node)
@ -166,6 +169,26 @@ class BuildVolume(SceneNode):
self.rebuild() self.rebuild()
self._scene_objects = new_scene_objects self._scene_objects = new_scene_objects
# This also needs to be called when objects are grouped/ungrouped,
# which is not reflected in a change in self._scene_objects
self._updateRootPrintableObjectCount()
def _updateRootPrintableObjectCount(self):
# Get the number of models in the scene root, excluding modifier meshes and counting grouped models as 1
root = self._application.getController().getScene().getRoot()
scene_objects = set(node for node in BreadthFirstIterator(root) if node.callDecoration("isSliceable") or node.callDecoration("isGroup"))
new_root_printable_object_count = len(list(node for node in scene_objects if node.getParent() == root and not (
node_stack := node.callDecoration("getStack") and (
node.callDecoration("getStack").getProperty("anti_overhang_mesh", "value") or
node.callDecoration("getStack").getProperty("support_mesh", "value") or
node.callDecoration("getStack").getProperty("cutting_mesh", "value") or
node.callDecoration("getStack").getProperty("infill_mesh", "value")
))
))
if new_root_printable_object_count != self._root_printable_object_count:
self._root_printable_object_count = new_root_printable_object_count
self._onSettingPropertyChanged("print_sequence", "value") # Create fake event, so right settings are triggered. self._onSettingPropertyChanged("print_sequence", "value") # Create fake event, so right settings are triggered.
def _updateNodeListeners(self, node: SceneNode): def _updateNodeListeners(self, node: SceneNode):
@ -489,20 +512,20 @@ class BuildVolume(SceneNode):
if not self._disallowed_areas: if not self._disallowed_areas:
return None return None
bounding_box = Polygon(numpy.array([[min_w, min_d], [min_w, max_d], [max_w, max_d], [max_w, min_d]], numpy.float32))
mb = MeshBuilder() mb = MeshBuilder()
color = self._disallowed_area_color color = self._disallowed_area_color
for polygon in self._disallowed_areas: for polygon in self._disallowed_areas:
points = polygon.getPoints() intersection = polygon.intersectionConvexHulls(bounding_box)
if len(points) == 0: points = numpy.flipud(intersection.getPoints())
if len(points) < 3:
continue continue
first = Vector(self._clamp(points[0][0], min_w, max_w), disallowed_area_height, first = Vector(points[0][0], disallowed_area_height, points[0][1])
self._clamp(points[0][1], min_d, max_d)) previous_point = Vector(points[1][0], disallowed_area_height, points[1][1])
previous_point = Vector(self._clamp(points[0][0], min_w, max_w), disallowed_area_height, for point in points[2:]:
self._clamp(points[0][1], min_d, max_d)) new_point = Vector(point[0], disallowed_area_height, point[1])
for point in points:
new_point = Vector(self._clamp(point[0], min_w, max_w), disallowed_area_height,
self._clamp(point[1], min_d, max_d))
mb.addFace(first, previous_point, new_point, color=color) mb.addFace(first, previous_point, new_point, color=color)
previous_point = new_point previous_point = new_point
@ -650,7 +673,7 @@ class BuildVolume(SceneNode):
self._width = self._global_container_stack.getProperty("machine_width", "value") self._width = self._global_container_stack.getProperty("machine_width", "value")
machine_height = self._global_container_stack.getProperty("machine_height", "value") machine_height = self._global_container_stack.getProperty("machine_height", "value")
if self._global_container_stack.getProperty("print_sequence", "value") == "one_at_a_time" and len(self._scene_objects) > 1: if self._global_container_stack.getProperty("print_sequence", "value") == "one_at_a_time" and self._root_printable_object_count > 1:
new_height = min(self._global_container_stack.getProperty("gantry_height", "value") * self._scale_vector.z, machine_height) new_height = min(self._global_container_stack.getProperty("gantry_height", "value") * self._scale_vector.z, machine_height)
if self._height > new_height: if self._height > new_height:
@ -692,9 +715,12 @@ class BuildVolume(SceneNode):
update_extra_z_clearance = True update_extra_z_clearance = True
for setting_key in self._changed_settings_since_last_rebuild: for setting_key in self._changed_settings_since_last_rebuild:
if setting_key in ["print_sequence", "support_mesh", "infill_mesh", "cutting_mesh", "anti_overhang_mesh"]:
self._updateRootPrintableObjectCount()
if setting_key == "print_sequence": if setting_key == "print_sequence":
machine_height = self._global_container_stack.getProperty("machine_height", "value") machine_height = self._global_container_stack.getProperty("machine_height", "value")
if self._application.getGlobalContainerStack().getProperty("print_sequence", "value") == "one_at_a_time" and len(self._scene_objects) > 1: if self._application.getGlobalContainerStack().getProperty("print_sequence", "value") == "one_at_a_time" and self._root_printable_object_count > 1:
new_height = min( new_height = min(
self._global_container_stack.getProperty("gantry_height", "value") * self._scale_vector.z, self._global_container_stack.getProperty("gantry_height", "value") * self._scale_vector.z,
machine_height) machine_height)
@ -813,7 +839,7 @@ class BuildVolume(SceneNode):
prime_tower_areas = self._computeDisallowedAreasPrinted(used_extruders) prime_tower_areas = self._computeDisallowedAreasPrinted(used_extruders)
for extruder_id in prime_tower_areas: for extruder_id in prime_tower_areas:
for area_index, prime_tower_area in enumerate(prime_tower_areas[extruder_id]): for area_index, prime_tower_area in enumerate(prime_tower_areas[extruder_id]):
for area in result_areas[extruder_id]: for area in result_areas_no_brim[extruder_id]:
if prime_tower_area.intersectsPolygon(area) is not None: if prime_tower_area.intersectsPolygon(area) is not None:
prime_tower_collision = True prime_tower_collision = True
break break
@ -860,13 +886,24 @@ class BuildVolume(SceneNode):
machine_depth = self._global_container_stack.getProperty("machine_depth", "value") machine_depth = self._global_container_stack.getProperty("machine_depth", "value")
prime_tower_x = self._global_container_stack.getProperty("prime_tower_position_x", "value") prime_tower_x = self._global_container_stack.getProperty("prime_tower_position_x", "value")
prime_tower_y = - self._global_container_stack.getProperty("prime_tower_position_y", "value") prime_tower_y = - self._global_container_stack.getProperty("prime_tower_position_y", "value")
prime_tower_brim_enable = self._global_container_stack.getProperty("prime_tower_brim_enable", "value")
prime_tower_base_size = self._global_container_stack.getProperty("prime_tower_base_size", "value")
prime_tower_base_height = self._global_container_stack.getProperty("prime_tower_base_height", "value")
adhesion_type = self._global_container_stack.getProperty("adhesion_type", "value")
if not self._global_container_stack.getProperty("machine_center_is_zero", "value"): if not self._global_container_stack.getProperty("machine_center_is_zero", "value"):
prime_tower_x = prime_tower_x - machine_width / 2 #Offset by half machine_width and _depth to put the origin in the front-left. prime_tower_x = prime_tower_x - machine_width / 2 #Offset by half machine_width and _depth to put the origin in the front-left.
prime_tower_y = prime_tower_y + machine_depth / 2 prime_tower_y = prime_tower_y + machine_depth / 2
radius = prime_tower_size / 2 radius = prime_tower_size / 2
prime_tower_area = Polygon.approximatedCircle(radius, num_segments = 24) delta_x = -radius
prime_tower_area = prime_tower_area.translate(prime_tower_x - radius, prime_tower_y - radius) delta_y = -radius
if prime_tower_base_size > 0 and ((prime_tower_brim_enable and prime_tower_base_height > 0) or adhesion_type == "raft"):
radius += prime_tower_base_size
prime_tower_area = Polygon.approximatedCircle(radius, num_segments = 32)
prime_tower_area = prime_tower_area.translate(prime_tower_x + delta_x, prime_tower_y + delta_y)
prime_tower_area = prime_tower_area.getMinkowskiHull(Polygon.approximatedCircle(0)) prime_tower_area = prime_tower_area.getMinkowskiHull(Polygon.approximatedCircle(0))
for extruder in used_extruders: for extruder in used_extruders:
@ -1171,7 +1208,7 @@ class BuildVolume(SceneNode):
_raft_settings = ["adhesion_type", "raft_base_thickness", "raft_interface_layers", "raft_interface_thickness", "raft_surface_layers", "raft_surface_thickness", "raft_airgap", "layer_0_z_overlap"] _raft_settings = ["adhesion_type", "raft_base_thickness", "raft_interface_layers", "raft_interface_thickness", "raft_surface_layers", "raft_surface_thickness", "raft_airgap", "layer_0_z_overlap"]
_extra_z_settings = ["retraction_hop_enabled", "retraction_hop"] _extra_z_settings = ["retraction_hop_enabled", "retraction_hop"]
_prime_settings = ["extruder_prime_pos_x", "extruder_prime_pos_y", "prime_blob_enable"] _prime_settings = ["extruder_prime_pos_x", "extruder_prime_pos_y", "prime_blob_enable"]
_tower_settings = ["prime_tower_enable", "prime_tower_size", "prime_tower_position_x", "prime_tower_position_y", "prime_tower_brim_enable"] _tower_settings = ["prime_tower_enable", "prime_tower_size", "prime_tower_position_x", "prime_tower_position_y", "prime_tower_brim_enable", "prime_tower_base_size", "prime_tower_base_height"]
_ooze_shield_settings = ["ooze_shield_enabled", "ooze_shield_dist"] _ooze_shield_settings = ["ooze_shield_enabled", "ooze_shield_dist"]
_distance_settings = ["infill_wipe_dist", "travel_avoid_distance", "support_offset", "support_enable", "travel_avoid_other_parts", "travel_avoid_supports", "wall_line_count", "wall_line_width_0", "wall_line_width_x"] _distance_settings = ["infill_wipe_dist", "travel_avoid_distance", "support_offset", "support_enable", "travel_avoid_other_parts", "travel_avoid_supports", "wall_line_count", "wall_line_width_0", "wall_line_width_x"]
_extruder_settings = ["support_enable", "support_bottom_enable", "support_roof_enable", "support_infill_extruder_nr", "support_extruder_nr_layer_0", "support_bottom_extruder_nr", "support_roof_extruder_nr", "brim_line_count", "skirt_brim_extruder_nr", "raft_base_extruder_nr", "raft_interface_extruder_nr", "raft_surface_extruder_nr", "adhesion_type"] #Settings that can affect which extruders are used. _extruder_settings = ["support_enable", "support_bottom_enable", "support_roof_enable", "support_infill_extruder_nr", "support_extruder_nr_layer_0", "support_bottom_extruder_nr", "support_roof_extruder_nr", "brim_line_count", "skirt_brim_extruder_nr", "raft_base_extruder_nr", "raft_interface_extruder_nr", "raft_surface_extruder_nr", "adhesion_type"] #Settings that can affect which extruders are used.

View file

@ -3,10 +3,11 @@
from typing import List, cast from typing import List, cast
from PyQt6.QtCore import QObject, QUrl, QMimeData from PyQt6.QtCore import QObject, QUrl, pyqtSignal, pyqtProperty
from PyQt6.QtGui import QDesktopServices from PyQt6.QtGui import QDesktopServices
from PyQt6.QtWidgets import QApplication from PyQt6.QtWidgets import QApplication
from UM.Application import Application
from UM.Event import CallFunctionEvent from UM.Event import CallFunctionEvent
from UM.FlameProfiler import pyqtSlot from UM.FlameProfiler import pyqtSlot
from UM.Math.Vector import Vector from UM.Math.Vector import Vector
@ -37,6 +38,10 @@ class CuraActions(QObject):
def __init__(self, parent: QObject = None) -> None: def __init__(self, parent: QObject = None) -> None:
super().__init__(parent) super().__init__(parent)
self._operation_stack = Application.getInstance().getOperationStack()
self._operation_stack.changed.connect(self._onUndoStackChanged)
undoStackChanged = pyqtSignal()
@pyqtSlot() @pyqtSlot()
def openDocumentation(self) -> None: def openDocumentation(self) -> None:
# Starting a web browser from a signal handler connected to a menu will crash on windows. # Starting a web browser from a signal handler connected to a menu will crash on windows.
@ -45,6 +50,25 @@ class CuraActions(QObject):
event = CallFunctionEvent(self._openUrl, [QUrl("https://ultimaker.com/en/resources/manuals/software?utm_source=cura&utm_medium=software&utm_campaign=dropdown-documentation")], {}) event = CallFunctionEvent(self._openUrl, [QUrl("https://ultimaker.com/en/resources/manuals/software?utm_source=cura&utm_medium=software&utm_campaign=dropdown-documentation")], {})
cura.CuraApplication.CuraApplication.getInstance().functionEvent(event) cura.CuraApplication.CuraApplication.getInstance().functionEvent(event)
@pyqtProperty(bool, notify=undoStackChanged)
def canUndo(self):
return self._operation_stack.canUndo()
@pyqtProperty(bool, notify=undoStackChanged)
def canRedo(self):
return self._operation_stack.canRedo()
@pyqtSlot()
def undo(self):
self._operation_stack.undo()
@pyqtSlot()
def redo(self):
self._operation_stack.redo()
def _onUndoStackChanged(self):
self.undoStackChanged.emit()
@pyqtSlot() @pyqtSlot()
def openBugReportPage(self) -> None: def openBugReportPage(self) -> None:
event = CallFunctionEvent(self._openUrl, [QUrl("https://github.com/Ultimaker/Cura/issues/new/choose")], {}) event = CallFunctionEvent(self._openUrl, [QUrl("https://github.com/Ultimaker/Cura/issues/new/choose")], {})
@ -249,7 +273,11 @@ class CuraActions(QObject):
# deselect currently selected nodes, and select the new nodes # deselect currently selected nodes, and select the new nodes
for node in Selection.getAllSelectedObjects(): for node in Selection.getAllSelectedObjects():
Selection.remove(node) Selection.remove(node)
numberOfFixedNodes = len(fixed_nodes)
for node in nodes: for node in nodes:
numberOfFixedNodes += 1
node.printOrder = numberOfFixedNodes
Selection.add(node) Selection.add(node)
def _openUrl(self, url: QUrl) -> None: def _openUrl(self, url: QUrl) -> None:

View file

@ -2,22 +2,26 @@
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
import enum import enum
import os import os
import re
import sys import sys
import tempfile import tempfile
import time import time
import platform import platform
from pathlib import Path
from typing import cast, TYPE_CHECKING, Optional, Callable, List, Any, Dict from typing import cast, TYPE_CHECKING, Optional, Callable, List, Any, Dict
import requests
import numpy import numpy
from PyQt6.QtCore import QObject, QTimer, QUrl, pyqtSignal, pyqtProperty, QEvent, pyqtEnum, QCoreApplication from PyQt6.QtCore import QObject, QTimer, QUrl, QUrlQuery, pyqtSignal, pyqtProperty, QEvent, pyqtEnum, QCoreApplication, \
QByteArray
from PyQt6.QtGui import QColor, QIcon from PyQt6.QtGui import QColor, QIcon
from PyQt6.QtQml import qmlRegisterUncreatableType, qmlRegisterUncreatableMetaObject, qmlRegisterSingletonType, qmlRegisterType from PyQt6.QtQml import qmlRegisterUncreatableMetaObject, qmlRegisterSingletonType, qmlRegisterType
from PyQt6.QtWidgets import QMessageBox from PyQt6.QtWidgets import QMessageBox
import UM.Util import UM.Util
import cura.Settings.cura_empty_instance_containers import cura.Settings.cura_empty_instance_containers
from UM.Application import Application from UM.Application import Application
from UM.Decorators import override from UM.Decorators import override, deprecated
from UM.FlameProfiler import pyqtSlot from UM.FlameProfiler import pyqtSlot
from UM.Logger import Logger from UM.Logger import Logger
from UM.Math.AxisAlignedBox import AxisAlignedBox from UM.Math.AxisAlignedBox import AxisAlignedBox
@ -100,7 +104,8 @@ from cura.Settings.SettingInheritanceManager import SettingInheritanceManager
from cura.Settings.SidebarCustomMenuItemsModel import SidebarCustomMenuItemsModel from cura.Settings.SidebarCustomMenuItemsModel import SidebarCustomMenuItemsModel
from cura.Settings.SimpleModeSettingsManager import SimpleModeSettingsManager from cura.Settings.SimpleModeSettingsManager import SimpleModeSettingsManager
from cura.TaskManagement.OnExitCallbackManager import OnExitCallbackManager from cura.TaskManagement.OnExitCallbackManager import OnExitCallbackManager
from cura.UI import CuraSplashScreen, MachineActionManager, PrintInformation from cura.UI import CuraSplashScreen, PrintInformation
from cura.UI.MachineActionManager import MachineActionManager
from cura.UI.AddPrinterPagesModel import AddPrinterPagesModel from cura.UI.AddPrinterPagesModel import AddPrinterPagesModel
from cura.UI.MachineSettingsManager import MachineSettingsManager from cura.UI.MachineSettingsManager import MachineSettingsManager
from cura.UI.ObjectsModel import ObjectsModel from cura.UI.ObjectsModel import ObjectsModel
@ -121,6 +126,7 @@ from .Machines.Models.CompatibleMachineModel import CompatibleMachineModel
from .Machines.Models.MachineListModel import MachineListModel from .Machines.Models.MachineListModel import MachineListModel
from .Machines.Models.ActiveIntentQualitiesModel import ActiveIntentQualitiesModel from .Machines.Models.ActiveIntentQualitiesModel import ActiveIntentQualitiesModel
from .Machines.Models.IntentSelectionModel import IntentSelectionModel from .Machines.Models.IntentSelectionModel import IntentSelectionModel
from .PrintOrderManager import PrintOrderManager
from .SingleInstance import SingleInstance from .SingleInstance import SingleInstance
if TYPE_CHECKING: if TYPE_CHECKING:
@ -175,18 +181,20 @@ class CuraApplication(QtApplication):
# Variables set from CLI # Variables set from CLI
self._files_to_open = [] self._files_to_open = []
self._urls_to_open = []
self._use_single_instance = False self._use_single_instance = False
self._single_instance = None self._single_instance = None
self._open_project_mode: Optional[str] = None
self._cura_formula_functions = None # type: Optional[CuraFormulaFunctions] self._cura_formula_functions = None # type: Optional[CuraFormulaFunctions]
self._machine_action_manager = None # type: Optional[MachineActionManager.MachineActionManager] self._machine_action_manager: Optional[MachineActionManager] = None
self.empty_container = None # type: EmptyInstanceContainer self.empty_container = None # type: EmptyInstanceContainer
self.empty_definition_changes_container = None # type: EmptyInstanceContainer self.empty_definition_changes_container = None # type: EmptyInstanceContainer
self.empty_variant_container = None # type: EmptyInstanceContainer self.empty_variant_container = None # type: EmptyInstanceContainer
self.empty_intent_container = None # type: EmptyInstanceContainer self.empty_intent_container = None # type: EmptyInstanceContainer
self.empty_material_container = None # type: EmptyInstanceContainer self.empty_material_container = None # type: EmptyInstanceContainer
self.empty_quality_container = None # type: EmptyInstanceContainer self.empty_quality_container = None # type: EmptyInstanceContainer
self.empty_quality_changes_container = None # type: EmptyInstanceContainer self.empty_quality_changes_container = None # type: EmptyInstanceContainer
@ -197,6 +205,7 @@ class CuraApplication(QtApplication):
self._container_manager = None self._container_manager = None
self._object_manager = None self._object_manager = None
self._print_order_manager = None
self._extruders_model = None self._extruders_model = None
self._extruders_model_with_optional = None self._extruders_model_with_optional = None
self._build_plate_model = None self._build_plate_model = None
@ -248,7 +257,7 @@ class CuraApplication(QtApplication):
self._additional_components = {} # Components to add to certain areas in the interface self._additional_components = {} # Components to add to certain areas in the interface
self._open_file_queue = [] # A list of files to open (after the application has started) self._open_file_queue = [] # A list of files to open (after the application has started)
self._open_url_queue = [] # A list of urls to open (after the application has started)
self._update_platform_activity_timer = None self._update_platform_activity_timer = None
self._sidebar_custom_menu_items = [] # type: list # Keeps list of custom menu items for the side bar self._sidebar_custom_menu_items = [] # type: list # Keeps list of custom menu items for the side bar
@ -269,6 +278,11 @@ class CuraApplication(QtApplication):
CentralFileStorage.setIsEnterprise(ApplicationMetadata.IsEnterpriseVersion) CentralFileStorage.setIsEnterprise(ApplicationMetadata.IsEnterpriseVersion)
Resources.setIsEnterprise(ApplicationMetadata.IsEnterpriseVersion) Resources.setIsEnterprise(ApplicationMetadata.IsEnterpriseVersion)
self._conan_installs = ApplicationMetadata.CONAN_INSTALLS
self._python_installs = ApplicationMetadata.PYTHON_INSTALLS
self._supported_url_schemes: List[str] = ["cura", "slicer"]
@pyqtProperty(str, constant=True) @pyqtProperty(str, constant=True)
def ultimakerCloudApiRootUrl(self) -> str: def ultimakerCloudApiRootUrl(self) -> str:
return UltimakerCloudConstants.CuraCloudAPIRoot return UltimakerCloudConstants.CuraCloudAPIRoot
@ -321,7 +335,11 @@ class CuraApplication(QtApplication):
assert not "This crash is triggered by the trigger_early_crash command line argument." assert not "This crash is triggered by the trigger_early_crash command line argument."
for filename in self._cli_args.file: for filename in self._cli_args.file:
self._files_to_open.append(os.path.abspath(filename)) url = QUrl(filename)
if url.scheme() in self._supported_url_schemes:
self._urls_to_open.append(url)
else:
self._files_to_open.append(os.path.abspath(filename))
def initialize(self) -> None: def initialize(self) -> None:
self.__addExpectedResourceDirsAndSearchPaths() # Must be added before init of super self.__addExpectedResourceDirsAndSearchPaths() # Must be added before init of super
@ -338,11 +356,11 @@ class CuraApplication(QtApplication):
self.__addAllEmptyContainers() self.__addAllEmptyContainers()
self.__setLatestResouceVersionsForVersionUpgrade() self.__setLatestResouceVersionsForVersionUpgrade()
self._machine_action_manager = MachineActionManager.MachineActionManager(self) self._machine_action_manager = MachineActionManager(self)
self._machine_action_manager.initialize() self._machine_action_manager.initialize()
def __sendCommandToSingleInstance(self): def __sendCommandToSingleInstance(self):
self._single_instance = SingleInstance(self, self._files_to_open) self._single_instance = SingleInstance(self, self._files_to_open, self._urls_to_open)
# If we use single instance, try to connect to the single instance server, send commands, and then exit. # If we use single instance, try to connect to the single instance server, send commands, and then exit.
# If we cannot find an existing single instance server, this is the only instance, so just keep going. # If we cannot find an existing single instance server, this is the only instance, so just keep going.
@ -359,10 +377,20 @@ class CuraApplication(QtApplication):
Resources.addExpectedDirNameInData(dir_name) Resources.addExpectedDirNameInData(dir_name)
app_root = os.path.abspath(os.path.join(os.path.dirname(sys.executable))) app_root = os.path.abspath(os.path.join(os.path.dirname(sys.executable)))
Resources.addSecureSearchPath(os.path.join(app_root, "share", "cura", "resources"))
Resources.addSecureSearchPath(os.path.join(self._app_install_dir, "share", "cura", "resources")) if platform.system() == "Darwin":
Resources.addSecureSearchPath(os.path.join(app_root, "Resources", "share", "cura", "resources"))
Resources.addSecureSearchPath(
os.path.join(self._app_install_dir, "Resources", "share", "cura", "resources"))
else:
Resources.addSecureSearchPath(os.path.join(app_root, "share", "cura", "resources"))
Resources.addSecureSearchPath(os.path.join(self._app_install_dir, "share", "cura", "resources"))
if not hasattr(sys, "frozen"): if not hasattr(sys, "frozen"):
cura_data_root = os.environ.get('CURA_DATA_ROOT', None)
if cura_data_root:
Resources.addSearchPath(str(Path(cura_data_root).joinpath("resources")))
Resources.addSearchPath(os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "resources")) Resources.addSearchPath(os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "resources"))
# local Conan cache # local Conan cache
@ -573,7 +601,9 @@ class CuraApplication(QtApplication):
preferences.addPreference("mesh/scale_to_fit", False) preferences.addPreference("mesh/scale_to_fit", False)
preferences.addPreference("mesh/scale_tiny_meshes", True) preferences.addPreference("mesh/scale_tiny_meshes", True)
preferences.addPreference("cura/dialog_on_project_save", True) preferences.addPreference("cura/dialog_on_project_save", True)
preferences.addPreference("cura/dialog_on_ucp_project_save", True)
preferences.addPreference("cura/asked_dialog_on_project_save", False) preferences.addPreference("cura/asked_dialog_on_project_save", False)
preferences.addPreference("cura/asked_dialog_on_ucp_project_save", False)
preferences.addPreference("cura/choice_on_profile_override", "always_ask") preferences.addPreference("cura/choice_on_profile_override", "always_ask")
preferences.addPreference("cura/choice_on_open_project", "always_ask") preferences.addPreference("cura/choice_on_open_project", "always_ask")
preferences.addPreference("cura/use_multi_build_plate", False) preferences.addPreference("cura/use_multi_build_plate", False)
@ -589,6 +619,7 @@ class CuraApplication(QtApplication):
preferences.addPreference("view/invert_zoom", False) preferences.addPreference("view/invert_zoom", False)
preferences.addPreference("view/filter_current_build_plate", False) preferences.addPreference("view/filter_current_build_plate", False)
preferences.addPreference("view/navigation_style", "cura")
preferences.addPreference("cura/sidebar_collapsed", False) preferences.addPreference("cura/sidebar_collapsed", False)
preferences.addPreference("cura/favorite_materials", "") preferences.addPreference("cura/favorite_materials", "")
@ -851,11 +882,8 @@ class CuraApplication(QtApplication):
self._log_hardware_info() self._log_hardware_info()
if len(ApplicationMetadata.DEPENDENCY_INFO) > 0: Logger.debug("Using conan dependencies: {}", str(self.conanInstalls))
Logger.debug("Using Conan managed dependencies: " + ", ".join( Logger.debug("Using python dependencies: {}", str(self.pythonInstalls))
[dep["recipe"]["id"] for dep in ApplicationMetadata.DEPENDENCY_INFO["installed"] if dep["recipe"]["version"] != "latest"]))
else:
Logger.warning("Could not find conan_install_info.json")
Logger.log("i", "Initializing machine error checker") Logger.log("i", "Initializing machine error checker")
self._machine_error_checker = MachineErrorChecker(self) self._machine_error_checker = MachineErrorChecker(self)
@ -884,6 +912,7 @@ class CuraApplication(QtApplication):
# initialize info objects # initialize info objects
self._print_information = PrintInformation.PrintInformation(self) self._print_information = PrintInformation.PrintInformation(self)
self._cura_actions = CuraActions.CuraActions(self) self._cura_actions = CuraActions.CuraActions(self)
self._print_order_manager = PrintOrderManager(self.getObjectsModel().getNodes)
self.processEvents() self.processEvents()
# Initialize setting visibility presets model. # Initialize setting visibility presets model.
self._setting_visibility_presets_model = SettingVisibilityPresetsModel(self.getPreferences(), parent = self) self._setting_visibility_presets_model = SettingVisibilityPresetsModel(self.getPreferences(), parent = self)
@ -941,6 +970,10 @@ class CuraApplication(QtApplication):
self.callLater(self._openFile, file_name) self.callLater(self._openFile, file_name)
for file_name in self._open_file_queue: # Open all the files that were queued up while plug-ins were loading. for file_name in self._open_file_queue: # Open all the files that were queued up while plug-ins were loading.
self.callLater(self._openFile, file_name) self.callLater(self._openFile, file_name)
for url in self._urls_to_open:
self.callLater(self._openUrl, url)
for url in self._open_url_queue:
self.callLater(self._openUrl, url)
initializationFinished = pyqtSignal() initializationFinished = pyqtSignal()
showAddPrintersUncancellableDialog = pyqtSignal() # Used to show the add printers dialog with a greyed background showAddPrintersUncancellableDialog = pyqtSignal() # Used to show the add printers dialog with a greyed background
@ -962,6 +995,7 @@ class CuraApplication(QtApplication):
t.setEnabledAxis([ToolHandle.XAxis, ToolHandle.YAxis, ToolHandle.ZAxis]) t.setEnabledAxis([ToolHandle.XAxis, ToolHandle.YAxis, ToolHandle.ZAxis])
Selection.selectionChanged.connect(self.onSelectionChanged) Selection.selectionChanged.connect(self.onSelectionChanged)
self._print_order_manager.printOrderChanged.connect(self._onPrintOrderChanged)
# Set default background color for scene # Set default background color for scene
self.getRenderer().setBackgroundColor(QColor(245, 245, 245)) self.getRenderer().setBackgroundColor(QColor(245, 245, 245))
@ -1051,6 +1085,10 @@ class CuraApplication(QtApplication):
def getTextManager(self, *args) -> "TextManager": def getTextManager(self, *args) -> "TextManager":
return self._text_manager return self._text_manager
@pyqtSlot()
def setWorkplaceDropToBuildplate(self):
return self._physics.setAppAllModelDropDown()
def getCuraFormulaFunctions(self, *args) -> "CuraFormulaFunctions": def getCuraFormulaFunctions(self, *args) -> "CuraFormulaFunctions":
if self._cura_formula_functions is None: if self._cura_formula_functions is None:
self._cura_formula_functions = CuraFormulaFunctions(self) self._cura_formula_functions = CuraFormulaFunctions(self)
@ -1077,6 +1115,10 @@ class CuraApplication(QtApplication):
self._object_manager = ObjectsModel(self) self._object_manager = ObjectsModel(self)
return self._object_manager return self._object_manager
@pyqtSlot(str, result = "QVariantList")
def getSupportedActionMachineList(self, definition_id: str) -> List["MachineAction"]:
return self._machine_action_manager.getSupportedActions(self._machine_manager.getDefinitionByMachineId(definition_id))
@pyqtSlot(result = QObject) @pyqtSlot(result = QObject)
def getExtrudersModel(self, *args) -> "ExtrudersModel": def getExtrudersModel(self, *args) -> "ExtrudersModel":
if self._extruders_model is None: if self._extruders_model is None:
@ -1102,6 +1144,16 @@ class CuraApplication(QtApplication):
self._build_plate_model = BuildPlateModel(self) self._build_plate_model = BuildPlateModel(self)
return self._build_plate_model return self._build_plate_model
@pyqtSlot()
def exportUcp(self):
writer = self.getMeshFileHandler().getWriter("3MFWriter")
if writer is None:
Logger.warning("3mf writer is not enabled")
return
writer.exportUcp()
def getCuraSceneController(self, *args) -> CuraSceneController: def getCuraSceneController(self, *args) -> CuraSceneController:
if self._cura_scene_controller is None: if self._cura_scene_controller is None:
self._cura_scene_controller = CuraSceneController.createCuraSceneController() self._cura_scene_controller = CuraSceneController.createCuraSceneController()
@ -1112,14 +1164,16 @@ class CuraApplication(QtApplication):
self._setting_inheritance_manager = SettingInheritanceManager.createSettingInheritanceManager() self._setting_inheritance_manager = SettingInheritanceManager.createSettingInheritanceManager()
return self._setting_inheritance_manager return self._setting_inheritance_manager
def getMachineActionManager(self, *args: Any) -> MachineActionManager.MachineActionManager: @pyqtSlot(result = QObject)
def getMachineActionManager(self, *args: Any) -> MachineActionManager:
"""Get the machine action manager """Get the machine action manager
We ignore any *args given to this, as we also register the machine manager as qml singleton. We ignore any *args given to this, as we also register the machine manager as qml singleton.
It wants to give this function an engine and script engine, but we don't care about that. It wants to give this function an engine and script engine, but we don't care about that.
""" """
return cast(MachineActionManager.MachineActionManager, self._machine_action_manager) return self._machine_action_manager
@pyqtSlot(result = QObject) @pyqtSlot(result = QObject)
def getMaterialManagementModel(self) -> MaterialManagementModel: def getMaterialManagementModel(self) -> MaterialManagementModel:
@ -1133,7 +1187,8 @@ class CuraApplication(QtApplication):
self._quality_management_model = QualityManagementModel(parent = self) self._quality_management_model = QualityManagementModel(parent = self)
return self._quality_management_model return self._quality_management_model
def getSimpleModeSettingsManager(self, *args): @pyqtSlot(result=QObject)
def getSimpleModeSettingsManager(self)-> SimpleModeSettingsManager:
if self._simple_mode_settings_manager is None: if self._simple_mode_settings_manager is None:
self._simple_mode_settings_manager = SimpleModeSettingsManager() self._simple_mode_settings_manager = SimpleModeSettingsManager()
return self._simple_mode_settings_manager return self._simple_mode_settings_manager
@ -1150,9 +1205,15 @@ class CuraApplication(QtApplication):
if event.type() == QEvent.Type.FileOpen: if event.type() == QEvent.Type.FileOpen:
if self._plugins_loaded: if self._plugins_loaded:
self._openFile(event.file()) if event.file():
self._openFile(event.file())
if event.url():
self._openUrl(event.url())
else: else:
self._open_file_queue.append(event.file()) if event.file():
self._open_file_queue.append(event.file())
if event.url():
self._open_url_queue.append(event.url())
if int(event.type()) == 20: # 'QEvent.Type.Quit' enum isn't there, even though it should be according to docs. if int(event.type()) == 20: # 'QEvent.Type.Quit' enum isn't there, even though it should be according to docs.
# Once we're at this point, everything should have been flushed already (past OnExitCallbackManager). # Once we're at this point, everything should have been flushed already (past OnExitCallbackManager).
@ -1170,16 +1231,43 @@ class CuraApplication(QtApplication):
return self._print_information return self._print_information
def getQualityProfilesDropDownMenuModel(self, *args, **kwargs): @pyqtSlot(result=QObject)
def getQualityProfilesDropDownMenuModel(self, *args, **kwargs)-> QualityProfilesDropDownMenuModel:
if self._quality_profile_drop_down_menu_model is None: if self._quality_profile_drop_down_menu_model is None:
self._quality_profile_drop_down_menu_model = QualityProfilesDropDownMenuModel(self) self._quality_profile_drop_down_menu_model = QualityProfilesDropDownMenuModel(self)
return self._quality_profile_drop_down_menu_model return self._quality_profile_drop_down_menu_model
def getCustomQualityProfilesDropDownMenuModel(self, *args, **kwargs): @pyqtSlot(result=QObject)
def getCustomQualityProfilesDropDownMenuModel(self, *args, **kwargs)->CustomQualityProfilesDropDownMenuModel:
if self._custom_quality_profile_drop_down_menu_model is None: if self._custom_quality_profile_drop_down_menu_model is None:
self._custom_quality_profile_drop_down_menu_model = CustomQualityProfilesDropDownMenuModel(self) self._custom_quality_profile_drop_down_menu_model = CustomQualityProfilesDropDownMenuModel(self)
return self._custom_quality_profile_drop_down_menu_model return self._custom_quality_profile_drop_down_menu_model
@deprecated("SimpleModeSettingsManager is deprecated and will be removed in major SDK release, Use getSimpleModeSettingsManager() instead", since = "5.7.0")
def getSimpleModeSettingsManagerWrapper(self, *args, **kwargs):
return self.getSimpleModeSettingsManager()
@deprecated("MachineActionManager is deprecated and will be removed in major SDK release, Use getMachineActionManager() instead", since="5.7.0")
def getMachineActionManagerWrapper(self, *args, **kwargs):
return self.getMachineActionManager()
@deprecated("QualityManagementModel is deprecated and will be removed in major SDK release, Use getQualityManagementModel() instead", since="5.7.0")
def getQualityManagementModelWrapper(self, *args, **kwargs):
return self.getQualityManagementModel()
@deprecated("MaterialManagementModel is deprecated and will be removed in major SDK release, Use getMaterialManagementModel() instead", since = "5.7.0")
def getMaterialManagementModelWrapper(self, *args, **kwargs):
return self.getMaterialManagementModel()
@deprecated("QualityProfilesDropDownMenuModel is deprecated and will be removed in major SDK release, Use getQualityProfilesDropDownMenuModel() instead", since = "5.7.0")
def getQualityProfilesDropDownMenuModelWrapper(self, *args, **kwargs):
return self.getQualityProfilesDropDownMenuModel()
@deprecated("CustomQualityProfilesDropDownMenuModel is deprecated and will be removed in major SDK release, Use getCustomQualityProfilesDropDownMenuModel() instead", since = "5.7.0")
def getCustomQualityProfilesDropDownMenuModelWrapper(self, *args, **kwargs):
return self.getCustomQualityProfilesDropDownMenuModel()
def getCuraAPI(self, *args, **kwargs) -> "CuraAPI": def getCuraAPI(self, *args, **kwargs) -> "CuraAPI":
return self._cura_API return self._cura_API
@ -1195,6 +1283,7 @@ class CuraApplication(QtApplication):
self.processEvents() self.processEvents()
engine.rootContext().setContextProperty("Printer", self) engine.rootContext().setContextProperty("Printer", self)
engine.rootContext().setContextProperty("CuraApplication", self) engine.rootContext().setContextProperty("CuraApplication", self)
engine.rootContext().setContextProperty("PrintOrderManager", self._print_order_manager)
engine.rootContext().setContextProperty("PrintInformation", self._print_information) engine.rootContext().setContextProperty("PrintInformation", self._print_information)
engine.rootContext().setContextProperty("CuraActions", self._cura_actions) engine.rootContext().setContextProperty("CuraActions", self._cura_actions)
engine.rootContext().setContextProperty("CuraSDKVersion", ApplicationMetadata.CuraSDKVersion) engine.rootContext().setContextProperty("CuraSDKVersion", ApplicationMetadata.CuraSDKVersion)
@ -1208,8 +1297,8 @@ class CuraApplication(QtApplication):
qmlRegisterSingletonType(MachineManager, "Cura", 1, 0, self.getMachineManager, "MachineManager") qmlRegisterSingletonType(MachineManager, "Cura", 1, 0, self.getMachineManager, "MachineManager")
qmlRegisterSingletonType(IntentManager, "Cura", 1, 6, self.getIntentManager, "IntentManager") qmlRegisterSingletonType(IntentManager, "Cura", 1, 6, self.getIntentManager, "IntentManager")
qmlRegisterSingletonType(SettingInheritanceManager, "Cura", 1, 0, self.getSettingInheritanceManager, "SettingInheritanceManager") qmlRegisterSingletonType(SettingInheritanceManager, "Cura", 1, 0, self.getSettingInheritanceManager, "SettingInheritanceManager")
qmlRegisterSingletonType(SimpleModeSettingsManager, "Cura", 1, 0, self.getSimpleModeSettingsManager, "SimpleModeSettingsManager") qmlRegisterSingletonType(SimpleModeSettingsManager, "Cura", 1, 0, self.getSimpleModeSettingsManagerWrapper, "SimpleModeSettingsManager")
qmlRegisterSingletonType(MachineActionManager.MachineActionManager, "Cura", 1, 0, self.getMachineActionManager, "MachineActionManager") qmlRegisterSingletonType(MachineActionManager, "Cura", 1, 0, self.getMachineActionManagerWrapper, "MachineActionManager")
self.processEvents() self.processEvents()
qmlRegisterType(NetworkingUtil, "Cura", 1, 5, "NetworkingUtil") qmlRegisterType(NetworkingUtil, "Cura", 1, 5, "NetworkingUtil")
@ -1234,16 +1323,14 @@ class CuraApplication(QtApplication):
qmlRegisterType(FavoriteMaterialsModel, "Cura", 1, 0, "FavoriteMaterialsModel") qmlRegisterType(FavoriteMaterialsModel, "Cura", 1, 0, "FavoriteMaterialsModel")
qmlRegisterType(GenericMaterialsModel, "Cura", 1, 0, "GenericMaterialsModel") qmlRegisterType(GenericMaterialsModel, "Cura", 1, 0, "GenericMaterialsModel")
qmlRegisterType(MaterialBrandsModel, "Cura", 1, 0, "MaterialBrandsModel") qmlRegisterType(MaterialBrandsModel, "Cura", 1, 0, "MaterialBrandsModel")
qmlRegisterSingletonType(QualityManagementModel, "Cura", 1, 0, self.getQualityManagementModel, "QualityManagementModel") qmlRegisterSingletonType(QualityManagementModel, "Cura", 1, 0, self.getQualityManagementModelWrapper,"QualityManagementModel")
qmlRegisterSingletonType(MaterialManagementModel, "Cura", 1, 5, self.getMaterialManagementModel, "MaterialManagementModel") qmlRegisterSingletonType(MaterialManagementModel, "Cura", 1, 5, self.getMaterialManagementModelWrapper,"MaterialManagementModel")
self.processEvents() self.processEvents()
qmlRegisterType(DiscoveredPrintersModel, "Cura", 1, 0, "DiscoveredPrintersModel") qmlRegisterType(DiscoveredPrintersModel, "Cura", 1, 0, "DiscoveredPrintersModel")
qmlRegisterType(DiscoveredCloudPrintersModel, "Cura", 1, 7, "DiscoveredCloudPrintersModel") qmlRegisterType(DiscoveredCloudPrintersModel, "Cura", 1, 7, "DiscoveredCloudPrintersModel")
qmlRegisterSingletonType(QualityProfilesDropDownMenuModel, "Cura", 1, 0, qmlRegisterSingletonType(QualityProfilesDropDownMenuModel, "Cura", 1, 0, self.getQualityProfilesDropDownMenuModelWrapper, "QualityProfilesDropDownMenuModel")
self.getQualityProfilesDropDownMenuModel, "QualityProfilesDropDownMenuModel") qmlRegisterSingletonType(CustomQualityProfilesDropDownMenuModel, "Cura", 1, 0, self.getCustomQualityProfilesDropDownMenuModelWrapper, "CustomQualityProfilesDropDownMenuModel")
qmlRegisterSingletonType(CustomQualityProfilesDropDownMenuModel, "Cura", 1, 0,
self.getCustomQualityProfilesDropDownMenuModel, "CustomQualityProfilesDropDownMenuModel")
qmlRegisterType(NozzleModel, "Cura", 1, 0, "NozzleModel") qmlRegisterType(NozzleModel, "Cura", 1, 0, "NozzleModel")
qmlRegisterType(IntentModel, "Cura", 1, 6, "IntentModel") qmlRegisterType(IntentModel, "Cura", 1, 6, "IntentModel")
qmlRegisterType(IntentCategoryModel, "Cura", 1, 6, "IntentCategoryModel") qmlRegisterType(IntentCategoryModel, "Cura", 1, 6, "IntentCategoryModel")
@ -1536,7 +1623,7 @@ class CuraApplication(QtApplication):
if not nodes: if not nodes:
return return
objects_in_filename = {} # type: Dict[str, List[CuraSceneNode]] objects_in_filename: Dict[str, List[CuraSceneNode]] = {}
for node in nodes: for node in nodes:
mesh_data = node.getMeshData() mesh_data = node.getMeshData()
if mesh_data: if mesh_data:
@ -1550,15 +1637,14 @@ class CuraApplication(QtApplication):
Logger.log("w", "Unable to reload data because we don't have a filename.") Logger.log("w", "Unable to reload data because we don't have a filename.")
for file_name, nodes in objects_in_filename.items(): for file_name, nodes in objects_in_filename.items():
for node in nodes: file_path = os.path.normpath(os.path.dirname(file_name))
file_path = os.path.normpath(os.path.dirname(file_name)) job = ReadMeshJob(file_name,
job = ReadMeshJob(file_name, add_to_recent_files = file_path != tempfile.gettempdir()) # Don't add temp files to the recent files list add_to_recent_files=file_path != tempfile.gettempdir()) # Don't add temp files to the recent files list
job._node = node # type: ignore job._nodes = nodes # type: ignore
job.finished.connect(self._reloadMeshFinished) job.finished.connect(self._reloadMeshFinished)
if has_merged_nodes: if has_merged_nodes:
job.finished.connect(self.updateOriginOfMergedMeshes) job.finished.connect(self.updateOriginOfMergedMeshes)
job.start()
job.start()
@pyqtSlot("QStringList") @pyqtSlot("QStringList")
def setExpandedCategories(self, categories: List[str]) -> None: def setExpandedCategories(self, categories: List[str]) -> None:
@ -1693,8 +1779,12 @@ class CuraApplication(QtApplication):
Selection.remove(node) Selection.remove(node)
Selection.add(group_node) Selection.add(group_node)
all_nodes = self.getObjectsModel().getNodes()
PrintOrderManager.updatePrintOrdersAfterGroupOperation(all_nodes, group_node, selected_nodes)
@pyqtSlot() @pyqtSlot()
def ungroupSelected(self) -> None: def ungroupSelected(self) -> None:
all_nodes = self.getObjectsModel().getNodes()
selected_objects = Selection.getAllSelectedObjects().copy() selected_objects = Selection.getAllSelectedObjects().copy()
for node in selected_objects: for node in selected_objects:
if node.callDecoration("isGroup"): if node.callDecoration("isGroup"):
@ -1702,21 +1792,30 @@ class CuraApplication(QtApplication):
group_parent = node.getParent() group_parent = node.getParent()
children = node.getChildren().copy() children = node.getChildren().copy()
for child in children:
# Ungroup only 1 level deep
if child.getParent() != node:
continue
# Ungroup only 1 level deep
children_to_ungroup = list(filter(lambda child: child.getParent() == node, children))
for child in children_to_ungroup:
# Set the parent of the children to the parent of the group-node # Set the parent of the children to the parent of the group-node
op.addOperation(SetParentOperation(child, group_parent)) op.addOperation(SetParentOperation(child, group_parent))
# Add all individual nodes to the selection # Add all individual nodes to the selection
Selection.add(child) Selection.add(child)
PrintOrderManager.updatePrintOrdersAfterUngroupOperation(all_nodes, node, children_to_ungroup)
op.push() op.push()
# Note: The group removes itself from the scene once all its children have left it, # Note: The group removes itself from the scene once all its children have left it,
# see GroupDecorator._onChildrenChanged # see GroupDecorator._onChildrenChanged
def _onPrintOrderChanged(self) -> None:
# update object list
scene = self.getController().getScene()
scene.sceneChanged.emit(scene.getRoot())
# reset if already was sliced
Application.getInstance().getBackend().needsSlicing()
Application.getInstance().getBackend().tickle()
def _createSplashScreen(self) -> Optional[CuraSplashScreen.CuraSplashScreen]: def _createSplashScreen(self) -> Optional[CuraSplashScreen.CuraSplashScreen]:
if self._is_headless: if self._is_headless:
return None return None
@ -1730,9 +1829,10 @@ class CuraApplication(QtApplication):
def _reloadMeshFinished(self, job) -> None: def _reloadMeshFinished(self, job) -> None:
""" """
Function called whenever a ReadMeshJob finishes in the background. It reloads a specific node object in the Function called when ReadMeshJob finishes reloading a file in the background, then update node objects in the
scene from its source file. The function gets all the nodes that exist in the file through the job result, and scene from its source file. The function gets all the nodes that exist in the file through the job result, and
then finds the scene node that it wants to refresh by its object id. Each job refreshes only one node. then finds the scene nodes that need to be refreshed by their name. Each job refreshes all nodes of a file.
Nodes that are not present in the updated file are kept in the scene.
:param job: The :py:class:`Uranium.UM.ReadMeshJob.ReadMeshJob` running in the background that reads all the :param job: The :py:class:`Uranium.UM.ReadMeshJob.ReadMeshJob` running in the background that reads all the
meshes in a file meshes in a file
@ -1742,25 +1842,93 @@ class CuraApplication(QtApplication):
if len(job_result) == 0: if len(job_result) == 0:
Logger.log("e", "Reloading the mesh failed.") Logger.log("e", "Reloading the mesh failed.")
return return
object_found = False renamed_nodes = {} # type: Dict[str, int]
mesh_data = None
# Find the node to be refreshed based on its id # Find the node to be refreshed based on its id
for job_result_node in job_result: for job_result_node in job_result:
if job_result_node.getId() == job._node.getId(): mesh_data = job_result_node.getMeshData()
mesh_data = job_result_node.getMeshData() if not mesh_data:
object_found = True Logger.log("w", "Could not find a mesh in reloaded node.")
break continue
if not object_found:
Logger.warning("The object with id {} no longer exists! Keeping the old version in the scene.".format(job_result_node.getId())) # Solves issues with object naming
return result_node_name = job_result_node.getName()
if not mesh_data: if not result_node_name:
Logger.log("w", "Could not find a mesh in reloaded node.") result_node_name = os.path.basename(mesh_data.getFileName())
return if result_node_name in renamed_nodes: # objects may get renamed by ObjectsModel._renameNodes() when loaded
job._node.setMeshData(mesh_data) renamed_nodes[result_node_name] += 1
result_node_name = "{0}({1})".format(result_node_name, renamed_nodes[result_node_name])
else:
renamed_nodes[job_result_node.getName()] = 0
# Find the matching scene node to replace
scene_node = None
for replaced_node in job._nodes:
if replaced_node.getName() == result_node_name:
scene_node = replaced_node
break
if scene_node:
scene_node.setMeshData(mesh_data)
else:
# Current node is a new one in the file, or it's name has changed
# TODO: Load this mesh into the scene. Also alter the "_reloadJobFinished" action in UM.Scene
Logger.log("w", "Could not find matching node for object '{0}' in the scene.".format(result_node_name))
def _openFile(self, filename): def _openFile(self, filename):
self.readLocalFile(QUrl.fromLocalFile(filename)) self.readLocalFile(QUrl.fromLocalFile(filename))
def _openUrl(self, url: QUrl) -> None:
if url.scheme() not in self._supported_url_schemes:
# only handle cura:// and slicer:// urls schemes
return
match url.host() + url.path():
case "open" | "open/":
query = QUrlQuery(url.query())
model_url = QUrl(query.queryItemValue("file", options=QUrl.ComponentFormattingOption.FullyDecoded))
def on_finish(response):
content_disposition_header_key = QByteArray("content-disposition".encode())
if not response.hasRawHeader(content_disposition_header_key):
Logger.log("w", "Could not find Content-Disposition header in response from {0}".format(
model_url.url()))
# Use the last part of the url as the filename, and assume it is an STL file
filename = model_url.path().split("/")[-1] + ".stl"
else:
# content_disposition is in the format
# ```
# content_disposition attachment; "filename=[FILENAME]"
# ```
# Use a regex to extract the filename
content_disposition = str(response.rawHeader(content_disposition_header_key).data(),
encoding='utf-8')
content_disposition_match = re.match(r'attachment; filename="(?P<filename>.*)"',
content_disposition)
assert content_disposition_match is not None
filename = content_disposition_match.group("filename")
tmp = tempfile.NamedTemporaryFile(suffix=filename, delete=False)
with open(tmp.name, "wb") as f:
f.write(response.readAll())
self.readLocalFile(QUrl.fromLocalFile(tmp.name), add_to_recent_files=False)
def on_error(*args, **kwargs):
Logger.log("w", "Could not download file from {0}".format(model_url.url()))
Message("Could not download file: " + str(model_url.url()),
title= "Loading Model failed",
message_type=Message.MessageType.ERROR).show()
return
self.getHttpRequestManager().get(
model_url.url(),
callback=on_finish,
error_callback=on_error,
)
case path:
Logger.log("w", "Unsupported url scheme path: {0}".format(path))
def _addProfileReader(self, profile_reader): def _addProfileReader(self, profile_reader):
# TODO: Add the profile reader to the list of plug-ins that can be used when importing profiles. # TODO: Add the profile reader to the list of plug-ins that can be used when importing profiles.
pass pass
@ -1824,7 +1992,7 @@ class CuraApplication(QtApplication):
Logger.log("i", "Attempting to read file %s", file.toString()) Logger.log("i", "Attempting to read file %s", file.toString())
if not file.isValid(): if not file.isValid():
return return
self._open_project_mode = project_mode
scene = self.getController().getScene() scene = self.getController().getScene()
for node in DepthFirstIterator(scene.getRoot()): for node in DepthFirstIterator(scene.getRoot()):
@ -1834,16 +2002,16 @@ class CuraApplication(QtApplication):
is_project_file = self.checkIsValidProjectFile(file) is_project_file = self.checkIsValidProjectFile(file)
if project_mode is None: if self._open_project_mode is None:
project_mode = self.getPreferences().getValue("cura/choice_on_open_project") self._open_project_mode = self.getPreferences().getValue("cura/choice_on_open_project")
if is_project_file and project_mode == "open_as_project": if is_project_file and self._open_project_mode == "open_as_project":
# open as project immediately without presenting a dialog # open as project immediately without presenting a dialog
workspace_handler = self.getWorkspaceFileHandler() workspace_handler = self.getWorkspaceFileHandler()
workspace_handler.readLocalFile(file, add_to_recent_files_hint = add_to_recent_files) workspace_handler.readLocalFile(file, add_to_recent_files_hint = add_to_recent_files)
return return
if is_project_file and project_mode == "always_ask": if is_project_file and self._open_project_mode == "always_ask":
# present a dialog asking to open as project or import models # present a dialog asking to open as project or import models
self.callLater(self.openProjectFile.emit, file, add_to_recent_files) self.callLater(self.openProjectFile.emit, file, add_to_recent_files)
return return
@ -1943,7 +2111,8 @@ class CuraApplication(QtApplication):
node.scale(original_node.getScale()) node.scale(original_node.getScale())
node.setSelectable(True) node.setSelectable(True)
node.setName(os.path.basename(file_name)) if not node.getName():
node.setName(os.path.basename(file_name))
self.getBuildVolume().checkBoundsAndUpdate(node) self.getBuildVolume().checkBoundsAndUpdate(node)
is_non_sliceable = "." + file_extension in self._non_sliceable_extensions is_non_sliceable = "." + file_extension in self._non_sliceable_extensions
@ -1977,8 +2146,11 @@ class CuraApplication(QtApplication):
center_y = 0 center_y = 0
node.translate(Vector(0, center_y, 0)) node.translate(Vector(0, center_y, 0))
nodes_to_arrange.append(node) nodes_to_arrange.append(node)
# If the file is a project,and models are to be loaded from a that project,
# models inside file should be arranged in buildplate.
elif self._open_project_mode == "open_as_model":
nodes_to_arrange.append(node)
# This node is deep copied from some other node which already has a BuildPlateDecorator, but the deepcopy # This node is deep copied from some other node which already has a BuildPlateDecorator, but the deepcopy
# of BuildPlateDecorator produces one that's associated with build plate -1. So, here we need to check if # of BuildPlateDecorator produces one that's associated with build plate -1. So, here we need to check if
@ -2130,3 +2302,11 @@ class CuraApplication(QtApplication):
@pyqtProperty(bool, constant=True) @pyqtProperty(bool, constant=True)
def isEnterprise(self) -> bool: def isEnterprise(self) -> bool:
return ApplicationMetadata.IsEnterpriseVersion return ApplicationMetadata.IsEnterpriseVersion
@pyqtProperty("QVariant", constant=True)
def conanInstalls(self) -> Dict[str, Dict[str, str]]:
return self._conan_installs
@pyqtProperty("QVariant", constant=True)
def pythonInstalls(self) -> Dict[str, Dict[str, str]]:
return self._python_installs

88
cura/HitChecker.py Normal file
View file

@ -0,0 +1,88 @@
from typing import List, Dict
from cura.Scene.CuraSceneNode import CuraSceneNode
class HitChecker:
"""Checks if nodes can be printed without causing any collisions and interference"""
def __init__(self, nodes: List[CuraSceneNode]) -> None:
self._hit_map = self._buildHitMap(nodes)
def anyTwoNodesBlockEachOther(self, nodes: List[CuraSceneNode]) -> bool:
"""Returns True if any 2 nodes block each other"""
for a in nodes:
for b in nodes:
if self._hit_map[a][b] and self._hit_map[b][a]:
return True
return False
def canPrintBefore(self, node: CuraSceneNode, other_nodes: List[CuraSceneNode]) -> bool:
"""Returns True if node doesn't block other_nodes and can be printed before them"""
no_hits = all(not self._hit_map[node][other_node] for other_node in other_nodes)
return no_hits
def canPrintAfter(self, node: CuraSceneNode, other_nodes: List[CuraSceneNode]) -> bool:
"""Returns True if node doesn't hit other nodes and can be printed after them"""
no_hits = all(not self._hit_map[other_node][node] for other_node in other_nodes)
return no_hits
def calculateScore(self, a: CuraSceneNode, b: CuraSceneNode) -> int:
"""Calculate score simply sums the number of other objects it 'blocks'
:param a: node
:param b: node
:return: sum of the number of other objects
"""
score_a = sum(self._hit_map[a].values())
score_b = sum(self._hit_map[b].values())
return score_a - score_b
def canPrintNodesInProvidedOrder(self, ordered_nodes: List[CuraSceneNode]) -> bool:
"""Returns True If nodes don't have any hits in provided order"""
for node_index, node in enumerate(ordered_nodes):
nodes_before = ordered_nodes[:node_index - 1] if node_index - 1 >= 0 else []
nodes_after = ordered_nodes[node_index + 1:] if node_index + 1 < len(ordered_nodes) else []
if not self.canPrintBefore(node, nodes_after) or not self.canPrintAfter(node, nodes_before):
return False
return True
@staticmethod
def _buildHitMap(nodes: List[CuraSceneNode]) -> Dict[CuraSceneNode, CuraSceneNode]:
"""Pre-computes all hits between all objects
:nodes: nodes that need to be checked for collisions
:return: dictionary where hit_map[node1][node2] is False if there node1 can be printed before node2
"""
hit_map = {j: {i: HitChecker._checkHit(j, i) for i in nodes} for j in nodes}
return hit_map
@staticmethod
def _checkHit(a: CuraSceneNode, b: CuraSceneNode) -> bool:
"""Checks if a can be printed before b
:param a: node
:param b: node
:return: False if a can be printed before b
"""
if a == b:
return False
a_hit_hull = a.callDecoration("getConvexHullBoundary")
b_hit_hull = b.callDecoration("getConvexHullHeadFull")
overlap = a_hit_hull.intersectsPolygon(b_hit_hull)
if overlap:
return True
# Adhesion areas must never overlap, regardless of printing order
# This would cause over-extrusion
a_hit_hull = a.callDecoration("getAdhesionArea")
b_hit_hull = b.callDecoration("getAdhesionArea")
overlap = a_hit_hull.intersectsPolygon(b_hit_hull)
if overlap:
return True
else:
return False

View file

@ -1,5 +1,6 @@
# Copyright (c) 2019 Ultimaker B.V. # Copyright (c) 2019 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
import math
import numpy import numpy
from typing import Optional, cast from typing import Optional, cast
@ -66,7 +67,7 @@ class LayerPolygon:
# Buffering the colors shouldn't be necessary as it is not # Buffering the colors shouldn't be necessary as it is not
# re-used and can save a lot of memory usage. # re-used and can save a lot of memory usage.
self._color_map = LayerPolygon.getColorMap() self._color_map = LayerPolygon.getColorMap()
self._colors = self._color_map[self._types] # type: numpy.ndarray self._colors: numpy.ndarray = self._color_map[self._types]
# When type is used as index returns true if type == LayerPolygon.InfillType # When type is used as index returns true if type == LayerPolygon.InfillType
# or type == LayerPolygon.SkinType # or type == LayerPolygon.SkinType
@ -74,8 +75,8 @@ class LayerPolygon:
# Should be generated in better way, not hardcoded. # Should be generated in better way, not hardcoded.
self._is_infill_or_skin_type_map = numpy.array([0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0], dtype=bool) self._is_infill_or_skin_type_map = numpy.array([0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0], dtype=bool)
self._build_cache_line_mesh_mask = None # type: Optional[numpy.ndarray] self._build_cache_line_mesh_mask: Optional[numpy.ndarray] = None
self._build_cache_needed_points = None # type: Optional[numpy.ndarray] self._build_cache_needed_points: Optional[numpy.ndarray] = None
def buildCache(self) -> None: def buildCache(self) -> None:
# For the line mesh we do not draw Infill or Jumps. Therefore those lines are filtered out. # For the line mesh we do not draw Infill or Jumps. Therefore those lines are filtered out.
@ -186,6 +187,11 @@ class LayerPolygon:
def types(self): def types(self):
return self._types return self._types
@property
def lineLengths(self):
data_array = numpy.array(self._data)
return numpy.linalg.norm(data_array[1:] - data_array[:-1], axis=1)
@property @property
def data(self): def data(self):
return self._data return self._data

View file

@ -31,6 +31,7 @@ class MaterialNode(ContainerNode):
my_metadata = container_registry.findContainersMetadata(id = container_id)[0] my_metadata = container_registry.findContainersMetadata(id = container_id)[0]
self.base_file = my_metadata["base_file"] self.base_file = my_metadata["base_file"]
self.material_type = my_metadata["material"] self.material_type = my_metadata["material"]
self.brand = my_metadata["brand"]
self.guid = my_metadata["GUID"] self.guid = my_metadata["GUID"]
self._loadAll() self._loadAll()
container_registry.containerRemoved.connect(self._onRemoved) container_registry.containerRemoved.connect(self._onRemoved)
@ -80,6 +81,7 @@ class MaterialNode(ContainerNode):
# such as "generic_pla_ultimaker_s5_AA_0.4". So we search with the "base_file" which is the material_root_id. # such as "generic_pla_ultimaker_s5_AA_0.4". So we search with the "base_file" which is the material_root_id.
else: else:
qualities = container_registry.findInstanceContainersMetadata(type = "quality", definition = self.variant.machine.quality_definition, material = self.base_file) qualities = container_registry.findInstanceContainersMetadata(type = "quality", definition = self.variant.machine.quality_definition, material = self.base_file)
if not qualities: if not qualities:
my_material_type = self.material_type my_material_type = self.material_type
if self.variant.machine.has_variants: if self.variant.machine.has_variants:
@ -89,9 +91,22 @@ class MaterialNode(ContainerNode):
else: else:
qualities_any_material = container_registry.findInstanceContainersMetadata(type = "quality", definition = self.variant.machine.quality_definition) qualities_any_material = container_registry.findInstanceContainersMetadata(type = "quality", definition = self.variant.machine.quality_definition)
all_material_base_files = {material_metadata["base_file"] for material_metadata in container_registry.findInstanceContainersMetadata(type = "material", material = my_material_type)} # First we attempt to find materials that have the same brand but not the right color
all_material_base_files_right_brand = {material_metadata["base_file"] for material_metadata in container_registry.findInstanceContainersMetadata(type = "material", material = my_material_type, brand = self.brand)}
qualities.extend((quality for quality in qualities_any_material if quality.get("material") in all_material_base_files)) right_brand_no_color_qualities = [quality for quality in qualities_any_material if quality.get("material") in all_material_base_files_right_brand]
if right_brand_no_color_qualities:
# We found qualties for materials with the right brand but not with the right color. Use those.
qualities.extend(right_brand_no_color_qualities)
else:
# Fall back to generic
all_material_base_files = {material_metadata["base_file"] for material_metadata in
container_registry.findInstanceContainersMetadata(type="material",
material=my_material_type)}
no_brand_no_color_qualities = (quality for quality in qualities_any_material if
quality.get("material") in all_material_base_files)
qualities.extend(no_brand_no_color_qualities)
if not qualities: # No quality profiles found. Go by GUID then. if not qualities: # No quality profiles found. Go by GUID then.
my_guid = self.guid my_guid = self.guid

View file

@ -51,6 +51,9 @@ class CompatibleMachineModel(ListModel):
for output_device in machine_manager.printerOutputDevices: for output_device in machine_manager.printerOutputDevices:
for printer in output_device.printers: for printer in output_device.printers:
extruder_configs = dict() extruder_configs = dict()
# If the printer name already exist in the queue skip it
if printer.name in [item["name"] for item in self.items]:
continue
# initialize & add current active material: # initialize & add current active material:
for extruder in printer.extruders: for extruder in printer.extruders:

View file

@ -227,7 +227,7 @@ class ExtrudersModel(ListModel):
"material_brand": "", "material_brand": "",
"color_name": "", "color_name": "",
"material_type": "", "material_type": "",
"material_label": "" "material_name": ""
} }
items.append(item) items.append(item)
if self._items != items: if self._items != items:

View file

@ -39,7 +39,9 @@ class IntentCategoryModel(ListModel):
""" """
if len(cls._translations) == 0: if len(cls._translations) == 0:
cls._translations["default"] = { cls._translations["default"] = {
"name": catalog.i18nc("@label", "Default") "name": catalog.i18nc("@label", "Balanced"),
"description": catalog.i18nc("@text",
"The balanced profile is designed to strike a balance between productivity, surface quality, mechanical properties and dimensional accuracy.")
} }
cls._translations["visual"] = { cls._translations["visual"] = {
"name": catalog.i18nc("@label", "Visual"), "name": catalog.i18nc("@label", "Visual"),
@ -59,6 +61,11 @@ class IntentCategoryModel(ListModel):
"The annealing profile requires post-processing in an oven after the print is finished. This profile retains the dimensional accuracy of the printed part after annealing and improves strength, stiffness, and thermal resistance.") "The annealing profile requires post-processing in an oven after the print is finished. This profile retains the dimensional accuracy of the printed part after annealing and improves strength, stiffness, and thermal resistance.")
} }
cls._translations["solid"] = {
"name": catalog.i18nc("@label", "Solid"),
"description": catalog.i18nc("@text",
"A highly dense and strong part but at a slower print time. Great for functional parts.")
}
return cls._translations return cls._translations
def __init__(self, intent_category: str) -> None: def __init__(self, intent_category: str) -> None:

View file

@ -57,8 +57,9 @@ class IntentSelectionModel(ListModel):
self._onChange() self._onChange()
_default_intent_categories = ["default", "visual", "engineering", "quick", "annealing"] _default_intent_categories = ["default", "visual", "engineering", "quick", "annealing", "solid"]
_icons = {"default": "GearCheck", "visual": "Visual", "engineering": "Nut", "quick": "SpeedOMeter", "annealing": "Anneal"} _icons = {"default": "GearCheck", "visual": "Visual", "engineering": "Nut", "quick": "SpeedOMeter",
"annealing": "Anneal", "solid": "Hammer"}
def _onContainerChange(self, container: ContainerInterface) -> None: def _onContainerChange(self, container: ContainerInterface) -> None:
"""Updates the list of intents if an intent profile was added or removed.""" """Updates the list of intents if an intent profile was added or removed."""

View file

@ -8,7 +8,9 @@ catalog = i18nCatalog("cura")
intent_translations = collections.OrderedDict() # type: collections.OrderedDict[str, Dict[str, Optional[str]]] intent_translations = collections.OrderedDict() # type: collections.OrderedDict[str, Dict[str, Optional[str]]]
intent_translations["default"] = { intent_translations["default"] = {
"name": catalog.i18nc("@label", "Default") "name": catalog.i18nc("@label", "Balanced"),
"description": catalog.i18nc("@text",
"The balanced profile is designed to strike a balance between productivity, surface quality, mechanical properties and dimensional accuracy.")
} }
intent_translations["visual"] = { intent_translations["visual"] = {
"name": catalog.i18nc("@label", "Visual"), "name": catalog.i18nc("@label", "Visual"),
@ -22,3 +24,8 @@ intent_translations["quick"] = {
"name": catalog.i18nc("@label", "Draft"), "name": catalog.i18nc("@label", "Draft"),
"description": catalog.i18nc("@text", "The draft profile is designed to print initial prototypes and concept validation with the intent of significant print time reduction.") "description": catalog.i18nc("@text", "The draft profile is designed to print initial prototypes and concept validation with the intent of significant print time reduction.")
} }
intent_translations["solid"] = {
"name": catalog.i18nc("@label", "Solid"),
"description": catalog.i18nc("@text",
"A highly dense and strong part but at a slower print time. Great for functional parts.")
}

View file

@ -44,6 +44,10 @@ class MaterialBrandsModel(BaseMaterialsModel):
if bool(container_node.getMetaDataEntry("removed", False)): if bool(container_node.getMetaDataEntry("removed", False)):
continue continue
# Ignore materials that are marked as not visible for whatever reason
if not bool(container_node.getMetaDataEntry("visible", True)):
continue
# Add brands we haven't seen yet to the dict, skipping generics # Add brands we haven't seen yet to the dict, skipping generics
brand = container_node.getMetaDataEntry("brand", "") brand = container_node.getMetaDataEntry("brand", "")
if brand.lower() == "generic": if brand.lower() == "generic":

View file

@ -344,7 +344,7 @@ class QualityManagementModel(ListModel):
"quality_type": quality_group.quality_type, "quality_type": quality_group.quality_type,
"quality_changes_group": None, "quality_changes_group": None,
"intent_category": "default", "intent_category": "default",
"section_name": catalog.i18nc("@label", "Default"), "section_name": catalog.i18nc("@label", "Balanced"),
"layer_height": layer_height, # layer_height is only used for sorting "layer_height": layer_height, # layer_height is only used for sorting
} }
item_list.append(item) item_list.append(item)

View file

@ -16,6 +16,7 @@ from UM.TaskManagement.HttpRequestManager import HttpRequestManager # To downlo
catalog = i18nCatalog("cura") catalog = i18nCatalog("cura")
TOKEN_TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S" TOKEN_TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S"
REQUEST_TIMEOUT = 5 # Seconds
class AuthorizationHelpers: class AuthorizationHelpers:
@ -40,6 +41,7 @@ class AuthorizationHelpers:
""" """
data = { data = {
"client_id": self._settings.CLIENT_ID if self._settings.CLIENT_ID is not None else "", "client_id": self._settings.CLIENT_ID if self._settings.CLIENT_ID is not None else "",
"client_secret": self._settings.CLIENT_SECRET if self._settings.CLIENT_SECRET is not None else "",
"redirect_uri": self._settings.CALLBACK_URL if self._settings.CALLBACK_URL is not None else "", "redirect_uri": self._settings.CALLBACK_URL if self._settings.CALLBACK_URL is not None else "",
"grant_type": "authorization_code", "grant_type": "authorization_code",
"code": authorization_code, "code": authorization_code,
@ -52,7 +54,8 @@ class AuthorizationHelpers:
data = urllib.parse.urlencode(data).encode("UTF-8"), data = urllib.parse.urlencode(data).encode("UTF-8"),
headers_dict = headers, headers_dict = headers,
callback = lambda response: self.parseTokenResponse(response, callback), callback = lambda response: self.parseTokenResponse(response, callback),
error_callback = lambda response, _: self.parseTokenResponse(response, callback) error_callback = lambda response, _: self.parseTokenResponse(response, callback),
timeout = REQUEST_TIMEOUT
) )
def getAccessTokenUsingRefreshToken(self, refresh_token: str, callback: Callable[[AuthenticationResponse], None]) -> None: def getAccessTokenUsingRefreshToken(self, refresh_token: str, callback: Callable[[AuthenticationResponse], None]) -> None:
@ -64,6 +67,7 @@ class AuthorizationHelpers:
Logger.log("d", "Refreshing the access token for [%s]", self._settings.OAUTH_SERVER_URL) Logger.log("d", "Refreshing the access token for [%s]", self._settings.OAUTH_SERVER_URL)
data = { data = {
"client_id": self._settings.CLIENT_ID if self._settings.CLIENT_ID is not None else "", "client_id": self._settings.CLIENT_ID if self._settings.CLIENT_ID is not None else "",
"client_secret": self._settings.CLIENT_SECRET if self._settings.CLIENT_SECRET is not None else "",
"redirect_uri": self._settings.CALLBACK_URL if self._settings.CALLBACK_URL is not None else "", "redirect_uri": self._settings.CALLBACK_URL if self._settings.CALLBACK_URL is not None else "",
"grant_type": "refresh_token", "grant_type": "refresh_token",
"refresh_token": refresh_token, "refresh_token": refresh_token,
@ -75,7 +79,9 @@ class AuthorizationHelpers:
data = urllib.parse.urlencode(data).encode("UTF-8"), data = urllib.parse.urlencode(data).encode("UTF-8"),
headers_dict = headers, headers_dict = headers,
callback = lambda response: self.parseTokenResponse(response, callback), callback = lambda response: self.parseTokenResponse(response, callback),
error_callback = lambda response, _: self.parseTokenResponse(response, callback) error_callback = lambda response, _: self.parseTokenResponse(response, callback),
urgent = True,
timeout = REQUEST_TIMEOUT
) )
def parseTokenResponse(self, token_response: QNetworkReply, callback: Callable[[AuthenticationResponse], None]) -> None: def parseTokenResponse(self, token_response: QNetworkReply, callback: Callable[[AuthenticationResponse], None]) -> None:
@ -120,7 +126,8 @@ class AuthorizationHelpers:
check_token_url, check_token_url,
headers_dict = headers, headers_dict = headers,
callback = lambda reply: self._parseUserProfile(reply, success_callback, failed_callback), callback = lambda reply: self._parseUserProfile(reply, success_callback, failed_callback),
error_callback = lambda _, _2: failed_callback() if failed_callback is not None else None error_callback = lambda _, _2: failed_callback() if failed_callback is not None else None,
timeout = REQUEST_TIMEOUT
) )
def _parseUserProfile(self, reply: QNetworkReply, success_callback: Optional[Callable[[UserProfile], None]], failed_callback: Optional[Callable[[], None]] = None) -> None: def _parseUserProfile(self, reply: QNetworkReply, success_callback: Optional[Callable[[UserProfile], None]], failed_callback: Optional[Callable[[], None]] = None) -> None:

View file

@ -1,4 +1,4 @@
# Copyright (c) 2021 Ultimaker B.V. # Copyright (c) 2024 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
import json import json
@ -6,13 +6,14 @@ from datetime import datetime, timedelta
from typing import Callable, Dict, Optional, TYPE_CHECKING, Union from typing import Callable, Dict, Optional, TYPE_CHECKING, Union
from urllib.parse import urlencode, quote_plus from urllib.parse import urlencode, quote_plus
from PyQt6.QtCore import QUrl from PyQt6.QtCore import QUrl, QTimer
from PyQt6.QtGui import QDesktopServices from PyQt6.QtGui import QDesktopServices
from UM.Logger import Logger from UM.Logger import Logger
from UM.Message import Message from UM.Message import Message
from UM.Signal import Signal from UM.Signal import Signal
from UM.i18n import i18nCatalog from UM.i18n import i18nCatalog
from UM.TaskManagement.HttpRequestManager import HttpRequestManager # To download log-in tokens.
from cura.OAuth2.AuthorizationHelpers import AuthorizationHelpers, TOKEN_TIMESTAMP_FORMAT from cura.OAuth2.AuthorizationHelpers import AuthorizationHelpers, TOKEN_TIMESTAMP_FORMAT
from cura.OAuth2.LocalAuthorizationServer import LocalAuthorizationServer from cura.OAuth2.LocalAuthorizationServer import LocalAuthorizationServer
from cura.OAuth2.Models import AuthenticationResponse, BaseModel from cura.OAuth2.Models import AuthenticationResponse, BaseModel
@ -25,26 +26,32 @@ if TYPE_CHECKING:
MYCLOUD_LOGOFF_URL = "https://account.ultimaker.com/logoff?utm_source=cura&utm_medium=software&utm_campaign=change-account-before-adding-printers" MYCLOUD_LOGOFF_URL = "https://account.ultimaker.com/logoff?utm_source=cura&utm_medium=software&utm_campaign=change-account-before-adding-printers"
REFRESH_TOKEN_MAX_RETRIES = 15
REFRESH_TOKEN_RETRY_INTERVAL = 1000
class AuthorizationService: class AuthorizationService:
"""The authorization service is responsible for handling the login flow, storing user credentials and providing """The authorization service is responsible for handling the login flow, storing user credentials and providing
account information. account information.
""" """
# Emit signal when authentication is completed. def __init__(self,
onAuthStateChanged = Signal() settings: "OAuth2Settings",
preferences: Optional["Preferences"] = None,
get_user_profile: bool = True) -> None:
# Emit signal when authentication is completed.
self.onAuthStateChanged = Signal()
# Emit signal when authentication failed. # Emit signal when authentication failed.
onAuthenticationError = Signal() self.onAuthenticationError = Signal()
accessTokenChanged = Signal() self.accessTokenChanged = Signal()
def __init__(self, settings: "OAuth2Settings", preferences: Optional["Preferences"] = None) -> None:
self._settings = settings self._settings = settings
self._auth_helpers = AuthorizationHelpers(settings) self._auth_helpers = AuthorizationHelpers(settings)
self._auth_url = "{}/authorize".format(self._settings.OAUTH_SERVER_URL) self._auth_url = "{}/authorize".format(self._settings.OAUTH_SERVER_URL)
self._auth_data: Optional[AuthenticationResponse] = None self._auth_data: Optional[AuthenticationResponse] = None
self._user_profile: Optional["UserProfile"] = None self._user_profile: Optional["UserProfile"] = None
self._get_user_profile: bool = get_user_profile
self._preferences = preferences self._preferences = preferences
self._server = LocalAuthorizationServer(self._auth_helpers, self._onAuthStateChanged, daemon=True) self._server = LocalAuthorizationServer(self._auth_helpers, self._onAuthStateChanged, daemon=True)
self._currently_refreshing_token = False # Whether we are currently in the process of refreshing auth. Don't make new requests while busy. self._currently_refreshing_token = False # Whether we are currently in the process of refreshing auth. Don't make new requests while busy.
@ -53,6 +60,12 @@ class AuthorizationService:
self.onAuthStateChanged.connect(self._authChanged) self.onAuthStateChanged.connect(self._authChanged)
self._refresh_token_retries = 0
self._refresh_token_retry_timer = QTimer()
self._refresh_token_retry_timer.setInterval(REFRESH_TOKEN_RETRY_INTERVAL)
self._refresh_token_retry_timer.setSingleShot(True)
self._refresh_token_retry_timer.timeout.connect(self.refreshAccessToken)
def _authChanged(self, logged_in): def _authChanged(self, logged_in):
if logged_in and self._unable_to_get_data_message is not None: if logged_in and self._unable_to_get_data_message is not None:
self._unable_to_get_data_message.hide() self._unable_to_get_data_message.hide()
@ -163,16 +176,29 @@ class AuthorizationService:
return return
def process_auth_data(response: AuthenticationResponse) -> None: def process_auth_data(response: AuthenticationResponse) -> None:
self._currently_refreshing_token = False
if response.success: if response.success:
self._refresh_token_retries = 0
self._storeAuthData(response) self._storeAuthData(response)
HttpRequestManager.getInstance().setDelayRequests(False)
self.onAuthStateChanged.emit(logged_in = True) self.onAuthStateChanged.emit(logged_in = True)
else: else:
Logger.warning("Failed to get a new access token from the server.") if self._refresh_token_retries >= REFRESH_TOKEN_MAX_RETRIES:
self.onAuthStateChanged.emit(logged_in = False) self._refresh_token_retries = 0
Logger.warning("Failed to get a new access token from the server, giving up.")
HttpRequestManager.getInstance().setDelayRequests(False)
self.onAuthStateChanged.emit(logged_in = False)
else:
# Retry a bit later, network may be offline right now and will hopefully be back soon
Logger.warning("Failed to get a new access token from the server, retrying later.")
self._refresh_token_retries += 1
self._refresh_token_retry_timer.start()
if self._currently_refreshing_token: if self._currently_refreshing_token:
Logger.debug("Was already busy refreshing token. Do not start a new request.") Logger.debug("Was already busy refreshing token. Do not start a new request.")
return return
HttpRequestManager.getInstance().setDelayRequests(True)
self._currently_refreshing_token = True self._currently_refreshing_token = True
self._auth_helpers.getAccessTokenUsingRefreshToken(self._auth_data.refresh_token, process_auth_data) self._auth_helpers.getAccessTokenUsingRefreshToken(self._auth_data.refresh_token, process_auth_data)
@ -294,7 +320,8 @@ class AuthorizationService:
self._auth_data = auth_data self._auth_data = auth_data
self._currently_refreshing_token = False self._currently_refreshing_token = False
if auth_data: if auth_data:
self.getUserProfile() if self._get_user_profile:
self.getUserProfile()
self._preferences.setValue(self._settings.AUTH_DATA_PREFERENCE_KEY, json.dumps(auth_data.dump())) self._preferences.setValue(self._settings.AUTH_DATA_PREFERENCE_KEY, json.dumps(auth_data.dump()))
else: else:
Logger.log("d", "Clearing the user profile") Logger.log("d", "Clearing the user profile")

View file

@ -16,6 +16,7 @@ class OAuth2Settings(BaseModel):
CALLBACK_PORT = None # type: Optional[int] CALLBACK_PORT = None # type: Optional[int]
OAUTH_SERVER_URL = None # type: Optional[str] OAUTH_SERVER_URL = None # type: Optional[str]
CLIENT_ID = None # type: Optional[str] CLIENT_ID = None # type: Optional[str]
CLIENT_SECRET = None # type: Optional[str]
CLIENT_SCOPES = None # type: Optional[str] CLIENT_SCOPES = None # type: Optional[str]
CALLBACK_URL = None # type: Optional[str] CALLBACK_URL = None # type: Optional[str]
AUTH_DATA_PREFERENCE_KEY = "" # type: str AUTH_DATA_PREFERENCE_KEY = "" # type: str

View file

@ -7,6 +7,11 @@ from UM.Scene.Iterator import Iterator
from UM.Scene.SceneNode import SceneNode from UM.Scene.SceneNode import SceneNode
from functools import cmp_to_key from functools import cmp_to_key
from cura.HitChecker import HitChecker
from cura.PrintOrderManager import PrintOrderManager
from cura.Scene.CuraSceneNode import CuraSceneNode
class OneAtATimeIterator(Iterator.Iterator): class OneAtATimeIterator(Iterator.Iterator):
"""Iterator that returns a list of nodes in the order that they need to be printed """Iterator that returns a list of nodes in the order that they need to be printed
@ -16,8 +21,6 @@ class OneAtATimeIterator(Iterator.Iterator):
def __init__(self, scene_node) -> None: def __init__(self, scene_node) -> None:
super().__init__(scene_node) # Call super to make multiple inheritance work. super().__init__(scene_node) # Call super to make multiple inheritance work.
self._hit_map = [[]] # type: List[List[bool]] # For each node, which other nodes this hits. A grid of booleans on which nodes hit which.
self._original_node_list = [] # type: List[SceneNode] # The nodes that need to be checked for collisions.
def _fillStack(self) -> None: def _fillStack(self) -> None:
"""Fills the ``_node_stack`` with a list of scene nodes that need to be printed in order. """ """Fills the ``_node_stack`` with a list of scene nodes that need to be printed in order. """
@ -38,104 +41,50 @@ class OneAtATimeIterator(Iterator.Iterator):
self._node_stack = node_list[:] self._node_stack = node_list[:]
return return
# Copy the list hit_checker = HitChecker(node_list)
self._original_node_list = node_list[:]
# Initialise the hit map (pre-compute all hits between all objects) if PrintOrderManager.isUserDefinedPrintOrderEnabled():
self._hit_map = [[self._checkHit(i, j) for i in node_list] for j in node_list] self._node_stack = self._getNodesOrderedByUser(hit_checker, node_list)
else:
self._node_stack = self._getNodesOrderedAutomatically(hit_checker, node_list)
# Check if we have to files that block each other. If this is the case, there is no solution! # update print orders so that user can try to arrange the nodes automatically first
for a in range(0, len(node_list)): # and if result is not satisfactory he/she can switch to manual mode and change it
for b in range(0, len(node_list)): for index, node in enumerate(self._node_stack):
if a != b and self._hit_map[a][b] and self._hit_map[b][a]: node.printOrder = index + 1
return
@staticmethod
def _getNodesOrderedByUser(hit_checker: HitChecker, node_list: List[CuraSceneNode]) -> List[CuraSceneNode]:
nodes_ordered_by_user = sorted(node_list, key=lambda n: n.printOrder)
if hit_checker.canPrintNodesInProvidedOrder(nodes_ordered_by_user):
return nodes_ordered_by_user
return [] # No solution
@staticmethod
def _getNodesOrderedAutomatically(hit_checker: HitChecker, node_list: List[CuraSceneNode]) -> List[CuraSceneNode]:
# Check if we have two files that block each other. If this is the case, there is no solution!
if hit_checker.anyTwoNodesBlockEachOther(node_list):
return [] # No solution
# Sort the original list so that items that block the most other objects are at the beginning. # Sort the original list so that items that block the most other objects are at the beginning.
# This does not decrease the worst case running time, but should improve it in most cases. # This does not decrease the worst case running time, but should improve it in most cases.
sorted(node_list, key = cmp_to_key(self._calculateScore)) node_list = sorted(node_list, key = cmp_to_key(hit_checker.calculateScore))
todo_node_list = [_ObjectOrder([], node_list)] todo_node_list = [_ObjectOrder([], node_list)]
while len(todo_node_list) > 0: while len(todo_node_list) > 0:
current = todo_node_list.pop() current = todo_node_list.pop()
for node in current.todo: for node in current.todo:
# Check if the object can be placed with what we have and still allows for a solution in the future # Check if the object can be placed with what we have and still allows for a solution in the future
if not self._checkHitMultiple(node, current.order) and not self._checkBlockMultiple(node, current.todo): if hit_checker.canPrintAfter(node, current.order) and hit_checker.canPrintBefore(node, current.todo):
# We found a possible result. Create new todo & order list. # We found a possible result. Create new todo & order list.
new_todo_list = current.todo[:] new_todo_list = current.todo[:]
new_todo_list.remove(node) new_todo_list.remove(node)
new_order = current.order[:] + [node] new_order = current.order[:] + [node]
if len(new_todo_list) == 0: if len(new_todo_list) == 0:
# We have no more nodes to check, so quit looking. # We have no more nodes to check, so quit looking.
self._node_stack = new_order return new_order # Solution found!
return
todo_node_list.append(_ObjectOrder(new_order, new_todo_list)) todo_node_list.append(_ObjectOrder(new_order, new_todo_list))
self._node_stack = [] #No result found! return [] # No result found!
# Check if first object can be printed before the provided list (using the hit map)
def _checkHitMultiple(self, node: SceneNode, other_nodes: List[SceneNode]) -> bool:
node_index = self._original_node_list.index(node)
for other_node in other_nodes:
other_node_index = self._original_node_list.index(other_node)
if self._hit_map[node_index][other_node_index]:
return True
return False
def _checkBlockMultiple(self, node: SceneNode, other_nodes: List[SceneNode]) -> bool:
"""Check for a node whether it hits any of the other nodes.
:param node: The node to check whether it collides with the other nodes.
:param other_nodes: The nodes to check for collisions.
:return: returns collision between nodes
"""
node_index = self._original_node_list.index(node)
for other_node in other_nodes:
other_node_index = self._original_node_list.index(other_node)
if self._hit_map[other_node_index][node_index] and node_index != other_node_index:
return True
return False
def _calculateScore(self, a: SceneNode, b: SceneNode) -> int:
"""Calculate score simply sums the number of other objects it 'blocks'
:param a: node
:param b: node
:return: sum of the number of other objects
"""
score_a = sum(self._hit_map[self._original_node_list.index(a)])
score_b = sum(self._hit_map[self._original_node_list.index(b)])
return score_a - score_b
def _checkHit(self, a: SceneNode, b: SceneNode) -> bool:
"""Checks if a can be printed before b
:param a: node
:param b: node
:return: true if a can be printed before b
"""
if a == b:
return False
a_hit_hull = a.callDecoration("getConvexHullBoundary")
b_hit_hull = b.callDecoration("getConvexHullHeadFull")
overlap = a_hit_hull.intersectsPolygon(b_hit_hull)
if overlap:
return True
# Adhesion areas must never overlap, regardless of printing order
# This would cause over-extrusion
a_hit_hull = a.callDecoration("getAdhesionArea")
b_hit_hull = b.callDecoration("getAdhesionArea")
overlap = a_hit_hull.intersectsPolygon(b_hit_hull)
if overlap:
return True
else:
return False
class _ObjectOrder: class _ObjectOrder:

View file

@ -39,6 +39,11 @@ class PlatformPhysics:
Application.getInstance().getPreferences().addPreference("physics/automatic_push_free", False) Application.getInstance().getPreferences().addPreference("physics/automatic_push_free", False)
Application.getInstance().getPreferences().addPreference("physics/automatic_drop_down", True) Application.getInstance().getPreferences().addPreference("physics/automatic_drop_down", True)
self._app_all_model_drop = False
def setAppAllModelDropDown(self):
self._app_all_model_drop = True
self._onChangeTimerFinished()
def _onSceneChanged(self, source): def _onSceneChanged(self, source):
if not source.callDecoration("isSliceable"): if not source.callDecoration("isSliceable"):
@ -80,12 +85,12 @@ class PlatformPhysics:
# Move it downwards if bottom is above platform # Move it downwards if bottom is above platform
move_vector = Vector() move_vector = Vector()
if node.getSetting(SceneNodeSettings.AutoDropDown, app_automatic_drop_down) and not (node.getParent() and node.getParent().callDecoration("isGroup") or node.getParent() != root) and node.isEnabled(): #If an object is grouped, don't move it down if (node.getSetting(SceneNodeSettings.AutoDropDown, app_automatic_drop_down) or self._app_all_model_drop) and not (node.getParent() and node.getParent().callDecoration("isGroup") or node.getParent() != root) and node.isEnabled():
z_offset = node.callDecoration("getZOffset") if node.getDecorator(ZOffsetDecorator.ZOffsetDecorator) else 0 z_offset = node.callDecoration("getZOffset") if node.getDecorator(ZOffsetDecorator.ZOffsetDecorator) else 0
move_vector = move_vector.set(y = -bbox.bottom + z_offset) move_vector = move_vector.set(y=-bbox.bottom + z_offset)
# If there is no convex hull for the node, start calculating it and continue. # If there is no convex hull for the node, start calculating it and continue.
if not node.getDecorator(ConvexHullDecorator) and not node.callDecoration("isNonPrintingMesh"): if not node.getDecorator(ConvexHullDecorator) and not node.callDecoration("isNonPrintingMesh") and node.callDecoration("getLayerData") is None:
node.addDecorator(ConvexHullDecorator()) node.addDecorator(ConvexHullDecorator())
# only push away objects if this node is a printing mesh # only push away objects if this node is a printing mesh
@ -168,6 +173,8 @@ class PlatformPhysics:
op = PlatformPhysicsOperation.PlatformPhysicsOperation(node, move_vector) op = PlatformPhysicsOperation.PlatformPhysicsOperation(node, move_vector)
op.push() op.push()
# setting this drop to model same as app_automatic_drop_down
self._app_all_model_drop = False
# After moving, we have to evaluate the boundary checks for nodes # After moving, we have to evaluate the boundary checks for nodes
build_volume.updateNodeBoundaryCheck() build_volume.updateNodeBoundaryCheck()

View file

@ -45,17 +45,17 @@ class PreviewPass(RenderPass):
This is useful to get a preview image of a scene taken from a different location as the active camera. This is useful to get a preview image of a scene taken from a different location as the active camera.
""" """
def __init__(self, width: int, height: int) -> None: def __init__(self, width: int, height: int, *, root: CuraSceneNode = None) -> None:
super().__init__("preview", width, height, 0) super().__init__("preview", width, height, 0)
self._camera = None # type: Optional[Camera] self._camera: Optional[Camera] = None
self._renderer = Application.getInstance().getRenderer() self._renderer = Application.getInstance().getRenderer()
self._shader = None # type: Optional[ShaderProgram] self._shader: Optional[ShaderProgram] = None
self._non_printing_shader = None # type: Optional[ShaderProgram] self._non_printing_shader: Optional[ShaderProgram] = None
self._support_mesh_shader = None # type: Optional[ShaderProgram] self._support_mesh_shader: Optional[ShaderProgram] = None
self._scene = Application.getInstance().getController().getScene() self._root = Application.getInstance().getController().getScene().getRoot() if root is None else root
# Set the camera to be used by this render pass # Set the camera to be used by this render pass
# if it's None, the active camera is used # if it's None, the active camera is used
@ -96,7 +96,7 @@ class PreviewPass(RenderPass):
batch_support_mesh = RenderBatch(self._support_mesh_shader) batch_support_mesh = RenderBatch(self._support_mesh_shader)
# Fill up the batch with objects that can be sliced. # Fill up the batch with objects that can be sliced.
for node in DepthFirstIterator(self._scene.getRoot()): for node in DepthFirstIterator(self._root):
if hasattr(node, "_outside_buildarea") and not getattr(node, "_outside_buildarea"): if hasattr(node, "_outside_buildarea") and not getattr(node, "_outside_buildarea"):
if node.callDecoration("isSliceable") and node.getMeshData() and node.isVisible(): if node.callDecoration("isSliceable") and node.getMeshData() and node.isVisible():
per_mesh_stack = node.callDecoration("getStack") per_mesh_stack = node.callDecoration("getStack")

174
cura/PrintOrderManager.py Normal file
View file

@ -0,0 +1,174 @@
from typing import List, Callable, Optional, Any
from PyQt6.QtCore import pyqtProperty, pyqtSignal, QObject, pyqtSlot
from UM.Application import Application
from UM.Scene.Selection import Selection
from cura.Scene.CuraSceneNode import CuraSceneNode
class PrintOrderManager(QObject):
"""Allows to order the object list to set the print sequence manually"""
def __init__(self, get_nodes: Callable[[], List[CuraSceneNode]]) -> None:
super().__init__()
self._get_nodes = get_nodes
self._configureEvents()
_settingsChanged = pyqtSignal()
_uiActionsOutdated = pyqtSignal()
printOrderChanged = pyqtSignal()
@pyqtSlot()
def swapSelectedAndPreviousNodes(self) -> None:
selected_node, previous_node, next_node = self._getSelectedAndNeighborNodes()
self._swapPrintOrders(selected_node, previous_node)
@pyqtSlot()
def swapSelectedAndNextNodes(self) -> None:
selected_node, previous_node, next_node = self._getSelectedAndNeighborNodes()
self._swapPrintOrders(selected_node, next_node)
@pyqtProperty(str, notify=_uiActionsOutdated)
def previousNodeName(self) -> str:
selected_node, previous_node, next_node = self._getSelectedAndNeighborNodes()
return self._getNodeName(previous_node)
@pyqtProperty(str, notify=_uiActionsOutdated)
def nextNodeName(self) -> str:
selected_node, previous_node, next_node = self._getSelectedAndNeighborNodes()
return self._getNodeName(next_node)
@pyqtProperty(bool, notify=_uiActionsOutdated)
def shouldEnablePrintBeforeAction(self) -> bool:
selected_node, previous_node, next_node = self._getSelectedAndNeighborNodes()
can_swap_with_previous_node = selected_node is not None and previous_node is not None
return can_swap_with_previous_node
@pyqtProperty(bool, notify=_uiActionsOutdated)
def shouldEnablePrintAfterAction(self) -> bool:
selected_node, previous_node, next_node = self._getSelectedAndNeighborNodes()
can_swap_with_next_node = selected_node is not None and next_node is not None
return can_swap_with_next_node
@pyqtProperty(bool, notify=_settingsChanged)
def shouldShowEditPrintOrderActions(self) -> bool:
return PrintOrderManager.isUserDefinedPrintOrderEnabled()
@staticmethod
def isUserDefinedPrintOrderEnabled() -> bool:
stack = Application.getInstance().getGlobalContainerStack()
is_enabled = stack and \
stack.getProperty("print_sequence", "value") == "one_at_a_time" and \
stack.getProperty("user_defined_print_order_enabled", "value")
return bool(is_enabled)
@staticmethod
def initializePrintOrders(nodes: List[CuraSceneNode]) -> None:
"""Just created (loaded from file) nodes have print order 0.
This method initializes print orders with max value to put nodes at the end of object list"""
max_print_order = max(map(lambda n: n.printOrder, nodes), default=0)
for node in nodes:
if node.printOrder == 0:
max_print_order += 1
node.printOrder = max_print_order
@staticmethod
def updatePrintOrdersAfterGroupOperation(
all_nodes: List[CuraSceneNode],
group_node: CuraSceneNode,
grouped_nodes: List[CuraSceneNode]
) -> None:
group_node.printOrder = min(map(lambda n: n.printOrder, grouped_nodes))
all_nodes.append(group_node)
for node in grouped_nodes:
all_nodes.remove(node)
# reassign print orders so there won't be gaps like 1 2 5 6 7
sorted_nodes = sorted(all_nodes, key=lambda n: n.printOrder)
for i, node in enumerate(sorted_nodes):
node.printOrder = i + 1
@staticmethod
def updatePrintOrdersAfterUngroupOperation(
all_nodes: List[CuraSceneNode],
group_node: CuraSceneNode,
ungrouped_nodes: List[CuraSceneNode]
) -> None:
all_nodes.remove(group_node)
nodes_to_update_print_order = filter(lambda n: n.printOrder > group_node.printOrder, all_nodes)
for node in nodes_to_update_print_order:
node.printOrder += len(ungrouped_nodes) - 1
for i, child in enumerate(ungrouped_nodes):
child.printOrder = group_node.printOrder + i
all_nodes.append(child)
def _swapPrintOrders(self, node1: CuraSceneNode, node2: CuraSceneNode) -> None:
if node1 and node2:
node1.printOrder, node2.printOrder = node2.printOrder, node1.printOrder # swap print orders
self.printOrderChanged.emit() # update object list first
self._uiActionsOutdated.emit() # then update UI actions
def _getSelectedAndNeighborNodes(self
) -> (Optional[CuraSceneNode], Optional[CuraSceneNode], Optional[CuraSceneNode]):
nodes = self._get_nodes()
ordered_nodes = sorted(nodes, key=lambda n: n.printOrder)
for i, node in enumerate(ordered_nodes, 1):
node.printOrder = i
selected_node = PrintOrderManager._getSingleSelectedNode()
if selected_node and selected_node in ordered_nodes:
selected_node_index = ordered_nodes.index(selected_node)
else:
selected_node_index = None
if selected_node_index is not None and selected_node_index - 1 >= 0:
previous_node = ordered_nodes[selected_node_index - 1]
else:
previous_node = None
if selected_node_index is not None and selected_node_index + 1 < len(ordered_nodes):
next_node = ordered_nodes[selected_node_index + 1]
else:
next_node = None
return selected_node, previous_node, next_node
@staticmethod
def _getNodeName(node: CuraSceneNode, max_length: int = 30) -> str:
node_name = node.getName() if node else ""
truncated_node_name = node_name[:max_length]
return truncated_node_name
@staticmethod
def _getSingleSelectedNode() -> Optional[CuraSceneNode]:
if len(Selection.getAllSelectedObjects()) == 1:
selected_node = Selection.getSelectedObject(0)
return selected_node
return None
def _configureEvents(self) -> None:
Selection.selectionChanged.connect(self._onSelectionChanged)
self._global_stack = None
Application.getInstance().globalContainerStackChanged.connect(self._onGlobalStackChanged)
self._onGlobalStackChanged()
def _onGlobalStackChanged(self) -> None:
if self._global_stack:
self._global_stack.propertyChanged.disconnect(self._onSettingsChanged)
self._global_stack.containersChanged.disconnect(self._onSettingsChanged)
self._global_stack = Application.getInstance().getGlobalContainerStack()
if self._global_stack:
self._global_stack.propertyChanged.connect(self._onSettingsChanged)
self._global_stack.containersChanged.connect(self._onSettingsChanged)
def _onSettingsChanged(self, *args: Any) -> None:
self._settingsChanged.emit()
def _onSelectionChanged(self) -> None:
self._uiActionsOutdated.emit()

View file

@ -40,9 +40,22 @@ class ExtruderConfigurationModel(QObject):
def setHotendID(self, hotend_id: Optional[str]) -> None: def setHotendID(self, hotend_id: Optional[str]) -> None:
if self._hotend_id != hotend_id: if self._hotend_id != hotend_id:
self._hotend_id = hotend_id self._hotend_id = ExtruderConfigurationModel.applyNameMappingHotend(hotend_id)
self.extruderConfigurationChanged.emit() self.extruderConfigurationChanged.emit()
@staticmethod
def applyNameMappingHotend(hotendId) -> str:
_EXTRUDER_NAME_MAP = {
"mk14_hot":"1XA",
"mk14_hot_s":"2XA",
"mk14_c":"1C",
"mk14":"1A",
"mk14_s":"2A"
}
if hotendId in _EXTRUDER_NAME_MAP:
return _EXTRUDER_NAME_MAP[hotendId]
return hotendId
@pyqtProperty(str, fset = setHotendID, notify = extruderConfigurationChanged) @pyqtProperty(str, fset = setHotendID, notify = extruderConfigurationChanged)
def hotendID(self) -> Optional[str]: def hotendID(self) -> Optional[str]:
return self._hotend_id return self._hotend_id

View file

@ -9,7 +9,9 @@ from PyQt6.QtCore import pyqtProperty, QObject
class MaterialOutputModel(QObject): class MaterialOutputModel(QObject):
def __init__(self, guid: Optional[str], type: str, color: str, brand: str, name: str, parent = None) -> None: def __init__(self, guid: Optional[str], type: str, color: str, brand: str, name: str, parent = None) -> None:
super().__init__(parent) super().__init__(parent)
self._guid = guid
name, guid = MaterialOutputModel.getMaterialFromDefinition(guid,type, brand, name)
self._guid =guid
self._type = type self._type = type
self._color = color self._color = color
self._brand = brand self._brand = brand
@ -19,6 +21,34 @@ class MaterialOutputModel(QObject):
def guid(self) -> str: def guid(self) -> str:
return self._guid if self._guid else "" return self._guid if self._guid else ""
@staticmethod
def getMaterialFromDefinition(guid, type, brand, name):
_MATERIAL_MAP = { "abs" :{"name" :"abs_175" ,"guid": "2780b345-577b-4a24-a2c5-12e6aad3e690"},
"abs-wss1" :{"name" :"absr_175" ,"guid": "88c8919c-6a09-471a-b7b6-e801263d862d"},
"asa" :{"name" :"asa_175" ,"guid": "416eead4-0d8e-4f0b-8bfc-a91a519befa5"},
"nylon-cf" :{"name" :"cffpa_175" ,"guid": "85bbae0e-938d-46fb-989f-c9b3689dc4f0"},
"nylon" :{"name" :"nylon_175" ,"guid": "283d439a-3490-4481-920c-c51d8cdecf9c"},
"pc" :{"name" :"pc_175" ,"guid": "62414577-94d1-490d-b1e4-7ef3ec40db02"},
"petg" :{"name" :"petg_175" ,"guid": "69386c85-5b6c-421a-bec5-aeb1fb33f060"},
"pla" :{"name" :"pla_175" ,"guid": "0ff92885-617b-4144-a03c-9989872454bc"},
"pva" :{"name" :"pva_175" ,"guid": "a4255da2-cb2a-4042-be49-4a83957a2f9a"},
"wss1" :{"name" :"rapidrinse_175","guid": "a140ef8f-4f26-4e73-abe0-cfc29d6d1024"},
"sr30" :{"name" :"sr30_175" ,"guid": "77873465-83a9-4283-bc44-4e542b8eb3eb"},
"im-pla" :{"name" :"tough_pla_175" ,"guid": "96fca5d9-0371-4516-9e96-8e8182677f3c"},
"bvoh" :{"name" :"bvoh_175" ,"guid": "923e604c-8432-4b09-96aa-9bbbd42207f4"},
"cpe" :{"name" :"cpe_175" ,"guid": "da1872c1-b991-4795-80ad-bdac0f131726"},
"hips" :{"name" :"hips_175" ,"guid": "a468d86a-220c-47eb-99a5-bbb47e514eb0"},
"tpu" :{"name" :"tpu_175" ,"guid": "19baa6a9-94ff-478b-b4a1-8157b74358d2"}
}
if guid is None and brand != "empty" and type in _MATERIAL_MAP:
name = _MATERIAL_MAP[type]["name"]
guid = _MATERIAL_MAP[type]["guid"]
return name, guid
@pyqtProperty(str, constant = True) @pyqtProperty(str, constant = True)
def type(self) -> str: def type(self) -> str:
return self._type return self._type

View file

@ -1,6 +1,8 @@
# Copyright (c) 2018 Aldo Hoeben / fieldOfView # Copyright (c) 2018 Aldo Hoeben / fieldOfView
# NetworkMJPGImage is released under the terms of the LGPLv3 or higher. # NetworkMJPGImage is released under the terms of the LGPLv3 or higher.
from typing import Optional
from PyQt6.QtCore import QUrl, pyqtProperty, pyqtSignal, pyqtSlot, QRect, QByteArray from PyQt6.QtCore import QUrl, pyqtProperty, pyqtSignal, pyqtSlot, QRect, QByteArray
from PyQt6.QtGui import QImage, QPainter from PyQt6.QtGui import QImage, QPainter
from PyQt6.QtQuick import QQuickPaintedItem from PyQt6.QtQuick import QQuickPaintedItem
@ -19,9 +21,9 @@ class NetworkMJPGImage(QQuickPaintedItem):
self._stream_buffer = QByteArray() self._stream_buffer = QByteArray()
self._stream_buffer_start_index = -1 self._stream_buffer_start_index = -1
self._network_manager = None # type: QNetworkAccessManager self._network_manager: Optional[QNetworkAccessManager] = None
self._image_request = None # type: QNetworkRequest self._image_request: Optional[QNetworkRequest] = None
self._image_reply = None # type: QNetworkReply self._image_reply: Optional[QNetworkReply] = None
self._image = QImage() self._image = QImage()
self._image_rect = QRect() self._image_rect = QRect()

View file

@ -415,7 +415,18 @@ class NetworkedPrinterOutputDevice(PrinterOutputDevice):
@pyqtProperty(str, constant = True) @pyqtProperty(str, constant = True)
def printerType(self) -> str: def printerType(self) -> str:
return self._properties.get(b"printer_type", b"Unknown").decode("utf-8") return NetworkedPrinterOutputDevice.applyPrinterTypeMapping(self._properties.get(b"printer_type", b"Unknown").decode("utf-8"))
@staticmethod
def applyPrinterTypeMapping(printer_type):
_PRINTER_TYPE_NAME = {
"fire_e": "ultimaker_method",
"lava_f": "ultimaker_methodx",
"magma_10": "ultimaker_methodxl"
}
if printer_type in _PRINTER_TYPE_NAME:
return _PRINTER_TYPE_NAME[printer_type]
return printer_type
@pyqtProperty(str, constant = True) @pyqtProperty(str, constant = True)
def ipAddress(self) -> str: def ipAddress(self) -> str:

View file

@ -11,6 +11,7 @@ from UM.Scene.SceneNode import SceneNode
from UM.Scene.SceneNodeDecorator import SceneNodeDecorator # To cast the deepcopy of every decorator back to SceneNodeDecorator. from UM.Scene.SceneNodeDecorator import SceneNodeDecorator # To cast the deepcopy of every decorator back to SceneNodeDecorator.
import cura.CuraApplication # To get the build plate. import cura.CuraApplication # To get the build plate.
from UM.Scene.SceneNodeSettings import SceneNodeSettings
from cura.Settings.ExtruderStack import ExtruderStack # For typing. from cura.Settings.ExtruderStack import ExtruderStack # For typing.
from cura.Settings.SettingOverrideDecorator import SettingOverrideDecorator # For per-object settings. from cura.Settings.SettingOverrideDecorator import SettingOverrideDecorator # For per-object settings.
@ -25,13 +26,26 @@ class CuraSceneNode(SceneNode):
if not no_setting_override: if not no_setting_override:
self.addDecorator(SettingOverrideDecorator()) # Now we always have a getActiveExtruderPosition, unless explicitly disabled self.addDecorator(SettingOverrideDecorator()) # Now we always have a getActiveExtruderPosition, unless explicitly disabled
self._outside_buildarea = False self._outside_buildarea = False
self._print_order = 0
def setOutsideBuildArea(self, new_value: bool) -> None: def setOutsideBuildArea(self, new_value: bool) -> None:
self._outside_buildarea = new_value self._outside_buildarea = new_value
@property
def printOrder(self):
return self._print_order
@printOrder.setter
def printOrder(self, new_value):
self._print_order = new_value
def isOutsideBuildArea(self) -> bool: def isOutsideBuildArea(self) -> bool:
return self._outside_buildarea or self.callDecoration("getBuildPlateNumber") < 0 return self._outside_buildarea or self.callDecoration("getBuildPlateNumber") < 0
@property
def isDropDownEnabled(self) ->bool:
return self.getSetting(SceneNodeSettings.AutoDropDown, Application.getInstance().getPreferences().getValue("physics/automatic_drop_down"))
def isVisible(self) -> bool: def isVisible(self) -> bool:
return super().isVisible() and self.callDecoration("getBuildPlateNumber") == cura.CuraApplication.CuraApplication.getInstance().getMultiBuildPlateModel().activeBuildPlate return super().isVisible() and self.callDecoration("getBuildPlateNumber") == cura.CuraApplication.CuraApplication.getInstance().getMultiBuildPlateModel().activeBuildPlate
@ -157,3 +171,6 @@ class CuraSceneNode(SceneNode):
def transformChanged(self) -> None: def transformChanged(self) -> None:
self._transformChanged() self._transformChanged()
def __repr__(self) -> str:
return "{print_order}. {name}".format(print_order = self._print_order, name = self.getName())

View file

@ -56,6 +56,9 @@ class CuraFormulaFunctions:
if isinstance(value, SettingFunction): if isinstance(value, SettingFunction):
value = value(extruder_stack, context = context) value = value(extruder_stack, context = context)
if isinstance(value, str):
value = value.lower()
return value return value
def _getActiveExtruders(self, context: Optional["PropertyEvaluationContext"] = None) -> List[str]: def _getActiveExtruders(self, context: Optional["PropertyEvaluationContext"] = None) -> List[str]:

View file

@ -284,16 +284,20 @@ class CuraStackBuilder:
abstract_machines = registry.findContainerStacks(id = abstract_machine_id) abstract_machines = registry.findContainerStacks(id = abstract_machine_id)
if abstract_machines: if abstract_machines:
return cast(GlobalStack, abstract_machines[0]) return cast(GlobalStack, abstract_machines[0])
definitions = registry.findDefinitionContainers(id=definition_id) definitions = registry.findDefinitionContainers(id=definition_id)
name = "" name = ""
if definitions: if definitions:
name = definitions[0].getName() name = definitions[0].getName()
stack = cls.createMachine(abstract_machine_id, definition_id, show_warning_message=False) stack = cls.createMachine(abstract_machine_id, definition_id, show_warning_message=False)
if not stack: if not stack:
return None return None
if not stack.getMetaDataEntry("visible", True):
return None
stack.setName(name) stack.setName(name)
stack.setMetaDataEntry("is_abstract_machine", True) stack.setMetaDataEntry("is_abstract_machine", True)

View file

@ -10,13 +10,16 @@ class VariantDatabaseHandler(DatabaseMetadataContainerController):
"""The Database handler for Variant containers""" """The Database handler for Variant containers"""
def __init__(self): def __init__(self):
super().__init__(SQLQueryFactory(table = "variant", super().__init__(SQLQueryFactory(
fields = { table="variant",
"id": "text", fields={
"name": "text", "id": "text",
"hardware_type": "text", "name": "text",
"definition": "text", "hardware_type": "text",
"version": "text", "definition": "text",
"setting_version": "text" "version": "text",
})) "setting_version": "text",
"reference_extruder_id": "text",
},
))
self._container_type = InstanceContainer self._container_type = InstanceContainer

View file

@ -316,7 +316,13 @@ class ExtruderManager(QObject):
# Starts with the adhesion extruder. # Starts with the adhesion extruder.
adhesion_type = global_stack.getProperty("adhesion_type", "value") adhesion_type = global_stack.getProperty("adhesion_type", "value")
if adhesion_type in {"skirt", "brim"}: if adhesion_type in {"skirt", "brim"}:
return max(0, int(global_stack.getProperty("skirt_brim_extruder_nr", "value"))) # optional skirt/brim extruder defaults to zero skirt_brim_extruder_nr = global_stack.getProperty("skirt_brim_extruder_nr", "value")
# if the skirt_brim_extruder_nr is -1, then we use the first used extruder
if skirt_brim_extruder_nr == -1:
used_extruders = self.getUsedExtruderStacks()
return used_extruders[0].position
else:
return skirt_brim_extruder_nr
if adhesion_type == "raft": if adhesion_type == "raft":
return global_stack.getProperty("raft_base_extruder_nr", "value") return global_stack.getProperty("raft_base_extruder_nr", "value")

View file

@ -48,6 +48,8 @@ from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura") catalog = i18nCatalog("cura")
from cura.Settings.GlobalStack import GlobalStack from cura.Settings.GlobalStack import GlobalStack
if TYPE_CHECKING: if TYPE_CHECKING:
from PyQt6.QtCore import QVariantList
from cura.CuraApplication import CuraApplication from cura.CuraApplication import CuraApplication
from cura.Machines.MaterialNode import MaterialNode from cura.Machines.MaterialNode import MaterialNode
from cura.Machines.QualityChangesGroup import QualityChangesGroup from cura.Machines.QualityChangesGroup import QualityChangesGroup
@ -581,6 +583,10 @@ class MachineManager(QObject):
def activeMachine(self) -> Optional["GlobalStack"]: def activeMachine(self) -> Optional["GlobalStack"]:
return self._global_container_stack return self._global_container_stack
@pyqtProperty("QVariantList", notify=activeVariantChanged)
def activeMachineExtruders(self) -> Optional["QVariantList"]:
return self._global_container_stack.extruderList if self._global_container_stack else None
@pyqtProperty(str, notify = activeStackChanged) @pyqtProperty(str, notify = activeStackChanged)
def activeStackId(self) -> str: def activeStackId(self) -> str:
if self._active_container_stack: if self._active_container_stack:
@ -1700,6 +1706,16 @@ class MachineManager(QObject):
else: # No intent had the correct category. else: # No intent had the correct category.
extruder.intent = empty_intent_container extruder.intent = empty_intent_container
@pyqtSlot()
def resetIntents(self) -> None:
"""Reset the intent category of the current printer.
"""
global_stack = self._application.getGlobalContainerStack()
if global_stack is None:
return
for extruder in global_stack.extruderList:
extruder.intent = empty_intent_container
def activeQualityGroup(self) -> Optional["QualityGroup"]: def activeQualityGroup(self) -> Optional["QualityGroup"]:
"""Get the currently activated quality group. """Get the currently activated quality group.

View file

@ -28,6 +28,7 @@ empty_material_container.setMetaDataEntry("type", "material")
empty_material_container.setMetaDataEntry("base_file", "empty_material") empty_material_container.setMetaDataEntry("base_file", "empty_material")
empty_material_container.setMetaDataEntry("GUID", "FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF") empty_material_container.setMetaDataEntry("GUID", "FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF")
empty_material_container.setMetaDataEntry("material", "empty") empty_material_container.setMetaDataEntry("material", "empty")
empty_material_container.setMetaDataEntry("brand", "empty_brand")
# Empty quality # Empty quality
EMPTY_QUALITY_CONTAINER_ID = "empty_quality" EMPTY_QUALITY_CONTAINER_ID = "empty_quality"

View file

@ -5,16 +5,18 @@ import json
import os import os
from typing import List, Optional from typing import List, Optional
from PyQt6.QtCore import QUrl
from PyQt6.QtNetwork import QLocalServer, QLocalSocket from PyQt6.QtNetwork import QLocalServer, QLocalSocket
from UM.Qt.QtApplication import QtApplication #For typing. from UM.Qt.QtApplication import QtApplication # For typing.
from UM.Logger import Logger from UM.Logger import Logger
class SingleInstance: class SingleInstance:
def __init__(self, application: QtApplication, files_to_open: Optional[List[str]]) -> None: def __init__(self, application: QtApplication, files_to_open: Optional[List[str]], url_to_open: Optional[List[str]]) -> None:
self._application = application self._application = application
self._files_to_open = files_to_open self._files_to_open = files_to_open
self._url_to_open = url_to_open
self._single_instance_server = None self._single_instance_server = None
@ -33,7 +35,7 @@ class SingleInstance:
return False return False
# We only send the files that need to be opened. # We only send the files that need to be opened.
if not self._files_to_open: if not self._files_to_open and not self._url_to_open:
Logger.log("i", "No file need to be opened, do nothing.") Logger.log("i", "No file need to be opened, do nothing.")
return True return True
@ -55,8 +57,12 @@ class SingleInstance:
payload = {"command": "open", "filePath": os.path.abspath(filename)} payload = {"command": "open", "filePath": os.path.abspath(filename)}
single_instance_socket.write(bytes(json.dumps(payload) + "\n", encoding = "ascii")) single_instance_socket.write(bytes(json.dumps(payload) + "\n", encoding = "ascii"))
for url in self._url_to_open:
payload = {"command": "open-url", "urlPath": url.toString()}
single_instance_socket.write(bytes(json.dumps(payload) + "\n", encoding="ascii"))
payload = {"command": "close-connection"} payload = {"command": "close-connection"}
single_instance_socket.write(bytes(json.dumps(payload) + "\n", encoding = "ascii")) single_instance_socket.write(bytes(json.dumps(payload) + "\n", encoding="ascii"))
single_instance_socket.flush() single_instance_socket.flush()
single_instance_socket.waitForDisconnected() single_instance_socket.waitForDisconnected()
@ -72,7 +78,7 @@ class SingleInstance:
def _onClientConnected(self) -> None: def _onClientConnected(self) -> None:
Logger.log("i", "New connection received on our single-instance server") Logger.log("i", "New connection received on our single-instance server")
connection = None #type: Optional[QLocalSocket] connection = None # type: Optional[QLocalSocket]
if self._single_instance_server: if self._single_instance_server:
connection = self._single_instance_server.nextPendingConnection() connection = self._single_instance_server.nextPendingConnection()
@ -81,7 +87,7 @@ class SingleInstance:
def __readCommands(self, connection: QLocalSocket) -> None: def __readCommands(self, connection: QLocalSocket) -> None:
line = connection.readLine() line = connection.readLine()
while len(line) != 0: # There is also a .canReadLine() while len(line) != 0: # There is also a .canReadLine()
try: try:
payload = json.loads(str(line, encoding = "ascii").strip()) payload = json.loads(str(line, encoding = "ascii").strip())
command = payload["command"] command = payload["command"]
@ -94,13 +100,19 @@ class SingleInstance:
elif command == "open": elif command == "open":
self._application.callLater(lambda f = payload["filePath"]: self._application._openFile(f)) self._application.callLater(lambda f = payload["filePath"]: self._application._openFile(f))
#command: Load a url link in Cura
elif command == "open-url":
url = QUrl(payload["urlPath"])
self._application.callLater(lambda: self._application._openUrl(url))
# Command: Activate the window and bring it to the top. # Command: Activate the window and bring it to the top.
elif command == "focus": elif command == "focus":
# Operating systems these days prevent windows from moving around by themselves. # Operating systems these days prevent windows from moving around by themselves.
# 'alert' or flashing the icon in the taskbar is the best thing we do now. # 'alert' or flashing the icon in the taskbar is the best thing we do now.
main_window = self._application.getMainWindow() main_window = self._application.getMainWindow()
if main_window is not None: if main_window is not None:
self._application.callLater(lambda: main_window.alert(0)) # type: ignore # I don't know why MyPy complains here self._application.callLater(lambda: main_window.alert(0)) # type: ignore # I don't know why MyPy complains here
# Command: Close the socket connection. We're done. # Command: Close the socket connection. We're done.
elif command == "close-connection": elif command == "close-connection":

View file

@ -1,7 +1,9 @@
# Copyright (c) 2021 Ultimaker B.V. # Copyright (c) 2023 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
import numpy import numpy
from typing import Optional
from PyQt6 import QtCore from PyQt6 import QtCore
from PyQt6.QtCore import QCoreApplication from PyQt6.QtCore import QCoreApplication
from PyQt6.QtGui import QImage from PyQt6.QtGui import QImage
@ -10,11 +12,13 @@ from UM.Logger import Logger
from cura.PreviewPass import PreviewPass from cura.PreviewPass import PreviewPass
from UM.Application import Application from UM.Application import Application
from UM.Math.AxisAlignedBox import AxisAlignedBox
from UM.Math.Matrix import Matrix from UM.Math.Matrix import Matrix
from UM.Math.Vector import Vector from UM.Math.Vector import Vector
from UM.Scene.Camera import Camera from UM.Scene.Camera import Camera
from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator
from UM.Scene.SceneNode import SceneNode
from UM.Qt.QtRenderer import QtRenderer
class Snapshot: class Snapshot:
@staticmethod @staticmethod
@ -32,6 +36,96 @@ class Snapshot:
return min_x, max_x, min_y, max_y return min_x, max_x, min_y, max_y
@staticmethod
def isometricSnapshot(width: int = 300, height: int = 300, *, node: Optional[SceneNode] = None) -> Optional[QImage]:
"""
Create an isometric snapshot of the scene.
:param width: width of the aspect ratio default 300
:param height: height of the aspect ratio default 300
:param node: node of the scene default is the root of the scene
:return: None when there is no model on the build plate otherwise it will return an image
"""
if node is None:
node = Application.getInstance().getController().getScene().getRoot()
# the direction the camera is looking at to create the isometric view
iso_view_dir = Vector(-1, -1, -1).normalized()
bounds = Snapshot.nodeBounds(node)
if bounds is None:
Logger.log("w", "There appears to be nothing to render")
return None
camera = Camera("snapshot")
# find local x and y directional vectors of the camera
tangent_space_x_direction = iso_view_dir.cross(Vector.Unit_Y).normalized()
tangent_space_y_direction = tangent_space_x_direction.cross(iso_view_dir).normalized()
# find extreme screen space coords of the scene
x_points = [p.dot(tangent_space_x_direction) for p in bounds.points]
y_points = [p.dot(tangent_space_y_direction) for p in bounds.points]
min_x = min(x_points)
max_x = max(x_points)
min_y = min(y_points)
max_y = max(y_points)
camera_width = max_x - min_x
camera_height = max_y - min_y
if camera_width == 0 or camera_height == 0:
Logger.log("w", "There appears to be nothing to render")
return None
# increase either width or height to match the aspect ratio of the image
if camera_width / camera_height > width / height:
camera_height = camera_width * height / width
else:
camera_width = camera_height * width / height
# Configure camera for isometric view
ortho_matrix = Matrix()
ortho_matrix.setOrtho(
-camera_width / 2,
camera_width / 2,
-camera_height / 2,
camera_height / 2,
-10000,
10000
)
camera.setPerspective(False)
camera.setProjectionMatrix(ortho_matrix)
camera.setPosition(bounds.center)
camera.lookAt(bounds.center + iso_view_dir)
# Render the scene
renderer = QtRenderer()
render_pass = PreviewPass(width, height, root=node)
renderer.setViewportSize(width, height)
renderer.setWindowSize(width, height)
render_pass.setCamera(camera)
renderer.addRenderPass(render_pass)
renderer.beginRendering()
renderer.render()
return render_pass.getOutput()
@staticmethod
def nodeBounds(root_node: SceneNode) -> Optional[AxisAlignedBox]:
axis_aligned_box = None
for node in DepthFirstIterator(root_node):
if not getattr(node, "_outside_buildarea", False):
if node.callDecoration(
"isSliceable") and node.getMeshData() and node.isVisible() and not node.callDecoration(
"isNonThumbnailVisibleMesh"):
if axis_aligned_box is None:
axis_aligned_box = node.getBoundingBox()
else:
axis_aligned_box = axis_aligned_box + node.getBoundingBox()
return axis_aligned_box
@staticmethod @staticmethod
def snapshot(width = 300, height = 300): def snapshot(width = 300, height = 300):
"""Return a QImage of the scene """Return a QImage of the scene
@ -55,14 +149,7 @@ class Snapshot:
camera = Camera("snapshot", root) camera = Camera("snapshot", root)
# determine zoom and look at # determine zoom and look at
bbox = None bbox = Snapshot.nodeBounds(root)
for node in DepthFirstIterator(root):
if not getattr(node, "_outside_buildarea", False):
if node.callDecoration("isSliceable") and node.getMeshData() and node.isVisible() and not node.callDecoration("isNonThumbnailVisibleMesh"):
if bbox is None:
bbox = node.getBoundingBox()
else:
bbox = bbox + node.getBoundingBox()
# If there is no bounding box, it means that there is no model in the buildplate # If there is no bounding box, it means that there is no model in the buildplate
if bbox is None: if bbox is None:
Logger.log("w", "Unable to create snapshot as we seem to have an empty buildplate") Logger.log("w", "Unable to create snapshot as we seem to have an empty buildplate")

View file

@ -14,6 +14,9 @@ from UM.Scene.SceneNode import SceneNode
from UM.Scene.Selection import Selection from UM.Scene.Selection import Selection
from UM.i18n import i18nCatalog from UM.i18n import i18nCatalog
from cura.PrintOrderManager import PrintOrderManager
from cura.Scene.CuraSceneNode import CuraSceneNode
catalog = i18nCatalog("cura") catalog = i18nCatalog("cura")
@ -69,13 +72,16 @@ class ObjectsModel(ListModel):
self._group_name_template = catalog.i18nc("@label", "Group #{group_nr}") self._group_name_template = catalog.i18nc("@label", "Group #{group_nr}")
self._group_name_prefix = self._group_name_template.split("#")[0] self._group_name_prefix = self._group_name_template.split("#")[0]
self._naming_regex = re.compile("^(.+)\(([0-9]+)\)$") self._naming_regex = re.compile(r"^(.+)\(([0-9]+)\)$")
def setActiveBuildPlate(self, nr: int) -> None: def setActiveBuildPlate(self, nr: int) -> None:
if self._build_plate_number != nr: if self._build_plate_number != nr:
self._build_plate_number = nr self._build_plate_number = nr
self._update() self._update()
def getNodes(self) -> List[CuraSceneNode]:
return list(map(lambda n: n["node"], self.items))
def _updateSceneDelayed(self, source) -> None: def _updateSceneDelayed(self, source) -> None:
if not isinstance(source, Camera): if not isinstance(source, Camera):
self._update_timer.start() self._update_timer.start()
@ -175,6 +181,10 @@ class ObjectsModel(ListModel):
all_nodes = self._renameNodes(name_to_node_info_dict) all_nodes = self._renameNodes(name_to_node_info_dict)
user_defined_print_order_enabled = PrintOrderManager.isUserDefinedPrintOrderEnabled()
if user_defined_print_order_enabled:
PrintOrderManager.initializePrintOrders(all_nodes)
for node in all_nodes: for node in all_nodes:
if hasattr(node, "isOutsideBuildArea"): if hasattr(node, "isOutsideBuildArea"):
is_outside_build_area = node.isOutsideBuildArea() # type: ignore is_outside_build_area = node.isOutsideBuildArea() # type: ignore
@ -223,8 +233,13 @@ class ObjectsModel(ListModel):
# for anti overhang meshes and groups the extruder nr is irrelevant # for anti overhang meshes and groups the extruder nr is irrelevant
extruder_number = -1 extruder_number = -1
if not user_defined_print_order_enabled:
name = node.getName()
else:
name = "{print_order}. {name}".format(print_order = node.printOrder, name = node.getName())
nodes.append({ nodes.append({
"name": node.getName(), "name": name,
"selected": Selection.isSelected(node), "selected": Selection.isSelected(node),
"outside_build_area": is_outside_build_area, "outside_build_area": is_outside_build_area,
"buildplate_number": node_build_plate_number, "buildplate_number": node_build_plate_number,
@ -234,5 +249,5 @@ class ObjectsModel(ListModel):
"node": node "node": node
}) })
nodes = sorted(nodes, key=lambda n: n["name"]) nodes = sorted(nodes, key=lambda n: n["name"] if not user_defined_print_order_enabled else n["node"].printOrder)
self.setItems(nodes) self.setItems(nodes)

View file

@ -148,6 +148,9 @@ class CloudMaterialSync(QObject):
continue continue
if metadata["id"] == "empty_material": # Don't export the empty material. if metadata["id"] == "empty_material": # Don't export the empty material.
continue continue
# Ignore materials that are marked as not visible for whatever reason
if not bool(metadata.get("visible", True)):
continue
material = registry.findContainers(id = metadata["id"])[0] material = registry.findContainers(id = metadata["id"])[0]
suffix = registry.getMimeTypeForContainer(type(material)).preferredSuffix suffix = registry.getMimeTypeForContainer(type(material)).preferredSuffix
filename = metadata["id"] + "." + suffix filename = metadata["id"] + "." + suffix

View file

@ -15,6 +15,10 @@ if "" in sys.path:
import argparse import argparse
import faulthandler import faulthandler
import os import os
# set the environment variable QT_QUICK_FLICKABLE_WHEEL_DECELERATION to 5000 as mentioned in qt6.6 update log to overcome scroll related issues
os.environ["QT_QUICK_FLICKABLE_WHEEL_DECELERATION"] = str(int(os.environ.get("QT_QUICK_FLICKABLE_WHEEL_DECELERATION", "5000")))
if sys.platform != "linux": # Turns out the Linux build _does_ use this, but we're not making an Enterprise release for that system anyway. if sys.platform != "linux": # Turns out the Linux build _does_ use this, but we're not making an Enterprise release for that system anyway.
os.environ["QT_PLUGIN_PATH"] = "" # Security workaround: Don't need it, and introduces an attack vector, so set to nul. os.environ["QT_PLUGIN_PATH"] = "" # Security workaround: Don't need it, and introduces an attack vector, so set to nul.
os.environ["QML2_IMPORT_PATH"] = "" # Security workaround: Don't need it, and introduces an attack vector, so set to nul. os.environ["QML2_IMPORT_PATH"] = "" # Security workaround: Don't need it, and introduces an attack vector, so set to nul.

View file

@ -1,37 +0,0 @@
How to Profile Cura and See What It is Doing
============================================
Cura has a simple flame graph profiler available as a plugin which can be used to see what Cura is doing as it runs and how much time it takes. A flame graph profile shows its output as a timeline and stacks of "blocks" which represent parts of the code and are stacked up to show call depth. These often form little peaks which look like flames. It is a simple yet powerful way to visualise the activity of a program.
Setting up and installing the profiler
--------------------------------------
The profiler plugin is kept outside of the Cura source code here: https://github.com/sedwards2009/cura-big-flame-graph
To install it do:
* Use `git clone https://github.com/sedwards2009/cura-big-flame-graph.git` to grab a copy of the code.
* Copy the `BigFlameGraph` directory into the `plugins` directory in your local Cura.
* Set the `URANIUM_FLAME_PROFILER` environment variable to something before starting Cura. This flags to the profiler code in Cura to activate and insert the needed hooks into the code.
Using the profiler
------------------
To open the profiler go to the Extensions menu and select "Start BFG" from the "Big Flame Graph" menu. A page will open up in your default browser. This is the profiler UI. Click on "Record" to start recording, go to Cura and perform an action and then back in the profiler click on "Stop". The results should now load in.
The time scale is at the top of the window. The blocks should be read as meaning the blocks at the bottom call the blocks which are stacked on top of them. Hover the mouse to get more detailed information about a block such as the name of the code involved and its duration. Use the zoom buttons or mouse wheel to zoom in. The display can be panned by dragging with the left mouse button.
Note: The profiler front-end itself is quite "heavy" (ok, not optimised). It runs much better in Google Chrome or Chromium than Firefox. It is also a good idea to keep recording sessions short for the same reason.
What the Profiler Sees
----------------------
The profiler doesn't capture every function call in Cura. It hooks into a number of important systems which give a good picture of activity without too much run time overhead. The most important system is Uranium's signal mechanism and PyQt5 slots. Functions which are called via the signal mechanism are recorded and their names appear in the results. PyQt5 slots appear in the results with the prefix `[SLOT]`.
Note that not all slots are captured. Only those slots which belong to classes which use the `pyqtSlot` decorator from the `UM.FlameProfiler` module.
Manually adding profiling code to more detail
---------------------------------------------
It is also possible to manually add decorators to methods to make them appear in the profiler results. The `UM.FlameProfiler` module contains the `profile` decorator which can be applied to methods. There is also a `profileCall` context manager which can be used with Python's `with` statement to measure a block of code. `profileCall` takes one argument, a label to use in the results.

View file

@ -1 +0,0 @@
<mxfile host="www.draw.io" modified="2019-12-20T12:34:56.339Z" agent="Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0" etag="1NLsmsxIqXUmOJee4m9D" version="12.4.3" type="device" pages="1"><diagram id="K0t5C8WxT4tyKudoHXNk" name="Page-1">7VzbcqM4EP0aP+4WSFzsx8SZmd2tpCqTbO1kn1KKkW3VYOQBObHn61cyF0MLM9jhkmyo8gMSLXQ5R+rTDckIT1fbLyFZL2+4R/0RMrztCF+NEDIthEbqZ3i7uMZ13LhiETIvMTpU3LOfNKk0ktoN82hUMBSc+4Kti5UzHgR0Jgp1JAz5S9Fszv1ir2uyoFrF/Yz4eu035ollXDtG7qH+D8oWy7Rn05nEd1YkNU5mEi2Jx19yVfjTCE9DzkV8tdpOqa8WL12XuN3nI3ezgYU0EHUa/OXdPfz4exV+fRLWl92tG9388/m35CnPxN8kE/4zEOqB8ZDFLl2H6IWtfBLI0uWcB+I+uWPI8mzJfO+a7PhGjSMSZPY9LV0uech+Snviy1umrJC3Q5HAjFXrOfP9Kfd5KCsCvu/g0OhePSzpJqSRbHabztcEVTdkWzC8JpFIB8h9n6wj9rQfsmq4IuGCBZdcCL5KjJKFoKGg26MrbGa4ScJTvqIi3EmTtIGTQJ1w3URWXH45MMecJDbLPGvShiRh6yJ7dtbdnWQ3CRZyCll/2SZJ+7MMrT+npDvkFHsjvqBhQAS95JvAi/Iskhe5mR6q9tw6gWdY4xnb8+xxJrtdcDVTOSi8vciQyHFPIiL21An5dwq4UkIf4rNFIIs+natmClImN/RFUi34Wj1sTWYsWFzvba6sQ81dsk6qisu2c3+/aZfM82ig2MUFEeQpY/+ay5nsF9K+lD+53FPjd3tky4FPZdk8lOVPmYdiygM5F8L2rKKSpy9UcbUeBY9vY52XCS+wTotSGkJe5FlYIMSp6Fsa+vJ0pCGTp8KAdbNY207PWE80rD06ZwETjAcD2g2jPUY9o516oBzcz0RubKUghgO9LdhNY9w37rpw/LGROIndo9it6YB404jjmlKyNcRtDfCvMeCdhApyWv+vUMEtSndslmg0qyxUwBWhwqsAdgaR1txutit3MyoRaWVgt7aZ3eH07hJvu0SmdYr3eFBp3aPuloi0TlE39SM9H4sNyLeFvGmUqLVuoddPeA1lGngXKkMuSzOfRBGbKUElVqn+olsmHpJFV9f/qmu5snHpapu7dbVLC4Ec/UO+kGuliodm+1LaLh4c9bRkPBBUcgJ8E85o1dTT1wRSuNEqDN1yDHOY2SWQpXUh9Ylgz8XxVuRvbxVtcwJwXBSAaAIeEc8zaZVP64MHZe8XMl8DHhSvg/YgCT3Z5cySbXV8wBgM2DUrxwXtsWsDVscjaDTNbOqO7qiIHcKUX4cpFgDRrKtkshOw+ZNNTzA+kYg+ylUfhOsZaYdxpSfrPVJBeoJxyCe3h3fvkQrSE4tZqDKA3SzYvQcoqSh9lUo9U3Gm6jZVunXUbYMq1aopUmN315dKxSBNmanWU1WqBT5VQJZbS6U2JQyR/gr62LEy6MLTdSEa1wx40yOn+aNEfz8RkNWgCE93GvFWecOKsDqrNeDdLN79K0I9pv8oImFcUySgI+nIbkSCbYAMlHumSLBtmMoCD2oolQXFiGUYleOC9hhhwOoWUln4pMB3EC2/Fi0OyJRip+bRlh6BjR9tWA92pf3gwk51YdgoB/6tSBasRx+nu7AzXNFbeBvj1PRh+Mjm7caHWeDTffvcQNcGvsKG3+s05cMs4MOSv0g56sOgvVF4fdOSD9ODso/Ce/Q+eI9AIO+cy3voXG2rHd7bE+DErUn1fpz0wXs9RO2W92Z/vDfeBe8d+HYahhp1ee+CDKkFN1BDvHcBj5FZfd5D+2543+BXJ++N93W/Ouk3VncMSItzz3sQq2O7Hd7DASMLV4/LgBu7i1hd/ybhdN6fyeFzvtJqkPeTd3Hcm4AVDlTldWlvwhRVS8e9Cd+XmdUyB9pbdnVKS3MPp9p34U4sPT3SrTvpMWyuK6P6dSdaZnRy5r6C8TduKXzQ30Pb1ePqI/VroQ/L+7pRc7+fRcDX35ror0178BWw1VK2CI/h9qp2J9DenLzquJfFw/85ic0P/y0Gf/oP</diagram></mxfile>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

View file

@ -1,81 +0,0 @@
# Reporting Issues
Please attach the following information in case <br>
you want to report crashing or similar issues.
<br>
## DxDiag
### ![Badge Windows]
The log as produced by **dxdiag**.
<kbd>start</kbd>  »  <kbd>run</kbd>  »  <kbd>dxdiag</kbd>  »  <kbd>save output</kbd>
<br>
<br>
## Cura GUI Log
If the Cura user interface still starts, you can also <br>
reach these directories from the application menu:
<kbd>Help</kbd>  »  <kbd>Show settings folder</kbd>
<br>
### ![Badge Windows]
```
%APPDATA%\cura\< >\cura.log
```
or
```
C:\Users\<your username>\AppData\Roaming\cura\< >\cura.log
```
<br>
### ![Badge Linux]
```
~/.local/share/cura/< >/cura.log
```
<br>
### ![Badge MacOS]
```
~/Library/Application Support/cura/< >/cura.log
```
<br>
<br>
## Alternative
An alternative is to install the **[ExtensiveSupportLogging]** <br>
plugin this creates a zip folder of the relevant log files.
If you're experiencing performance issues, we might ask <br>
you to connect the CPU profiler in this plugin and attach <br>
the collected data to your support ticket.
<br>
<!----------------------------------------------------------------------------->
[ExtensiveSupportLogging]: https://marketplace.ultimaker.com/app/cura/plugins/UltimakerPackages/ExtensiveSupportLogging
<!---------------------------------[ Badges ]---------------------------------->
[Badge Windows]: https://img.shields.io/badge/Windows-0078D6?style=for-the-badge&logoColor=white&logo=Windows
[Badge Linux]: https://img.shields.io/badge/Linux-00A95C?style=for-the-badge&logoColor=white&logo=Linux
[Badge MacOS]: https://img.shields.io/badge/MacOS-403C3D?style=for-the-badge&logoColor=white&logo=MacOS

View file

@ -1,22 +0,0 @@
Cura Documentation
====
Welcome to the Cura documentation pages.
Objective
----
The goal of this documentation is to give an overview of the architecture of Cura's source code. The purpose of this overview is to make programmers familiar with Cura's source code so that they may contribute more easily, write plug-ins more easily or get started within the Cura team more quickly.
There are some caveats though. These are *not* within the scope of this documentation:
* There is no documentation on individual functions or classes of the code here. For that, refer to the Doxygen documentation and Python Docstrings in the source code itself, or generate the documentation locally using Doxygen.
* It's virtually impossible and indeed not worth the effort or money to keep this 100% up to date.
* There are no example plug-ins here. There are a number of example plug-ins in the Ultimaker organisation on Github.com to draw from.
* The slicing process is not documented here. Refer to CuraEngine for that.
This documentation will touch on the inner workings of Uranium as well though, due to the nature of the architecture.
Index
----
The following chapters are available in this documentation:
* [Repositories](repositories.md): An overview of the repositories that together make up the Cura application.
* [Profiles](profiles/profiles.md): About the setting and profile system of Cura.
* [Scene](scene/scene.md): How Cura's 3D scene looks.

View file

@ -1,33 +0,0 @@
Container Stacks
====
When the user selects the profiles and settings to print with, he can swap out a number of profiles. The profiles that are currently in use are stored in several container stacks. These container stacks always have a definition container at the bottom, which defines all available settings and all available properties for each setting. The profiles on top of that definition can then override the `value` property of some of those settings.
When deriving a setting value, a container stack starts looking at the top-most profile to see if it contains an override for that setting. If it does, it returns that override. Otherwise, it looks into the second profile. If that also doesn't have an override for this setting, it looks into the third profile, and so on. The last profile is always a definition container which always contains an value for all settings. This way, the profiles at the top will always win over the profiles at the bottom. There is a clear precedence order for which profile wins over which other profile.
A Machine Instance
----
A machine instance is a printer that the user has added to his configuration. It consists of multiple container stacks: One for global settings and one for each of the available extruders. This way, different extruders can contain different materials and quality profiles, for instance. The global stack contains a different set of profiles than the extruder stacks.
While Uranium defines no specific roles for the entries in a container stack, Cura defines rigid roles for each slot in a container stack. These are the layouts for the container stacks of an example printer with 2 extruders.
![Three container stacks](../resources/machine_instance.svg)
To expand on this a bit further, each extruder stack contains the following profiles:
* A user profile, where extruder-specific setting changes are stored that are not (yet) saved to a custom profile. If the user changes a setting that can be adjusted per extruder (such as infill density) then it gets stored here. If the user adjusts a setting that is global it will immediately be stored in the user profile of the global stack.
* A custom profile. If the user saves his setting changes to a custom profile, it gets moved from the user profile to here. Actually a "custom profile" as the user sees it consists of multiple profiles: one for each extruder and one for the global settings.
* An intent profile. The user can select between several intents for his print, such as precision, strength, visual quality, etc. This may be empty as well, which indicates the "default" intent.
* A quality profile. The user can select between several quality levels.
* A material profile, where the user selects which material is loaded in this extruder.
* A nozzle profile, where the user selects which nozzle is installed in this extruder.
* Definition changes, which stores the changes that the user made for this extruder in the Printer Settings dialogue.
* Extruder. The user is not able to swap this out. This is a definition that lists the extruder number for this extruder and optionally things that are fixed in the printer, such as the nozzle offset.
The global container stack contains the following profiles:
* A user profile, where global setting changes are stored that are not (yet) saved to a custom profile. If the user changes for instance the layer height, the new value for the layer height gets stored here.
* A custom profile. If the user saves his setting changes to a custom profile, the global settings that were in the global user profile get moved here.
* An intent profile. Currently this must ALWAYS be empty. There are no global intent profiles. This is there for historical reasons.
* A quality profile. This contains global settings that match with the quality level that the user selected. This global quality profile cannot be specific to a material or nozzle.
* A material profile. Currently this must ALWAYS be empty. There are no global material profiles. This is there for historical reasons.
* A variant profile. Currently this must ALWAYS be empty. There are no global variant profiles. This is there for historical reasons.
* Definition changes, which stores the changes that the user made to the printer in the Printer Settings dialogue.
* Printer. This specifies the currently used printer model, such as Ultimaker 3, Ultimaker S5, etc.

View file

@ -1,70 +0,0 @@
Getting a Setting Value
====
How Cura gets a setting's value is a complex endeavour that requires some explanation. The `value` property gets special treatment for this because there are a few other properties that influence the value. In this page we explain the algorithm to getting a setting value.
This page explains all possible cases for a setting, but not all of them may apply. For instance, a global setting will not evaluate the per-object settings to get its value. Exceptions to the rules for other types of settings will be written down.
Per Object Settings
----
Per-object settings, which are added to an object using the per-object settings tool, will always prevail over other setting values. They are not evaluated with the rest of the settings system because Cura's front-end doesn't need to send all setting values for all objects to CuraEngine separately. It only sends over the per-object settings that get overridden. CuraEngine then evaluates settings that can be changed per-object using the list of settings for that object but if the object doesn't have the setting attached falls back on the settings in the object's extruder. Refer to the [CuraEngine](#CuraEngine) chapter to see how this works.
Settings where the `settable_per_mesh` property is false will not be shown in Cura's interface in the list of available settings in the per-object settings panel. They cannot be adjusted per object then. CuraEngine will also not evaluate those settings for each object separately. There is (or should always be) a good reason why each of these settings are not evaluated per object: Simply because CuraEngine is not processing one particular mesh at that moment. For instance, when writing the move to change to the next layer, CuraEngine hasn't processed any of the meshes on that layer yet and so the layer change movement speed, or indeed the layer height, can't change for each object.
The per-object settings are stored in a separate container stack that is particular to the object. The container stack is added to the object via a scene decorator. It has just a single container in it, which contains all of the settings that the user changed.
Resolve
----
If the setting is not listed in the per-object settings, it needs to be evaluated from the main settings list. However before evaluating it from a particular extruder, Cura will check if the setting has the `resolve` property. If it does, it returns the output of the `resolve` property and that's everything.
The `resolve` property is intended for settings which are global in nature, but still need to be influenced by extruder-specific settings. A good example is the Build Plate Temperature, which is very dependent on the material(s) used by the printer, but there can only be a single bed temperature at a time.
Cura will simply evaluate the `resolve` setting if present, which is an arbitrary Python expression, and return its result as the setting's value. However typically the `resolve` property is a function that takes the values of this setting for all extruders in use and then computes a result based on those. There is a built-in function for that called `extruderValues()`, which returns a list of setting values, one for each extruder. The function can then for instance take the average of those. In the case of the build plate temperature it will take the highest of those. In the case of the adhesion type it will choose "raft" if any extruder uses a raft, or "brim" as second choice, "skirt" as third choice and "none" only if all extruders use "none". Each setting with a `resolve` property has its own way of resolving the setting. The `extruderValues()` function continues with the algorithm as written below, but repeats it for each extruder.
Limit To Extruder
----
If a setting is evaluated from a particular extruder stack, it normally gets evaluated from the extruder that the object is assigned to. However there are some exceptions. Some groups of settings belong to a particular "extruder setting", like the Infill Extruder setting, or the Support Extruder setting. Which extruder a setting belongs to is stored in the `limit_to_extruder` property. Settings which have their `limit_to_extruder` property set to `adhesion_extruder_nr`, for instance, belong to the build plate adhesion settings.
If the `limit_to_extruder` property evaluates to a positive number, instead of getting the setting from the object's extruder it will be obtained from the extruder written in the `limit_to_extruder` property. So even if an object is set to be printed with extruder 0, if the infill extruder is set to extruder 1 any infill setting will be obtained from extruder 1. If `limit_to_extruder` is negative (in particular -1, which is the default), then the setting will be obtained from the object's own extruder.
This property is communicated to CuraEngine separately. CuraEngine makes sure that the setting is evaluated from the correct extruder. Refer to the [CuraEngine](#CuraEngine) chapter to see how this works.
Evaluating a Stack
----
After the resolve and limit to extruder properties have been checked, the setting value needs to be evaluated from an extruder stack.
This is explained in more detail in the [Container Stacks](container_stacks.md) documentation. In brief, Cura will check the highest container in the extruder stack first to see whether that container overrides the setting. If it does, it returns that as the setting value. Otherwise, it checks the second container on the stack to see if that one overrides it. If it does it returns that value, and otherwise it checks the third container, and so on. If a setting is not overridden by any container in the extruder stack, it continues downward in the global stack. If it is also not overridden there, it eventually arrives at the definition in the bottom of the global stack.
Evaluating a Definition
----
If the evaluation for a setting reaches the last entry of the global stack, its definition, a few more things can happen.
Definition containers have an inheritance structure. For instance, the `ultimaker3` definition container specifies in its metadata that it inherits from `ultimaker`, which in turn inherits from `fdmprinter`. So again here, when evaluating a property from the `ultimaker3` definition it will first look to see if the property is overridden by the `ultimaker3` definition itself, and otherwise refer on to the `ultimaker` definition or otherwise finally to the `fdmprinter` definition. `fdmprinter` is the last line of defence, and it contains *all* properties for *all* settings.
But even in `fdmprinter`, not all settings have a `value` property. It is not a required property. If the setting doesn't have a `value` property, the `default_value` property is returned, which is a required property. The distinction between `value` and `default_value` is made in order to allow CuraEngine to load a definition file as well when running from the command line (a debugging technique for CuraEngine). It then won't have all of the correct setting values but it at least doesn't need to evaluate all of the Python expressions and you'll be able to make some debugging slices.
Evaluating a Value Property
----
The `value` property may contain a formula, which is an arbitrary Python expression that will be executed by Cura to arrive at a setting value. All containers may set the `value` property. Instance containers can only set the `value`, while definitions can set all properties.
While the value could be any sort of formula, some functions of Python are restricted for security reasons. Since Cura 4.6, profiles are no longer a "trusted" resource and are therefore subject to heavy restrictions. It can use Python's built in mathematical functions and list functions as well as a few basic other ones, but things like writing to a file are prohibited.
There are also a few extra things that can be used in these expressions:
* Any setting key can be used as a variable that contains the setting's value.
* As explained before, `extruderValues(key)` is a function that returns a list of setting values for a particular setting for all used extruders.
* The function `extruderValue(extruder, key)` will evaluate a particular setting for a particular extruder.
* The function `resolveOrValue(key)` will perform the full setting evaluation as described in this document for the current context (so if this setting is being evaluated for the second extruder it would perform it as if coming from the second extruder).
* The function `defaultExtruderPosition()` will get the first extruder that is not disabled. For instance, if a printer has three extruders but the first is disabled, this would return `1` to indicate the second extruder (0-indexed).
* The function `anyExtruderNrWithOrDefault(key)` will filter the list of extruders on the key, and then give the first
index for which it is true, or if none of them are, the default one as specified by the 'default extruder position'
function above.
* The function `anyExtruderWithMaterial(key)` will filter the list of extruders on the key of material quality, and then give the first index for which it is true, or if none of them are, the default one as specified by the 'default extruder position' function above.
* The function `valueFromContainer(key, index)` will get a setting value from the global stack, but skip the first few containers in that stack. It will skip until it reaches a particular index in the container stack.
* The function `extruderValueFromContainer(key, index)` will get a setting value from the current extruder stack, but skip the first few containers in that stack. It will skip until it reaches a particular index in the container stack.
CuraEngine
----
When starting a slice, Cura will send the scene to CuraEngine and with each model send over the per-object settings that belong to it. It also sends all setting values over, as evaluated from each extruder and from the global stack, and sends the `limit_to_extruder` property along as well. CuraEngine stores this and then starts its slicing process. CuraEngine also has a hierarchical structure for its settings with fallbacks. This is explained in detail in [the documentation of CuraEngine](https://github.com/Ultimaker/CuraEngine/blob/master/docs/settings.md) and shortly again here.
Each model gets a setting container assigned. The per-object settings are stored in those. The fallback for this container is set to be the extruder with which the object is printed. The extruder uses the current *mesh group* as fallback (which is a concept that Cura's front-end doesn't have). Each mesh group uses the global settings container as fallback.
During the slicing process CuraEngine will evaluate the settings from its current context as it goes. For instance, when processing the walls for a particular mesh, it will request the Outer Wall Line Width setting from the settings container of that mesh. When it's not processing a particular mesh but for instance the travel moves between two meshes, it uses the currently applicable extruder. So this business logic defines actually how a setting can be configured per mesh, per extruder or only globally. The `settable_per_extruder`, and related properties of settings are only used in the front-end to determine how the settings are shown to the user.

View file

@ -1,30 +0,0 @@
Profiles
====
Cura's profile system is very advanced and has gotten pretty complex. This chapter is an attempt to document how it is structured.
Index
----
The following pages describe the profile and setting system of Cura:
* [Container Stacks](container_stacks.md): Which profiles can be swapped out and how they are ordered when evaluating a setting.
* [Setting Properties](setting_properties.md): What properties can each setting have?
* [Getting a Setting Value](getting_a_setting_value.md): How Cura arrives at a value for a certain setting.
Glossary
----
The terminology for these profiles is not always obvious. Here is a glossary of the terms that we'll use in this chapter.
* **Profile:** Either an *instance container* or a *definition container*.
* **Definition container:** Profile that's stored as .def.json file, defining new settings and all of their properties. In Cura these represent printer models and extruder trains.
* **Instance container:** Profile that's stored as .inst.cfg file or .xml.fdm_material file, which override some setting values. In Cura these represent the other profiles.
* **[Container] stack:** A list of profiles, with one definition container at the bottom and instance containers for the rest. All settings are defined in the definition container. The rest of the profiles each specify a set of value overrides. The profiles at the top always override the profiles at the bottom.
* **Machine instance:** An instance of a printer that the user has added. The list of all machine instances is shown in a drop-down in Cura's interface.
* **Material:** A type of filament that's being sold by a vendor as a product.
* **Filament spool:** A single spool of material.
* **Quality profile:** A profile that is one of the options when the user selects which quality level they want to print with.
* **Intent profile:** A profile that is one of the options when the user selects what his intent is.
* **Custom profile:** A user-made profile that is stored when the user selects to "create a profile from the current settings/overrides".
* **Quality-changes profile:** Alternative name for *custom profile*. This name is used in the code more often, but it's a bit misleading so this documentation prefers the term "custom profile".
* **User profile:** A profile containing the settings that the user has changed, but not yet saved to a profile.
* **Variant profile:** A profile containing some overrides that allow the user to select variants of the definition. As of this writing this is only used for the nozzles.
* **Quality level:** A measure of quality where the user can select from, for instance "normal", "fast", "high". When selecting a quality level, Cura will select a matching quality profile for each extruder.
* **Quality type:** Alternative name for *quality level*. This name is used in the code more often, but this documentation prefers the term "quality level".
* **Inheritance function:** A function through which the `value` of a setting is calculated. This may depend on other settings.

View file

@ -1,78 +0,0 @@
Setting Properties
====
Each setting in Cura has a number of properties. It's not just a key and a value. This page lists the properties that a setting can define.
* `key` (string): __The identifier by which the setting is referenced.__
* This is not a human-readable name, but just a reference string, such as `layer_height_0`.
* This is not actually a real property but just an identifier; it can't be changed.
* Typically these are named with the most significant category first, in order to sort them better, such as `material_print_temperature`.
* `value` (optional): __The current value of the setting.__
* This can be a function (an arbitrary Python expression) that depends on the values of other settings.
* If it's not present, the `default_value` is used.
* `default_value`: __A default value for the setting if `value` is undefined.__
* This property is required.
* It can't be a Python expression, but it can be any JSON type.
* This is made separate so that CuraEngine can read it out for its debugging mode via the command line, without needing a complete Python interpreter.
* `label` (string): __The human-readable name for the setting.__
* This label is translated.
* `description` (string): __A longer description of what the setting does when you change it.__
* This description is translated.
* `type` (string): __The type of value that this setting contains.__
* Allowed types are: `bool`, `str`, `float`, `int`, `enum`, `category`, `[int]`, `vec3`, `polygon` and `polygons`.
* `unit` (optional string): __A unit that is displayed at the right-hand side of the text field where the user enters the setting value.__
* `resolve` (optional string): __A Python expression that resolves disagreements for global settings if multiple per-extruder profiles define different values for a setting.__
* Typically this takes the values for the setting from all stacks and computes one final value for it that will be used for the global setting. For instance, the `resolve` function for the build plate temperature is `max(extruderValues('material_bed_temperature')`, meaning that it will use the hottest bed temperature of all materials of the extruders in use.
* `limit_to_extruder` (optional): __A Python expression that indicates which extruder a setting will be obtained from.__
* This is used for settings that may be extruder-specific but the extruder is not necessarily the current extruder. For instance, support settings need to be evaluated for the support extruder. Infill settings need to be evaluated for the infill extruder if the infill extruder is changed.
* `enabled` (optional string or boolean): __Whether the setting can currently be made visible for the user.__
* This can be a simple true/false, or a Python expression that depends on other settings.
* Typically used for settings that don't apply when another setting is disabled, such as to hide the support settings if support is disabled.
* `minimum_value` (optional): __The lowest acceptable value for this setting.__
* If it's any lower, Cura will not allow the user to slice.
* This property only applies to numerical settings.
* By convention this is used to prevent setting values that are technically or physically impossible, such as a layer height of 0mm.
* `maximum_value` (optional): __The highest acceptable value for this setting.__
* If it's any higher, Cura will not allow the user to slice.
* This property only applies to numerical settings.
* By convention this is used to prevent setting values that are technically or physically impossible, such as a support overhang angle of more than 90 degrees.
* `minimum_value_warning` (optional): __The threshold under which a warning is displayed to the user.__
* This property only applies to numerical settings.
* By convention this is used to indicate that it will probably not print very nicely with such a low setting value.
* `maximum_value_warning` (optional): __The threshold above which a warning is displayed to the user.__
* This property only applies to numerical settings.
* By convention this is used to indicate that it will probably not print very nicely with such a high setting value.
* `settable_globally` (optional boolean): __Whether the setting can be changed globally.__
* For some mesh-type settings such as `support_mesh` this doesn't make sense, so those can't be changed globally. They are not displayed in the main settings list then.
* `settable_per_meshgroup` (optional boolean): __Whether a setting can be changed per group of meshes.__
* *This is currently unused by Cura.*
* `settable_per_extruder` (optional boolean): __Whether a setting can be changed per extruder.__
* Some settings, like the build plate temperature, can't be adjusted separately for each extruder. An icon is shown in the interface to indicate this.
* If the user changes these settings they are stored in the global stack.
* `settable_per_mesh` (optional boolean): __Whether a setting can be changed per mesh.__
* The settings that can be changed per mesh are shown in the list of available settings in the per-object settings tool.
* `children` (optional list): __A list of child settings.__
* These are displayed with an indentation. If all child settings are overridden by the user, the parent setting gets greyed out to indicate that the parent setting has no effect any more. This is not strictly always the case though, because that would depend on the inheritance functions in the `value`.
* `icon` (optional string): __A path to an icon to be displayed.__
* Only applies to setting categories.
* `allow_empty` (optional bool): __Whether the setting is allowed to be empty.__
* If it's not, this will be treated as a setting error and Cura will not allow the user to slice.
* Only applies to string-type settings.
* `warning_description` (optional string): __A warning message to display when the setting has a warning value.__
* *This is currently unused by Cura.*
* `error_description` (optional string): __An error message to display when the setting has an error value.__
* *This is currently unused by Cura.*
* `options` (dictionary): __A list of values that the user can choose from.__
* The keys of this dictionary are keys that CuraEngine identifies the option with.
* The values are human-readable strings and will be translated.
* Only applies to (and only required for) enum-type settings.
* `comments` (optional string): __Comments to other programmers about the setting.__
* *This is currently unused by Cura.*
* `is_uuid` (optional boolean): __Whether or not this setting indicates a UUID-4.__
* If it is, the setting will indicate an error if it's not in the correct format.
* Only applies to string-type settings.
* `regex_blacklist_pattern` (optional string): __A regular expression, where if the setting value matches with this regular expression, it gets an error state.__
* Only applies to string-type settings.
* `error_value` (optional): __If the setting value is equal to this value, it will show a setting error.__
* This is used to display errors for non-numerical settings such as checkboxes.
* `warning_value` (optional): __If the setting value is equal to this value, it will show a setting warning.__
* This is used to display warnings for non-numerical settings such as checkboxes.

View file

@ -1,33 +0,0 @@
Repositories
====
Cura uses a number of repositories where parts of our source code are separated, in order to get a cleaner architecture. Those repositories are:
* [Cura](https://github.com/Ultimaker/Cura) is the main repository for the front-end of Cura. This contains:
- all of the business logic for the front-end, including the specific types of profiles that are available
- the concept of 3D printers and materials
- specific tools for handling 3D printed models
- pretty much all of the GUI
- Ultimaker services such as the Marketplace and accounts.
* [Uranium](https://github.com/Ultimaker/Uranium) is the underlying framework the Cura repository is built on. [Uranium](https://github.com/Ultimaker/Uranium) is a framework for desktop applications that handle 3D models. It has a separate back-end. This provides Cura with:
- a basic GUI framework ([Qt](https://www.qt.io/))
- a 3D scene, a rendering system
- a plug-in system
- a system for stacked profiles that change settings.
* [CuraEngine](https://github.com/Ultimaker/CuraEngine) is the slicer used by Cura in the background. This does the actual process that converts 3D models into a toolpath for the printer.
* [libArcus](https://github.com/Ultimaker/libArcus) handles the communication to CuraEngine. [libArcus](https://github.com/Ultimaker/libArcus) is a small library that wraps around [Protobuf](https://developers.google.com/protocol-buffers/) in order to make it run over a local socket.
* [cura-build](https://github.com/Ultimaker/cura-build): Cura's build scripts.
* [cura-build-environment](https://github.com/Ultimaker/cura-build-environment) build scripts for building dependencies.
There are also a number of repositories under our control that are not integral parts of Cura's architecture, but more like separated side-gigs:
* [libSavitar](https://github.com/Ultimaker/libSavitar) is used for loading and writing 3MF files.
* [libCharon](https://github.com/Ultimaker/libCharon) is used for loading and writing UFP files.
* [cura-binary-data](https://github.com/Ultimaker/cura-binary-data) pre-compiled parts to make the build system a bit simpler. This holds things which would require considerable tooling to build automatically like:
- the machine-readable translation files
- the Marlin builds for firmware updates
* [Cura-squish-tests](https://github.com/Ultimaker/Cura-squish-tests): automated GUI tests.
* [fdm_materials](https://github.com/Ultimaker/fdm_materials) stores Material profiles. This is separated out and combined in our build process, so that the firmware for Ultimaker's printers can use the same set of profiles too.
Interplay
----
At a very high level, Cura's repositories interconnect as follows:
![Overview of interplay between repositories](resources/repositories.svg)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 60 KiB

View file

@ -1,54 +0,0 @@
digraph {
"cpython/3.10.4@ultimaker/testing" -> "zlib/1.2.12"
"cpython/3.10.4@ultimaker/testing" -> "openssl/1.1.1l"
"cpython/3.10.4@ultimaker/testing" -> "expat/2.4.1"
"cpython/3.10.4@ultimaker/testing" -> "libffi/3.2.1"
"cpython/3.10.4@ultimaker/testing" -> "mpdecimal/2.5.0@ultimaker/testing"
"cpython/3.10.4@ultimaker/testing" -> "libuuid/1.0.3"
"cpython/3.10.4@ultimaker/testing" -> "libxcrypt/4.4.25"
"cpython/3.10.4@ultimaker/testing" -> "bzip2/1.0.8"
"cpython/3.10.4@ultimaker/testing" -> "gdbm/1.19"
"cpython/3.10.4@ultimaker/testing" -> "sqlite3/3.36.0"
"cpython/3.10.4@ultimaker/testing" -> "tk/8.6.10"
"cpython/3.10.4@ultimaker/testing" -> "ncurses/6.2"
"cpython/3.10.4@ultimaker/testing" -> "xz_utils/5.2.5"
"pynest2d/5.1.0-beta+3@ultimaker/stable" -> "libnest2d/5.1.0-beta+3@ultimaker/stable"
"pynest2d/5.1.0-beta+3@ultimaker/stable" -> "cpython/3.10.4@ultimaker/testing"
"freetype/2.12.1" -> "libpng/1.6.37"
"freetype/2.12.1" -> "zlib/1.2.12"
"freetype/2.12.1" -> "bzip2/1.0.8"
"freetype/2.12.1" -> "brotli/1.0.9"
"savitar/5.1.0-beta+3@ultimaker/stable" -> "pugixml/1.12.1"
"savitar/5.1.0-beta+3@ultimaker/stable" -> "cpython/3.10.4@ultimaker/testing"
"arcus/5.1.0-beta+3@ultimaker/stable" -> "protobuf/3.17.1"
"arcus/5.1.0-beta+3@ultimaker/stable" -> "cpython/3.10.4@ultimaker/testing"
"arcus/5.1.0-beta+3@ultimaker/stable" -> "zlib/1.2.12"
"libpng/1.6.37" -> "zlib/1.2.12"
"curaengine/5.1.0-beta+3@ultimaker/stable" -> "clipper/6.4.2"
"curaengine/5.1.0-beta+3@ultimaker/stable" -> "boost/1.78.0"
"curaengine/5.1.0-beta+3@ultimaker/stable" -> "rapidjson/1.1.0"
"curaengine/5.1.0-beta+3@ultimaker/stable" -> "stb/20200203"
"curaengine/5.1.0-beta+3@ultimaker/stable" -> "protobuf/3.17.1"
"curaengine/5.1.0-beta+3@ultimaker/stable" -> "arcus/5.1.0-beta+3@ultimaker/stable"
"tcl/8.6.10" -> "zlib/1.2.12"
"uranium/5.1.0-beta+3@ultimaker/stable" -> "arcus/5.1.0-beta+3@ultimaker/stable"
"uranium/5.1.0-beta+3@ultimaker/stable" -> "cpython/3.10.4@ultimaker/testing"
"libnest2d/5.1.0-beta+3@ultimaker/stable" -> "boost/1.78.0"
"libnest2d/5.1.0-beta+3@ultimaker/stable" -> "clipper/6.4.2"
"libnest2d/5.1.0-beta+3@ultimaker/stable" -> "nlopt/2.7.0"
"conanfile.py (cura/5.1.0-beta+3@ultimaker/testing)" -> "arcus/5.1.0-beta+3@ultimaker/stable"
"conanfile.py (cura/5.1.0-beta+3@ultimaker/testing)" -> "curaengine/5.1.0-beta+3@ultimaker/stable"
"conanfile.py (cura/5.1.0-beta+3@ultimaker/testing)" -> "savitar/5.1.0-beta+3@ultimaker/stable"
"conanfile.py (cura/5.1.0-beta+3@ultimaker/testing)" -> "pynest2d/5.1.0-beta+3@ultimaker/stable"
"conanfile.py (cura/5.1.0-beta+3@ultimaker/testing)" -> "uranium/5.1.0-beta+3@ultimaker/stable"
"conanfile.py (cura/5.1.0-beta+3@ultimaker/testing)" -> "fdm_materials/5.1.0-beta+3@ultimaker/stable"
"conanfile.py (cura/5.1.0-beta+3@ultimaker/testing)" -> "cura_binary_data/5.1.0-beta+3@ultimaker/stable"
"conanfile.py (cura/5.1.0-beta+3@ultimaker/testing)" -> "cpython/3.10.4@ultimaker/testing"
"fontconfig/2.13.93" -> "freetype/2.12.1"
"fontconfig/2.13.93" -> "expat/2.4.1"
"fontconfig/2.13.93" -> "libuuid/1.0.3"
"tk/8.6.10" -> "tcl/8.6.10"
"tk/8.6.10" -> "fontconfig/2.13.93"
"tk/8.6.10" -> "xorg/system"
"protobuf/3.17.1" -> "zlib/1.2.12"
}

View file

@ -1,55 +0,0 @@
<?xml version="1.0" encoding="UTF-8" ?>
<svg xmlns="http://www.w3.org/2000/svg" width="700" height="1010">
<defs>
<path id="stack-header" d="m0,50 v-30 a20,20 0 0 1 20,-20 h260 a20,20 0 0 1 20,20 v30 z" />
<marker id="arrow" refX="2" refY="1.5" markerWidth="3" markerHeight="3" orient="auto-start-reverse">
<polygon points="0,0 3,1.5 0,3" />
</marker>
</defs>
<g stroke="black" stroke-width="5" fill="silver"> <!-- Stack headers. -->
<use href="#stack-header" x="200" y="555" />
<use href="#stack-header" x="5" y="5" />
<use href="#stack-header" x="395" y="5" />
</g>
<g stroke="black" stroke-width="10" fill="none"> <!-- Stack outlines. -->
<rect x="200" y="555" width="300" height="450" rx="20" /> <!-- Global stack. -->
<rect x="5" y="5" width="300" height="450" rx="20" /> <!-- Left extruder. -->
<rect x="395" y="5" width="300" height="450" rx="20" /> <!-- Right extruder. -->
</g>
<g font-family="sans-serif" font-size="25" dominant-baseline="middle" text-anchor="middle">
<text x="350" y="582.5">Global stack</text> <!-- Slightly lowered since the top line is thicker than the bottom. -->
<text x="350" y="630">User</text>
<text x="350" y="680">Custom</text>
<text x="350" y="730">Intent</text>
<text x="350" y="780">Quality</text>
<text x="350" y="830">Material</text>
<text x="350" y="880">Variant</text>
<text x="350" y="930">Definition changes</text>
<text x="350" y="980">Printer</text>
<text x="155" y="32.5">Left extruder</text> <!-- Slightly lowered again. -->
<text x="155" y="80">User</text>
<text x="155" y="130">Custom</text>
<text x="155" y="180">Intent</text>
<text x="155" y="230">Quality</text>
<text x="155" y="280">Material</text>
<text x="155" y="330">Nozzle</text>
<text x="155" y="380">Definition changes</text>
<text x="155" y="430">Extruder</text>
<text x="545" y="32.5">Right extruder</text> <!-- Slightly lowered again. -->
<text x="545" y="80">User</text>
<text x="545" y="130">Custom</text>
<text x="545" y="180">Intent</text>
<text x="545" y="230">Quality</text>
<text x="545" y="280">Material</text>
<text x="545" y="330">Nozzle</text>
<text x="545" y="380">Definition changes</text>
<text x="545" y="430">Extruder</text>
</g>
<g stroke="black" stroke-width="5" marker-end="url(#arrow)"> <!-- Arrows. -->
<line x1="155" y1="455" x2="345" y2="545" />
<line x1="545" y1="455" x2="355" y2="545" />
</g>
</svg>

Before

Width:  |  Height:  |  Size: 2.6 KiB

View file

@ -1,70 +0,0 @@
<?xml version="1.0" encoding="UTF-8" ?>
<svg xmlns="http://www.w3.org/2000/svg" width="1000" height="1000">
<defs>
<marker id="arrow" refX="2" refY="1.5" markerWidth="3" markerHeight="3" orient="auto-start-reverse">
<polygon points="0,0 3,1.5 0,3" />
</marker>
</defs>
<g marker-end="url(#arrow)" stroke="black" stroke-width="5"> <!-- Arrows. -->
<!-- Towards CuraEngine and back. -->
<line x1="475" y1="400" x2="475" y2="307.5" />
<line x1="475" y1="250" x2="475" y2="210" />
<line x1="525" y1="200" x2="525" y2="242.5" />
<line x1="525" y1="300" x2="525" y2="390" />
<!-- From libSavitar. -->
<line x1="100" y1="425" x2="142.5" y2="425" />
<line x1="300" y1="425" x2="390" y2="425" />
<!-- From fdm_materials. -->
<line x1="350" y1="575" x2="390" y2="575" />
<!-- To libCharon. -->
<line x1="600" y1="500" x2="692.5" y2="500" />
<line x1="900" y1="500" x2="945" y2="500" />
<!-- To Uranium. -->
<line x1="500" y1="600" x2="500" y2="690" />
</g>
<g stroke="black" fill="none"> <!-- Boxes representing repositories. -->
<g stroke-width="10"> <!-- Major repositories. -->
<rect x="400" y="400" width="200" height="200" rx="20" /> <!-- Cura. -->
<rect x="350" y="700" width="300" height="200" rx="20" /> <!-- Uranium. -->
<rect x="300" y="5" width="400" height="195" rx="20" /> <!-- CuraEngine. -->
</g>
<g stroke-width="5"> <!-- Minor repositories. -->
<rect x="150" y="350" width="150" height="100" rx="20" /> <!-- libSavitar. -->
<rect x="100" y="550" width="250" height="100" rx="20" /> <!-- fdm_materials. -->
<rect x="430" y="250" width="140" height="50" rx="20" /> <!-- libArcus. -->
<rect x="700" y="450" width="200" height="100" rx="20" /> <!-- libCharon. -->
</g>
</g>
<g font-family="sans-serif" text-anchor="middle" dominant-baseline="middle"> <!-- Labels. -->
<g font-size="50"> <!-- Major repositories. -->
<text x="500" y="500">Cura</text>
<text x="500" y="800">Uranium</text>
<text x="500" y="102.5">CuraEngine</text>
</g>
<g font-size="25"> <!-- Minor repositories and arrows. -->
<text x="225" y="400">libSavitar</text>
<text x="225" y="600">fdm_materials</text>
<text x="500" y="275">libArcus</text>
<text x="800" y="500">libCharon</text>
<g text-anchor="start">
<text x="645" y="490" transform="rotate(-90, 645, 490)">G-code</text>
<text x="950" y="500">UFP</text>
<text x="535" y="345">G-code</text>
<text x="345" y="415" transform="rotate(-90, 345, 415)">Model</text>
<text x="510" y="645">Built upon</text>
</g>
<g text-anchor="end">
<text x="465" y="345">Scene</text>
<text x="90" y="425">3MF</text>
</g>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 3 KiB

View file

@ -1,27 +0,0 @@
Build Volume
====
The build volume is a scene node. This node gets placed somewhere in the scene. This is a specialised scene node that draws the build volume and all of its bits and pieces.
Volume bounds
----
The build volume draws a cube (for rectangular build plates) that represents the printable build volume. This outline is drawn with a blue line. To render this, the Build Volume scene node generates a cube and instructs OpenGL to draw a wireframe of this cube. This way the wireframe is always a single pixel wide regardless of view distance. This cube is automatically resized when the relevant settings change, like the width, height and depth of the printer, the shape of the build plate, the Print Sequence or the gantry height.
The build volume also draws a grid underneath the build volume. The grid features 1cm lines which allows the user to roughly estimate how big its print is or the distance between prints. It also features a finer 1mm line pattern within that grid. The grid is drawn as a single quad. This quad is then sent to the graphical card with a specialised shader which draws the grid pattern.
For elliptical build plates, the volume bounds are drawn as two circles, one at the top and one at the bottom of the available height. The build plate grid is drawn as a tessellated circle, but with the same shader.
Disallowed areas
----
The build volume also calculates and draws the disallowed areas. These are drawn as a grey shadow. The point of these disallowed areas is to denote the areas where the user is not allowed to place any objects. The reason to forbid placing an object can be a lot of things.
One disallowed area that is always present is the border around the build volume. This border is there to prevent the nozzle from going outside of the bounds of the build volume. For instance, if you were to print an object with a brim of 8mm, you won't be able to place that object closer than 8mm to the edge of the build volume. Doing so would draw part of the brim outside of the build volume. The width of these disallowed areas depends on a bunch of things. Most commonly the build plate adhesion setting or the Avoid Distance setting is the culprit. However this border is also affected by the draft shield, ooze shield and Support Horizontal Expansion, among others.
Another disallowed area stems from the distance between the nozzles for some multi-extrusion printers. The total build volume in Cura is normally the volume that can be reached by either nozzle. However for every extruder that your print uses, the build volume will be shrunk to the intersecting area that all used nozzles can reach. This is done by adding disallowed areas near the border. For instance, if you have two extruders with 18mm X distance between them, and your print uses only the left extruder, there will be an extra border of 18mm on the right hand side of the printer, because the left nozzle can't reach that far to the right. If you then use both extruders, there will be an 18mm border on both sides.
There are also disallowed areas for features that are printed. There are as of this writing two such disallowed areas: The prime tower and the prime blob. You can't print an object on those locations since they would intersect with the printed feature.
Then there are disallowed areas imposed by the current printer. Some printers have things in the way of your print, such as clips that hold the build plate down, or cameras, switching bays or wiping brushes. These are encoded in the `machine_disallowed_areas` and `nozzle_disallowed_areas` settings, as polygons. The difference between these two settings is that one is intended to describe where the print head is not allowed to move. The other is intended to describe where the currently active nozzle is not allowed to move. This distinction is meant to allow inactive nozzles to move over things like build plate clips or stickers, which can slide underneath an inactive nozzle.
Finally, there are disallowed areas imposed by other objects that you want to print. Each object and group has an associated Convex Hull Node, which denotes the volume that the object is going to be taking up while printing. This convex hull is projected down to the build plate and determines there the volume that the object is going to occupy.
Each type of disallowed area is affected by certain settings. The border around the build volume, for instance, is affected by the brim, but the disallowed areas for printed objects are not. This is because the brim could go outside of the build volume but the brim can't hit any other objects. If the brim comes too close to other objects, it merges with the brim of those objects. As such, generating each type of disallowed area requires specialised business logic to determine how the setting affects the disallowed area. It needs to take the highest of two settings sometimes, or it needs to sum them together, multiplying a certain line width by an accompanying line count setting, and so on. All this logic is implemented in the BuildVolume class.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 96 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 174 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 79 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 196 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 345 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 83 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 72 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 69 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

View file

@ -1,113 +0,0 @@
# Operations and the OperationStack
Cura supports an operation stack. The `OperationStack` class maintains a history of the operations performed in Cura, which allows for undo and redo actions. Every operation registers itself in the stack. The OperationStuck supports the following functions:
* `push(operation)`: Pushes an operation in the stack and applies the operation. This function is called when an operation pushes itself in the stack.
* `undo()`: Reverses the actions performed by the last operation and reduces the current index of the stack.
* `redo()`: Applies the actions performed by the next operation in the stack and increments the current index of the stack.
* `getOperations()`: Returns a list of all the operations that are currently inside the OperationStack
* `canUndo()`: Indicates whether the index of the operation stack has reached the bottom of the stack, which means that there are no more operations to be undone.
* `canRedo()`: Indicates whether the index of the operation stack has reached the top of the stack, which means that there are no more operations to be redone.
**Note 1:** When consecutive operations are performed very quickly after each other, they are merged together at the top of the stack. This action ensures that these minor operation will be undone with one Undo keystroke (e.g. when moving the object around and you press and release the left mouse button really fast, it is considered as one move operation).
**Note 2:** When an operation is pushed in the middle of the stack, all operations above it are removed from the stack. This ensures that there won't be any "history branches" created.
### Operations
Every action that happens in the scene and affects one or multiple models is associated with a subclass of the `Operation` class and is it added to the `OperationStack`. The subclassed operations that can be found in Cura (excluding the ones from downloadable plugins) are the following:
* [GroupedOperation](#groupedoperation)
* [AddSceneNodeOperation](#addscenenodeoperation)
* [RemoveSceneNodeOperation](#removescenenodeoperation)
* [SetParentOperation](#setparentoperation)
* [SetTransformOperation](#settransformoperation)
* [SetObjectExtruderOperation](#setobjectextruderoperation)
* [GravityOperation](#gravityoperation)
* [PlatformPhysicsOperation](#platformphysicsoperation)
* [TranslateOperation](#translateoperation)
* [ScaleOperation](#scaleoperation)
* [RotateOperation](#rotateoperation)
* [MirrorOperation](#mirroroperation)
* [LayFlatOperation](#layflatoperation)
* [SetBuildPlateNumberOperation]()
### GroupedOperation
The `GroupedOperation` is an operation that groups several other operations together. The intent of this operation is to hide an underlying chain of operations from the user if they correspond to only one interaction with the user, such as an operation applied to multiple scene nodes or a re-arrangement of multiple items in the scene.
Once a `GroupedOperation` is pushed into the stack, it applies all of its children operations in one go. Similarly, when it is undone, it reverses all its children operations at once.
### AddSceneNodeOperation
The `AddSceneNodeOperation` is added to the stack whenever a mesh is loaded inside the `Scene`, either by a `FileReader` or by inserting a [Support Blocker](tools.md#supporteraser-tool) in an object.
### RemoveSceneNodeOperation
The `RemoveSceneNodeOperation` is added to the stack whenever a mesh is removed from the Scene by the user or when the user requests to clear the build plate (_Ctrl+D_).
### SetParentOperation
The `SetParentOperation` changes the parent of a node. It is primarily used when grouping (the group node is set as the nodes' parent) and ungrouping (the group's children's parent is set to the group's parent before the group node is deleted), or when a SupportEraser node is added to the scene (to set the selected object as the Eraser's parent).
### SetTransformOperation
The `SetTransformOperation` translates, rotates, and scales a node all at once. This operation accepts a transformation matrix, an orientation matrix, and a scale matrix, and it is used by the _"Reset All Model Positions"_ and _"Reset All Model Transformations"_ options in the right-click (context) menu.
### SetObjectExtruderOperation
This operation is used to set the extruder with which a certain object should be printed with. It adds a [SettingOverrideDecorator](scene.md#settingoverridedecorator) to the object (if it doesn't have any) and then sets the extruder number via the decoration function `node.callDecoration("setActiveExtruder", extruder_id)`.
### GravityOperation
The `GravityOperation` moves a scene node down to 0 on the y-axis. It is currently used by the _"Lay flat"_ and _"Select face to align to the build plate"_ actions of the `RotationTool` to ensure that the object will end up touching the build plate after the corresponding rotation operations have be done.
### PlatformPhysicsOperation
The `PlatformPhysicsOperation` is generated by the `PlatformPhysics` class and it is associated with the preferences _"Ensure models are kept apart"_ and _"Automatically drop models to the build plate"_. If any of these preferences is set to true, the `PlatformPhysics` class periodically checks to make sure that the two conditions are met and if not, it calculates the move vector for each of the nodes that will satisfy the conditions.
Once the move vectors have been computed, they are applied to the nodes through consecutive `PlatformPhysicsOperations`, whose job is to use the `translate` function on the nodes.
**Note:** When there are multiple nodes, multiple `PlatformPhysicsOperations` may be generated (all models may be moved to ensure they are kept apart). These operations eventually get merged together by the `OperationStack` due to the fact that the individual operations are applied very fast one after the other.
### TranslateOperation
The `TranslateOperation` applies a linear transformation on a node, moving the node in the scene. This operation is primarily linked to the [TranslateTool](tools.md#translatetool) but it is also used in other places around Cura, such as arranging objects on the build plate (Ctrl+R) and centering an object to the build plate (via the right-click context menu's _"Center Selected Model"_ option).
When an object is moved using the move tool handles, multiple translate operations are generated to make sure that the object is rendered properly while it is moved. These translate operations are merged together once the user releases the tool handle.
**Note:** Some functionalities may move (translate) nodes without generating a TranslateOperation (such as when a model with is imported from a 3mf into a certain position). This ensures that the moving of the object cannot be accidentally undone by the user.
### ScaleOperation
The `ScaleOperation` scales the selected scene node uniformly or non-uniformly. This operation is primarily generated by the [ScaleTool](tools.md#scaletool).
When an object is scaled using the scale tool handles, multiple scale operations are generated to make sure that the object is rendered properly while it is being resized. These scale operations are merged together once the user releases the tool handle.
**Note:** When the _"Scale extremely small models"_ or the _"Scale large models"_ preferences are enabled the model is scaled when it is inserted into the build plate but it **DOES NOT** generate a `ScaleOperation`. This ensures that Cura doesn't register the scaling as an action that can be undone and the user doesn't accidentally end up with a very big or very small model.
### RotateOperation
The `RotateOperation` rotates the selected scene node(s) according to a given rotation quaternion and, optionally, around a given point. This operation is primarily generated by the [RotationTool](tools.md#rotatetool). It is also used by the arrange algorithm, which may rotate some models to fit them in the build plate.
When an object is rotated using the rotate tool handles, multiple rotate operations are generated to make sure that the object is rendered properly while it is being rotated. These operations are merged together once the user releases the tool handle.
### MirrorOperation
The `MirrorOperation` mirrors the selected object. It is primarily associated with the [MirrorTool](tools.md#mirrortool) and allows for mirroring the object in a certain direction, using the `MirrorToolHandles`.
The `MirrorOperation` accepts a transformation matrix that should only define values on the diagonal of the matrix, and only the values 1 or -1. It allows for mirroring around the center of the object or around the axis origin. The latter isn't used that often.
### LayFlatOperation
The `LayFlatOperation` computes some orientation to hopefully lay the object flat on the build plate. It is generated by the `layFlat()` function of the [RotateTool](tools.md#rotatetool). Contrary to the other operations, the `LayFlatOperation` is computed in a separate thread through the `LayFlatJob` since it can be quite computationally expensive.
### SetBuildPlateNumberOperation
The `SetBuildPlateNumberOperation` is linked to a legacy feature which allowed the user to have multiple build plates open in Cura at the same time. With this operation it was possible to transfer a node to another build plate through the node's [BuildPlateDecorator](scene.md#buildplatedecorator) by calling the decoration `node.callDecoration("setBuildPlateNumber", new_build_plate_nr)`.
**Note:** Changing the active build plate is a disabled feature in Cura and it is intended to be completely removed (internal ticket: CURA-4975), along with the `SetBuildPlateNumberOperation`.

View file

@ -1,216 +0,0 @@
Scene
====
The 3D scene in Cura is designed as a [Scene Graph](https://en.wikipedia.org/wiki/Scene_graph), which is common in many 3D graphics applications. The scene graph of Cura is usually very flat, but has the possibility to have nested objects which inherit transformations from each other.
Scene Graph
----
Cura's scene graph is a mere tree data structure. This tree contains all scene nodes, which represent the objects in the 3D scene.
The main idea behind the scene tree is that each scene node has a transformation applied to it. The scene nodes can be nested beneath other scene nodes. The transformation of the parents is then also applied to the children. This way you can have scene nodes grouped together and transform the group as a whole. Since the transformations are all linear, this ensures that the elements of this group stay in the same relative position and orientation. It will look as if the whole group is a single object. This idea is very common for games where objects are often composed of multiple 3D models but need to move together as a whole. For Cura it is used to group objects together and to transform the collision area correctly.
Class Diagram
----
The following class diagram depicts the classes that interact with the Scene
![alt text](images/components_interacting_with_scene.jpg)
The scene lives in the Controller of the Application, and it is primarily interacting with SceneNode objects, which are the components of the Scene Graph.
A Typical Scene
----
Cura's scene has a few nodes that are always present, and a few nodes that are repeated for every object that the user loads onto their build plate. The root of the scene graph is a SceneNode that lives inside the Scene and contains all the other children SceneNodes of the scene. Typically, inside the root you can find the SceneNodes that are always loaded (the Cameras, the [BuildVolume](build_volume.md), and the Platform), the objects that are loaded on the platform, and finally a ConvexHullNode for each object and each group of objects in the Scene.
Let's take the following example Scene:
![scene_example.png](images/scene_example.jpg)
The scene graph in this case is the following:
![scene_example_scene_graph.png](images/scene_example_scene_graph.jpg)
**Note 1:** The Platform is actually a child of the BuildVolume.
**Note 2:** The ConvexHullNodes are not actually named after the object they decorate. Their names are used in the image to convey how the ConvexHullNodes are related to the objects in the scene.
**Note 3:** The CuraSceneNode that holds the layer data (inside the BuildVolume) is created and destroyed according to the availability of sliced layer data provided by the CuraEngine. See the [LayerDataDecorator](#layerdatadecorator) for more information.
Accessing SceneNodes in the Scene
----
SceneNodes can be accessed using a `BreadthFirstIterator` or a `DepthFirstIterator`. Each iterator traverses the scene graph and returns a Python iterator, which yields all the SceneNodes and their children.
``` python
for node in BreadthFirstIterator(scene.getRoot()):
# do stuff with the node
```
Example result when iterating the above scene graph:
```python
[i for i in BreadthFirstIterator(CuraApplication.getInstance().getController().getScene().getRoot()]
```
* 00 = {SceneNode} <SceneNode object: 'Root'>
* 01 = {BuildVolume} <BuildVolume object '0x2e35dbce108'>
* 02 = {Camera} <Camera object: '3d'>
* 03 = {CuraSceneNode} <CuraSceneNode object: 'Torus.stl'>
* 04 = {CuraSceneNode} <CuraSceneNode object: 'Group #1'>
* 05 = {Camera} <Camera object: 'snapshot'>
* 06 = {CuraSceneNode} <CuraSceneNode object: 'Star.stl'>
* 07 = {ConvexHullNode} <ConvexHullNode object: '0x2e3000def08'>
* 08 = {ConvexHullNode} <ConvexHullNode object: '0x2e36861bd88'>
* 09 = {ConvexHullNode} <ConvexHullNode object: '0x2e3000bd4c8'>
* 10 = {ConvexHullNode} <ConvexHullNode object: '0x2e35fbb62c8'>
* 11 = {ConvexHullNode} <ConvexHullNode object: '0x2e3000a0648'>
* 12 = {ConvexHullNode} <ConvexHullNode object: '0x2e30019d0c8'>
* 13 = {ConvexHullNode} <ConvexHullNode object: '0x2e3001a2dc8'>
* 14 = {Platform} <Platform object '0x2e35a001948'>
* 15 = {CuraSceneNode} <CuraSceneNode object: 'Group #2'>
* 16 = {CuraSceneNode} <CuraSceneNode object: 'Sphere.stl'>
* 17 = {CuraSceneNode} <CuraSceneNode object: 'Cylinder.stl'>
* 18 = {CuraSceneNode} <CuraSceneNode object: 'Cube.stl'>
SceneNodeDecorators
----
SceneNodeDecorators are decorators that can be added to the nodes of the scene to provide them with additional functions.
Cura provides the following classes derived from the SceneNodeDecorator class:
1. [GroupDecorator](#groupdecorator)
2. [ConvexHullDecorator](#convexhulldecorator)
3. [SettingOverrideDecorator](#settingoverridedecorator)
4. [SliceableObjectDecorator](#sliceableobjectdecorator)
5. [LayerDataDecorator](#layerdatadecorator)
6. [ZOffsetDecorator](#zoffsetdecorator)
7. [BlockSlicingDecorator](#blockslicingdecorator)
8. [GCodeListDecorator](#gcodelistdecorator)
9. [BuildPlateDecorator](#buildplatedecorator)
GroupDecorator
----
Whenever objects on the build plate are grouped together, a new node is added in the scene as the parent of the grouped objects. Group nodes can be identified when traversing the SceneGraph by running the following:
```python
node.callDecoration("isGroup") == True
```
Group nodes decorated by GroupDecorators are added in the scene either by reading project files which contain grouped objects, or when the user selects multiple objects and groups them together (Ctrl + G).
Group nodes that are left with only one child are removed from the scene, making their only child a child of the group's parent. In addition, group nodes without any remaining children are removed from the scene.
ConvexHullDecorator
----
As seen in the scene graph of the scene example, each CuraSceneNode that represents an object on the build plate is linked to a ConvexHullNode which is rendered as the object's shadow on the build plate. The ConvexHullDecorator is the link between these two nodes.
In essence, the CuraSceneNode has a ConvexHullDecorator which points to the ConvexHullNode of the object. The data of the object's convex hull can be accessed via
```python
convex_hull_polygon = object_node.callDecoration("getConvexHull")
```
The ConvexHullDecorator also provides convex hulls that include the head, the fans, and the adhesion of the object. These are primarily used and rendered when One-at-a-time mode is activated.
For more information on the functions added to the node by this decorator, visit the [ConvexHullDecorator.py](https://github.com/Ultimaker/Cura/blob/master/cura/Scene/ConvexHullDecorator.py).
SettingOverrideDecorator
----
SettingOverrideDecorators are primarily used for modifier meshes such as support meshes, cutting meshes, infill meshes, and anti-overhang meshes. When a user converts an object to a modifier mesh, the object's node is decorated by a SettingOverrideDecorator. This decorator adds a PerObjectContainerStack to the CuraSceneNode, which allows the user to modify the settings of the specific model.
For more information on the functions added to the node by this decorator, visit the [SettingOverrideDecorator.py](https://github.com/Ultimaker/Cura/blob/master/cura/Settings/SettingOverrideDecorator.py).
SliceableObjectDecorator
----
This is a convenience decorator that allows us to easily identify the nodes which can be sliced. All **individual** objects (meshes) added to the build plate receive this decorator, apart from the nodes loaded from GCode files (.gcode, .g, .gz, .ufp).
The SceneNodes that do not receive this decorator are:
- Cameras
- BuildVolume
- Platform
- ConvexHullNodes
- CuraSceneNodes that serve as group nodes (these have a GroupDecorator instead)
- The CuraSceneNode that serves as the layer data node
- ToolHandles
- NozzleNode
- Nodes that contain GCode data. See the [BlockSlicingDecorator](#blockslicingdecorator) for more information on that.
This decorator provides the following function to the node:
```python
node.callDecoration("isSliceable")
```
LayerDataDecorator
----
Once the Slicing has completed and the CuraEngine has returned the slicing data, Cura creates a CuraSceneNode inside the BuildVolume which is decorated by a LayerDataDecorator. This decorator holds the layer data of the scene.
![Layer Data Scene Node](images/layer_data_scene_node.jpg)
The layer data can be accessed through the function given to the aforementioned CuraSceneNode by the LayerDataDecorator:
```python
node.callDecoration("getLayerData")
```
This CuraSceneNode is created once Cura has completed processing the Layer data (after the user clicks on the Preview tab after slicing). The CuraSceneNode then is destroyed once any action that changes the Scene occurs (e.g. if the user moves/rotates/scales an object or changes a setting value), indicating that the layer data is no longer available. When that happens, the "Slice" button becomes available again.
ZOffsetDecorator
----
The ZOffsetDecorator is added to an object in the scene when that object is moved below the build plate. It is primarily used when the "Automatically drop models to the build plate" preference is enabled, in order to make sure that the GravityOperation, which drops the mode on the build plate, is not applied when the object is moved under the build plate.
The amount the object is moved under the build plate can be retrieved by calling the "getZOffset" decoration on the node:
```python
z_offset = node.callDecoration("getZOffset")
```
The ZOffsetDecorator is removed from the node when the node is move above the build plate.
BlockSlicingDecorator
----
The BlockSlicingDecorator is the opposite of the SliceableObjectDecorator. It is added on objects loaded on the scene which should not be sliced. This decorator is primarily added on objects loaded from ".gcode", ".ufp", ".g", and ".gz" files. Such an object already contains all the slice information and therefore should not allow Cura to slice it.
If an object with a BlockSlicingDecorator appears in the scene, the backend (CuraEngine) and the print setup (changing print settings) become disabled, considering that G-code files cannot be modified.
The BlockSlicingDecorator adds the following decoration function to the node:
```python
node.callDecoration("isBlockSlicing")
```
GCodeListDecorator
----
The GCodeListDecorator is also added only when a file containing GCode is loaded in the scene. It's purpose is to hold a list of all the GCode data of the loaded object.
The GCode list data is stored in the scene's gcode_dict attribute which then is used in other places in the Cura code, e.g. to provide the GCode to the GCodeWriter or to the PostProcessingPlugin.
The GCode data becomes available by calling the "getGCodeList" decoration of the node:
```python
gcode_list = node.callDecoration("getGCodeList")
```
The CuraSceneNode with the GCodeListDecorator is destroyed when another object or project file is loaded in the Scene.
BuildPlateDecorator
----
The BuildPlateDecorator is added to all the CuraSceneNodes. This decorator is linked to a legacy feature which allowed the user to have multiple build plates open in Cura at the same time. With this decorator it was possible to determine which nodes are present on each build plate, and therefore, which objects should be visible in the currently active build plate. It indicates the number of the build plate this scene node belongs to, which currently is always the build plate -1.
This decorator provides a function to the node that returns the number of the build plate it belongs to:
```python
node.callDecoration("getBuildPlateNumber")
```
**Note:** Changing the active build plate is a disabled feature in Cura and it is intended to be completely removed (internal ticket: CURA-4975).

View file

@ -1,86 +0,0 @@
# Tools
Tools are plugin objects which are used to manipulate or interact with the scene and the objects (node) in the scene.
![Class diagram of tools in the scene](images/tools_tool-handles_class_diagram.jpg)
Tools live inside the Controller of the Application and may be associated with ToolHandles. Some of them interact with the scene as a whole (such as the Camera), while others interact with the objects (nodes) in the Scene (selection tool, rotate tool, scale tool etc.). The tools that are available in Cura (excluding the ones provided by downloadable plugins) are the following:
* [CameraTool](#cameratool)
* [SelectionTool](#selectiontool)
* [TranslateTool](#translatetool)
* [ScaleTool](#scaletool)
* [RotateTool](#rotatetool)
* [MirrorTool](#mirrortool)
* [PerObjectSettingsTool](#perobjectsettingstool)
* [SupportEraserTool](#supporteraser)
*****
### CameraTool
The CameraTool is the tool that allows the user to manipulate the Camera. It provides the functions of moving, zooming, and rotating the Camera. This tool does not contain a handle.
### SelectionTool
This tool allows the user to select objects and groups of objects in the scene. The selected objects gain a blue outline and become available in the code through the Selection class.
![Selection Tool](images/selection_tool.jpg)
This tool does not contain a handle.
### TranslateTool
This tool allows the user to move the object around the build plate. The TranslateTool is activated once the user presses the Move icon in the tool sidebar or hits the shortcut (T) while an object is selected.
![Translate Tool](images/translate_tool.jpg)
The TranslateTool contains the TranslateToolHandle, which draws the arrow handles on the selected object(s). The TranslateTool generates TranslateOperations whenever the object is moved around the build plate.
### ScaleTool
This tool allows the user to scale the selected object(s). The ScaleTool is activated once the user presses the Scale icon in the tool sidebar or hits the shortcut (S) while an object is selected.
![Scale Tool](images/scale_tool.jpg)
The ScaleTool contains the ScaleToolHandle, which draws the box handles on the selected object(s). The ScaleTool generates ScaleOperations whenever the object is scaled.
### RotateTool
This tool allows the user to rotate the selected object(s). The RotateTool is activated once the user presses the Rotate icon in the tool sidebar or hits the shortcut (R) while an object is selected.
![Rotate Tool](images/rotate_tool.jpg)
The RotateTool contains the RotateToolHandle, which draws the donuts (tori) and arrow handles on the selected object(s). The RotateTool generates RotateOperations whenever the object is rotated or if a face is selected to be laid flat on the build plate. It also contains the `layFlat()` action, which generates the [LayFlatOperation](operations.md#layflatoperation).
### MirrorTool
This tool allows the user to mirror the selected object(s) in the required direction. The MirrorTool is activated once the user presses the Mirror icon in the tool sidebar or hits the shortcut (M) while an object is selected.
![Mirror Tool](images/mirror_tool.jpg)
The MirrorTool contains the MirrorToolHandle, which draws pyramid handles on the selected object(s). The MirrorTool generates MirrorOperations whenever the object is mirrored against an axis.
### PerObjectSettingsTool
This tool allows the user to change the mesh type of the object into one of the following:
* Normal Model
* Print as support
* Modify settings for overlaps
- Infill mesh only
- Cutting mesh
* Don't support overlaps
![Per object settings tool](images/per_objectsettings_tool.jpg)
Contrary to other tools, this tool doesn't have any handles and it does not generate any operations. This means that once an object's type is changed it cannot be undone/redone using the OperationStack. This tool adds a [SettingOverrideDecorator](scene.md#settingoverridedecorator) on the object's node instead, which allows the user to change certain settings only for this mesh.
### SupportEraser tool
This tool allows the user to add support blockers on the selected model. The SupportEraserTool is activated once the user pressed the Support Blocker icon in the tool sidebar or hits the shortcut (E) while an object is selected. With this tool active, the user can add support blockers (cubes) on the object by clicking on various places on the selected mesh.
![Support Eraser Tool](images/support_blocker_tool.jpg)
The SupportEraser uses a GroupOperation to add a new CuraSceneNode (the eraser) in the scene and set the selected model as the parent of the eraser. This means that the addition of Erasers in the scene can be undone/redone. The SupportEraser does not have any tool handles.

View file

@ -14,106 +14,43 @@ AppDir:
- amd64 - amd64
allow_unauthenticated: true allow_unauthenticated: true
sources: sources:
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy main restricted - sourceline: deb http://archive.ubuntu.com/ubuntu/ jammy main restricted universe multiverse
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy-updates main restricted - sourceline: deb http://archive.ubuntu.com/ubuntu/ jammy-updates main restricted universe multiverse
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy universe - sourceline: deb http://security.ubuntu.com/ubuntu jammy-security main restricted universe multiverse
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy-updates universe
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy multiverse
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy-updates multiverse
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy-backports main restricted
universe multiverse
- sourceline: deb http://security.ubuntu.com/ubuntu jammy-security main restricted
- sourceline: deb http://security.ubuntu.com/ubuntu jammy-security universe
- sourceline: deb http://security.ubuntu.com/ubuntu jammy-security multiverse
- sourceline: deb https://releases.jfrog.io/artifactory/jfrog-debs xenial contrib
- sourceline: deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-14 main
- sourceline: deb https://ppa.launchpadcontent.net/ubuntu-toolchain-r/test/ubuntu/
jammy main
- sourceline: deb https://ppa.launchpadcontent.net/deadsnakes/ppa/ubuntu/ jammy
main
- sourceline: deb [arch=amd64] https://packages.microsoft.com/repos/ms-teams stable
main
- sourceline: deb https://ppa.launchpadcontent.net/ppa-verse/cling/ubuntu/ jammy
main
- sourceline: deb [arch=amd64] https://dl.google.com/linux/chrome/deb/ stable
main
- sourceline: deb [signed-by=/usr/share/keyrings/nodesource.gpg] https://deb.nodesource.com/node_14.x
jammy main
- sourceline: deb [arch=amd64 signed-by=/usr/share/keyrings/transip-stack.gpg]
https://mirror.transip.net/stack/software/deb/Ubuntu_22.04/ ./
- sourceline: deb http://repository.spotify.com stable non-free
- sourceline: deb [arch=amd64,arm64,armhf] http://packages.microsoft.com/repos/code
stable main
- sourceline: deb https://packagecloud.io/slacktechnologies/slack/debian/ jessie
main
include: include:
- libc6:amd64 - xdg-desktop-portal-kde
- xdg-desktop-portal-kde:amd64 - libgtk-3-0
- libcap2:amd64 - librsvg2-2
- libcom-err2:amd64 - librsvg2-common
- libdbus-1-3:amd64 - libgdk-pixbuf2.0-0
- libgpg-error0:amd64 - libgdk-pixbuf2.0-bin
- libgtk-3-common - libgdk-pixbuf2.0-common
- libkeyutils1:amd64 - imagemagick
- libllvm13 - shared-mime-info
- liblzma5:amd64 - gnome-icon-theme-symbolic
- libpcre3:amd64
- libqt6gui6
- libqt6qml6
- libqt6qmlworkerscript6
- libqt6quick6
- libselinux1:amd64
- libtinfo6:amd64
- qml6-module-qtqml-workerscript:amd64
- qml6-module-qtquick:amd64
- qt6-gtk-platformtheme:amd64
- qt6-qpa-plugins:amd64
# x11
- libx11-6
- libx11-xcb1
- libxcb1
- libxcb-render0
- libxcb-xfixes0
- libxcb-shape0
- libxcb-dri2-0
- libxcb-shm0
- libxcb-glx0
- libxcb-present0
- libxcb-dri3-0
# graphic libraries interface (safe graphics bundle including drivers, acceleration may not work in some systems)
- libglvnd0
- libglx0
- libglapi-mesa
- libgl1
- libegl1
- libgbm1
- libdrm2
- libglx-mesa0
- libgl1-amber-dri
- libgl1-mesa-dri
- mesa-utils
- libgl1-mesa-glx
- libdrm-amdgpu1
- libdrm-nouveau2
exclude:
- hicolor-icon-theme - hicolor-icon-theme
- adwaita-icon-theme exclude: []
- humanity-icon-theme
files: files:
include: [] include: []
exclude: exclude:
- usr/share/man - usr/share/man
- usr/share/doc/*/README.* - usr/share/doc/*/README.*
- usr/share/doc/*/changelog.* - usr/share/doc/*/changelog.*
- usr/share/doc/*/NEWS.* - usr/share/doc/*/NEWS.*
- usr/share/doc/*/TODO.* - usr/share/doc/*/TODO.*
- usr/lib/x86_64-linux-gnu/libssl.so*
runtime: runtime:
env: env:
APPDIR_LIBRARY_PATH: "$APPDIR/usr/lib/x86_64-linux-gnu:$APPDIR/lib/x86_64-linux-gnu:$APPDIR/usr/lib:$APPDIR/usr/lib/x86_64-linux-gnu/gdk-pixbuf-2.0/2.10.0/loaders" APPDIR_LIBRARY_PATH: "$APPDIR:$APPDIR/runtime/compat/:$APPDIR/usr/lib/x86_64-linux-gnu:$APPDIR/lib/x86_64-linux-gnu:$APPDIR/usr/lib:$APPDIR/usr/lib/x86_64-linux-gnu/gdk-pixbuf-2.0/2.10.0/loaders"
LD_LIBRARY_PATH: "$APPDIR:$APPDIR/runtime/compat/:$APPDIR/usr/lib/x86_64-linux-gnu:$APPDIR/lib/x86_64-linux-gnu:$APPDIR/usr/lib:$APPDIR/usr/lib/x86_64-linux-gnu/gdk-pixbuf-2.0/2.10.0/loaders"
PYTHONPATH: "$APPDIR" PYTHONPATH: "$APPDIR"
QT_PLUGIN_PATH: "$APPDIR/qt/plugins" QT_PLUGIN_PATH: "$APPDIR/qt/plugins"
QML2_IMPORT_PATH: "$APPDIR/qt/qml" QML2_IMPORT_PATH: "$APPDIR/qt/qml"
QT_QPA_PLATFORMTHEME: xdgdesktopportal QT_QPA_PLATFORMTHEME: xdgdesktopportal
GDK_PIXBUF_MODULEDIR: $APPDIR/usr/lib/x86_64-linux-gnu/gdk-pixbuf-2.0/2.10.0/loaders
GDK_PIXBUF_MODULE_FILE: $APPDIR/usr/lib/x86_64-linux-gnu/gdk-pixbuf-2.0/2.10.0/loaders.cache
path_mappings:
- /usr/share:$APPDIR/usr/share
test: test:
fedora-30: fedora-30:
image: appimagecrafters/tests-env:fedora-30 image: appimagecrafters/tests-env:fedora-30

View file

@ -38,7 +38,7 @@ def build_appimage(dist_path, version, appimage_filename):
""" """
generate_appimage_builder_config(dist_path, version, appimage_filename) generate_appimage_builder_config(dist_path, version, appimage_filename)
create_appimage() create_appimage()
sign_appimage(dist_path, appimage_filename) sign_appimage(appimage_filename)
def generate_appimage_builder_config(dist_path, version, appimage_filename): def generate_appimage_builder_config(dist_path, version, appimage_filename):
@ -85,7 +85,7 @@ def create_appimage():
raise RuntimeError(f"The AppImageTool command returned non-zero: {result}") raise RuntimeError(f"The AppImageTool command returned non-zero: {result}")
def sign_appimage(dist_path, appimage_filename): def sign_appimage(appimage_filename):
command = ["gpg", "--yes", "--armor", "--detach-sig", appimage_filename] command = ["gpg", "--yes", "--armor", "--detach-sig", appimage_filename]
result = subprocess.call(command) result = subprocess.call(command)
if result != 0: if result != 0:

View file

@ -87,15 +87,16 @@ def notarize_file(dist_path: str, filename: str) -> None:
""" Notarize a file. This takes 5+ minutes, there is indication that this step is successful.""" """ Notarize a file. This takes 5+ minutes, there is indication that this step is successful."""
notarize_user = os.environ.get("MAC_NOTARIZE_USER") notarize_user = os.environ.get("MAC_NOTARIZE_USER")
notarize_password = os.environ.get("MAC_NOTARIZE_PASS") notarize_password = os.environ.get("MAC_NOTARIZE_PASS")
altool_executable = os.environ.get("ALTOOL_EXECUTABLE", "altool") notarize_team = os.environ.get("MACOS_CERT_USER")
notary_executable = os.environ.get("NOTARY_TOOL_EXECUTABLE", "notarytool")
notarize_arguments = [ notarize_arguments = [
"xcrun", altool_executable, "xcrun", notary_executable,
"--notarize-app", "submit",
"--primary-bundle-id", ULTIMAKER_CURA_DOMAIN, "--apple-id", notarize_user,
"--username", notarize_user,
"--password", notarize_password, "--password", notarize_password,
"--file", Path(dist_path, filename) "--team-id", notarize_team,
Path(dist_path, filename)
] ]
subprocess.run(notarize_arguments) subprocess.run(notarize_arguments)

View file

@ -144,6 +144,23 @@ SectionEnd
###################################################################### ######################################################################
Section UrlProtocol
WriteRegStr HKCR "cura" "" "URL:cura"
WriteRegStr HKCR "cura" "URL Protocol" ""
WriteRegStr HKCR "cura\DefaultIcon" "" "$INSTDIR\${MAIN_APP_EXE},1"
WriteRegStr HKCR "cura\shell" "" "open"
WriteRegStr HKCR "cura\shell\open\command" "" '"$INSTDIR\${MAIN_APP_EXE}" "%1"'
WriteRegStr HKCR "slicer" "" "URL:slicer"
WriteRegStr HKCR "slicer" "URL Protocol" ""
WriteRegStr HKCR "slicer\DefaultIcon" "" "$INSTDIR\${MAIN_APP_EXE},1"
WriteRegStr HKCR "slicer\shell" "" "open"
WriteRegStr HKCR "slicer\shell\open\command" "" '"$INSTDIR\${MAIN_APP_EXE}" "%1"'
SectionEnd
######################################################################
Section Uninstall Section Uninstall
${INSTALL_TYPE}{% for files in mapped_out_paths.values() %}{% for file in files %} ${INSTALL_TYPE}{% for files in mapped_out_paths.values() %}{% for file in files %}
Delete "{{ file[1] }}"{% endfor %}{% endfor %}{% for rem_dir in rmdir_paths %} Delete "{{ file[1] }}"{% endfor %}{% endfor %}{% for rem_dir in rmdir_paths %}
@ -187,8 +204,13 @@ RmDir "$SMPROGRAMS\{{ app_name }}"
!insertmacro APP_UNASSOCIATE "stl" "Cura.model" !insertmacro APP_UNASSOCIATE "stl" "Cura.model"
!insertmacro APP_UNASSOCIATE "3mf" "Cura.project" !insertmacro APP_UNASSOCIATE "3mf" "Cura.project"
; Unassociate file associations for 'cura' protocol
DeleteRegKey HKCR "cura"
; Unassociate file associations for 'slicer' protocol
DeleteRegKey HKCR "slicer"
DeleteRegKey ${REG_ROOT} "${REG_APP_PATH}" DeleteRegKey ${REG_ROOT} "${REG_APP_PATH}"
DeleteRegKey ${REG_ROOT} "${UNINSTALL_PATH}" DeleteRegKey ${REG_ROOT} "${UNINSTALL_PATH}"
SectionEnd SectionEnd
######################################################################

Some files were not shown because too many files have changed in this diff Show more