Merge branch 'main' into fix_postprocessing_script_folders

This commit is contained in:
Aldo Hoeben 2023-04-17 16:39:30 +02:00 committed by GitHub
commit 7ddd1037a6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7255 changed files with 322095 additions and 328534 deletions

View file

@ -1,4 +0,0 @@
.git
.github
resources/materials
CuraEngine

View file

@ -1,6 +1,6 @@
name: Feature Request name: Feature Request
description: Suggest an idea for this project. description: Suggest an idea for this project.
labels: "Type: New Feature" labels: ["Type: New Feature", "Status: Triage"]
body: body:
- type: markdown - type: markdown
attributes: attributes:
@ -41,4 +41,4 @@ body:
- type: textarea - type: textarea
attributes: attributes:
label: Additional information & file uploads label: Additional information & file uploads
description: You can add pictures or files to visualize your feature request in the comments below. description: You can add pictures or files to visualize your feature request in the comments below.

33
.github/PULL_REQUEST_TEMPLATE.md vendored Normal file
View file

@ -0,0 +1,33 @@
# Description
<!-- Please include a summary of which issue is fixed or feature was added. Please also include relevant motivation and context.
If this pull request adds settings definitions for machines/materials, list them here.
This fixes... OR This improves... -->
## Type of change
<!-- Please delete options that are not relevant. -->
- [ ] Bug fix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Printer definition file(s)
- [ ] Translations
# How Has This Been Tested?
<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration -->
- [ ] Test A
- [ ] Test B
**Test Configuration**:
* Operating System:
# Checklist:
<!-- Check if relevant -->
- [ ] My code follows the style guidelines of this project as described in [UltiMaker Meta](https://github.com/Ultimaker/Meta) and [Cura QML best practices](https://github.com/Ultimaker/Cura/wiki/QML-Best-Practices)
- [ ] I have read the [Contribution guide](https://github.com/Ultimaker/Cura/blob/main/contributing.md)
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] I have uploaded any files required to test this change

View file

@ -1,24 +0,0 @@
---
name: CI
on:
push:
branches:
- master
- 'WIP**'
- '4.*'
- 'CURA-*'
pull_request:
permissions:
contents: read
jobs:
build:
runs-on: ubuntu-latest
container: ultimaker/cura-build-environment
steps:
- name: Checkout Cura
uses: actions/checkout@v2
- name: Build
run: docker/build.sh
- name: Test
run: docker/test.sh

View file

@ -0,0 +1,158 @@
name: Create and Upload Conan package
on:
workflow_call:
inputs:
project_name:
required: true
type: string
recipe_id_full:
required: true
type: string
build_id:
required: true
type: number
build_info:
required: false
default: true
type: boolean
recipe_id_latest:
required: false
type: string
runs_on:
required: true
type: string
python_version:
required: true
type: string
conan_config_branch:
required: false
type: string
conan_logging_level:
required: false
type: string
conan_clean_local_cache:
required: false
type: boolean
default: false
conan_upload_community:
required: false
default: true
type: boolean
env:
CONAN_LOGIN_USERNAME: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD: ${{ secrets.CONAN_PASS }}
CONAN_LOG_RUN_TO_OUTPUT: 1
CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
CONAN_NON_INTERACTIVE: 1
jobs:
conan-package-create:
runs-on: ${{ inputs.runs_on }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: ${{ inputs.python_version }}
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r https://raw.githubusercontent.com/Ultimaker/Cura/main/.github/workflows/requirements-conan-package.txt
# Note the runner requirements are always installed from the main branch in the Ultimaker/Cura repo
- name: Use Conan download cache (Bash)
if: ${{ runner.os != 'Windows' }}
run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
- name: Use Conan download cache (Powershell)
if: ${{ runner.os == 'Windows' }}
run: conan config set storage.download_cache="C:\Users\runneradmin\.conan\conan_download_cache"
- name: Cache Conan local repository packages (Bash)
uses: actions/cache@v3
if: ${{ runner.os != 'Windows' }}
with:
path: |
$HOME/.conan/data
$HOME/.conan/conan_download_cache
key: conan-${{ inputs.runs_on }}-${{ runner.arch }}-create-cache
- name: Cache Conan local repository packages (Powershell)
uses: actions/cache@v3
if: ${{ runner.os == 'Windows' }}
with:
path: |
C:\Users\runneradmin\.conan\data
C:\.conan
C:\Users\runneradmin\.conan\conan_download_cache
key: conan-${{ inputs.runs_on }}-${{ runner.arch }}-create-cache
- name: Install MacOS system requirements
if: ${{ runner.os == 'Macos' }}
run: brew install autoconf automake ninja
# NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
# This is maybe because grub caches the disk it uses last time, which is recreated each time.
- name: Install Linux system requirements
if: ${{ runner.os == 'Linux' }}
run: |
sudo rm /var/cache/debconf/config.dat
sudo dpkg --configure -a
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config flex bison -y
- name: Install GCC-12 on ubuntu-22.04
if: ${{ startsWith(inputs.runs_on, 'ubuntu-22.04') }}
run: |
sudo apt install g++-12 gcc-12 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 12
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-12 12
- name: Use GCC-10 on ubuntu-20.04
if: ${{ startsWith(inputs.runs_on, 'ubuntu-20.04') }}
run: |
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 10
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-10 10
- name: Create the default Conan profile
run: conan profile new default --detect
- name: Get Conan configuration from branch
if: ${{ inputs.conan_config_branch != '' }}
run: conan config install https://github.com/Ultimaker/conan-config.git -a "-b ${{ inputs.conan_config_branch }}"
- name: Get Conan configuration
if: ${{ inputs.conan_config_branch == '' }}
run: conan config install https://github.com/Ultimaker/conan-config.git
- name: Add Cura private Artifactory remote
run: conan remote add cura-private https://ultimaker.jfrog.io/artifactory/api/conan/cura-private True
- name: Create the Packages
run: conan install ${{ inputs.recipe_id_full }} --build=missing --update
- name: Upload the Package(s)
if: ${{ always() && inputs.conan_upload_community }}
run: conan upload ${{ inputs.recipe_id_full }} -r cura --all -c
- name: Upload the Package(s) to the private Artifactory
if: ${{ always() && ! inputs.conan_upload_community }}
run: conan upload ${{ inputs.recipe_id_full }} -r cura-private --all -c

140
.github/workflows/conan-package.yml vendored Normal file
View file

@ -0,0 +1,140 @@
---
name: conan-package
# Exports the recipe, sources and binaries for Mac, Windows and Linux and upload these to the server such that these can
# be used downstream.
#
# It should run on pushes against main or CURA-* branches, but it will only create the binaries for main and release branches
on:
workflow_dispatch:
inputs:
create_binaries_windows:
required: true
default: false
description: 'create binaries Windows'
create_binaries_linux:
required: true
default: false
description: 'create binaries Linux'
create_binaries_macos:
required: true
default: false
description: 'create binaries Macos'
push:
paths:
- 'plugins/**'
- 'resources/**'
- 'cura/**'
- 'icons/**'
- 'tests/**'
- 'packaging/**'
- '.github/workflows/conan-*.yml'
- '.github/workflows/notify.yml'
- '.github/workflows/requirements-conan-package.txt'
- 'requirements*.txt'
- 'conanfile.py'
- 'conandata.yml'
- 'GitVersion.yml'
- '*.jinja'
branches:
- main
- 'CURA-*'
- '[1-9].[0-9]'
- '[1-9].[0-9][0-9]'
tags:
- '[1-9].[0-9].[0-9]*'
- '[1-9].[0-9].[0-9]'
- '[1-9].[0-9][0-9].[0-9]*'
env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
CONAN_LOGIN_USERNAME_CURA_CE: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA_CE: ${{ secrets.CONAN_PASS }}
CONAN_LOG_RUN_TO_OUTPUT: 1
CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
CONAN_NON_INTERACTIVE: 1
permissions: {}
jobs:
conan-recipe-version:
permissions:
contents: read
uses: ultimaker/cura/.github/workflows/conan-recipe-version.yml@main
with:
project_name: cura
conan-package-create-linux:
needs: [ conan-recipe-version ]
runs-on: 'ubuntu-latest'
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Cache Conan data
id: cache-conan
uses: actions/cache@v3
with:
path: ~/.conan
key: ${{ runner.os }}-conan
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: '3.11.x'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r .github/workflows/requirements-conan-package.txt
# NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
# This is maybe because grub caches the disk it uses last time, which is recreated each time.
- name: Install Linux system requirements
if: ${{ runner.os == 'Linux' }}
run: |
sudo rm /var/cache/debconf/config.dat
sudo dpkg --configure -a
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install efibootmgr build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config flex bison g++-12 gcc-12 -y
sudo apt install g++-12 gcc-12 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 12
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-12 12
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Get Conan configuration
run: conan config install https://github.com/Ultimaker/conan-config.git
- name: Create the Packages
run: conan create . ${{ needs.conan-recipe-version.outputs.recipe_id_full }} --build=missing --update -o ${{ needs.conan-recipe-version.outputs.project_name }}:devtools=True
- name: Create the latest alias
if: always()
run: conan alias ${{ needs.conan-recipe-version.outputs.recipe_id_latest }} ${{ needs.conan-recipe-version.outputs.recipe_id_full }}
- name: Upload the Package(s)
if: always()
run: |
conan upload ${{ needs.conan-recipe-version.outputs.recipe_id_full }} -r cura --all -c
conan upload ${{ needs.conan-recipe-version.outputs.recipe_id_latest }} -r cura -c
notify-create:
if: ${{ always() && (github.event_name == 'push' && (github.ref_name == 'main' || github.ref_name == 'master' || needs.conan-recipe-version.outputs.is_release_branch == 'true')) }}
needs: [ conan-recipe-version, conan-package-create-linux ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
with:
success: ${{ contains(join(needs.*.result, ','), 'success') }}
success_title: "New binaries created in ${{ github.repository }}"
success_body: "Created binaries for ${{ needs.conan-recipe-version.outputs.recipe_id_full }}"
failure_title: "Failed to create binaries in ${{ github.repository }}"
failure_body: "Failed to created binaries for ${{ needs.conan-recipe-version.outputs.recipe_id_full }}"
secrets: inherit

View file

@ -0,0 +1,106 @@
name: Export Conan Recipe to server
on:
workflow_call:
inputs:
recipe_id_full:
required: true
type: string
recipe_id_latest:
required: false
type: string
runs_on:
required: true
type: string
python_version:
required: true
type: string
conan_config_branch:
required: false
type: string
conan_logging_level:
required: false
type: string
conan_export_binaries:
required: false
type: boolean
conan_upload_community:
required: false
default: true
type: boolean
env:
CONAN_LOGIN_USERNAME: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD: ${{ secrets.CONAN_PASS }}
CONAN_LOG_RUN_TO_OUTPUT: 1
CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
CONAN_NON_INTERACTIVE: 1
jobs:
package-export:
runs-on: ${{ inputs.runs_on }}
steps:
- name: Checkout project
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: ${{ inputs.python_version }}
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements and Create default Conan profile
run: |
pip install -r https://raw.githubusercontent.com/Ultimaker/Cura/main/.github/workflows/requirements-conan-package.txt
conan profile new default --detect
# Note the runner requirements are always installed from the main branch in the Ultimaker/Cura repo
- name: Cache Conan local repository packages
uses: actions/cache@v3
with:
path: $HOME/.conan/data
key: ${{ runner.os }}-conan-export-cache
- name: Get Conan configuration from branch
if: ${{ inputs.conan_config_branch != '' }}
run: conan config install https://github.com/Ultimaker/conan-config.git -a "-b ${{ inputs.conan_config_branch }}"
- name: Get Conan configuration
if: ${{ inputs.conan_config_branch == '' }}
run: conan config install https://github.com/Ultimaker/conan-config.git
- name: Add Cura private Artifactory remote
run: conan remote add cura-private https://ultimaker.jfrog.io/artifactory/api/conan/cura-private True
- name: Export the Package (binaries)
if: ${{ inputs.conan_export_binaries }}
run: conan create . ${{ inputs.recipe_id_full }} --build=missing --update
- name: Export the Package
if: ${{ !inputs.conan_export_binaries }}
run: conan export . ${{ inputs.recipe_id_full }}
- name: Create the latest alias
if: always()
run: conan alias ${{ inputs.recipe_id_latest }} ${{ inputs.recipe_id_full }}
- name: Upload the Package(s)
if: ${{ always() && inputs.conan_upload_community }}
run: |
conan upload ${{ inputs.recipe_id_full }} -r cura --all -c
conan upload ${{ inputs.recipe_id_latest }} -r cura -c
- name: Upload the Package(s) to the private Artifactory
if: ${{ always() && ! inputs.conan_upload_community }}
run: |
conan upload ${{ inputs.recipe_id_full }} -r cura-private --all -c
conan upload ${{ inputs.recipe_id_latest }} -r cura-private -c

View file

@ -0,0 +1,223 @@
name: Get Conan Recipe Version
on:
workflow_call:
inputs:
project_name:
required: true
type: string
user:
required: false
default: ultimaker
type: string
additional_buildmetadata:
required: false
default: ""
type: string
outputs:
recipe_id_full:
description: "The full Conan recipe id: <name>/<version>@<user>/<channel>"
value: ${{ jobs.get-semver.outputs.recipe_id_full }}
recipe_id_latest:
description: "The full Conan recipe aliased (latest) id: <name>/(latest)@<user>/<channel>"
value: ${{ jobs.get-semver.outputs.recipe_id_latest }}
recipe_semver_full:
description: "The full semver <Major>.<Minor>.<Patch>-<PreReleaseTag>+<BuildMetaData>"
value: ${{ jobs.get-semver.outputs.semver_full }}
is_release_branch:
description: "is current branch a release branch?"
value: ${{ jobs.get-semver.outputs.release_branch }}
user:
description: "The conan user"
value: ${{ jobs.get-semver.outputs.user }}
channel:
description: "The conan channel"
value: ${{ jobs.get-semver.outputs.channel }}
project_name:
description: "The conan projectname"
value: ${{ inputs.project_name }}
jobs:
get-semver:
runs-on: ubuntu-latest
outputs:
recipe_id_full: ${{ steps.get-conan-broadcast-data.outputs.recipe_id_full }}
recipe_id_latest: ${{ steps.get-conan-broadcast-data.outputs.recipe_id_latest }}
semver_full: ${{ steps.get-conan-broadcast-data.outputs.semver_full }}
is_release_branch: ${{ steps.get-conan-broadcast-data.outputs.is_release_branch }}
user: ${{ steps.get-conan-broadcast-data.outputs.user }}
channel: ${{ steps.get-conan-broadcast-data.outputs.channel }}
steps:
- name: Checkout repo
uses: actions/checkout@v3
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
with:
fetch-depth: 0
ref: ${{ github.head_ref }}
- name: Checkout repo PR
uses: actions/checkout@v3
if: ${{ github.event.pull_request.head.repo.full_name != github.repository }}
with:
fetch-depth: 0
ref: ${{ github.base_ref }}
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: "3.10.x"
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements and Create default Conan profile
run: |
pip install -r .github/workflows/requirements-conan-package.txt
pip install gitpython
- id: get-conan-broadcast-data
name: Get Conan broadcast data
run: |
import subprocess
import os
from conan.tools.scm import Version
from conan.errors import ConanException
from git import Repo
repo = Repo('.')
user = "${{ inputs.user }}".lower()
project_name = "${{ inputs.project_name }}"
event_name = "${{ github.event_name }}"
issue_number = "${{ github.ref }}".split('/')[2]
is_tag = "${{ github.ref_type }}" == "tag"
is_release_branch = False
ref_name = "${{ github.base_ref }}" if event_name == "pull_request" else "${{ github.ref_name }}"
buildmetadata = "" if "${{ inputs.additional_buildmetadata }}" == "" else "${{ inputs.additional_buildmetadata }}_"
# FIXME: for when we push a tag (such as an release)
channel = "testing"
if is_tag:
branch_version = Version(ref_name)
is_release_branch = True
channel = "_"
user = "_"
actual_version = f"{branch_version}"
else:
try:
branch_version = Version(repo.active_branch.name)
except ConanException:
branch_version = Version('0.0.0')
if ref_name == f"{branch_version.major}.{branch_version.minor}":
channel = 'stable'
is_release_branch = True
elif ref_name in ("main", "master"):
channel = 'testing'
else:
channel = "_".join(repo.active_branch.name.replace("-", "_").split("_")[:2]).lower()
if "pull_request" in event_name:
channel = f"pr_{issue_number}"
# %% Get the actual version
latest_branch_version = Version("0.0.0")
latest_branch_tag = None
for tag in repo.active_branch.repo.tags:
if str(tag).startswith("firmware") or str(tag).startswith("master"):
continue # Quick-fix for the versioning scheme name of the embedded team in fdm_materials(_private) repo
try:
version = Version(tag)
except ConanException:
continue
if version > latest_branch_version and version < Version("6.0.0"):
# FIXME: stupid old Cura tags 13.04 etc. keep popping up, als the fdm_material tag for firmware are messing with this
latest_branch_version = version
latest_branch_tag = repo.tag(tag)
if latest_branch_tag:
# %% Get the actual version
no_commits = 0
for commit in repo.iter_commits("HEAD"):
if commit == latest_branch_tag.commit:
break
no_commits += 1
latest_branch_version_prerelease = latest_branch_version.pre
if latest_branch_version.pre and not "." in str(latest_branch_version.pre):
# The prerealese did not contain a version number, default it to 1
latest_branch_version_prerelease = f"{latest_branch_version.pre}.1"
if event_name == "pull_request":
actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{latest_branch_version.patch}-{str(latest_branch_version_prerelease).lower()}+{buildmetadata}pr_{issue_number}_{no_commits}"
channel_metadata = f"{channel}_{no_commits}"
else:
if channel in ("stable", "_", ""):
channel_metadata = f"{no_commits}"
else:
channel_metadata = f"{channel}_{no_commits}"
if is_release_branch:
if latest_branch_version.pre == "" and branch_version > latest_branch_version:
actual_version = f"{branch_version.major}.{branch_version.minor}.0-beta.1+{buildmetadata}{channel_metadata}"
elif latest_branch_version.pre == "":
# An actual full release has been created, we are working on patch
bump_up_patch = int(str(latest_branch_version.patch)) + 1
actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{bump_up_patch}-beta.1+{buildmetadata}{channel_metadata}"
elif latest_branch_version.pre is None:
actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{int(latest_branch_version.patch.value) + 1}-beta.1+{buildmetadata}{channel_metadata}"
else:
# An beta release has been created we are working toward a next beta or full release
bump_up_release_tag = int(str(latest_branch_version.pre).split('.')[1]) + 1
actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{latest_branch_version.patch}-{str(latest_branch_version.pre).split('.')[0]}.{bump_up_release_tag}+{buildmetadata}{channel_metadata}"
else:
max_branches_version = Version("0.0.0")
branches_no_commits = no_commits
for branch in repo.references:
try:
if "remotes/origin" in branch.abspath:
b_version = Version(branch.name.split("/")[-1])
if b_version < Version("6.0.0") and b_version > max_branches_version:
max_branches_version = b_version
branches_no_commits = repo.commit().count() - branch.commit.count()
except:
pass
if max_branches_version > latest_branch_version:
actual_version = f"{max_branches_version.major}.{int(str(max_branches_version.minor)) + 1}.0-alpha+{buildmetadata}{channel}_{branches_no_commits}"
else:
actual_version = f"{latest_branch_version.major}.{int(str(latest_branch_version.minor)) + 1}.0-alpha+{buildmetadata}{channel_metadata}"
# %% Set the environment output
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"name={project_name}\n")
f.writelines(f"version={actual_version}\n")
f.writelines(f"channel={channel}\n")
f.writelines(f"recipe_id_full={project_name}/{actual_version}@{user}/{channel}\n")
f.writelines(f"recipe_id_latest={project_name}/latest@{user}/{channel}\n")
f.writelines(f"semver_full={actual_version}\n")
f.writelines(f"is_release_branch={str(is_release_branch).lower()}\n")
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
with open(summary_env, "w") as f:
f.writelines(f"# {project_name}\n")
f.writelines(f"name={project_name}\n")
f.writelines(f"version={actual_version}\n")
f.writelines(f"channel={channel}\n")
f.writelines(f"recipe_id_full={project_name}/{actual_version}@{user}/{channel}\n")
f.writelines(f"recipe_id_latest={project_name}/latest@{user}/{channel}\n")
f.writelines(f"semver_full={actual_version}\n")
f.writelines(f"is_release_branch={str(is_release_branch).lower()}\n")
shell: python

View file

@ -0,0 +1,151 @@
name: Cura All Installers
run-name: ${{ inputs.cura_conan_version }} for exe ${{ inputs.build_windows_exe }}, msi ${{ inputs.build_windows_msi }}, dmg ${{ inputs.build_macos }}, pkg ${{ inputs.build_macos_installer }}, appimage ${{ inputs.build_linux }} - enterprise ${{ inputs.enterprise }}
on:
workflow_dispatch:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
conan_config:
description: 'Conan config branch to use'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
installer:
description: 'Create the installer'
default: true
required: true
type: boolean
build_windows_exe:
description: 'Build for Windows exe'
default: false
required: true
type: boolean
build_windows_msi:
description: 'Build for msi+pkg'
default: true
required: true
type: boolean
build_linux:
description: 'Build for Linux'
default: true
required: true
type: boolean
build_macos:
description: 'Build dmg for MacOS'
default: true
required: true
type: boolean
# Run the nightly at 3:25 UTC on working days
schedule:
- cron: '25 3 * * 1-5'
jobs:
windows-installer-create-exe:
if: ${{ inputs.build_windows_exe }}
uses: ./.github/workflows/cura-installer.yml
with:
platform: 'windows-2022'
os_name: 'win64'
cura_conan_version: ${{ inputs.cura_conan_version }}
conan_args: ${{ inputs.conan_args }}
conan_config: ${{ inputs.conan_config }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}
installer: ${{ inputs.installer }}
msi_installer: false
secrets: inherit
windows-installer-create-msi:
if: ${{ inputs.build_windows_msi }}
uses: ./.github/workflows/cura-installer.yml
with:
platform: 'windows-2022'
os_name: 'win64'
cura_conan_version: ${{ inputs.cura_conan_version }}
conan_args: ${{ inputs.conan_args }}
conan_config: ${{ inputs.conan_config }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}
installer: ${{ inputs.installer }}
msi_installer: true
secrets: inherit
linux-installer-create:
if: ${{ inputs.build_linux }}
uses: ./.github/workflows/cura-installer.yml
with:
platform: 'ubuntu-20.04'
os_name: 'linux'
cura_conan_version: ${{ inputs.cura_conan_version }}
conan_args: ${{ inputs.conan_args }}
conan_config: ${{ inputs.conan_config }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}
installer: ${{ inputs.installer }}
msi_installer: false
secrets: inherit
linux-modern-installer-create:
if: ${{ inputs.build_linux }}
uses: ./.github/workflows/cura-installer.yml
with:
platform: 'ubuntu-22.04'
os_name: 'linux-modern'
cura_conan_version: ${{ inputs.cura_conan_version }}
conan_args: ${{ inputs.conan_args }}
conan_config: ${{ inputs.conan_config }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}
installer: ${{ inputs.installer }}
msi_installer: false
secrets: inherit
macos-dmg-create:
if: ${{ inputs.build_macos }}
uses: ./.github/workflows/cura-installer.yml
with:
platform: 'macos-11'
os_name: 'mac'
cura_conan_version: ${{ inputs.cura_conan_version }}
conan_args: ${{ inputs.conan_args }}
conan_config: ${{ inputs.conan_config }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}
installer: ${{ inputs.installer }}
msi_installer: false
secrets: inherit
macos-installer-create:
if: ${{ inputs.build_macos }}
uses: ./.github/workflows/cura-installer.yml
with:
platform: 'macos-11'
os_name: 'mac'
cura_conan_version: ${{ inputs.cura_conan_version }}
conan_args: ${{ inputs.conan_args }}
conan_config: ${{ inputs.conan_config }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}
installer: ${{ inputs.installer }}
msi_installer: true
secrets: inherit

View file

@ -1,36 +1,54 @@
name: Cura Installer name: Cura Installer
run-name: ${{ inputs.cura_conan_version }} for ${{ inputs.platform }} by @${{ github.actor }}
on: on:
workflow_dispatch: workflow_call:
inputs: inputs:
platform:
description: 'Selected Installer OS'
default: 'ubuntu-20.04'
required: true
type: string
os_name:
description: 'OS Friendly Name'
default: 'linux'
required: true
type: string
cura_conan_version: cura_conan_version:
description: 'Cura Conan Version' description: 'Cura Conan Version'
# Fixme: default to cura/latest@testing (which is main) default: 'cura/latest@ultimaker/testing'
default: 'cura/latest@ultimaker/stable'
required: true required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
conan_config: conan_config:
description: 'Conan config branch to use' description: 'Conan config branch to use'
default: '' default: ''
required: false required: false
type: string
enterprise: enterprise:
description: 'Build Cura as an Enterprise edition' description: 'Build Cura as an Enterprise edition'
required: true
default: false default: false
required: true
type: boolean type: boolean
staging: staging:
description: 'Use staging API' description: 'Use staging API'
required: true
default: false default: false
required: true
type: boolean type: boolean
installer: installer:
description: 'Create the installer' description: 'Create the installer'
default: true
required: true
type: boolean
msi_installer:
description: 'Create the msi'
default: false
required: true required: true
default: false
type: boolean type: boolean
# Run the nightly at 5:25 UTC on working days
schedule:
- cron: '25 3 * * 1-5'
env: env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }} CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
@ -44,19 +62,19 @@ env:
MAC_NOTARIZE_USER: ${{ secrets.MAC_NOTARIZE_USER }} MAC_NOTARIZE_USER: ${{ secrets.MAC_NOTARIZE_USER }}
MAC_NOTARIZE_PASS: ${{ secrets.MAC_NOTARIZE_PASS }} MAC_NOTARIZE_PASS: ${{ secrets.MAC_NOTARIZE_PASS }}
MACOS_CERT_P12: ${{ secrets.MACOS_CERT_P12 }} MACOS_CERT_P12: ${{ secrets.MACOS_CERT_P12 }}
MACOS_CERT_PASS: ${{ secrets.MACOS_CERT_PASS }} MACOS_CERT_INSTALLER_P12: ${{ secrets.MACOS_CERT_INSTALLER_P12 }}
MACOS_CERT_USER: ${{ secrets.MACOS_CERT_USER }} MACOS_CERT_USER: ${{ secrets.MACOS_CERT_USER }}
GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }} GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
MACOS_CERT_PASSPHRASE: ${{ secrets.MACOS_CERT_PASSPHRASE }} MACOS_CERT_PASSPHRASE: ${{ secrets.MACOS_CERT_PASSPHRASE }}
WIN_CERT_INSTALLER_CER: ${{ secrets.WIN_CERT_INSTALLER_CER }}
WIN_CERT_INSTALLER_CER_PASS: ${{ secrets.WIN_CERT_INSTALLER_CER_PASS }}
CURA_CONAN_VERSION: ${{ inputs.cura_conan_version }}
ENTERPRISE: ${{ inputs.enterprise }}
STAGING: ${{ inputs.staging }}
jobs: jobs:
cura-installer-create: cura-installer-create:
runs-on: ${{ matrix.os }} runs-on: ${{ inputs.platform }}
strategy:
fail-fast: false
matrix:
os: [ macos-10.15, windows-2022, ubuntu-20.04 ]
steps: steps:
- name: Checkout - name: Checkout
@ -69,10 +87,9 @@ jobs:
cache: 'pip' cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements and Create default Conan profile - name: Install Python requirements for runner
run: | run: pip install -r https://raw.githubusercontent.com/Ultimaker/Cura/main/.github/workflows/requirements-conan-package.txt
pip install -r .github/workflows/requirements-conan-package.txt # Note the runner requirements are always installed from the main branch in the Ultimaker/Cura repo
conan profile new default --detect
- name: Use Conan download cache (Bash) - name: Use Conan download cache (Bash)
if: ${{ runner.os != 'Windows' }} if: ${{ runner.os != 'Windows' }}
@ -89,7 +106,7 @@ jobs:
path: | path: |
$HOME/.conan/data $HOME/.conan/data
$HOME/.conan/conan_download_cache $HOME/.conan/conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }} key: conan-${{ runner.os }}-${{ runner.arch }}-installer-cache
- name: Cache Conan local repository packages (Powershell) - name: Cache Conan local repository packages (Powershell)
uses: actions/cache@v3 uses: actions/cache@v3
@ -99,46 +116,96 @@ jobs:
C:\Users\runneradmin\.conan\data C:\Users\runneradmin\.conan\data
C:\.conan C:\.conan
C:\Users\runneradmin\.conan\conan_download_cache C:\Users\runneradmin\.conan\conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }} key: conan-${{ runner.os }}-${{ runner.arch }}-installer-cache
- name: Install MacOS system requirements - name: Install MacOS system requirements
if: ${{ runner.os == 'Macos' }} if: ${{ runner.os == 'Macos' }}
run: brew install autoconf automake ninja run: brew install autoconf automake ninja create-dmg # Delete create-dmg when deprecating dmg
- name: Hack needed specifically for ubuntu-22.04 from mid-Feb 2023 onwards
if: ${{ runner.os == 'Linux' && startsWith(inputs.platform, 'ubuntu-22.04') }}
run: sudo apt remove libodbc2 libodbcinst2 unixodbc-common -y
# NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
# This is maybe because grub caches the disk it uses last time, which is recreated each time.
- name: Install Linux system requirements - name: Install Linux system requirements
if: ${{ runner.os == 'Linux' }} if: ${{ runner.os == 'Linux' }}
run: | run: |
sudo apt install build-essential checkinstall zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev -y sudo rm /var/cache/debconf/config.dat
sudo dpkg --configure -a
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config -y
wget --no-check-certificate --quiet https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage -O $GITHUB_WORKSPACE/appimagetool wget --no-check-certificate --quiet https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage -O $GITHUB_WORKSPACE/appimagetool
chmod +x $GITHUB_WORKSPACE/appimagetool chmod +x $GITHUB_WORKSPACE/appimagetool
echo "APPIMAGETOOL_LOCATION=$GITHUB_WORKSPACE/appimagetool" >> $GITHUB_ENV echo "APPIMAGETOOL_LOCATION=$GITHUB_WORKSPACE/appimagetool" >> $GITHUB_ENV
- name: Install GCC-12 on ubuntu-22.04
if: ${{ startsWith(inputs.platform, 'ubuntu-22.04') }}
run: |
sudo apt install g++-12 gcc-12 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 12
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-12 12
- name: Use GCC-10 on ubuntu-20.04
if: ${{ startsWith(inputs.platform, 'ubuntu-20.04') }}
run: |
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 10
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-10 10
- name: Create the default Conan profile
run: conan profile new default --detect
- name: Configure GPG Key Linux (Bash) - name: Configure GPG Key Linux (Bash)
if: ${{ runner.os == 'Linux' }} if: ${{ runner.os == 'Linux' }}
run: echo -n "$GPG_PRIVATE_KEY" | base64 --decode | gpg --import run: echo -n "$GPG_PRIVATE_KEY" | base64 --decode | gpg --import
- name: Configure Macos keychain (Bash) - name: Configure Macos keychain Developer Cert(Bash)
id: macos-keychain-developer-cert
if: ${{ runner.os == 'Macos' }} if: ${{ runner.os == 'Macos' }}
run: | uses: apple-actions/import-codesign-certs@v1
CERTIFICATE_PATH=$RUNNER_TEMP/um_keychain.p12 with:
echo -n "$MACOS_CERT_P12" | base64 --decode --output $CERTIFICATE_PATH keychain-password: ${{ secrets.MACOS_KEYCHAIN_PASSWORD }}
security import $CERTIFICATE_PATH -p $MACOS_CERT_PASSPHRASE -A p12-file-base64: ${{ secrets.MACOS_CERT_P12 }}
security unlock -p $MACOS_CERT_USER $CERTIFICATE_PATH p12-password: ${{ secrets.MACOS_CERT_PASSPHRASE }}
- name: Clean Conan local cache - name: Configure Macos keychain Installer Cert (Bash)
if: ${{ inputs.conan_clean_local_cache }} id: macos-keychain-installer-cert
run: conan remove "*" -f if: ${{ runner.os == 'Macos' }}
uses: apple-actions/import-codesign-certs@v1
with:
keychain-password: ${{ secrets.MACOS_KEYCHAIN_PASSWORD }}
create-keychain: false # keychain is created in previous use of action.
p12-file-base64: ${{ secrets.MACOS_CERT_INSTALLER_P12 }}
p12-password: ${{ secrets.MACOS_CERT_PASSPHRASE }}
- name: Create PFX certificate from BASE64_PFX_CONTENT secret
if: ${{ runner.os == 'Windows' }}
id: create-pfx
env:
PFX_CONTENT: ${{ secrets.WIN_CERT_INSTALLER_CER }}
run: |
$pfxPath = Join-Path -Path $env:RUNNER_TEMP -ChildPath "cert.pfx";
$encodedBytes = [System.Convert]::FromBase64String($env:PFX_CONTENT);
Set-Content $pfxPath -Value $encodedBytes -AsByteStream;
echo "PFX_PATH=$pfxPath" >> $env:GITHUB_OUTPUT;
- name: Get Conan configuration from branch - name: Get Conan configuration from branch
if: ${{ inputs.conan_config_branch != '' }} if: ${{ inputs.conan_config != '' }}
run: conan config install https://github.com/Ultimaker/conan-config.git -a "-b ${{ inputs.conan_config_branch }}" run: conan config install https://github.com/Ultimaker/conan-config.git -a "-b ${{ inputs.conan_config }}"
- name: Get Conan configuration - name: Get Conan configuration
if: ${{ inputs.conan_config_branch == '' }} if: ${{ inputs.conan_config == '' }}
run: conan config install https://github.com/Ultimaker/conan-config.git run: conan config install https://github.com/Ultimaker/conan-config.git
- name: Create the Packages - name: Create the Packages (Bash)
run: conan install ${{ inputs.cura_conan_version }} --build=missing --update -c tools.env.virtualenv:powershell=True -if cura_inst -g VirtualPythonEnv -o cura:enterprise=${{ inputs.enterprise }} -o cura:staging=${{ inputs.staging }} if: ${{ runner.os != 'Windows' }}
run: conan install $CURA_CONAN_VERSION ${{ inputs.conan_args }} --build=missing --update -if cura_inst -g VirtualPythonEnv -o cura:enterprise=$ENTERPRISE -o cura:staging=$STAGING --json "cura_inst/conan_install_info.json"
- name: Create the Packages (Powershell)
if: ${{ runner.os == 'Windows' }}
run: conan install $Env:CURA_CONAN_VERSION ${{ inputs.conan_args }} --build=missing --update -if cura_inst -g VirtualPythonEnv -o cura:enterprise=$Env:ENTERPRISE -o cura:staging=$Env:STAGING --json "cura_inst/conan_install_info.json"
- name: Set Environment variables for Cura (bash) - name: Set Environment variables for Cura (bash)
if: ${{ runner.os != 'Windows' }} if: ${{ runner.os != 'Windows' }}
@ -149,67 +216,157 @@ jobs:
- name: Set Environment variables for Cura (Powershell) - name: Set Environment variables for Cura (Powershell)
if: ${{ runner.os == 'Windows' }} if: ${{ runner.os == 'Windows' }}
run: | run: |
echo "${Env:WIX}\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
.\cura_inst\Scripts\activate_github_actions_env.ps1 .\cura_inst\Scripts\activate_github_actions_env.ps1
.\cura_inst\Scripts\activate_github_actions_version_env.ps1 .\cura_inst\Scripts\activate_github_actions_version_env.ps1
- name: Unlock Macos keychain (Bash)
if: ${{ runner.os == 'Macos' }}
run: security unlock -p $TEMP_KEYCHAIN_PASSWORD signing_temp.keychain
env:
TEMP_KEYCHAIN_PASSWORD: ${{ steps.macos-keychain-developer-cert.outputs.keychain-password }}
# FIXME: This is a workaround to ensure that we use and pack a shared library for OpenSSL 1.1.1l. We currently compile
# OpenSSL statically for CPython, but our Python Dependenies (such as PyQt6) require a shared library.
# Because Conan won't allow for building the same library with two different options (easily) we need to install it explicitly
# and do a manual copy to the VirtualEnv, such that Pyinstaller can find it.
- name: Install OpenSSL shared
run: conan install openssl/1.1.1l@_/_ --build=missing --update -o openssl:shared=True -g deploy
- name: Copy OpenSSL shared (Bash)
if: ${{ runner.os != 'Windows' }}
run: |
cp ./openssl/lib/*.so* ./cura_inst/bin/ || true
cp ./openssl/lib/*.dylib* ./cura_inst/bin/ || true
- name: Copy OpenSSL shared (Powershell)
if: ${{ runner.os == 'Windows' }}
run: |
cp openssl/bin/*.dll ./cura_inst/Scripts/
cp openssl/lib/*.lib ./cura_inst/Lib/
- name: Create the Cura dist - name: Create the Cura dist
run: pyinstaller ./cura_inst/Ultimaker-Cura.spec run: pyinstaller ./cura_inst/UltiMaker-Cura.spec
- name: Output the name file name and extension
id: filename
shell: python
run: |
import os
enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-${{ inputs.os_name }}"
if "${{ runner.os }}" == "Windows":
installer_ext = "msi" if "${{ inputs.msi_installer }}" == "true" else "exe"
elif "${{ runner.os }}" == "macOS":
installer_ext = "pkg" if "${{ inputs.msi_installer }}" == "true" else "dmg"
else:
installer_ext = "AppImage"
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"INSTALLER_FILENAME={installer_filename}\n")
f.writelines(f"INSTALLER_EXT={installer_ext}\n")
f.writelines(f"FULL_INSTALLER_FILENAME={installer_filename}.{installer_ext}\n")
- name: Summarize the used Conan dependencies
shell: python
run: |
import os
import json
from pathlib import Path
conan_install_info_path = Path("cura_inst/conan_install_info.json")
conan_info = {"installed": []}
if os.path.exists(conan_install_info_path):
with open(conan_install_info_path, "r") as f:
conan_info = json.load(f)
sorted_deps = sorted([dep["recipe"]["id"].replace('#', r' rev: ') for dep in conan_info["installed"]])
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("# ${{ steps.filename.outputs.FULL_INSTALLER_FILENAME }} uses:\n")
for dep in sorted_deps:
f.writelines(f"`{dep}`\n")
- name: Archive the artifacts (bash) - name: Archive the artifacts (bash)
if: ${{ github.event.inputs.installer == 'false' && runner.os != 'Windows' }} if: ${{ !inputs.installer && runner.os != 'Windows' }}
run: tar -zcf "./Ultimaker-Cura-$CURA_VERSION_FULL-${{ runner.os }}-${{ runner.arch }}.tar.gz" "./Ultimaker-Cura/" run: tar -zcf "./${{ steps.filename.outputs.INSTALLER_FILENAME }}.tar.gz" "./UltiMaker-Cura/"
working-directory: dist working-directory: dist
- name: Archive the artifacts (Powershell) - name: Archive the artifacts (Powershell)
if: ${{ github.event.inputs.installer == 'false' && runner.os == 'Windows' }} if: ${{ !inputs.installer && runner.os == 'Windows' }}
run: Compress-Archive -Path ".\Ultimaker-Cura" -DestinationPath ".\Ultimaker-Cura-$Env:CURA_VERSION_FULL-${{ runner.os }}-${{ runner.arch }}.zip" run: Compress-Archive -Path ".\UltiMaker-Cura" -DestinationPath ".\${{ steps.filename.outputs.INSTALLER_FILENAME }}.zip"
working-directory: dist working-directory: dist
- name: Create the Windows exe installer (Powershell) - name: Create the Windows exe installer (Powershell)
if: ${{ github.event.inputs.installer == 'true' && runner.os == 'Windows' }} if: ${{ inputs.installer && runner.os == 'Windows' && !inputs.msi_installer }}
run: | run: |
python ..\cura_inst\packaging\NSIS\nsis-configurator.py ".\Ultimaker-Cura" "..\cura_inst\packaging\NSIS\Ultimaker-Cura.nsi.jinja" "Ultimaker Cura" "Ultimaker-Cura.exe" "$Env:CURA_VERSION_MAJOR" "$Env:CURA_VERSION_MINOR" "$Env:CURA_VERSION_PATCH" "$Env:CURA_VERSION_BUILD" "Ultimaker B.V." "https://ultimaker.com" "..\cura_inst\packaging\cura_license.txt" "LZMA" "..\cura_inst\packaging\NSIS\cura_banner_nsis.bmp" "..\cura_inst\packaging\icons\Cura.ico" "Ultimaker-Cura-$Env:CURA_VERSION_FULL-${{ runner.os }}-${{ runner.arch }}.exe" python ..\cura_inst\packaging\NSIS\create_windows_installer.py ../cura_inst . "${{ steps.filename.outputs.FULL_INSTALLER_FILENAME }}"
makensis /V2 /P4 Ultimaker-Cura.nsi working-directory: dist
- name: Create the Windows msi installer (Powershell)
if: ${{ inputs.installer && runner.os == 'Windows' && inputs.msi_installer }}
run: |
python ..\cura_inst\packaging\msi\create_windows_msi.py ..\cura_inst .\UltiMaker-Cura "${{ steps.filename.outputs.FULL_INSTALLER_FILENAME }}" "$Env:CURA_APP_NAME"
working-directory: dist
- name: Sign the Windows exe installer (Powershell)
if: ${{ inputs.installer && runner.os == 'Windows' && !inputs.msi_installer }}
env:
PFX_PATH: ${{ steps.create-pfx.outputs.PFX_PATH }}
run: |
& "C:/Program Files (x86)/Windows Kits/10/bin/10.0.17763.0/x86/signtool.exe" sign /f $Env:PFX_PATH /p "$Env:WIN_CERT_INSTALLER_CER_PASS" /fd SHA256 /t http://timestamp.digicert.com "${{ steps.filename.outputs.FULL_INSTALLER_FILENAME }}"
working-directory: dist
- name: Sign the Windows msi installer (Powershell)
if: ${{ inputs.installer && runner.os == 'Windows' && inputs.msi_installer }}
env:
PFX_PATH: ${{ steps.create-pfx.outputs.PFX_PATH }}
run: |
& "C:/Program Files (x86)/Windows Kits/10/bin/10.0.17763.0/x86/signtool.exe" sign /f $Env:PFX_PATH /p "$Env:WIN_CERT_INSTALLER_CER_PASS" /fd SHA256 /t http://timestamp.digicert.com "${{ steps.filename.outputs.FULL_INSTALLER_FILENAME }}"
working-directory: dist working-directory: dist
- name: Create the Linux AppImage (Bash) - name: Create the Linux AppImage (Bash)
if: ${{ github.event.inputs.installer == 'true' && runner.os == 'Linux' }} if: ${{ inputs.installer && runner.os == 'Linux' }}
run: python ../cura_inst/packaging/AppImage/create_appimage.py ./Ultimaker-Cura $CURA_VERSION_FULL run: python ../cura_inst/packaging/AppImage/create_appimage.py ./UltiMaker-Cura $CURA_VERSION_FULL "${{ steps.filename.outputs.FULL_INSTALLER_FILENAME }}"
working-directory: dist working-directory: dist
- name: Create the MacOS dmg (Bash) alternative - name: Create the MacOS dmg and/or pkg (Bash)
if: ${{ github.event.inputs.installer == 'true' && runner.os == 'Macos' }} if: ${{ github.event.inputs.installer == 'true' && runner.os == 'Macos' }}
run: create-dmg --window-pos 640 360 --volicon "../cura_inst/packaging/icons/VolumeIcons_Cura.icns" --window-size 690 503 --icon-size 90 --icon "Ultimaker-Cura.app" 169 272 --app-drop-link 520 272 --eula "../cura_inst/packaging/cura_license.txt" --background "../cura_inst/packaging/icons/cura_background_dmg.png" --rez Rez "./Ultimaker-Cura.dmg" "./Ultimaker-Cura.app" run: python ../cura_inst/packaging/MacOS/build_macos.py ../cura_inst . $CURA_CONAN_VERSION "${{ steps.filename.outputs.FULL_INSTALLER_FILENAME }}" "$CURA_APP_NAME"
working-directory: dist working-directory: dist
- name: Sign the MacOS dmg (Bash) alternative
if: ${{ github.event.inputs.installer == 'true' && runner.os == 'Macos' }}
run: codesign -s "$CODESIGN_IDENTITY" --timestamp -i "nl.ultimaker.cura.dmg" "./Ultimaker-Cura.dmg"
working-directory: dist
- name: Notarize the MacOS dmg (Bash) alternative
if: ${{ github.event.inputs.installer == 'true' && runner.os == 'Macos' }}
run: xcrun altool --notarize-app --primary-bundle-id "nl.ultimaker.cura" --username "$MAC_NOTARIZE_USER" --password "$MAC_NOTARIZE_PASS" --file "./Ultimaker-Cura.dmg"
working-directory: dist
- name: Create the MacOS dmg (Bash)
if: ${{ github.event.inputs.installer == 'true' && runner.os == 'Macos' }}
run: python ../cura_inst/packaging/dmg/dmg_sign_notarize.py
working-directory: dist
env:
SOURCE_DIR: ${{ env.GITHUB_WORKSPACE }}/cura_inst
DIST_DIR: ${{ env.GITHUB_WORKSPACE }}/dist
- name: Upload the artifacts - name: Upload the artifacts
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3
with: with:
name: Ultimaker-Cura-${{ env.CURA_VERSION_FULL }}-${{ runner.os }}-${{ runner.arch }} name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-${{ steps.filename.outputs.INSTALLER_EXT }}
path: | path: |
dist/*.tar.gz dist/*.tar.gz
dist/*.zip dist/*.zip
dist/*.exe dist/${{ steps.filename.outputs.FULL_INSTALLER_FILENAME }}
dist/*.msi
dist/*.dmg
dist/*.AppImage
dist/*.asc dist/*.asc
retention-days: 2 retention-days: 5
notify-export:
if: ${{ always() }}
needs: [ cura-installer-create ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
with:
success: ${{ contains(join(needs.*.result, ','), 'success') }}
success_title: "Create the Cura distributions"
success_body: "Installers for ${{ inputs.cura_conan_version }}"
failure_title: "Failed to create the Cura distributions"
failure_body: "Failed to create at least 1 installer for ${{ inputs.cura_conan_version }}"
secrets: inherit

View file

@ -7,7 +7,7 @@ on:
types: [created] types: [created]
schedule: schedule:
# Schedule for ten minutes after the hour, every hour # Schedule for ten minutes after the hour, every hour
- cron: '10 * * * *' - cron: '* */12 * * *'
# By specifying the access of one of the scopes, all of those that are not # By specifying the access of one of the scopes, all of those that are not
# specified are set to 'none'. # specified are set to 'none'.

54
.github/workflows/notify.yml vendored Normal file
View file

@ -0,0 +1,54 @@
name: Get Conan Recipe Version
on:
workflow_call:
inputs:
success:
required: true
type: boolean
success_title:
required: true
type: string
success_body:
required: true
type: string
failure_title:
required: true
type: string
failure_body:
required: true
type: string
jobs:
slackNotification:
name: Slack Notification
runs-on: ubuntu-latest
steps:
- name: Slack notify on-success
if: ${{ inputs.success }}
uses: rtCamp/action-slack-notify@v2
env:
SLACK_USERNAME: ${{ github.repository }}
SLACK_COLOR: green
SLACK_ICON: https://github.com/Ultimaker/Cura/blob/main/icons/cura-128.png?raw=true
SLACK_TITLE: ${{ inputs.success_title }}
SLACK_MESSAGE: ${{ inputs.success_body }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
- name: Slack notify on-failure
if: ${{ !inputs.success }}
uses: rtCamp/action-slack-notify@v2
env:
SLACK_USERNAME: ${{ github.repository }}
SLACK_COLOR: red
SLACK_ICON: https://github.com/Ultimaker/Cura/blob/main/icons/cura-128.png?raw=true
SLACK_TITLE: ${{ inputs.failure_title }}
SLACK_MESSAGE: ${{ inputs.failure_body }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}

View file

@ -0,0 +1,36 @@
name: notify_on_print_profile_change
on:
push:
branches: [ "main" ]
paths:
- 'resources/definitions/fdmprinter.def.json'
- 'resources/definitions/ultimaker**'
- 'resources/extruders/ultimaker**'
- 'resources/intent/ultimaker**'
- 'resources/quality/ultimaker**'
- 'resources/variants/ultimaker**'
pull_request:
branches: [ "main" ]
paths:
- 'resources/definitions/fdmprinter.def.json'
- 'resources/definitions/ultimaker**'
- 'resources/extruders/ultimaker**'
- 'resources/intent/ultimaker**'
- 'resources/quality/ultimaker**'
- 'resources/variants/ultimaker**'
permissions: {}
jobs:
slackNotification:
name: Slack Notification
runs-on: ubuntu-latest
steps:
- name: Ultimaker Print Profile Changed
uses: rtCamp/action-slack-notify@v2
env:
SLACK_CHANNEL: profile-changes
SLACK_USERNAME: ${{ github.repository }}
SLACK_COLOR: '#00FF00'
SLACK_TITLE: Print profiles changed
MSG_MINIMAL: commit
SLACK_WEBHOOK: ${{ secrets.SLACK_CURA_PPM_HOOK }}

View file

@ -0,0 +1,46 @@
name: printer-linter-format
on:
push:
paths:
- 'resources/definitions/**'
- 'resources/extruders/**'
- 'resources/intent/**'
- 'resources/quality/**'
- 'resources/variants/**'
jobs:
printer-linter-format:
name: Printer linter auto format
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- uses: technote-space/get-diff-action@v6
with:
PATTERNS: |
resources/+(definitions|extruders)/*.def.json
resources/+(intent|quality|variants)/**/*.inst.cfg
- name: Setup Python and pip
if: env.GIT_DIFF && !env.MATCHED_FILES # If nothing happens with python and/or pip after, the clean-up crashes.
uses: actions/setup-python@v4
with:
python-version: 3.11.x
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-printer-linter.txt
- name: Install Python requirements for runner
if: env.GIT_DIFF && !env.MATCHED_FILES
run: pip install -r .github/workflows/requirements-printer-linter.txt
- name: Format file
if: env.GIT_DIFF && !env.MATCHED_FILES
run: python printer-linter/src/terminal.py --format ${{ env.GIT_DIFF_FILTERED }}
- uses: stefanzweifel/git-auto-commit-action@v4
if: env.GIT_DIFF && !env.MATCHED_FILES
with:
commit_message: "Applied printer-linter format"

View file

@ -0,0 +1,60 @@
name: printer-linter-pr-diagnose
on:
pull_request:
path:
- "resources/**"
jobs:
printer-linter-diagnose:
name: Printer linter PR diagnose
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 2
- uses: technote-space/get-diff-action@v6
with:
PATTERNS: |
resources/+(extruders|definitions)/*.def.json
resources/+(intent|quality|variants)/**/*.inst.cfg
- name: Setup Python and pip
if: env.GIT_DIFF && !env.MATCHED_FILES # If nothing happens with python and/or pip after, the clean-up crashes.
uses: actions/setup-python@v4
with:
python-version: 3.11.x
cache: "pip"
cache-dependency-path: .github/workflows/requirements-printer-linter.txt
- name: Install Python requirements for runner
if: env.GIT_DIFF && !env.MATCHED_FILES
run: pip install -r .github/workflows/requirements-printer-linter.txt
- name: Create results directory
run: mkdir printer-linter-result
- name: Diagnose file(s)
if: env.GIT_DIFF && !env.MATCHED_FILES
run: python printer-linter/src/terminal.py --diagnose --report printer-linter-result/fixes.yml ${{ env.GIT_DIFF_FILTERED }}
- name: Save PR metadata
run: |
echo ${{ github.event.number }} > printer-linter-result/pr-id.txt
echo ${{ github.event.pull_request.head.repo.full_name }} > printer-linter-result/pr-head-repo.txt
echo ${{ github.event.pull_request.head.ref }} > printer-linter-result/pr-head-ref.txt
- uses: actions/upload-artifact@v2
with:
name: printer-linter-result
path: printer-linter-result/
- name: Run clang-tidy-pr-comments action
uses: platisd/clang-tidy-pr-comments@bc0bb7da034a8317d54e7fe1e819159002f4cc40
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
clang_tidy_fixes: result.yml
request_changes: true

View file

@ -0,0 +1,81 @@
name: printer-linter-pr-post
on:
workflow_run:
workflows: ["printer-linter-pr-diagnose"]
types: [completed]
jobs:
clang-tidy-results:
# Trigger the job only if the previous (insecure) workflow completed successfully
if: ${{ github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' }}
runs-on: ubuntu-latest
steps:
- name: Download analysis results
uses: actions/github-script@v3.1.0
with:
script: |
let artifacts = await github.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
});
let matchArtifact = artifacts.data.artifacts.filter((artifact) => {
return artifact.name == "printer-linter-result"
})[0];
let download = await github.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: "zip",
});
let fs = require("fs");
fs.writeFileSync("${{github.workspace}}/printer-linter-result.zip", Buffer.from(download.data));
- name: Set environment variables
run: |
mkdir printer-linter-result
unzip printer-linter-result.zip -d printer-linter-result
echo "pr_id=$(cat printer-linter-result/pr-id.txt)" >> $GITHUB_ENV
echo "pr_head_repo=$(cat printer-linter-result/pr-head-repo.txt)" >> $GITHUB_ENV
echo "pr_head_ref=$(cat printer-linter-result/pr-head-ref.txt)" >> $GITHUB_ENV
- uses: actions/checkout@v3
with:
repository: ${{ env.pr_head_repo }}
ref: ${{ env.pr_head_ref }}
persist-credentials: false
- name: Redownload analysis results
uses: actions/github-script@v3.1.0
with:
script: |
let artifacts = await github.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
});
let matchArtifact = artifacts.data.artifacts.filter((artifact) => {
return artifact.name == "printer-linter-result"
})[0];
let download = await github.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: "zip",
});
let fs = require("fs");
fs.writeFileSync("${{github.workspace}}/printer-linter-result.zip", Buffer.from(download.data));
- name: Extract analysis results
run: |
mkdir printer-linter-result
unzip printer-linter-result.zip -d printer-linter-result
- name: Run clang-tidy-pr-comments action
uses: platisd/clang-tidy-pr-comments@bc0bb7da034a8317d54e7fe1e819159002f4cc40
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
clang_tidy_fixes: printer-linter-result/fixes.yml
pull_request_id: ${{ env.pr_id }}
request_changes: true

View file

@ -0,0 +1,15 @@
name: process-pull-request
on:
pull_request_target:
types: [opened, reopened, edited, synchronize, review_requested, ready_for_review, assigned]
jobs:
add_label:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-ecosystem/action-add-labels@v1
if: ${{ github.event.pull_request.head.repo.full_name != github.repository }}
with:
labels: 'PR: Community Contribution :crown:'

View file

@ -1,2 +1,2 @@
conan conan==1.56.0
sip==6.5.1 sip

View file

@ -0,0 +1 @@
pyyaml

71
.github/workflows/security_badge.yml vendored Normal file
View file

@ -0,0 +1,71 @@
# NOTE: Best to keep all of these remarks in, they might prove useful in the future.
# This is basically just the standard one that is sugested on 'new workflow'.
name: Scorecard supply-chain security
on:
# For Branch-Protection check. Only the default branch is supported. See
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
branch_protection_rule:
# To guarantee Maintained check is occasionally updated. See
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
schedule:
- cron: '25 2 * * 5'
push:
branches: [ "main" ]
# Declare default permissions as read only.
permissions: read-all
jobs:
analysis:
name: Scorecard analysis
runs-on: ubuntu-latest
permissions:
# Needed to upload the results to code-scanning dashboard.
security-events: write
# Needed to publish results and get a badge (see publish_results below).
id-token: write
# Uncomment the permissions below if installing in a private repository.
# contents: read
# actions: read
steps:
- name: "Checkout code"
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0
with:
persist-credentials: false
- name: "Run analysis"
uses: ossf/scorecard-action@e38b1902ae4f44df626f11ba0734b14fb91f8f86 # v2.1.2
with:
results_file: results.sarif
results_format: sarif
# (Optional) "write" PAT token. Uncomment the `repo_token` line below if:
# - you want to enable the Branch-Protection check on a *public* repository, or
# - you are installing Scorecard on a *private* repository
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat.
# repo_token: ${{ secrets.SCORECARD_TOKEN }}
# Public repositories:
# - Publish results to OpenSSF REST API for easy access by consumers
# - Allows the repository to include the Scorecard badge.
# - See https://github.com/ossf/scorecard-action#publishing-results.
# For private repositories:
# - `publish_results` will always be set to `false`, regardless
# of the value entered here.
publish_results: true
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.
- name: "Upload artifact"
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0
with:
name: SARIF file
path: results.sarif
retention-days: 5
# Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning"
uses: github/codeql-action/upload-sarif@17573ee1cc1b9d061760f3a006fc4aac4f944fd5 # v2.2.4
with:
sarif_file: results.sarif

82
.github/workflows/unit-test-post.yml vendored Normal file
View file

@ -0,0 +1,82 @@
name: unit-test-post
on:
workflow_run:
workflows: [ "unit-test" ]
types: [ completed ]
jobs:
publish-test-results:
if: ${{ github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' }}
runs-on: ubuntu-latest
steps:
- name: Download analysis results
uses: actions/github-script@v3.1.0
with:
script: |
let artifacts = await github.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
});
let matchArtifact = artifacts.data.artifacts.filter((artifact) => {
return artifact.name == "test-result"
})[0];
let download = await github.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: "zip",
});
let fs = require("fs");
fs.writeFileSync("${{github.workspace}}/test-result.zip", Buffer.from(download.data));
- name: Set environment variables
run: |
mkdir pr_env
unzip test-result.zip -d pr_env
echo "pr_id=$(cat pr_env/pr-id.txt)" >> $GITHUB_ENV
echo "pr_head_repo=$(cat pr_env/pr-head-repo.txt)" >> $GITHUB_ENV
echo "pr_head_ref=$(cat pr_env/pr-head-ref.txt)" >> $GITHUB_ENV
- uses: actions/checkout@v3
with:
repository: ${{ env.pr_head_repo }}
ref: ${{ env.pr_head_ref }}
persist-credentials: false
- name: Redownload analysis results
uses: actions/github-script@v3.1.0
with:
script: |
let artifacts = await github.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
});
let matchArtifact = artifacts.data.artifacts.filter((artifact) => {
return artifact.name == "test-result"
})[0];
let download = await github.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: "zip",
});
let fs = require("fs");
fs.writeFileSync("${{github.workspace}}/test-result.zip", Buffer.from(download.data));
- name: Extract analysis results
run: |
mkdir -p tests
unzip test-result.zip -d tests
- name: Publish Unit Test Results
id: test-results
uses: EnricoMi/publish-unit-test-result-action@v1
with:
files: "tests/**/*.xml"
- name: Conclusion
run: echo "Conclusion is ${{ steps.test-results.outputs.json && fromJSON( steps.test-results.outputs.json ).conclusion }}"

164
.github/workflows/unit-test.yml vendored Normal file
View file

@ -0,0 +1,164 @@
---
name: unit-test
on:
push:
paths:
- 'plugins/**'
- 'resources/**'
- 'cura/**'
- 'icons/**'
- 'tests/**'
- 'packaging/**'
- '.github/workflows/conan-*.yml'
- '.github/workflows/unit-test.yml'
- '.github/workflows/notify.yml'
- '.github/workflows/requirements-conan-package.txt'
- 'requirements*.txt'
- 'conanfile.py'
- 'conandata.yml'
- 'GitVersion.yml'
- '*.jinja'
branches:
- main
- 'CURA-*'
- '[1-9]+.[0-9]+'
tags:
- '[0-9]+.[0-9]+.[0-9]+'
- '[0-9]+.[0-9]+-beta'
pull_request:
paths:
- 'plugins/**'
- 'resources/**'
- 'cura/**'
- 'icons/**'
- 'tests/**'
- 'packaging/**'
- '.github/workflows/conan-*.yml'
- '.github/workflows/unit-test.yml'
- '.github/workflows/notify.yml'
- '.github/workflows/requirements-conan-package.txt'
- 'requirements*.txt'
- 'conanfile.py'
- 'conandata.yml'
- 'GitVersion.yml'
- '*.jinja'
branches:
- main
- '[1-9]+.[0-9]+'
tags:
- '[0-9]+.[0-9]+.[0-9]+'
- '[0-9]+.[0-9]+-beta'
env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
CONAN_LOGIN_USERNAME_CURA_CE: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA_CE: ${{ secrets.CONAN_PASS }}
CONAN_LOG_RUN_TO_OUTPUT: 1
CONAN_LOGGING_LEVEL: info
CONAN_NON_INTERACTIVE: 1
permissions:
contents: read
jobs:
conan-recipe-version:
uses: ultimaker/cura/.github/workflows/conan-recipe-version.yml@main
with:
project_name: cura
testing:
runs-on: ubuntu-22.04
needs: [ conan-recipe-version ]
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 2
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: '3.11.x'
architecture: 'x64'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements and Create default Conan profile
run: pip install -r requirements-conan-package.txt
working-directory: .github/workflows/
- name: Use Conan download cache (Bash)
if: ${{ runner.os != 'Windows' }}
run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
- name: Cache Conan local repository packages (Bash)
uses: actions/cache@v3
if: ${{ runner.os != 'Windows' }}
with:
path: |
$HOME/.conan/data
$HOME/.conan/conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }}-unit-cache
# NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
# This is maybe because grub caches the disk it uses last time, which is recreated each time.
- name: Install Linux system requirements
if: ${{ runner.os == 'Linux' }}
run: |
sudo rm /var/cache/debconf/config.dat
sudo dpkg --configure -a
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config -y
- name: Install GCC-12 on ubuntu-22.04
run: |
sudo apt install g++-12 gcc-12 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 12
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-12 12
- name: Get Conan configuration
run: conan config install https://github.com/Ultimaker/conan-config.git
- name: Get Conan profile
run: conan profile new default --detect --force
- name: Install dependencies
run: conan install . ${{ needs.conan-recipe-version.outputs.recipe_id_full }} --build=missing --update -o cura:devtools=True -g VirtualPythonEnv -if venv
- name: Upload the Dependency package(s)
run: conan upload "*" -r cura --all -c
- name: Set Environment variables for Cura (bash)
if: ${{ runner.os != 'Windows' }}
run: |
. ./venv/bin/activate_github_actions_env.sh
- name: Run Unit Test
id: run-test
run: |
pytest --junitxml=junit_cura.xml
working-directory: tests
- name: Save PR metadata
if: always()
run: |
echo ${{ github.event.number }} > pr-id.txt
echo ${{ github.event.pull_request.head.repo.full_name }} > pr-head-repo.txt
echo ${{ github.event.pull_request.head.ref }} > pr-head-ref.txt
working-directory: tests
- name: Upload Test Results
if: always()
uses: actions/upload-artifact@v3
with:
name: test-result
path: |
tests/**/*.xml
tests/pr-id.txt
tests/pr-head-repo.txt
tests/pr-head-ref.txt

View file

@ -0,0 +1,75 @@
name: update-translations
on:
push:
paths:
- 'plugins/**'
- 'resources/**'
- 'cura/**'
- 'icons/**'
- 'tests/**'
- 'packaging/**'
- '.github/workflows/conan-*.yml'
- '.github/workflows/notify.yml'
- '.github/workflows/requirements-conan-package.txt'
- 'requirements*.txt'
- 'conanfile.py'
- 'conandata.yml'
- 'GitVersion.yml'
- '*.jinja'
jobs:
update-translations:
name: Update translations
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Cache Conan data
id: cache-conan
uses: actions/cache@v3
with:
path: ~/.conan
key: ${{ runner.os }}-conan
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: 3.11.x
cache: pip
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r .github/workflows/requirements-conan-package.txt
# NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
# This is maybe because grub caches the disk it uses last time, which is recreated each time.
- name: Install Linux system requirements
if: ${{ runner.os == 'Linux' }}
run: |
sudo rm /var/cache/debconf/config.dat
sudo dpkg --configure -a
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install efibootmgr build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config flex bison g++-12 gcc-12 -y
sudo apt install g++-12 gcc-12 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 12
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-12 12
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Get Conan configuration
run: conan config install https://github.com/Ultimaker/conan-config.git
- name: generate the files using Conan install
run: conan install . --build=missing --update -o cura:devtools=True
- uses: stefanzweifel/git-auto-commit-action@v4
with:
file_pattern: resources/i18n/*.po resources/i18n/*.pot
status_options: --untracked-files=no
commit_message: update translations

13
.gitignore vendored
View file

@ -31,6 +31,7 @@ LC_MESSAGES
.directory .directory
.idea .idea
cura.desktop cura.desktop
*.bak
# Eclipse+PyDev # Eclipse+PyDev
.project .project
@ -89,4 +90,14 @@ CuraEngine
#Prevents import failures when plugin running tests #Prevents import failures when plugin running tests
plugins/__init__.py plugins/__init__.py
/venv venv/
build/
dist/
conaninfo.txt
conan.lock
conan_imports_manifest.txt
conanbuildinfo.txt
graph_info.json
Ultimaker-Cura.spec
.run/
/printer-linter/src/printerlinter.egg-info/

17
.printer-linter Normal file
View file

@ -0,0 +1,17 @@
checks:
diagnostic-mesh-file-extension: true
diagnostic-mesh-file-size: true
diagnostic-definition-redundant-override: true
fixes:
diagnostic-definition-redundant-override: true
format:
format-definition-bracket-newline: true
format-definition-paired-coordinate-array: true
format-definition-sort-keys: true
format-definition-indent: 4
format-definition-single-value-single-line: true # Format dicts and lists with a single item on one line "dict": { "value": 10 }
format-profile-space-around-delimiters: true
format-profile-sort-keys: true
diagnostic-mesh-file-size: 1200000
diagnostic-definition-redundant-override-ignore:
- machine_.*

View file

@ -0,0 +1,25 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="{{ name }}" type="PythonConfigurationType" factoryName="Python" nameIsGenerated="true">
<module name="{{ module_name }}" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
<envs>
<env name="PYTHONUNBUFFERED" value="1" />{% for key, value in env_vars.items() %}
<env name="{{ key }}" value="{{ value }}" />{% endfor %}
</envs>
<option name="SDK_HOME" value="{{ sdk_path }}" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
<option name="IS_MODULE_SDK" value="true" />
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/{{ script_name }}" />
<option name="PARAMETERS" value="{{ parameters }}" />
<option name="SHOW_COMMAND_LINE" value="false" />
<option name="EMULATE_TERMINAL" value="false" />
<option name="MODULE_MODE" value="false" />
<option name="REDIRECT_INPUT" value="false" />
<option name="INPUT_FILE" value="" />
<method v="2" />
</configuration>
</component>

View file

@ -0,0 +1,23 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="{{ name }}" type="tests" factoryName="py.test" nameIsGenerated="true">
<module name="{{ module_name }}" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
<envs>
<env name="PYTHONUNBUFFERED" value="1" />{% for key, value in env_vars.items() %}
<env name="{{ key }}" value="{{ value }}" />{% endfor %}
</envs>
<option name="SDK_HOME" value="{{ sdk_path }}" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/tests" />
<option name="IS_MODULE_SDK" value="true" />
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="_new_keywords" value="&quot;&quot;" />
<option name="_new_parameters" value="&quot;&quot;" />
<option name="_new_additionalArguments" value="&quot;&quot;" />
<option name="_new_target" value="&quot;$PROJECT_DIR$/{{ script_name }}&quot;" />
<option name="_new_targetType" value="&quot;PATH&quot;" />
<method v="2" />
</configuration>
</component>

View file

@ -14,17 +14,17 @@ authors:
contact: contact:
- email: info@ultimaker.com - email: info@ultimaker.com
name: "Ultimaker B.V." name: "Ultimaker B.V."
url: 'https://ultimaker.com/software/ultimaker-cura' url: "https://ultimaker.com/software/ultimaker-cura"
repository-code: 'https://github.com/Ultimaker/Cura' repository-code: "https://github.com/Ultimaker/Cura"
license: LGPL-3.0 license: LGPL-3.0
license-url: "https://github.com/Ultimaker/Cura/blob/main/LICENSE" license-url: "https://github.com/Ultimaker/Cura/blob/main/LICENSE"
version: 5.0.0 version: 5.2.1
date-released: '2022-05-17' date-released: "2022-10-19"
keywords: keywords:
- Ultimaker - Ultimaker
- Cura - Cura
- Slicer
- Uranium - Uranium
- Arachne - Arachne
- 3DPrinting - 3D Printing
- Slicer - Additive Manufacturing
...

View file

@ -1,6 +1,10 @@
# Copyright (c) 2022 Ultimaker B.V. # Copyright (c) 2022 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
# NOTE: This is only being used for translation scripts.
# For MSVC flags, will be ignored on non-Windows OS's and this project in general. Only needed for cura-build-environment.
cmake_policy(SET CMP0091 NEW)
project(cura) project(cura)
cmake_minimum_required(VERSION 3.18) cmake_minimum_required(VERSION 3.18)
@ -11,47 +15,8 @@ list(APPEND CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake)
set(URANIUM_DIR "${CMAKE_SOURCE_DIR}/../Uranium" CACHE PATH "The location of the Uranium repository") set(URANIUM_DIR "${CMAKE_SOURCE_DIR}/../Uranium" CACHE PATH "The location of the Uranium repository")
set(URANIUM_SCRIPTS_DIR "${URANIUM_DIR}/scripts" CACHE PATH "The location of the scripts directory of the Uranium repository") set(URANIUM_SCRIPTS_DIR "${URANIUM_DIR}/scripts" CACHE PATH "The location of the scripts directory of the Uranium repository")
option(CURA_DEBUGMODE "Enable debug dialog and other debug features" OFF)
if(CURA_DEBUGMODE)
set(_cura_debugmode "ON")
endif()
option(GENERATE_TRANSLATIONS "Should the translations be generated?" ON) option(GENERATE_TRANSLATIONS "Should the translations be generated?" ON)
set(CURA_APP_NAME "cura" CACHE STRING "Short name of Cura, used for configuration folder")
set(CURA_APP_DISPLAY_NAME "Ultimaker Cura" CACHE STRING "Display name of Cura")
set(CURA_VERSION "master" CACHE STRING "Version name of Cura")
set(CURA_BUILDTYPE "" CACHE STRING "Build type of Cura, eg. 'PPA'")
set(CURA_CLOUD_API_ROOT "" CACHE STRING "Alternative Cura cloud API root")
set(CURA_CLOUD_API_VERSION "" CACHE STRING "Alternative Cura cloud API version")
set(CURA_CLOUD_ACCOUNT_API_ROOT "" CACHE STRING "Alternative Cura cloud account API version")
set(CURA_MARKETPLACE_ROOT "" CACHE STRING "Alternative Marketplace location")
set(CURA_DIGITAL_FACTORY_URL "" CACHE STRING "Alternative Digital Factory location")
configure_file(${CMAKE_SOURCE_DIR}/com.ultimaker.cura.desktop.in ${CMAKE_BINARY_DIR}/com.ultimaker.cura.desktop @ONLY)
configure_file(cura/CuraVersion.py.in CuraVersion.py @ONLY)
if(NOT DEFINED Python_VERSION)
set(Python_VERSION
3.10
CACHE STRING "Python Version" FORCE)
message(STATUS "Setting Python version to ${Python_VERSION}. Set Python_VERSION if you want to compile against an other version.")
endif()
if(APPLE)
set(Python_FIND_FRAMEWORK NEVER)
endif()
find_package(Python ${Python_VERSION} EXACT REQUIRED COMPONENTS Interpreter)
message(STATUS "Linking and building ${project_name} against Python ${Python_VERSION}")
if(NOT DEFINED Python_SITELIB_LOCAL)
set(Python_SITELIB_LOCAL
"${Python_SITELIB}"
CACHE PATH "Local alternative site-package location to install Cura" FORCE)
endif()
# Tests
include(CuraTests)
if(NOT ${URANIUM_DIR} STREQUAL "") if(NOT ${URANIUM_DIR} STREQUAL "")
set(CMAKE_MODULE_PATH "${CMAKE_MODULE_PATH};${URANIUM_DIR}/cmake") set(CMAKE_MODULE_PATH "${CMAKE_MODULE_PATH};${URANIUM_DIR}/cmake")
endif() endif()
@ -64,24 +29,4 @@ if(NOT ${URANIUM_SCRIPTS_DIR} STREQUAL "")
if(${GENERATE_TRANSLATIONS}) if(${GENERATE_TRANSLATIONS})
CREATE_TRANSLATION_TARGETS() CREATE_TRANSLATION_TARGETS()
endif() endif()
endif() endif()
install(DIRECTORY resources DESTINATION ${CMAKE_INSTALL_DATADIR}/cura)
include(CuraPluginInstall)
install(FILES cura_app.py DESTINATION ${CMAKE_INSTALL_BINDIR}
PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
install(DIRECTORY cura DESTINATION "${Python_SITELIB_LOCAL}")
install(FILES ${CMAKE_BINARY_DIR}/CuraVersion.py DESTINATION "${Python_SITELIB_LOCAL}/cura/")
if(NOT APPLE AND NOT WIN32)
install(FILES ${CMAKE_BINARY_DIR}/com.ultimaker.cura.desktop
DESTINATION ${CMAKE_INSTALL_DATADIR}/applications)
install(FILES ${CMAKE_SOURCE_DIR}/resources/images/cura-icon.png
DESTINATION ${CMAKE_INSTALL_DATADIR}/icons/hicolor/128x128/apps/)
install(FILES com.ultimaker.cura.appdata.xml
DESTINATION ${CMAKE_INSTALL_DATADIR}/metainfo)
install(FILES cura.sharedmimeinfo
DESTINATION ${CMAKE_INSTALL_DATADIR}/mime/packages/
RENAME cura.xml )
endif()

14
CuraVersion.py.jinja Normal file
View file

@ -0,0 +1,14 @@
# Copyright (c) 2022 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
CuraAppName = "{{ cura_app_name }}"
CuraAppDisplayName = "{{ cura_app_display_name }}"
CuraVersion = "{{ cura_version }}"
CuraBuildType = "{{ cura_build_type }}"
CuraDebugMode = {{ cura_debug_mode }}
CuraCloudAPIRoot = "{{ cura_cloud_api_root }}"
CuraCloudAPIVersion = "{{ cura_cloud_api_version }}"
CuraCloudAccountAPIRoot = "{{ cura_cloud_account_api_root }}"
CuraMarketplaceRoot = "{{ cura_marketplace_root }}"
CuraDigitalFactoryURL = "{{ cura_digital_factory_url }}"
CuraLatestURL = "{{ cura_latest_url }}"

View file

@ -1,45 +0,0 @@
FROM ultimaker/cura-build-environment:1
# Environment vars for easy configuration
ENV CURA_APP_DIR=/srv/cura
# Ensure our sources dir exists
RUN mkdir $CURA_APP_DIR
# Setup CuraEngine
ENV CURA_ENGINE_BRANCH=master
WORKDIR $CURA_APP_DIR
RUN git clone -b $CURA_ENGINE_BRANCH --depth 1 https://github.com/Ultimaker/CuraEngine
WORKDIR $CURA_APP_DIR/CuraEngine
RUN mkdir build
WORKDIR $CURA_APP_DIR/CuraEngine/build
RUN cmake3 ..
RUN make
RUN make install
# TODO: setup libCharon
# Setup Uranium
ENV URANIUM_BRANCH=master
WORKDIR $CURA_APP_DIR
RUN git clone -b $URANIUM_BRANCH --depth 1 https://github.com/Ultimaker/Uranium
# Setup materials
ENV MATERIALS_BRANCH=master
WORKDIR $CURA_APP_DIR
RUN git clone -b $MATERIALS_BRANCH --depth 1 https://github.com/Ultimaker/fdm_materials materials
# Setup Cura
WORKDIR $CURA_APP_DIR/Cura
ADD . .
RUN mv $CURA_APP_DIR/materials resources/materials
# Make sure Cura can find CuraEngine
RUN ln -s /usr/local/bin/CuraEngine $CURA_APP_DIR/Cura
# Run Cura
WORKDIR $CURA_APP_DIR/Cura
ENV PYTHONPATH=${PYTHONPATH}:$CURA_APP_DIR/Uranium
RUN chmod +x ./CuraEngine
RUN chmod +x ./run_in_docker.sh
CMD "./run_in_docker.sh"

74
Jenkinsfile vendored
View file

@ -1,74 +0,0 @@
parallel_nodes(['linux && cura', 'windows && cura'])
{
timeout(time: 2, unit: "HOURS")
{
// Prepare building
stage('Prepare')
{
// Ensure we start with a clean build directory.
step([$class: 'WsCleanup'])
// Checkout whatever sources are linked to this pipeline.
checkout scm
}
// If any error occurs during building, we want to catch it and continue with the "finale" stage.
catchError
{
// Building and testing should happen in a subdirectory.
dir('build')
{
// Perform the "build". Since Uranium is Python code, this basically only ensures CMake is setup.
stage('Build')
{
def branch = env.BRANCH_NAME
if(!fileExists("${env.CURA_ENVIRONMENT_PATH}/${branch}"))
{
branch = "master"
}
// Ensure CMake is setup. Note that since this is Python code we do not really "build" it.
def uranium_dir = get_workspace_dir("Ultimaker/Uranium/${branch}")
cmake("..", "-DCMAKE_PREFIX_PATH=\"${env.CURA_ENVIRONMENT_PATH}/${branch}\" -DCMAKE_BUILD_TYPE=Release -DURANIUM_DIR=\"${uranium_dir}\"")
}
// Try and run the unit tests. If this stage fails, we consider the build to be "unstable".
stage('Unit Test')
{
if (isUnix())
{
// For Linux
try {
sh 'make CTEST_OUTPUT_ON_FAILURE=TRUE test'
} catch(e)
{
currentBuild.result = "UNSTABLE"
}
}
else
{
// For Windows
try
{
// This also does code style checks.
bat 'ctest -V'
} catch(e)
{
currentBuild.result = "UNSTABLE"
}
}
}
}
}
// Perform any post-build actions like notification and publishing of unit tests.
stage('Finalize')
{
// Publish the test results to Jenkins.
junit allowEmptyResults: true, testResults: 'build/junit*.xml'
notify_build_result(env.CURA_EMAIL_RECIPIENTS, '#cura-dev', ['master', '2.'])
}
}
}

134
README.md
View file

@ -1,61 +1,105 @@
Cura
====
Ultimaker Cura is a state-of-the-art slicer application to prepare your 3D models for printing with a 3D printer. With hundreds of settings and hundreds of community-managed print profiles, Ultimaker Cura is sure to lead your next project to a success.
![Screenshot](cura-logo.PNG) > # Work with us!
> If you're interested in working with us on Cura and Thingiverse, please apply to one of the open positions below.
> - [Software Engineer C++ & Python](https://www.linkedin.com/jobs/view/3516545085) for [Cura](https://github.com/Ultimaker/Cura)
> - [DevOps Engineer Community Software](https://www.linkedin.com/jobs/view/3516542580) for [Cura](https://github.com/Ultimaker/Cura) and [Thingiverse](https://www.thingiverse.com/)
> - [QA / Test Engineer Cura (3D printing)](https://www.linkedin.com/jobs/view/3516538895) for [Cura](https://github.com/Ultimaker/Cura) and [Thingiverse](https://www.thingiverse.com/)
Logging Issues <br>
------------
For crashes and similar issues, please attach the following information:
* (On Windows) The log as produced by dxdiag (start -> run -> dxdiag -> save output) <div align = center>
* The Cura GUI log file, located at
* `%APPDATA%\cura\<Cura version>\cura.log` (Windows), or usually `C:\Users\<your username>\AppData\Roaming\cura\<Cura version>\cura.log`
* `$HOME/Library/Application Support/cura/<Cura version>/cura.log` (OSX)
* `$HOME/.local/share/cura/<Cura version>/cura.log` (Ubuntu/Linux)
If the Cura user interface still starts, you can also reach this directory from the application menu in Help -> Show settings folder [![Badge Issues]][Issues]
[![Badge PullRequests]][PullRequests]
[![Badge Closed]][Closed]
For additional support, you could also ask in the [#cura channel](https://web.libera.chat/#cura) on [libera.chat](https://libera.chat/). For help with development, there is also the [#cura-dev channel](https://web.libera.chat/#cura-dev). [![Badge Size]][#]
[![Badge License]][License]
[![Badge Contributors]][Contributors]
Dependencies [![Badge Test]][Test]
------------ [![Badge Conan]][Conan]
* [Uranium](https://github.com/Ultimaker/Uranium) Cura is built on top of the Uranium framework.
* [CuraEngine](https://github.com/Ultimaker/CuraEngine) This will be needed at runtime to perform the actual slicing.
* [fdm_materials](https://github.com/Ultimaker/fdm_materials) Required to load a printer that has swappable material profiles.
* [PySerial](https://github.com/pyserial/pyserial) Only required for USB printing support.
* [python-zeroconf](https://github.com/jstasiak/python-zeroconf) Only required to detect mDNS-enabled printers.
For a list of required Python packages, with their recommended version, see `requirements.txt`. <br>
<br>
This list is not exhaustive at the moment, please check the links in the next section for more details. ![Logo]
Build scripts # Ultimaker Cura
-------------
Please check out [cura-build](https://github.com/Ultimaker/cura-build) for detailed building instructions.
If you want to build the entire environment from scratch before building Cura as well, [cura-build-environment](https://github.com/Ultimaker/cura-build-environment) might be a starting point before cura-build. (Again, see cura-build for more details.) *State-of-the-art slicer app to prepare* <br>
*your 3D models for your 3D printer.*
Running from Source *With hundreds of settings & community-managed print profiles,* <br>
------------- *Ultimaker Cura is sure to lead your next project to a success.*
Please check our [Wiki page](https://github.com/Ultimaker/Cura/wiki/Running-Cura-from-Source) for details about running Cura from source.
Plugins <br>
------------- <br>
Please check our [Wiki page](https://github.com/Ultimaker/Cura/wiki/Plugin-Directory) for details about creating and using plugins.
Supported printers [![Button Building]][Building]
------------- [![Button Plugins]][Plugins]
Please check our [Wiki page](https://github.com/Ultimaker/Cura/wiki/Adding-new-machine-profiles-to-Cura) for guidelines about adding support for new machines. [![Button Machines]][Machines]
Configuring Cura [![Button Report]][Report]
---------------- [![Button Settings]][Settings]
Please check out [Wiki page](https://github.com/Ultimaker/Cura/wiki/Cura-Settings) about configuration options for developers. [![Button Localize]][Localize]
<br>
<br>
<picture>
<source media="(prefers-color-scheme: light)" srcset="./cura-logo.PNG">
<source media="(prefers-color-scheme: dark)" srcset="./cura-logo-dark.PNG">
<img alt="Shows cura open on the preview screen with a large benchy model in the center." src="./cura-logo.PNG">
</picture>
</div>
<br>
[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/Ultimaker/Cura/badge)](https://api.securityscorecards.dev/projects/github.com/Ultimaker/Cura)
<br>
<!----------------------------------------------------------------------------->
[Contributors]: https://github.com/Ultimaker/Cura/graphs/contributors
[PullRequests]: https://github.com/Ultimaker/Cura/pulls
[Machines]: https://github.com/Ultimaker/Cura/wiki/Adding-new-machine-profiles-to-Cura
[Building]: https://github.com/Ultimaker/Cura/wiki/Running-Cura-from-Source
[Localize]: https://github.com/Ultimaker/Cura/wiki/Translating-Cura
[Settings]: https://github.com/Ultimaker/Cura/wiki/Cura-Settings
[Plugins]: https://github.com/Ultimaker/Cura/wiki/Plugin-Directory
[Closed]: https://github.com/Ultimaker/Cura/issues?q=is%3Aissue+is%3Aclosed
[Issues]: https://github.com/Ultimaker/Cura/issues
[Conan]: https://github.com/Ultimaker/Cura/actions/workflows/conan-package.yml
[Test]: https://github.com/Ultimaker/Cura/actions/workflows/unit-test.yml
[License]: LICENSE
[Report]: docs/Report.md
[Logo]: resources/images/cura-icon.png
[#]: #
<!---------------------------------[ Badges ]---------------------------------->
[Badge Contributors]: https://img.shields.io/github/contributors/ultimaker/cura?style=for-the-badge&logoColor=white&labelColor=db5e8a&color=ab4a6c&logo=GitHub
[Badge PullRequests]: https://img.shields.io/github/issues-pr/ultimaker/cura?style=for-the-badge&logoColor=white&labelColor=bb9f3e&color=937d31&logo=GitExtensions
[Badge License]: https://img.shields.io/badge/License-LGPL3-336887.svg?style=for-the-badge&labelColor=458cb5&logoColor=white&logo=GNU
[Badge Closed]: https://img.shields.io/github/issues-closed/ultimaker/cura?style=for-the-badge&logoColor=white&labelColor=629944&color=446a30&logo=AddThis
[Badge Issues]: https://img.shields.io/github/issues/ultimaker/cura?style=for-the-badge&logoColor=white&labelColor=c34360&color=933349&logo=AdBlock
[Badge Conan]: https://img.shields.io/github/workflow/status/Ultimaker/Cura/conan-package?style=for-the-badge&logoColor=white&labelColor=6185aa&color=4c6987&logo=Conan&label=Conan%20Package
[Badge Test]: https://img.shields.io/github/workflow/status/Ultimaker/Cura/unit-test?style=for-the-badge&logoColor=white&labelColor=4a999d&color=346c6e&logo=Codacy&label=Unit%20Test
[Badge Size]: https://img.shields.io/github/repo-size/ultimaker/cura?style=for-the-badge&logoColor=white&labelColor=715a97&color=584674&logo=GoogleAnalytics
<!---------------------------------[ Buttons ]--------------------------------->
[Button Localize]: https://img.shields.io/badge/Help_Localize-e2467d?style=for-the-badge&logoColor=white&logo=GoogleTranslate
[Button Machines]: https://img.shields.io/badge/Adding_Machines-yellow?style=for-the-badge&logoColor=white&logo=CloudFoundry
[Button Settings]: https://img.shields.io/badge/Configuration-00979D?style=for-the-badge&logoColor=white&logo=CodeReview
[Button Building]: https://img.shields.io/badge/Building_Cura-blue?style=for-the-badge&logoColor=white&logo=GitBook
[Button Plugins]: https://img.shields.io/badge/Plugin_Usage-569A31?style=for-the-badge&logoColor=white&logo=ROS
[Button Report]: https://img.shields.io/badge/Report_Issues-C9284D?style=for-the-badge&logoColor=white&logo=Cliqz
Translating Cura
----------------
Please check out [Wiki page](https://github.com/Ultimaker/Cura/wiki/Translating-Cura) about how to translate Cura into other languages.
License
----------------
Cura is released under the terms of the LGPLv3 or higher. A copy of this license should be included with the software.

275
UltiMaker-Cura.spec.jinja Normal file
View file

@ -0,0 +1,275 @@
# -*- mode: python ; coding: utf-8 -*-
import os
from pathlib import Path
from PyInstaller.utils.hooks import collect_all
datas = {{ datas }}
binaries = {{ binaries }}
hiddenimports = {{ hiddenimports }}
{% for value in collect_all %}tmp_ret = collect_all('{{ value }}')
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
{% endfor %}
# Add dynamic libs in the venv bin/Script Path. This is needed because we might copy some additional libs
# e.q.: OpenSSL 1.1.1l in that directory with a separate:
# `conan install openssl@1.1.1l -g deploy && cp openssl/bin/*.so cura_inst/bin`
binaries.extend([(str(bin), ".") for bin in Path(r"{{ venv_script_path }}").glob("*.so*")])
binaries.extend([(str(bin), ".") for bin in Path(r"{{ venv_script_path }}").glob("*.dll")])
binaries.extend([(str(bin), ".") for bin in Path(r"{{ venv_script_path }}").glob("*.dylib")])
block_cipher = None
a = Analysis(
[{{ entrypoint }}],
pathex=[],
binaries=binaries,
datas=datas,
hiddenimports=hiddenimports,
hookspath=[],
hooksconfig={},
runtime_hooks=[],
excludes=[],
win_no_prefer_redirects=False,
win_private_assemblies=False,
cipher=block_cipher,
noarchive=False
)
pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
exe = EXE(
pyz,
a.scripts,
[],
exclude_binaries=True,
name=r'{{ name }}',
debug=False,
bootloader_ignore_signals=False,
strip={{ strip }},
upx={{ upx }},
console=False,
disable_windowed_traceback=False,
argv_emulation=False,
target_arch={{ target_arch }},
codesign_identity=os.getenv('CODESIGN_IDENTITY', None),
entitlements_file={{ entitlements_file }},
icon={{ icon }}
)
coll = COLLECT(
exe,
a.binaries,
a.zipfiles,
a.datas,
strip=False,
upx=True,
upx_exclude=[],
name=r'{{ name }}'
)
{% if macos == true %}
# PyInstaller seems to copy everything in the resource folder for the MacOS, this causes issues with codesigning and notarizing
# The folder structure should adhere to the one specified in Table 2-5
# https://developer.apple.com/library/archive/documentation/CoreFoundation/Conceptual/CFBundles/BundleTypes/BundleTypes.html#//apple_ref/doc/uid/10000123i-CH101-SW1
# The class below is basically ducktyping the BUNDLE class of PyInstaller and using our own `assemble` method for more fine-grain and specific
# control. Some code of the method below is copied from:
# https://github.com/pyinstaller/pyinstaller/blob/22d1d2a5378228744cc95f14904dae1664df32c4/PyInstaller/building/osx.py#L115
#-----------------------------------------------------------------------------
# Copyright (c) 2005-2022, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License (version 2
# or later) with exception for distributing the bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
#-----------------------------------------------------------------------------
import plistlib
import shutil
import PyInstaller.utils.osx as osxutils
from pathlib import Path
from PyInstaller.building.osx import BUNDLE
from PyInstaller.building.utils import (_check_path_overlap, _rmtree, add_suffix_to_extension, checkCache)
from PyInstaller.building.datastruct import logger
from PyInstaller.building.icon import normalize_icon_type
class UMBUNDLE(BUNDLE):
def assemble(self):
from PyInstaller.config import CONF
if _check_path_overlap(self.name) and os.path.isdir(self.name):
_rmtree(self.name)
logger.info("Building BUNDLE %s", self.tocbasename)
# Create a minimal Mac bundle structure.
macos_path = Path(self.name, "Contents", "MacOS")
resources_path = Path(self.name, "Contents", "Resources")
frameworks_path = Path(self.name, "Contents", "Frameworks")
os.makedirs(macos_path)
os.makedirs(resources_path)
os.makedirs(frameworks_path)
# Makes sure the icon exists and attempts to convert to the proper format if applicable
self.icon = normalize_icon_type(self.icon, ("icns",), "icns", CONF["workpath"])
# Ensure icon path is absolute
self.icon = os.path.abspath(self.icon)
# Copy icns icon to Resources directory.
shutil.copy(self.icon, os.path.join(self.name, 'Contents', 'Resources'))
# Key/values for a minimal Info.plist file
info_plist_dict = {
"CFBundleDisplayName": self.appname,
"CFBundleName": self.appname,
# Required by 'codesign' utility.
# The value for CFBundleIdentifier is used as the default unique name of your program for Code Signing
# purposes. It even identifies the APP for access to restricted OS X areas like Keychain.
#
# The identifier used for signing must be globally unique. The usual form for this identifier is a
# hierarchical name in reverse DNS notation, starting with the toplevel domain, followed by the company
# name, followed by the department within the company, and ending with the product name. Usually in the
# form: com.mycompany.department.appname
# CLI option --osx-bundle-identifier sets this value.
"CFBundleIdentifier": self.bundle_identifier,
"CFBundleExecutable": os.path.basename(self.exename),
"CFBundleIconFile": os.path.basename(self.icon),
"CFBundleInfoDictionaryVersion": "6.0",
"CFBundlePackageType": "APPL",
"CFBundleVersionString": self.version,
"CFBundleShortVersionString": self.version,
}
# Set some default values. But they still can be overwritten by the user.
if self.console:
# Setting EXE console=True implies LSBackgroundOnly=True.
info_plist_dict['LSBackgroundOnly'] = True
else:
# Let's use high resolution by default.
info_plist_dict['NSHighResolutionCapable'] = True
# Merge info_plist settings from spec file
if isinstance(self.info_plist, dict) and self.info_plist:
info_plist_dict.update(self.info_plist)
plist_filename = os.path.join(self.name, "Contents", "Info.plist")
with open(plist_filename, "wb") as plist_fh:
plistlib.dump(info_plist_dict, plist_fh)
links = []
_QT_BASE_PATH = {'PySide2', 'PySide6', 'PyQt5', 'PyQt6', 'PySide6'}
for inm, fnm, typ in self.toc:
# Adjust name for extensions, if applicable
inm, fnm, typ = add_suffix_to_extension(inm, fnm, typ)
inm = Path(inm)
fnm = Path(fnm)
# Copy files from cache. This ensures that are used files with relative paths to dynamic library
# dependencies (@executable_path)
if typ in ('EXTENSION', 'BINARY') or (typ == 'DATA' and inm.suffix == '.so'):
if any(['.' in p for p in inm.parent.parts]):
inm = Path(inm.name)
fnm = Path(checkCache(
str(fnm),
strip = self.strip,
upx = self.upx,
upx_exclude = self.upx_exclude,
dist_nm = str(inm),
target_arch = self.target_arch,
codesign_identity = self.codesign_identity,
entitlements_file = self.entitlements_file,
strict_arch_validation = (typ == 'EXTENSION'),
))
frame_dst = frameworks_path.joinpath(inm)
if not frame_dst.exists():
if frame_dst.is_dir():
os.makedirs(frame_dst, exist_ok = True)
else:
os.makedirs(frame_dst.parent, exist_ok = True)
shutil.copy(fnm, frame_dst, follow_symlinks = True)
macos_dst = macos_path.joinpath(inm)
if not macos_dst.exists():
if macos_dst.is_dir():
os.makedirs(macos_dst, exist_ok = True)
else:
os.makedirs(macos_dst.parent, exist_ok = True)
# Create relative symlink to the framework
symlink_to = Path(*[".." for p in macos_dst.relative_to(macos_path).parts], "Frameworks").joinpath(
frame_dst.relative_to(frameworks_path))
try:
macos_dst.symlink_to(symlink_to)
except FileExistsError:
pass
else:
if typ == 'DATA':
if any(['.' in p for p in inm.parent.parts]) or inm.suffix == '.so':
# Skip info dist egg and some not needed folders in tcl and tk, since they all contain dots in their files
logger.warning(f"Skipping DATA file {inm}")
continue
res_dst = resources_path.joinpath(inm)
if not res_dst.exists():
if res_dst.is_dir():
os.makedirs(res_dst, exist_ok = True)
else:
os.makedirs(res_dst.parent, exist_ok = True)
shutil.copy(fnm, res_dst, follow_symlinks = True)
macos_dst = macos_path.joinpath(inm)
if not macos_dst.exists():
if macos_dst.is_dir():
os.makedirs(macos_dst, exist_ok = True)
else:
os.makedirs(macos_dst.parent, exist_ok = True)
# Create relative symlink to the resource
symlink_to = Path(*[".." for p in macos_dst.relative_to(macos_path).parts], "Resources").joinpath(
res_dst.relative_to(resources_path))
try:
macos_dst.symlink_to(symlink_to)
except FileExistsError:
pass
else:
macos_dst = macos_path.joinpath(inm)
if not macos_dst.exists():
if macos_dst.is_dir():
os.makedirs(macos_dst, exist_ok = True)
else:
os.makedirs(macos_dst.parent, exist_ok = True)
shutil.copy(fnm, macos_dst, follow_symlinks = True)
# Sign the bundle
logger.info('Signing the BUNDLE...')
try:
osxutils.sign_binary(self.name, self.codesign_identity, self.entitlements_file, deep = True)
except Exception as e:
logger.warning(f"Error while signing the bundle: {e}")
logger.warning("You will need to sign the bundle manually!")
logger.info(f"Building BUNDLE {self.tocbasename} completed successfully.")
app = UMBUNDLE(
coll,
name='{{ display_name }}.app',
icon={{ icon }},
bundle_identifier={{ osx_bundle_identifier }} + "_" + '{{ display_name }}'.replace(" ", "_") + "_" {{ short_version }},
version={{ version }},
info_plist={
'CFBundleDisplayName': '{{ display_name }}',
'NSPrincipalClass': 'NSApplication',
'CFBundleDevelopmentRegion': 'English',
'CFBundleExecutable': '{{ name }}',
'CFBundleInfoDictionaryVersion': '6.0',
'CFBundlePackageType': 'APPL',
'CFBundleVersionString': {{ version }},
'CFBundleShortVersionString': {{ short_version }},
'CFBundleDocumentTypes': [{
'CFBundleTypeRole': 'Viewer',
'CFBundleTypeExtensions': ['*'],
'CFBundleTypeName': 'Model Files',
}]
},
){% endif %}

View file

@ -1,34 +0,0 @@
set -e
set -u
export OLD_PWD=`pwd`
export CMAKE=/c/software/PCL/cmake-3.0.1-win32-x86/bin/cmake.exe
export MAKE=mingw32-make.exe
export PATH=/c/mingw-w64/i686-4.9.2-posix-dwarf-rt_v3-rev1/mingw32/bin:$PATH
mkdir -p /c/software/protobuf/_build
cd /c/software/protobuf/_build
$CMAKE ../
$MAKE install
mkdir -p /c/software/libArcus/_build
cd /c/software/libArcus/_build
$CMAKE ../
$MAKE install
mkdir -p /c/software/PinkUnicornEngine/_build
cd /c/software/PinkUnicornEngine/_build
$CMAKE ../
$MAKE
cd $OLD_PWD
export PYTHONPATH=`pwd`/../libArcus/python:/c/Software/Uranium/
/c/python34/python setup.py py2exe
cp /c/software/PinkUnicornEngine/_build/CuraEngine.exe dist/
cp /c/software/libArcus/_install/bin/libArcus.dll dist/
cp /c/mingw-w64/i686-4.9.2-posix-dwarf-rt_v3-rev1/mingw32/bin/libgcc_s_dw2-1.dll dist/
cp /c/mingw-w64/i686-4.9.2-posix-dwarf-rt_v3-rev1/mingw32/bin/libwinpthread-1.dll dist/
cp /c/mingw-w64/i686-4.9.2-posix-dwarf-rt_v3-rev1/mingw32/bin/libstdc++-6.dll dist/
/c/program\ files\ \(x86\)/NSIS/makensis.exe installer.nsi

View file

@ -1,92 +0,0 @@
# Copyright (c) 2022 Ultimaker B.V.
# CuraPluginInstall.cmake is released under the terms of the LGPLv3 or higher.
#
# This module detects all plugins that need to be installed and adds them using the CMake install() command.
# It detects all plugin folder in the path "plugins/*" where there's a "plugin.json" in it.
#
# Plugins can be configured to NOT BE INSTALLED via the variable "CURA_NO_INSTALL_PLUGINS" as a list of string in the
# form of "a;b;c" or "a,b,c". By default all plugins will be installed.
#
option(PRINT_PLUGIN_LIST "Should the list of plugins that are installed be printed?" ON)
# Options or configuration variables
set(CURA_NO_INSTALL_PLUGINS "" CACHE STRING "A list of plugins that should not be installed, separated with ';' or ','.")
file(GLOB_RECURSE _plugin_json_list ${CMAKE_SOURCE_DIR}/plugins/*/plugin.json)
list(LENGTH _plugin_json_list _plugin_json_list_len)
# Sort the lists alphabetically so we can handle cases like this:
# - plugins/my_plugin/plugin.json
# - plugins/my_plugin/my_module/plugin.json
# In this case, only "plugins/my_plugin" should be added via install().
set(_no_install_plugin_list ${CURA_NO_INSTALL_PLUGINS})
# Sanitize the string so the comparison will be case-insensitive.
string(STRIP "${_no_install_plugin_list}" _no_install_plugin_list)
string(TOLOWER "${_no_install_plugin_list}" _no_install_plugin_list)
# WORKAROUND counterpart of what's in cura-build.
string(REPLACE "," ";" _no_install_plugin_list "${_no_install_plugin_list}")
list(LENGTH _no_install_plugin_list _no_install_plugin_list_len)
if(_no_install_plugin_list_len GREATER 0)
list(SORT _no_install_plugin_list)
endif()
if(_plugin_json_list_len GREATER 0)
list(SORT _plugin_json_list)
endif()
# Check all plugin directories and add them via install() if needed.
set(_install_plugin_list "")
foreach(_plugin_json_path ${_plugin_json_list})
get_filename_component(_plugin_dir ${_plugin_json_path} DIRECTORY)
file(RELATIVE_PATH _rel_plugin_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_plugin_dir})
get_filename_component(_plugin_dir_name ${_plugin_dir} NAME)
# Make plugin name comparison case-insensitive
string(TOLOWER "${_plugin_dir_name}" _plugin_dir_name_lowercase)
# Check if this plugin needs to be skipped for installation
set(_add_plugin ON) # Indicates if this plugin should be added to the build or not.
set(_is_no_install_plugin OFF) # If this plugin will not be added, this indicates if it's because the plugin is
# specified in the NO_INSTALL_PLUGINS list.
if(_no_install_plugin_list)
if("${_plugin_dir_name_lowercase}" IN_LIST _no_install_plugin_list)
set(_add_plugin OFF)
set(_is_no_install_plugin ON)
endif()
endif()
# Make sure this is not a subdirectory in a plugin that's already in the install list
if(_add_plugin)
foreach(_known_install_plugin_dir ${_install_plugin_list})
if(_plugin_dir MATCHES "${_known_install_plugin_dir}.+")
set(_add_plugin OFF)
break()
endif()
endforeach()
endif()
if(_add_plugin)
if(${PRINT_PLUGIN_LIST})
message(STATUS "[+] PLUGIN TO INSTALL: ${_rel_plugin_dir}")
endif()
get_filename_component(_rel_plugin_parent_dir ${_rel_plugin_dir} DIRECTORY)
install(DIRECTORY ${_rel_plugin_dir}
DESTINATION lib${LIB_SUFFIX}/cura/${_rel_plugin_parent_dir}
PATTERN "__pycache__" EXCLUDE
PATTERN "*.qmlc" EXCLUDE
)
list(APPEND _install_plugin_list ${_plugin_dir})
elseif(_is_no_install_plugin)
if(${PRINT_PLUGIN_LIST})
message(STATUS "[-] PLUGIN TO REMOVE : ${_rel_plugin_dir}")
endif()
execute_process(COMMAND ${Python_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/cmake/mod_bundled_packages_json.py
-d ${CMAKE_CURRENT_SOURCE_DIR}/resources/bundled_packages
${_plugin_dir_name}
RESULT_VARIABLE _mod_json_result)
endif()
endforeach()

View file

@ -1,77 +0,0 @@
# Copyright (c) 2022 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
include(CTest)
include(CMakeParseArguments)
add_custom_target(test-verbose COMMAND ${CMAKE_CTEST_COMMAND} --verbose)
function(cura_add_test)
set(_single_args NAME DIRECTORY PYTHONPATH)
cmake_parse_arguments("" "" "${_single_args}" "" ${ARGN})
if(NOT _NAME)
message(FATAL_ERROR "cura_add_test requires a test name argument")
endif()
if(NOT _DIRECTORY)
message(FATAL_ERROR "cura_add_test requires a directory to test")
endif()
if(NOT _PYTHONPATH)
set(_PYTHONPATH ${_DIRECTORY})
endif()
if(WIN32)
string(REPLACE "|" "\\;" _PYTHONPATH ${_PYTHONPATH})
set(_PYTHONPATH "${_PYTHONPATH}\\;$ENV{PYTHONPATH}")
else()
string(REPLACE "|" ":" _PYTHONPATH ${_PYTHONPATH})
set(_PYTHONPATH "${_PYTHONPATH}:$ENV{PYTHONPATH}")
endif()
get_test_property(${_NAME} ENVIRONMENT test_exists) #Find out if the test exists by getting a property from it that always exists (such as ENVIRONMENT because we set that ourselves).
if (NOT ${test_exists})
add_test(
NAME ${_NAME}
COMMAND ${Python_EXECUTABLE} -m pytest --junitxml=${CMAKE_BINARY_DIR}/junit-${_NAME}.xml ${_DIRECTORY}
)
set_tests_properties(${_NAME} PROPERTIES ENVIRONMENT LANG=C)
set_tests_properties(${_NAME} PROPERTIES ENVIRONMENT "PYTHONPATH=${_PYTHONPATH}")
else()
message(WARNING "Duplicate test ${_NAME}!")
endif()
endfunction()
#Add code style test.
add_test(
NAME "code-style"
COMMAND ${Python_EXECUTABLE} run_mypy.py
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
)
#Add test for import statements which are not compatible with all builds
add_test(
NAME "invalid-imports"
COMMAND ${Python_EXECUTABLE} scripts/check_invalid_imports.py
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
)
cura_add_test(NAME pytest-main DIRECTORY ${CMAKE_SOURCE_DIR}/tests PYTHONPATH "${CMAKE_SOURCE_DIR}|${URANIUM_DIR}")
file(GLOB_RECURSE _plugins plugins/*/__init__.py)
foreach(_plugin ${_plugins})
get_filename_component(_plugin_directory ${_plugin} DIRECTORY)
if(EXISTS ${_plugin_directory}/tests)
get_filename_component(_plugin_name ${_plugin_directory} NAME)
cura_add_test(NAME pytest-${_plugin_name} DIRECTORY ${_plugin_directory} PYTHONPATH "${_plugin_directory}|${CMAKE_SOURCE_DIR}|${URANIUM_DIR}")
endif()
endforeach()
#Add test for whether the shortcut alt-keys are unique in every translation.
add_test(
NAME "shortcut-keys"
COMMAND ${Python_EXECUTABLE} scripts/check_shortcut_keys.py
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
)

View file

@ -1,73 +0,0 @@
#!/usr/bin/env python3
#
# This script removes the given package entries in the bundled_packages JSON files. This is used by the PluginInstall
# CMake module.
#
import argparse
import collections
import json
import os
import sys
def find_json_files(work_dir: str) -> list:
"""Finds all JSON files in the given directory recursively and returns a list of those files in absolute paths.
:param work_dir: The directory to look for JSON files recursively.
:return: A list of JSON files in absolute paths that are found in the given directory.
"""
json_file_list = []
for root, dir_names, file_names in os.walk(work_dir):
for file_name in file_names:
abs_path = os.path.abspath(os.path.join(root, file_name))
json_file_list.append(abs_path)
return json_file_list
def remove_entries_from_json_file(file_path: str, entries: list) -> None:
"""Removes the given entries from the given JSON file. The file will modified in-place.
:param file_path: The JSON file to modify.
:param entries: A list of strings as entries to remove.
:return: None
"""
try:
with open(file_path, "r", encoding = "utf-8") as f:
package_dict = json.load(f, object_hook = collections.OrderedDict)
except Exception as e:
msg = "Failed to load '{file_path}' as a JSON file. This file will be ignored Exception: {e}"\
.format(file_path = file_path, e = e)
sys.stderr.write(msg + os.linesep)
return
for entry in entries:
if entry in package_dict:
del package_dict[entry]
print("[INFO] Remove entry [{entry}] from [{file_path}]".format(file_path = file_path, entry = entry))
try:
with open(file_path, "w", encoding = "utf-8", newline = "\n") as f:
json.dump(package_dict, f, indent = 4)
except Exception as e:
msg = "Failed to write '{file_path}' as a JSON file. Exception: {e}".format(file_path = file_path, e = e)
raise IOError(msg)
def main() -> None:
parser = argparse.ArgumentParser("mod_bundled_packages_json")
parser.add_argument("-d", "--dir", dest = "work_dir",
help = "The directory to look for bundled packages JSON files, recursively.")
parser.add_argument("entries", metavar = "ENTRIES", type = str, nargs = "+")
args = parser.parse_args()
json_file_list = find_json_files(args.work_dir)
for json_file_path in json_file_list:
remove_entries_from_json_file(json_file_path, args.entries)
if __name__ == "__main__":
main()

View file

@ -1,34 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- Copyright 2016 Richard Hughes <richard@hughsie.com> -->
<component type="desktop">
<id>com.ultimaker.cura.desktop</id>
<metadata_license>CC0-1.0</metadata_license>
<project_license>LGPL-3.0 and CC-BY-SA-4.0</project_license>
<name>Cura</name>
<summary>The world's most advanced 3d printer software</summary>
<description>
<p>
Cura creates a seamless integration between hardware, software and
materials for the best 3D printing experience around.
Cura supports the 3MF, OBJ and STL file formats and is available on
Windows, Mac and Linux.
</p>
<ul>
<li>Novices can start printing right away</li>
<li>Experts are able to customize 300 settings to achieve the best results</li>
<li>Optimized profiles for Ultimaker materials</li>
<li>Supported by a global network of Ultimaker certified service partners</li>
<li>Print multiple objects at once with different settings for each object</li>
<li>Cura supports STL, 3MF and OBJ file formats</li>
<li>Open source and completely free</li>
</ul>
</description>
<screenshots>
<screenshot type="default">

</screenshot>
</screenshots>
<url type="homepage">https://ultimaker.com/software/ultimaker-cura?utm_source=cura&amp;utm_medium=software&amp;utm_campaign=cura-update-linux</url>
<translation type="gettext">Cura</translation>
<content_rating type="oars-1.1" />
</component>

View file

@ -1,19 +0,0 @@
[Desktop Entry]
Name=Ultimaker Cura
Name[de]=Ultimaker Cura
Name[nl]=Ultimaker Cura
GenericName=3D Printing Software
GenericName[de]=3D-Druck-Software
GenericName[nl]=3D-printsoftware
Comment=Cura converts 3D models into paths for a 3D printer. It prepares your print for maximum accuracy, minimum printing time and good reliability with many extra features that make your print come out great.
Comment[de]=Cura wandelt 3D-Modelle in Pfade für einen 3D-Drucker um. Es bereitet Ihren Druck für maximale Genauigkeit, minimale Druckzeit und guter Zuverlässigkeit mit vielen zusätzlichen Funktionen vor, damit Ihr Druck großartig wird.
Comment[nl]=Cura converteert 3D-modellen naar paden voor een 3D printer. Het bereidt je print voor om zeer precies, snel en betrouwbaar te kunnen printen, met veel extra functionaliteit om je print er goed uit te laten komen.
Exec=@CMAKE_INSTALL_FULL_BINDIR@/cura %F
TryExec=@CMAKE_INSTALL_FULL_BINDIR@/cura
Icon=cura-icon
Terminal=false
Type=Application
MimeType=model/stl;application/vnd.ms-3mfdocument;application/prs.wavefront-obj;image/bmp;image/gif;image/jpeg;image/png;text/x-gcode;application/x-amf;application/x-ply;application/x-ctm;model/vnd.collada+xml;model/gltf-binary;model/gltf+json;model/vnd.collada+xml+zip;
Categories=Graphics;
Keywords=3D;Printing;Slicer;
StartupWMClass=cura.real

210
conandata.yml Normal file
View file

@ -0,0 +1,210 @@
urls:
default:
cloud_api_root: "https://api.ultimaker.com"
cloud_account_api_root: "https://account.ultimaker.com"
marketplace_root: "https://marketplace.ultimaker.com"
digital_factory_url: "https://digitalfactory.ultimaker.com"
cura_latest_url: "https://software.ultimaker.com/latest.json"
staging:
cloud_api_root: "https://api-staging.ultimaker.com"
cloud_account_api_root: "https://account-staging.ultimaker.com"
marketplace_root: "https://marketplace-staging.ultimaker.com"
digital_factory_url: "https://digitalfactory-staging.ultimaker.com"
cura_latest_url: "https://software.ultimaker.com/latest.json"
pyinstaller:
runinfo:
entrypoint: "cura_app.py"
datas:
cura_plugins:
package: "cura"
src: "plugins"
dst: "share/cura/plugins"
cura_resources:
package: "cura"
src: "resources"
dst: "share/cura/resources"
cura_private_data:
package: "cura_private_data"
src: "res"
dst: "share/cura"
internal: true
uranium_plugins:
package: "uranium"
src: "plugins"
dst: "share/uranium/plugins"
uranium_resources:
package: "uranium"
src: "resources"
dst: "share/uranium/resources"
uranium_um_qt_qml_um:
package: "uranium"
src: "site-packages/UM/Qt/qml/UM"
dst: "PyQt6/Qt6/qml/UM"
cura_binary_data:
package: "cura_binary_data"
src: "resources/cura/resources"
dst: "share/cura/resources"
uranium_binary_data:
package: "cura_binary_data"
src: "resources/uranium/resources"
dst: "share/uranium/resources"
windows_binary_data:
package: "cura_binary_data"
src: "windows"
dst: "share/windows"
fdm_materials:
package: "fdm_materials"
src: "res/resources/materials"
dst: "share/cura/resources/materials"
tcl:
package: "tcl"
src: "lib/tcl8.6"
dst: "tcl"
tk:
package: "tk"
src: "lib/tk8.6"
dst: "tk"
binaries:
curaengine:
package: "curaengine"
src: "bin"
dst: "."
binary: "CuraEngine"
hiddenimports:
- "pySavitar"
- "pyArcus"
- "pynest2d"
- "PyQt6"
- "PyQt6.QtNetwork"
- "PyQt6.sip"
- "logging.handlers"
- "zeroconf"
- "fcntl"
- "stl"
- "serial"
collect_all:
- "cura"
- "UM"
- "serial"
- "Charon"
- "sqlite3"
- "trimesh"
- "win32ctypes"
- "PyQt6"
- "PyQt6.QtNetwork"
- "PyQt6.sip"
- "stl"
icon:
Windows: "./icons/Cura.ico"
Macos: "./icons/cura.icns"
Linux: "./icons/cura-128.png"
pycharm_targets:
- jinja_path: .run_templates/pycharm_cura_run.run.xml.jinja
module_name: Cura
name: cura
script_name: cura_app.py
- jinja_path: .run_templates/pycharm_cura_run.run.xml.jinja
module_name: Cura
name: cura_external_engine
parameters: --external-backend
script_name: cura_app.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in tests
script_name: tests/
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestBuildVolume.py
script_name: tests/TestBuildVolume.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestConvexHullDecorator.py
script_name: tests/TestConvexHullDecorator.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestCuraSceneNode.py
script_name: tests/TestCuraSceneNode.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestCuraSceneNode.py
script_name: tests/TestExtruderManager.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestGCodeListDecorator.py
script_name: tests/TestGCodeListDecorator.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestIntentManager.py
script_name: tests/TestIntentManager.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestLayer.py
script_name: tests/TestLayer.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestMachineAction.py
script_name: tests/TestMachineAction.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestMachineManager.py
script_name: tests/TestMachineManager.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestOAuth2.py
script_name: tests/TestOAuth2.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestObjectsModel.py
script_name: tests/TestObjectsModel.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestPrintInformation.py
script_name: tests/TestPrintInformation.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestProfileRequirements.py
script_name: tests/TestProfileRequirements.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestThemes.py
script_name: tests/TestThemes.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestContainerManager.py
script_name: tests/Settings/TestContainerManager.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestCuraContainerRegistry.py
script_name: tests/Settings/TestCuraContainerRegistry.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestCuraStackBuilder.py
script_name: tests/Settings/TestCuraStackBuilder.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestDefinitionContainer.py
script_name: tests/Settings/TestDefinitionContainer.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestExtruderStack.py
script_name: tests/Settings/TestExtruderStack.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestGlobalStack.py
script_name: tests/Settings/TestGlobalStack.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestProfiles.py
script_name: tests/Settings/TestProfiles.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestSettingInheritanceManager.py
script_name: tests/Settings/TestSettingInheritanceManager.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestSettingOverrideDecorator.py
script_name: tests/Settings/TestSettingOverrideDecorator.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestSettingVisibilityPresets.py
script_name: tests/Settings/TestSettingVisibilityPresets.py

477
conanfile.py Normal file
View file

@ -0,0 +1,477 @@
import os
from pathlib import Path
from jinja2 import Template
from conan import ConanFile
from conan.tools.files import copy, rmdir, save, mkdir
from conan.tools.microsoft import unix_path
from conan.tools.env import VirtualRunEnv, Environment, VirtualBuildEnv
from conan.tools.scm import Version
from conan.errors import ConanInvalidConfiguration, ConanException
required_conan_version = "<=1.56.0"
class CuraConan(ConanFile):
name = "cura"
license = "LGPL-3.0"
author = "UltiMaker"
url = "https://github.com/Ultimaker/cura"
description = "3D printer / slicing GUI built on top of the Uranium framework"
topics = ("conan", "python", "pyqt6", "qt", "qml", "3d-printing", "slicer")
build_policy = "missing"
exports = "LICENSE*", "UltiMaker-Cura.spec.jinja", "CuraVersion.py.jinja"
settings = "os", "compiler", "build_type", "arch"
# FIXME: Remove specific branch once merged to main
python_requires = "umbase/[>=0.1.7]@ultimaker/stable", "translationextractor/[>=2.1.1]@ultimaker/stable"
python_requires_extend = "umbase.UMBaseConanfile"
options = {
"enterprise": ["True", "False", "true", "false"], # Workaround for GH Action passing boolean as lowercase string
"staging": ["True", "False", "true", "false"], # Workaround for GH Action passing boolean as lowercase string
"devtools": [True, False], # FIXME: Split this up in testing and (development / build (pyinstaller) / system installer) tools
"cloud_api_version": "ANY",
"display_name": "ANY", # TODO: should this be an option??
"cura_debug_mode": [True, False], # FIXME: Use profiles
"internal": [True, False]
}
default_options = {
"enterprise": "False",
"staging": "False",
"devtools": False,
"cloud_api_version": "1",
"display_name": "UltiMaker Cura",
"cura_debug_mode": False, # Not yet implemented
"internal": False,
}
def set_version(self):
if not self.version:
self.version = "5.4.0-alpha"
@property
def _pycharm_targets(self):
return self.conan_data["pycharm_targets"]
# FIXME: These env vars should be defined in the runenv.
_cura_env = None
@property
def _cura_run_env(self):
if self._cura_env:
return self._cura_env
self._cura_env = Environment()
self._cura_env.define("QML2_IMPORT_PATH", str(self._site_packages.joinpath("PyQt6", "Qt6", "qml")))
self._cura_env.define("QT_PLUGIN_PATH", str(self._site_packages.joinpath("PyQt6", "Qt6", "plugins")))
if self.settings.os == "Linux":
self._cura_env.define("QT_QPA_FONTDIR", "/usr/share/fonts")
self._cura_env.define("QT_QPA_PLATFORMTHEME", "xdgdesktopportal")
self._cura_env.define("QT_XKB_CONFIG_ROOT", "/usr/share/X11/xkb")
return self._cura_env
@property
def _enterprise(self):
return self.options.enterprise in ["True", 'true']
@property
def _app_name(self):
if self._enterprise:
return str(self.options.display_name) + " Enterprise"
return str(self.options.display_name)
@property
def _urls(self):
if self.options.staging in ["True", 'true']:
return "staging"
return "default"
@property
def requirements_txts(self):
if self.options.devtools:
return ["requirements.txt", "requirements-ultimaker.txt", "requirements-dev.txt"]
return ["requirements.txt", "requirements-ultimaker.txt"]
@property
def _base_dir(self):
if self.install_folder is None:
if self.build_folder is not None:
return Path(self.build_folder)
return Path(os.getcwd(), "venv")
if self.in_local_cache:
return Path(self.install_folder)
else:
return Path(self.source_folder, "venv")
@property
def _share_dir(self):
return self._base_dir.joinpath("share")
@property
def _script_dir(self):
if self.settings.os == "Windows":
return self._base_dir.joinpath("Scripts")
return self._base_dir.joinpath("bin")
@property
def _site_packages(self):
if self.settings.os == "Windows":
return self._base_dir.joinpath("Lib", "site-packages")
py_version = Version(self.deps_cpp_info["cpython"].version)
return self._base_dir.joinpath("lib", f"python{py_version.major}.{py_version.minor}", "site-packages")
@property
def _py_interp(self):
py_interp = self._script_dir.joinpath(Path(self.deps_user_info["cpython"].python).name)
if self.settings.os == "Windows":
py_interp = Path(*[f'"{p}"' if " " in p else p for p in py_interp.parts])
return py_interp
@property
def _pyinstaller_spec_arch(self):
if self.settings.os == "Macos":
if self.settings.arch == "armv8":
return "'arm64'"
return "'x86_64'"
return "None"
def _generate_cura_version(self, location):
with open(os.path.join(self.recipe_folder, "CuraVersion.py.jinja"), "r") as f:
cura_version_py = Template(f.read())
# If you want a specific Cura version to show up on the splash screen add the user configuration `user.cura:version=VERSION`
# the global.conf, profile, package_info (of dependency) or via the cmd line `-c user.cura:version=VERSION`
cura_version = Version(self.conf.get("user.cura:version", default = self.version, check_type = str))
pre_tag = f"-{cura_version.pre}" if cura_version.pre else ""
build_tag = f"+{cura_version.build}" if cura_version.build else ""
internal_tag = f"+internal" if self.options.internal else ""
cura_version = f"{cura_version.major}.{cura_version.minor}.{cura_version.patch}{pre_tag}{build_tag}{internal_tag}"
with open(os.path.join(location, "CuraVersion.py"), "w") as f:
f.write(cura_version_py.render(
cura_app_name = self.name,
cura_app_display_name = self._app_name,
cura_version = cura_version,
cura_build_type = "Enterprise" if self._enterprise else "",
cura_debug_mode = self.options.cura_debug_mode,
cura_cloud_api_root = self.conan_data["urls"][self._urls]["cloud_api_root"],
cura_cloud_api_version = self.options.cloud_api_version,
cura_cloud_account_api_root = self.conan_data["urls"][self._urls]["cloud_account_api_root"],
cura_marketplace_root = self.conan_data["urls"][self._urls]["marketplace_root"],
cura_digital_factory_url = self.conan_data["urls"][self._urls]["digital_factory_url"],
cura_latest_url = self.conan_data["urls"][self._urls]["cura_latest_url"]))
def _generate_pyinstaller_spec(self, location, entrypoint_location, icon_path, entitlements_file):
pyinstaller_metadata = self.conan_data["pyinstaller"]
datas = [(str(self._base_dir.joinpath("conan_install_info.json")), ".")]
for data in pyinstaller_metadata["datas"].values():
if not self.options.internal and data.get("internal", False):
continue
if "package" in data: # get the paths from conan package
if data["package"] == self.name:
if self.in_local_cache:
src_path = os.path.join(self.package_folder, data["src"])
else:
src_path = os.path.join(self.source_folder, data["src"])
else:
src_path = os.path.join(self.deps_cpp_info[data["package"]].rootpath, data["src"])
elif "root" in data: # get the paths relative from the sourcefolder
src_path = os.path.join(self.source_folder, data["root"], data["src"])
else:
continue
if Path(src_path).exists():
datas.append((str(src_path), data["dst"]))
binaries = []
for binary in pyinstaller_metadata["binaries"].values():
if "package" in binary: # get the paths from conan package
src_path = os.path.join(self.deps_cpp_info[binary["package"]].rootpath, binary["src"])
elif "root" in binary: # get the paths relative from the sourcefolder
src_path = os.path.join(self.source_folder, binary["root"], binary["src"])
else:
continue
if not Path(src_path).exists():
self.output.warning(f"Source path for binary {binary['binary']} does not exist")
continue
for bin in Path(src_path).glob(binary["binary"] + "*[.exe|.dll|.so|.dylib|.so.]*"):
binaries.append((str(bin), binary["dst"]))
for bin in Path(src_path).glob(binary["binary"]):
binaries.append((str(bin), binary["dst"]))
# Make sure all Conan dependencies which are shared are added to the binary list for pyinstaller
for _, dependency in self.dependencies.host.items():
for bin_paths in dependency.cpp_info.bindirs:
binaries.extend([(f"{p}", ".") for p in Path(bin_paths).glob("**/*.dll")])
for lib_paths in dependency.cpp_info.libdirs:
binaries.extend([(f"{p}", ".") for p in Path(lib_paths).glob("**/*.so*")])
binaries.extend([(f"{p}", ".") for p in Path(lib_paths).glob("**/*.dylib*")])
# Copy dynamic libs from lib path
binaries.extend([(f"{p}", ".") for p in Path(self._base_dir.joinpath("lib")).glob("**/*.dylib*")])
binaries.extend([(f"{p}", ".") for p in Path(self._base_dir.joinpath("lib")).glob("**/*.so*")])
# Collect all dll's from PyQt6 and place them in the root
binaries.extend([(f"{p}", ".") for p in Path(self._site_packages, "PyQt6", "Qt6").glob("**/*.dll")])
with open(os.path.join(self.recipe_folder, "UltiMaker-Cura.spec.jinja"), "r") as f:
pyinstaller = Template(f.read())
version = self.conf_info.get("user.cura:version", default = self.version, check_type = str)
cura_version = Version(version)
with open(os.path.join(location, "UltiMaker-Cura.spec"), "w") as f:
f.write(pyinstaller.render(
name = str(self.options.display_name).replace(" ", "-"),
display_name = self._app_name,
entrypoint = entrypoint_location,
datas = datas,
binaries = binaries,
venv_script_path = str(self._script_dir),
hiddenimports = pyinstaller_metadata["hiddenimports"],
collect_all = pyinstaller_metadata["collect_all"],
icon = icon_path,
entitlements_file = entitlements_file,
osx_bundle_identifier = "'nl.ultimaker.cura'" if self.settings.os == "Macos" else "None",
upx = str(self.settings.os == "Windows"),
strip = False, # This should be possible on Linux and MacOS but, it can also cause issues on some distributions. Safest is to disable it for now
target_arch = self._pyinstaller_spec_arch,
macos = self.settings.os == "Macos",
version = f"'{version}'",
short_version = f"'{cura_version.major}.{cura_version.minor}.{cura_version.patch}'",
))
def export_sources(self):
copy(self, "*", os.path.join(self.recipe_folder, "plugins"), os.path.join(self.export_sources_folder, "plugins"))
copy(self, "*", os.path.join(self.recipe_folder, "resources"), os.path.join(self.export_sources_folder, "resources"), excludes = "*.mo")
copy(self, "*", os.path.join(self.recipe_folder, "tests"), os.path.join(self.export_sources_folder, "tests"))
copy(self, "*", os.path.join(self.recipe_folder, "cura"), os.path.join(self.export_sources_folder, "cura"), excludes="CuraVersion.py")
copy(self, "*", os.path.join(self.recipe_folder, "packaging"), os.path.join(self.export_sources_folder, "packaging"))
copy(self, "*", os.path.join(self.recipe_folder, ".run_templates"), os.path.join(self.export_sources_folder, ".run_templates"))
copy(self, "requirements.txt", self.recipe_folder, self.export_sources_folder)
copy(self, "requirements-dev.txt", self.recipe_folder, self.export_sources_folder)
copy(self, "requirements-ultimaker.txt", self.recipe_folder, self.export_sources_folder)
copy(self, "cura_app.py", self.recipe_folder, self.export_sources_folder)
def configure(self):
self.options["pyarcus"].shared = True
self.options["pysavitar"].shared = True
self.options["pynest2d"].shared = True
self.options["cpython"].shared = True
def validate(self):
version = self.conf_info.get("user.cura:version", default = self.version, check_type = str)
if version and Version(version) <= Version("4"):
raise ConanInvalidConfiguration("Only versions 5+ are support")
def requirements(self):
self.requires("pyarcus/5.2.2")
self.requires("curaengine/(latest)@ultimaker/testing")
self.requires("pysavitar/5.2.2")
self.requires("pynest2d/5.2.2")
self.requires("uranium/(latest)@ultimaker/testing")
self.requires("cura_binary_data/(latest)@ultimaker/testing")
self.requires("cpython/3.10.4")
if self.options.internal:
self.requires("cura_private_data/(latest)@ultimaker/testing")
self.requires("fdm_materials/(latest)@internal/testing")
else:
self.requires("fdm_materials/(latest)@ultimaker/testing")
def build_requirements(self):
if self.options.devtools:
if self.settings.os != "Windows" or self.conf.get("tools.microsoft.bash:path", check_type = str):
# FIXME: once m4, autoconf, automake are Conan V2 ready use self.win_bash and add gettext as base tool_requirement
self.tool_requires("gettext/0.21@ultimaker/testing", force_host_context = True)
def layout(self):
self.folders.source = "."
self.folders.build = "venv"
self.folders.generators = os.path.join(self.folders.build, "conan")
self.cpp.package.libdirs = [os.path.join("site-packages", "cura")]
self.cpp.package.bindirs = ["bin"]
self.cpp.package.resdirs = ["resources", "plugins", "packaging", "pip_requirements"] # pip_requirements should be the last item in the list
def generate(self):
copy(self, "cura_app.py", self.source_folder, str(self._script_dir))
cura_run_envvars = self._cura_run_env.vars(self, scope = "run")
ext = ".ps1" if self.settings.os == "Windows" else ".sh"
cura_run_envvars.save_script(os.path.join(self.folders.generators, f"cura_run_environment{ext}"))
vr = VirtualRunEnv(self)
vr.generate()
self._generate_cura_version(os.path.join(self.source_folder, "cura"))
if self.options.devtools:
entitlements_file = "'{}'".format(os.path.join(self.source_folder, "packaging", "MacOS", "cura.entitlements"))
self._generate_pyinstaller_spec(location = self.generators_folder,
entrypoint_location = "'{}'".format(os.path.join(self.source_folder, self.conan_data["pyinstaller"]["runinfo"]["entrypoint"])).replace("\\", "\\\\"),
icon_path = "'{}'".format(os.path.join(self.source_folder, "packaging", self.conan_data["pyinstaller"]["icon"][str(self.settings.os)])).replace("\\", "\\\\"),
entitlements_file = entitlements_file if self.settings.os == "Macos" else "None")
# Update the po and pot files
if self.settings.os != "Windows" or self.conf.get("tools.microsoft.bash:path", check_type=str):
vb = VirtualBuildEnv(self)
vb.generate()
# FIXME: once m4, autoconf, automake are Conan V2 ready use self.win_bash and add gettext as base tool_requirement
cpp_info = self.dependencies["gettext"].cpp_info
pot = self.python_requires["translationextractor"].module.ExtractTranslations(self, cpp_info.bindirs[0])
pot.generate()
def build(self):
if self.options.devtools:
if self.settings.os != "Windows" or self.conf.get("tools.microsoft.bash:path", check_type = str):
# FIXME: once m4, autoconf, automake are Conan V2 ready use self.win_bash and add gettext as base tool_requirement
for po_file in self.source_path.joinpath("resources", "i18n").glob("**/*.po"):
mo_file = Path(self.build_folder, po_file.with_suffix('.mo').relative_to(self.source_path))
mo_file = mo_file.parent.joinpath("LC_MESSAGES", mo_file.name)
mkdir(self, str(unix_path(self, Path(mo_file).parent)))
cpp_info = self.dependencies["gettext"].cpp_info
self.run(f"{cpp_info.bindirs[0]}/msgfmt {po_file} -o {mo_file} -f", env="conanbuild", ignore_errors=True)
def imports(self):
self.copy("CuraEngine.exe", root_package = "curaengine", src = "@bindirs", dst = "", keep_path = False)
self.copy("CuraEngine", root_package = "curaengine", src = "@bindirs", dst = "", keep_path = False)
rmdir(self, os.path.join(self.source_folder, "resources", "materials"))
self.copy("*.fdm_material", root_package = "fdm_materials", src = "@resdirs", dst = "resources/materials", keep_path = False)
self.copy("*.sig", root_package = "fdm_materials", src = "@resdirs", dst = "resources/materials", keep_path = False)
if self.options.internal:
self.copy("*", root_package = "cura_private_data", src = self.deps_cpp_info["cura_private_data"].resdirs[0],
dst = self._share_dir.joinpath("cura", "resources"), keep_path = True)
# Copy resources of cura_binary_data
self.copy("*", root_package = "cura_binary_data", src = self.deps_cpp_info["cura_binary_data"].resdirs[0],
dst = self._share_dir.joinpath("cura", "resources"), keep_path = True)
self.copy("*", root_package = "cura_binary_data", src = self.deps_cpp_info["cura_binary_data"].resdirs[1],
dst =self._share_dir.joinpath("uranium", "resources"), keep_path = True)
self.copy("*.dll", src = "@bindirs", dst = self._site_packages)
self.copy("*.pyd", src = "@libdirs", dst = self._site_packages)
self.copy("*.pyi", src = "@libdirs", dst = self._site_packages)
self.copy("*.dylib", src = "@libdirs", dst = self._script_dir)
def deploy(self):
# Copy CuraEngine.exe to bindirs of Virtual Python Environment
curaengine = self.dependencies["curaengine"].cpp_info
copy(self, "CuraEngine.exe", curaengine.bindirs[0], str(self._base_dir), keep_path = False)
copy(self, "CuraEngine", curaengine.bindirs[0], str(self._base_dir), keep_path = False)
# Copy resources of Cura (keep folder structure)
copy(self, "*", os.path.join(self.package_folder, self.cpp_info.bindirs[0]), str(self._base_dir), keep_path = False)
copy(self, "*", os.path.join(self.package_folder, self.cpp_info.libdirs[0]), str(self._site_packages.joinpath("cura")), keep_path = True)
copy(self, "*", os.path.join(self.package_folder, self.cpp_info.resdirs[0]), str(self._share_dir.joinpath("cura", "resources")), keep_path = True)
copy(self, "*", os.path.join(self.package_folder, self.cpp_info.resdirs[1]), str(self._share_dir.joinpath("cura", "plugins")), keep_path = True)
# Copy materials (flat)
fdm_materials = self.dependencies["fdm_materials"].cpp_info
copy(self, "*", fdm_materials.resdirs[0], str(self._share_dir.joinpath("cura")))
# Copy internal resources
if self.options.internal:
cura_private_data = self.dependencies["cura_private_data"].cpp_info
copy(self, "*", cura_private_data.resdirs[0], str(self._share_dir.joinpath("cura")))
# Copy resources of Uranium (keep folder structure)
uranium = self.dependencies["uranium"].cpp_info
copy(self, "*", uranium.resdirs[0], str(self._share_dir.joinpath("uranium", "resources")), keep_path = True)
copy(self, "*", uranium.resdirs[1], str(self._share_dir.joinpath("uranium", "plugins")), keep_path = True)
copy(self, "*", uranium.libdirs[0], str(self._site_packages.joinpath("UM")), keep_path = True)
# TODO: figure out if this is still needed
copy(self, "*", os.path.join(uranium.libdirs[0], "Qt", "qml", "UM"), str(self._site_packages.joinpath("PyQt6", "Qt6", "qml", "UM")), keep_path = True)
# Copy resources of cura_binary_data
cura_binary_data = self.dependencies["cura_binary_data"].cpp_info
copy(self, "*", cura_binary_data.resdirs[0], str(self._share_dir.joinpath("cura")), keep_path = True)
copy(self, "*", cura_binary_data.resdirs[1], str(self._share_dir.joinpath("uranium")), keep_path = True)
if self.settings.os == "Windows":
copy(self, "*", cura_binary_data.resdirs[2], str(self._share_dir.joinpath("windows")), keep_path = True)
for dependency in self.dependencies.host.values():
for bindir in dependency.cpp_info.bindirs:
copy(self, "*.dll", bindir, str(self._site_packages), keep_path = False)
for libdir in dependency.cpp_info.libdirs:
copy(self, "*.pyd", libdir, str(self._site_packages), keep_path = False)
copy(self, "*.pyi", libdir, str(self._site_packages), keep_path = False)
copy(self, "*.dylib", libdir, str(self._base_dir.joinpath("lib")), keep_path = False)
# Copy packaging scripts
copy(self, "*", os.path.join(self.package_folder, self.cpp_info.resdirs[2]), str(self._base_dir.joinpath("packaging")), keep_path = True)
# Copy requirements.txt's
copy(self, "*.txt", os.path.join(self.package_folder, self.cpp_info.resdirs[-1]), str(self._base_dir.joinpath("pip_requirements")), keep_path = False)
# Generate the GitHub Action version info Environment
version = self.conf_info.get("user.cura:version", default = self.version, check_type = str)
cura_version = Version(version)
env_prefix = "Env:" if self.settings.os == "Windows" else ""
activate_github_actions_version_env = Template(r"""echo "CURA_VERSION_MAJOR={{ cura_version_major }}" >> ${{ env_prefix }}GITHUB_ENV
echo "CURA_VERSION_MINOR={{ cura_version_minor }}" >> ${{ env_prefix }}GITHUB_ENV
echo "CURA_VERSION_PATCH={{ cura_version_patch }}" >> ${{ env_prefix }}GITHUB_ENV
echo "CURA_VERSION_BUILD={{ cura_version_build }}" >> ${{ env_prefix }}GITHUB_ENV
echo "CURA_VERSION_FULL={{ cura_version_full }}" >> ${{ env_prefix }}GITHUB_ENV
echo "CURA_APP_NAME={{ cura_app_name }}" >> ${{ env_prefix }}GITHUB_ENV
""").render(cura_version_major = cura_version.major,
cura_version_minor = cura_version.minor,
cura_version_patch = cura_version.patch,
cura_version_build = cura_version.build if cura_version.build != "" else "0",
cura_version_full = self.version,
cura_app_name = self._app_name,
env_prefix = env_prefix)
ext = ".sh" if self.settings.os != "Windows" else ".ps1"
save(self, os.path.join(self._script_dir, f"activate_github_actions_version_env{ext}"), activate_github_actions_version_env)
self._generate_cura_version(os.path.join(self._site_packages, "cura"))
entitlements_file = "'{}'".format(Path(self.cpp_info.res_paths[2], "MacOS", "cura.entitlements"))
self._generate_pyinstaller_spec(location = self._base_dir,
entrypoint_location = "'{}'".format(os.path.join(self.package_folder, self.cpp_info.bindirs[0], self.conan_data["pyinstaller"]["runinfo"]["entrypoint"])).replace("\\", "\\\\"),
icon_path = "'{}'".format(os.path.join(self.package_folder, self.cpp_info.resdirs[2], self.conan_data["pyinstaller"]["icon"][str(self.settings.os)])).replace("\\", "\\\\"),
entitlements_file = entitlements_file if self.settings.os == "Macos" else "None")
def package(self):
copy(self, "cura_app.py", src = self.source_folder, dst = os.path.join(self.package_folder, self.cpp.package.bindirs[0]))
copy(self, "*", src = os.path.join(self.source_folder, "cura"), dst = os.path.join(self.package_folder, self.cpp.package.libdirs[0]))
copy(self, "*", src = os.path.join(self.source_folder, "resources"), dst = os.path.join(self.package_folder, self.cpp.package.resdirs[0]))
copy(self, "*.mo", os.path.join(self.build_folder, "resources"), os.path.join(self.package_folder, "resources"))
copy(self, "*", src = os.path.join(self.source_folder, "plugins"), dst = os.path.join(self.package_folder, self.cpp.package.resdirs[1]))
copy(self, "requirement*.txt", src = self.source_folder, dst = os.path.join(self.package_folder, self.cpp.package.resdirs[-1]))
copy(self, "*", src = os.path.join(self.source_folder, "packaging"), dst = os.path.join(self.package_folder, self.cpp.package.resdirs[2]))
def package_info(self):
self.user_info.pip_requirements = "requirements.txt"
self.user_info.pip_requirements_git = "requirements-ultimaker.txt"
self.user_info.pip_requirements_build = "requirements-dev.txt"
if self.in_local_cache:
self.runenv_info.append_path("PYTHONPATH", os.path.join(self.package_folder, "site-packages"))
self.runenv_info.append_path("PYTHONPATH", os.path.join(self.package_folder, "plugins"))
else:
self.runenv_info.append_path("PYTHONPATH", self.source_folder)
self.runenv_info.append_path("PYTHONPATH", os.path.join(self.source_folder, "plugins"))
def package_id(self):
self.info.clear()
# The following options shouldn't be used to determine the hash, since these are only used to set the CuraVersion.py
# which will als be generated by the deploy method during the `conan install cura/5.1.0@_/_`
del self.info.options.enterprise
del self.info.options.staging
del self.info.options.devtools
del self.info.options.cloud_api_version
del self.info.options.display_name
del self.info.options.cura_debug_mode
# TODO: Use the hash of requirements.txt and requirements-ultimaker.txt, Because changing these will actually result in a different
# Cura. This is needed because the requirements.txt aren't managed by Conan and therefor not resolved in the package_id. This isn't
# ideal but an acceptable solution for now.

View file

@ -16,4 +16,6 @@ Making pull requests
-------------------- --------------------
If you want to propose a change to Cura's source code, please create a pull request in the appropriate repository (being [Cura](https://github.com/Ultimaker/Cura), [Uranium](https://github.com/Ultimaker/Uranium), [CuraEngine](https://github.com/Ultimaker/CuraEngine), [fdm_materials](https://github.com/Ultimaker/fdm_materials), [libArcus](https://github.com/Ultimaker/libArcus), [cura-build](https://github.com/Ultimaker/cura-build), [cura-build-environment](https://github.com/Ultimaker/cura-build-environment), [libSavitar](https://github.com/Ultimaker/libSavitar), [libCharon](https://github.com/Ultimaker/libCharon) or [cura-binary-data](https://github.com/Ultimaker/cura-binary-data)) and if your change requires changes on multiple of these repositories, please link them together so that we know to merge them together. If you want to propose a change to Cura's source code, please create a pull request in the appropriate repository (being [Cura](https://github.com/Ultimaker/Cura), [Uranium](https://github.com/Ultimaker/Uranium), [CuraEngine](https://github.com/Ultimaker/CuraEngine), [fdm_materials](https://github.com/Ultimaker/fdm_materials), [libArcus](https://github.com/Ultimaker/libArcus), [cura-build](https://github.com/Ultimaker/cura-build), [cura-build-environment](https://github.com/Ultimaker/cura-build-environment), [libSavitar](https://github.com/Ultimaker/libSavitar), [libCharon](https://github.com/Ultimaker/libCharon) or [cura-binary-data](https://github.com/Ultimaker/cura-binary-data)) and if your change requires changes on multiple of these repositories, please link them together so that we know to merge them together.
Some of these repositories will have automated tests running when you create a pull request, indicated by green check marks or red crosses in the Github web page. If you see a red cross, that means that a test has failed. If the test doesn't fail on the Master branch but does fail on your branch, that indicates that you've probably made a mistake and you need to do that. Click on the cross for more details, or run the test locally by running `cmake . && ctest --verbose`. The style guide for code contributions to Cura and other Ultimaker projects can be found [here](https://github.com/Ultimaker/Meta/blob/master/general/generic_code_conventions.md).
Some of these repositories will have automated tests running when you create a pull request, indicated by green check marks or red crosses in the Github web page. If you see a red cross, that means that a test has failed. If the test doesn't fail on the Master branch but does fail on your branch, that indicates that you've probably made a mistake and you need to do that. Click on the cross for more details, or run the test locally by running `cmake . && ctest --verbose`.

BIN
cura-logo-dark.PNG Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 520 KiB

After

Width:  |  Height:  |  Size: 1 MiB

Before After
Before After

View file

@ -1,19 +1,26 @@
# Copyright (c) 2021 Ultimaker B.V. # Copyright (c) 2022 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
import enum import enum
from datetime import datetime from datetime import datetime
import json
from PyQt6.QtCore import QObject, pyqtSignal, pyqtSlot, pyqtProperty, QTimer, pyqtEnum from PyQt6.QtCore import QObject, pyqtSignal, pyqtSlot, pyqtProperty, QTimer, pyqtEnum
from typing import Any, Optional, Dict, TYPE_CHECKING, Callable from PyQt6.QtNetwork import QNetworkRequest
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING
from UM.Decorators import deprecated
from UM.Logger import Logger from UM.Logger import Logger
from UM.Message import Message from UM.Message import Message
from UM.i18n import i18nCatalog from UM.i18n import i18nCatalog
from UM.TaskManagement.HttpRequestManager import HttpRequestManager
from UM.TaskManagement.HttpRequestScope import JsonDecoratorScope
from cura.OAuth2.AuthorizationService import AuthorizationService from cura.OAuth2.AuthorizationService import AuthorizationService
from cura.OAuth2.Models import OAuth2Settings, UserProfile from cura.OAuth2.Models import OAuth2Settings, UserProfile
from cura.UltimakerCloud import UltimakerCloudConstants from cura.UltimakerCloud import UltimakerCloudConstants
from cura.UltimakerCloud.UltimakerCloudScope import UltimakerCloudScope
if TYPE_CHECKING: if TYPE_CHECKING:
from cura.CuraApplication import CuraApplication from cura.CuraApplication import CuraApplication
from PyQt6.QtNetwork import QNetworkReply
i18n_catalog = i18nCatalog("cura") i18n_catalog = i18nCatalog("cura")
@ -78,6 +85,7 @@ class Account(QObject):
self._logged_in = False self._logged_in = False
self._user_profile: Optional[UserProfile] = None self._user_profile: Optional[UserProfile] = None
self._additional_rights: Dict[str, Any] = {} self._additional_rights: Dict[str, Any] = {}
self._permissions: List[str] = [] # List of account permission keys, e.g. ["digital-factory.print-job.write"]
self._sync_state = SyncState.IDLE self._sync_state = SyncState.IDLE
self._manual_sync_enabled = False self._manual_sync_enabled = False
self._update_packages_enabled = False self._update_packages_enabled = False
@ -109,6 +117,7 @@ class Account(QObject):
self._sync_services: Dict[str, int] = {} self._sync_services: Dict[str, int] = {}
"""contains entries "service_name" : SyncState""" """contains entries "service_name" : SyncState"""
self.syncRequested.connect(self._updatePermissions)
def initialize(self) -> None: def initialize(self) -> None:
self._authorization_service.initialize(self._application.getPreferences()) self._authorization_service.initialize(self._application.getPreferences())
@ -311,13 +320,63 @@ class Account(QObject):
self._authorization_service.deleteAuthData() self._authorization_service.deleteAuthData()
@deprecated("Get permissions from the 'permissions' property", since = "5.2.0")
def updateAdditionalRight(self, **kwargs) -> None: def updateAdditionalRight(self, **kwargs) -> None:
"""Update the additional rights of the account. """Update the additional rights of the account.
The argument(s) are the rights that need to be set""" The argument(s) are the rights that need to be set"""
self._additional_rights.update(kwargs) self._additional_rights.update(kwargs)
self.additionalRightsChanged.emit(self._additional_rights) self.additionalRightsChanged.emit(self._additional_rights)
@deprecated("Get permissions from the 'permissions' property", since = "5.2.0")
@pyqtProperty("QVariantMap", notify = additionalRightsChanged) @pyqtProperty("QVariantMap", notify = additionalRightsChanged)
def additionalRights(self) -> Dict[str, Any]: def additionalRights(self) -> Dict[str, Any]:
"""A dictionary which can be queried for additional account rights.""" """A dictionary which can be queried for additional account rights."""
return self._additional_rights return self._additional_rights
permissionsChanged = pyqtSignal()
@pyqtProperty("QVariantList", notify = permissionsChanged)
def permissions(self) -> List[str]:
"""
The permission keys that the user has in his account.
"""
return self._permissions
def _updatePermissions(self) -> None:
"""
Update the list of permissions that the user has.
"""
def callback(reply: "QNetworkReply"):
status_code = reply.attribute(QNetworkRequest.Attribute.HttpStatusCodeAttribute)
if status_code is None:
Logger.error("Server did not respond to request to get list of permissions.")
return
if status_code >= 300:
Logger.error(f"Request to get list of permission resulted in HTTP error {status_code}")
return
try:
reply_data = json.loads(bytes(reply.readAll()).decode("UTF-8"))
except (UnicodeDecodeError, json.JSONDecodeError, ValueError) as e:
Logger.logException("e", f"Could not parse response to permission list request: {e}")
return
if "errors" in reply_data:
Logger.error(f"Request to get list of permission resulted in error response: {reply_data['errors']}")
return
if "data" in reply_data and "permissions" in reply_data["data"]:
permissions = sorted(reply_data["data"]["permissions"])
if permissions != self._permissions:
self._permissions = permissions
self.permissionsChanged.emit()
def error_callback(reply: "QNetworkReply", error: "QNetworkReply.NetworkError"):
Logger.error(f"Request for user permissions list failed. Network error: {error}")
HttpRequestManager.getInstance().get(
url = f"{self._oauth_root}/users/permissions",
scope = JsonDecoratorScope(UltimakerCloudScope(self._application)),
callback = callback,
error_callback = error_callback,
timeout = 10
)

View file

@ -1,19 +1,27 @@
# Copyright (c) 2022 Ultimaker B.V. # Copyright (c) 2022 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
# --------- # ---------
# General constants used in Cura # General constants used in Cura
# --------- # ---------
DEFAULT_CURA_APP_NAME = "cura" DEFAULT_CURA_APP_NAME = "cura"
DEFAULT_CURA_DISPLAY_NAME = "Ultimaker Cura" DEFAULT_CURA_DISPLAY_NAME = "UltiMaker Cura"
DEFAULT_CURA_VERSION = "master" DEFAULT_CURA_VERSION = "dev"
DEFAULT_CURA_BUILD_TYPE = "" DEFAULT_CURA_BUILD_TYPE = ""
DEFAULT_CURA_DEBUG_MODE = False DEFAULT_CURA_DEBUG_MODE = False
DEFAULT_CURA_LATEST_URL = "https://software.ultimaker.com/latest.json"
# Each release has a fixed SDK version coupled with it. It doesn't make sense to make it configurable because, for # Each release has a fixed SDK version coupled with it. It doesn't make sense to make it configurable because, for
# example Cura 3.2 with SDK version 6.1 will not work. So the SDK version is hard-coded here and left out of the # example Cura 3.2 with SDK version 6.1 will not work. So the SDK version is hard-coded here and left out of the
# CuraVersion.py.in template. # CuraVersion.py.in template.
CuraSDKVersion = "8.0.0" CuraSDKVersion = "8.3.0"
try:
from cura.CuraVersion import CuraLatestURL
if CuraLatestURL == "":
CuraLatestURL = DEFAULT_CURA_LATEST_URL
except ImportError:
CuraLatestURL = DEFAULT_CURA_LATEST_URL
try: try:
from cura.CuraVersion import CuraAppName # type: ignore from cura.CuraVersion import CuraAppName # type: ignore
@ -56,7 +64,18 @@ try:
if CuraAppDisplayName == "": if CuraAppDisplayName == "":
CuraAppDisplayName = DEFAULT_CURA_DISPLAY_NAME CuraAppDisplayName = DEFAULT_CURA_DISPLAY_NAME
if IsEnterpriseVersion: if IsEnterpriseVersion:
CuraAppDisplayName = CuraAppDisplayName + " Enterprise" CuraAppDisplayName = CuraAppDisplayName
except ImportError: except ImportError:
CuraAppDisplayName = DEFAULT_CURA_DISPLAY_NAME CuraAppDisplayName = DEFAULT_CURA_DISPLAY_NAME
DEPENDENCY_INFO = {}
try:
from pathlib import Path
conan_install_info = Path(__file__).parent.parent.joinpath("conan_install_info.json")
if conan_install_info.exists():
import json
with open(conan_install_info, "r") as f:
DEPENDENCY_INFO = json.loads(f.read())
except:
pass

View file

@ -21,6 +21,7 @@ class ArrangeObjectsJob(Job):
self._min_offset = min_offset self._min_offset = min_offset
def run(self): def run(self):
found_solution_for_all = False
status_message = Message(i18n_catalog.i18nc("@info:status", "Finding new location for objects"), status_message = Message(i18n_catalog.i18nc("@info:status", "Finding new location for objects"),
lifetime = 0, lifetime = 0,
dismissable = False, dismissable = False,
@ -28,18 +29,19 @@ class ArrangeObjectsJob(Job):
title = i18n_catalog.i18nc("@info:title", "Finding Location")) title = i18n_catalog.i18nc("@info:title", "Finding Location"))
status_message.show() status_message.show()
found_solution_for_all = None
try: try:
found_solution_for_all = arrange(self._nodes, Application.getInstance().getBuildVolume(), self._fixed_nodes) found_solution_for_all = arrange(self._nodes, Application.getInstance().getBuildVolume(), self._fixed_nodes)
except: # If the thread crashes, the message should still close except: # If the thread crashes, the message should still close
Logger.logException("e", "Unable to arrange the objects on the buildplate. The arrange algorithm has crashed.") Logger.logException("e", "Unable to arrange the objects on the buildplate. The arrange algorithm has crashed.")
status_message.hide() status_message.hide()
if found_solution_for_all is not None and not found_solution_for_all:
if not found_solution_for_all:
no_full_solution_message = Message( no_full_solution_message = Message(
i18n_catalog.i18nc("@info:status", i18n_catalog.i18nc("@info:status",
"Unable to find a location within the build volume for all objects"), "Unable to find a location within the build volume for all objects"),
title = i18n_catalog.i18nc("@info:title", "Can't Find Location"), title = i18n_catalog.i18nc("@info:title", "Can't Find Location"),
message_type = Message.MessageType.ERROR) message_type = Message.MessageType.ERROR)
no_full_solution_message.show() no_full_solution_message.show()
self.finished.emit(self) self.finished.emit(self)

View file

@ -74,14 +74,14 @@ class ShapeArray:
# If the child-nodes are included, adjust convex hulls as well: # If the child-nodes are included, adjust convex hulls as well:
if include_children: if include_children:
children = node.getAllChildren() children = node.getAllChildren()
if not children is None: if children is not None:
for child in children: for child in children:
# 'Inefficient' combination of convex hulls through known code rather than mess it up: # 'Inefficient' combination of convex hulls through known code rather than mess it up:
child_hull = child.callDecoration("getConvexHull") child_hull = child.callDecoration("getConvexHull")
if not child_hull is None: if child_hull is not None:
hull_verts = hull_verts.unionConvexHulls(child_hull) hull_verts = hull_verts.unionConvexHulls(child_hull)
child_hull_head = child.callDecoration("getConvexHullHead") or child_hull child_hull_head = child.callDecoration("getConvexHullHead") or child_hull
if not child_hull_head is None: if child_hull_head is not None:
hull_head_verts = hull_head_verts.unionConvexHulls(child_hull_head) hull_head_verts = hull_head_verts.unionConvexHulls(child_hull_head)
offset_verts = hull_head_verts.getMinkowskiHull(Polygon.approximatedCircle(min_offset)) offset_verts = hull_head_verts.getMinkowskiHull(Polygon.approximatedCircle(min_offset))
@ -159,4 +159,4 @@ class ShapeArray:
max_col_idx = (idxs[0] - p1[0]) / (p2[0] - p1[0]) * (p2[1] - p1[1]) + p1[1] max_col_idx = (idxs[0] - p1[0]) / (p2[0] - p1[0]) * (p2[1] - p1[1]) + p1[1]
sign = numpy.sign(p2[0] - p1[0]) sign = numpy.sign(p2[0] - p1[0])
return idxs[1] * sign <= max_col_idx * sign return idxs[1] * sign <= max_col_idx * sign

View file

@ -136,7 +136,7 @@ class Backup:
return False return False
current_version = Version(self._application.getVersion()) current_version = Version(self._application.getVersion())
version_to_restore = Version(self.meta_data.get("cura_release", "master")) version_to_restore = Version(self.meta_data.get("cura_release", "dev"))
if current_version < version_to_restore: if current_version < version_to_restore:
# Cannot restore version newer than current because settings might have changed. # Cannot restore version newer than current because settings might have changed.

View file

@ -810,11 +810,6 @@ class BuildVolume(SceneNode):
break break
if prime_tower_collision: # Already found a collision. if prime_tower_collision: # Already found a collision.
break break
if self._global_container_stack.getProperty("prime_tower_brim_enable", "value") and self._global_container_stack.getProperty("adhesion_type", "value") != "raft":
brim_size = self._calculateBedAdhesionSize(used_extruders, "brim")
# Use 2x the brim size, since we need 1x brim size distance due to the object brim and another
# times the brim due to the brim of the prime tower
prime_tower_areas[extruder_id][area_index] = prime_tower_area.getMinkowskiHull(Polygon.approximatedCircle(2 * brim_size, num_segments = 24))
if not prime_tower_collision: if not prime_tower_collision:
result_areas[extruder_id].extend(prime_tower_areas[extruder_id]) result_areas[extruder_id].extend(prime_tower_areas[extruder_id])
result_areas_no_brim[extruder_id].extend(prime_tower_areas[extruder_id]) result_areas_no_brim[extruder_id].extend(prime_tower_areas[extruder_id])
@ -840,9 +835,13 @@ class BuildVolume(SceneNode):
result = {} result = {}
skirt_brim_extruder: ExtruderStack = None skirt_brim_extruder: ExtruderStack = None
skirt_brim_extruder_nr = self._global_container_stack.getProperty("skirt_brim_extruder_nr", "value")
for extruder in used_extruders: for extruder in used_extruders:
if int(extruder.getProperty("extruder_nr", "value")) == int(self._global_container_stack.getProperty("skirt_brim_extruder_nr", "value")): if skirt_brim_extruder_nr == -1:
skirt_brim_extruder = extruder skirt_brim_extruder = used_extruders[0] # The prime tower brim is always printed with the first extruder
elif int(extruder.getProperty("extruder_nr", "value")) == int(skirt_brim_extruder_nr):
skirt_brim_extruder = extruder
result[extruder.getId()] = [] result[extruder.getId()] = []
# Currently, the only normally printed object is the prime tower. # Currently, the only normally printed object is the prime tower.
@ -856,15 +855,6 @@ class BuildVolume(SceneNode):
prime_tower_x = prime_tower_x - machine_width / 2 #Offset by half machine_width and _depth to put the origin in the front-left. prime_tower_x = prime_tower_x - machine_width / 2 #Offset by half machine_width and _depth to put the origin in the front-left.
prime_tower_y = prime_tower_y + machine_depth / 2 prime_tower_y = prime_tower_y + machine_depth / 2
if skirt_brim_extruder is not None and self._global_container_stack.getProperty("prime_tower_brim_enable", "value") and self._global_container_stack.getProperty("adhesion_type", "value") != "raft":
brim_size = (
skirt_brim_extruder.getProperty("brim_line_count", "value") *
skirt_brim_extruder.getProperty("skirt_brim_line_width", "value") / 100.0 *
skirt_brim_extruder.getProperty("initial_layer_line_width_factor", "value")
)
prime_tower_x -= brim_size
prime_tower_y += brim_size
radius = prime_tower_size / 2 radius = prime_tower_size / 2
prime_tower_area = Polygon.approximatedCircle(radius, num_segments = 24) prime_tower_area = Polygon.approximatedCircle(radius, num_segments = 24)
prime_tower_area = prime_tower_area.translate(prime_tower_x - radius, prime_tower_y - radius) prime_tower_area = prime_tower_area.translate(prime_tower_x - radius, prime_tower_y - radius)
@ -1076,7 +1066,7 @@ class BuildVolume(SceneNode):
all_values[i] = 0 all_values[i] = 0
return all_values return all_values
def _calculateBedAdhesionSize(self, used_extruders, adhesion_override = None): def _calculateBedAdhesionSize(self, used_extruders):
"""Get the bed adhesion size for the global container stack and used extruders """Get the bed adhesion size for the global container stack and used extruders
:param adhesion_override: override adhesion type. :param adhesion_override: override adhesion type.
@ -1086,52 +1076,12 @@ class BuildVolume(SceneNode):
return None return None
container_stack = self._global_container_stack container_stack = self._global_container_stack
adhesion_type = adhesion_override adhesion_type = container_stack.getProperty("adhesion_type", "value")
if adhesion_type is None:
adhesion_type = container_stack.getProperty("adhesion_type", "value")
# Skirt_brim_line_width is a bit of an odd one out. The primary bit of the skirt/brim is printed if adhesion_type == "raft":
# with the adhesion extruder, but it also prints one extra line by all other extruders. As such, the
# setting does *not* have a limit_to_extruder setting (which means that we can't ask the global extruder what
# the value is.
skirt_brim_extruder_nr = self._global_container_stack.getProperty("skirt_brim_extruder_nr", "value")
try:
skirt_brim_stack = self._global_container_stack.extruderList[int(skirt_brim_extruder_nr)]
except IndexError:
Logger.warning(f"Couldn't find extruder with index '{skirt_brim_extruder_nr}', defaulting to 0 instead.")
skirt_brim_stack = self._global_container_stack.extruderList[0]
skirt_brim_line_width = skirt_brim_stack.getProperty("skirt_brim_line_width", "value")
initial_layer_line_width_factor = skirt_brim_stack.getProperty("initial_layer_line_width_factor", "value")
# Use brim width if brim is enabled OR the prime tower has a brim.
if adhesion_type == "brim":
brim_line_count = skirt_brim_stack.getProperty("brim_line_count", "value")
brim_gap = skirt_brim_stack.getProperty("brim_gap", "value")
bed_adhesion_size = brim_gap + skirt_brim_line_width * brim_line_count * initial_layer_line_width_factor / 100.0
for extruder_stack in used_extruders:
bed_adhesion_size += extruder_stack.getProperty("skirt_brim_line_width", "value") * extruder_stack.getProperty("initial_layer_line_width_factor", "value") / 100.0
# We don't create an additional line for the extruder we're printing the brim with.
bed_adhesion_size -= skirt_brim_line_width * initial_layer_line_width_factor / 100.0
elif adhesion_type == "skirt":
skirt_distance = skirt_brim_stack.getProperty("skirt_gap", "value")
skirt_line_count = skirt_brim_stack.getProperty("skirt_line_count", "value")
bed_adhesion_size = skirt_distance + (
skirt_brim_line_width * skirt_line_count) * initial_layer_line_width_factor / 100.0
for extruder_stack in used_extruders:
bed_adhesion_size += extruder_stack.getProperty("skirt_brim_line_width", "value") * extruder_stack.getProperty("initial_layer_line_width_factor", "value") / 100.0
# We don't create an additional line for the extruder we're printing the skirt with.
bed_adhesion_size -= skirt_brim_line_width * initial_layer_line_width_factor / 100.0
elif adhesion_type == "raft":
bed_adhesion_size = self._global_container_stack.getProperty("raft_margin", "value") # Should refer to the raft extruder if set. bed_adhesion_size = self._global_container_stack.getProperty("raft_margin", "value") # Should refer to the raft extruder if set.
elif adhesion_type == "none": else: # raft, brim or skirt. Those last two are handled by CuraEngine.
bed_adhesion_size = 0 bed_adhesion_size = 0
else:
raise Exception("Unknown bed adhesion type. Did you forget to update the build volume calculations for your new bed adhesion type?")
max_length_available = 0.5 * min( max_length_available = 0.5 * min(
self._global_container_stack.getProperty("machine_width", "value"), self._global_container_stack.getProperty("machine_width", "value"),

View file

@ -1,5 +1,5 @@
# Copyright (c) 2019 Ultimaker B.V. # Copyright (c) 2022 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
import platform import platform
import traceback import traceback
@ -110,7 +110,7 @@ class CrashHandler:
layout = QVBoxLayout(dialog) layout = QVBoxLayout(dialog)
label = QLabel() label = QLabel()
label.setText(catalog.i18nc("@label crash message", """<p><b>Oops, Ultimaker Cura has encountered something that doesn't seem right.</p></b> label.setText(catalog.i18nc("@label crash message", """<p><b>Oops, UltiMaker Cura has encountered something that doesn't seem right.</p></b>
<p>We encountered an unrecoverable error during start up. It was possibly caused by some incorrect configuration files. We suggest to backup and reset your configuration.</p> <p>We encountered an unrecoverable error during start up. It was possibly caused by some incorrect configuration files. We suggest to backup and reset your configuration.</p>
<p>Backups can be found in the configuration folder.</p> <p>Backups can be found in the configuration folder.</p>
<p>Please send us this Crash Report to fix the problem.</p> <p>Please send us this Crash Report to fix the problem.</p>
@ -119,7 +119,7 @@ class CrashHandler:
layout.addWidget(label) layout.addWidget(label)
# "send report" check box and show details # "send report" check box and show details
self._send_report_checkbox = QCheckBox(catalog.i18nc("@action:button", "Send crash report to Ultimaker"), dialog) self._send_report_checkbox = QCheckBox(catalog.i18nc("@action:button", "Send crash report to UltiMaker"), dialog)
self._send_report_checkbox.setChecked(True) self._send_report_checkbox.setChecked(True)
show_details_button = QPushButton(catalog.i18nc("@action:button", "Show detailed crash report"), dialog) show_details_button = QPushButton(catalog.i18nc("@action:button", "Show detailed crash report"), dialog)

View file

@ -115,6 +115,8 @@ from . import CuraActions
from . import PlatformPhysics from . import PlatformPhysics
from . import PrintJobPreviewImageProvider from . import PrintJobPreviewImageProvider
from .AutoSave import AutoSave from .AutoSave import AutoSave
from .Machines.Models.CompatibleMachineModel import CompatibleMachineModel
from .Machines.Models.MachineListModel import MachineListModel
from .Machines.Models.ActiveIntentQualitiesModel import ActiveIntentQualitiesModel from .Machines.Models.ActiveIntentQualitiesModel import ActiveIntentQualitiesModel
from .Machines.Models.IntentSelectionModel import IntentSelectionModel from .Machines.Models.IntentSelectionModel import IntentSelectionModel
from .SingleInstance import SingleInstance from .SingleInstance import SingleInstance
@ -128,7 +130,7 @@ class CuraApplication(QtApplication):
# SettingVersion represents the set of settings available in the machine/extruder definitions. # SettingVersion represents the set of settings available in the machine/extruder definitions.
# You need to make sure that this version number needs to be increased if there is any non-backwards-compatible # You need to make sure that this version number needs to be increased if there is any non-backwards-compatible
# changes of the settings. # changes of the settings.
SettingVersion = 20 SettingVersion = 22
Created = False Created = False
@ -152,6 +154,7 @@ class CuraApplication(QtApplication):
super().__init__(name = ApplicationMetadata.CuraAppName, super().__init__(name = ApplicationMetadata.CuraAppName,
app_display_name = ApplicationMetadata.CuraAppDisplayName, app_display_name = ApplicationMetadata.CuraAppDisplayName,
version = ApplicationMetadata.CuraVersion if not ApplicationMetadata.IsAlternateVersion else ApplicationMetadata.CuraBuildType, version = ApplicationMetadata.CuraVersion if not ApplicationMetadata.IsAlternateVersion else ApplicationMetadata.CuraBuildType,
latest_url = ApplicationMetadata.CuraLatestURL,
api_version = ApplicationMetadata.CuraSDKVersion, api_version = ApplicationMetadata.CuraSDKVersion,
build_type = ApplicationMetadata.CuraBuildType, build_type = ApplicationMetadata.CuraBuildType,
is_debug_mode = ApplicationMetadata.CuraDebugMode, is_debug_mode = ApplicationMetadata.CuraDebugMode,
@ -355,8 +358,14 @@ class CuraApplication(QtApplication):
Resources.addSecureSearchPath(os.path.join(self._app_install_dir, "share", "cura", "resources")) Resources.addSecureSearchPath(os.path.join(self._app_install_dir, "share", "cura", "resources"))
if not hasattr(sys, "frozen"): if not hasattr(sys, "frozen"):
resource_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "resources") Resources.addSearchPath(os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "resources"))
Resources.addSecureSearchPath(resource_path)
# local Conan cache
Resources.addSearchPath(os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "..", "resources"))
Resources.addSearchPath(os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "..", "plugins"))
# venv site-packages
Resources.addSearchPath(os.path.join(app_root, "..", "share", "cura", "resources"))
@classmethod @classmethod
def _initializeSettingDefinitions(cls): def _initializeSettingDefinitions(cls):
@ -700,6 +709,7 @@ class CuraApplication(QtApplication):
self.showMessageBox.emit(title, text, informativeText, detailedText, buttons, icon) self.showMessageBox.emit(title, text, informativeText, detailedText, buttons, icon)
showDiscardOrKeepProfileChanges = pyqtSignal() showDiscardOrKeepProfileChanges = pyqtSignal()
showCompareAndSaveProfileChanges = pyqtSignal(int)
def discardOrKeepProfileChanges(self) -> bool: def discardOrKeepProfileChanges(self) -> bool:
has_user_interaction = False has_user_interaction = False
@ -814,6 +824,12 @@ class CuraApplication(QtApplication):
def run(self): def run(self):
super().run() super().run()
if len(ApplicationMetadata.DEPENDENCY_INFO) > 0:
Logger.debug("Using Conan managed dependencies: " + ", ".join(
[dep["recipe"]["id"] for dep in ApplicationMetadata.DEPENDENCY_INFO["installed"] if dep["recipe"]["version"] != "latest"]))
else:
Logger.warning("Could not find conan_install_info.json")
Logger.log("i", "Initializing machine error checker") Logger.log("i", "Initializing machine error checker")
self._machine_error_checker = MachineErrorChecker(self) self._machine_error_checker = MachineErrorChecker(self)
self._machine_error_checker.initialize() self._machine_error_checker.initialize()
@ -1176,6 +1192,8 @@ class CuraApplication(QtApplication):
qmlRegisterType(InstanceContainer, "Cura", 1, 0, "InstanceContainer") qmlRegisterType(InstanceContainer, "Cura", 1, 0, "InstanceContainer")
qmlRegisterType(ExtrudersModel, "Cura", 1, 0, "ExtrudersModel") qmlRegisterType(ExtrudersModel, "Cura", 1, 0, "ExtrudersModel")
qmlRegisterType(GlobalStacksModel, "Cura", 1, 0, "GlobalStacksModel") qmlRegisterType(GlobalStacksModel, "Cura", 1, 0, "GlobalStacksModel")
qmlRegisterType(MachineListModel, "Cura", 1, 0, "MachineListModel")
qmlRegisterType(CompatibleMachineModel, "Cura", 1, 0, "CompatibleMachineModel")
self.processEvents() self.processEvents()
qmlRegisterType(FavoriteMaterialsModel, "Cura", 1, 0, "FavoriteMaterialsModel") qmlRegisterType(FavoriteMaterialsModel, "Cura", 1, 0, "FavoriteMaterialsModel")
@ -1430,7 +1448,7 @@ class CuraApplication(QtApplication):
bounding_box = node.getBoundingBox() bounding_box = node.getBoundingBox()
if bounding_box is None or bounding_box.width < self._volume.getBoundingBox().width or bounding_box.depth < self._volume.getBoundingBox().depth: if bounding_box is None or bounding_box.width < self._volume.getBoundingBox().width or bounding_box.depth < self._volume.getBoundingBox().depth:
# Arrange only the unlocked nodes and keep the locked ones in place # Arrange only the unlocked nodes and keep the locked ones in place
if UM.Util.parseBool(node.getSetting(SceneNodeSettings.LockPosition)): if node.getSetting(SceneNodeSettings.LockPosition):
locked_nodes.append(node) locked_nodes.append(node)
else: else:
nodes_to_arrange.append(node) nodes_to_arrange.append(node)
@ -2058,3 +2076,7 @@ class CuraApplication(QtApplication):
@classmethod @classmethod
def getInstance(cls, *args, **kwargs) -> "CuraApplication": def getInstance(cls, *args, **kwargs) -> "CuraApplication":
return cast(CuraApplication, super().getInstance(**kwargs)) return cast(CuraApplication, super().getInstance(**kwargs))
@pyqtProperty(bool, constant=True)
def isEnterprise(self) -> bool:
return ApplicationMetadata.IsEnterpriseVersion

View file

@ -14,6 +14,7 @@ from cura.Settings.GlobalStack import GlobalStack
from UM.PackageManager import PackageManager # The class we're extending. from UM.PackageManager import PackageManager # The class we're extending.
from UM.Resources import Resources # To find storage paths for some resource types. from UM.Resources import Resources # To find storage paths for some resource types.
from UM.i18n import i18nCatalog from UM.i18n import i18nCatalog
from urllib.parse import unquote_plus
catalog = i18nCatalog("cura") catalog = i18nCatalog("cura")
@ -54,6 +55,14 @@ class CuraPackageManager(PackageManager):
def initialize(self) -> None: def initialize(self) -> None:
self._installation_dirs_dict["materials"] = Resources.getStoragePath(CuraApplication.ResourceTypes.MaterialInstanceContainer) self._installation_dirs_dict["materials"] = Resources.getStoragePath(CuraApplication.ResourceTypes.MaterialInstanceContainer)
self._installation_dirs_dict["qualities"] = Resources.getStoragePath(CuraApplication.ResourceTypes.QualityInstanceContainer) self._installation_dirs_dict["qualities"] = Resources.getStoragePath(CuraApplication.ResourceTypes.QualityInstanceContainer)
self._installation_dirs_dict["variants"] = Resources.getStoragePath(CuraApplication.ResourceTypes.VariantInstanceContainer)
# Due to a bug in Cura 5.1.0 we needed to change the directory structure of the curapackage on the server side (See SD-3871).
# Although the material intent profiles will be installed in the `intent` folder, the curapackage from the server side will
# have an `intents` folder. For completeness, we will look in both locations of in the curapackage and map them both to the
# `intent` folder.
self._installation_dirs_dict["intents"] = Resources.getStoragePath(CuraApplication.ResourceTypes.IntentInstanceContainer)
self._installation_dirs_dict["intent"] = Resources.getStoragePath(CuraApplication.ResourceTypes.IntentInstanceContainer)
super().initialize() super().initialize()
@ -80,6 +89,7 @@ class CuraPackageManager(PackageManager):
def getMaterialFilePackageId(self, file_name: str, guid: str) -> str: def getMaterialFilePackageId(self, file_name: str, guid: str) -> str:
"""Get the id of the installed material package that contains file_name""" """Get the id of the installed material package that contains file_name"""
file_name = unquote_plus(file_name)
for material_package in [f for f in os.scandir(self._installation_dirs_dict["materials"]) if f.is_dir()]: for material_package in [f for f in os.scandir(self._installation_dirs_dict["materials"]) if f.is_dir()]:
package_id = material_package.name package_id = material_package.name
@ -98,6 +108,7 @@ class CuraPackageManager(PackageManager):
return package_id return package_id
Logger.error("Could not find package_id for file: {} with GUID: {} ".format(file_name, guid)) Logger.error("Could not find package_id for file: {} with GUID: {} ".format(file_name, guid))
Logger.error(f"Bundled paths searched: {list(Resources.getSecureSearchPaths())}")
return "" return ""
def getMachinesUsingPackage(self, package_id: str) -> Tuple[List[Tuple[GlobalStack, str, str]], List[Tuple[GlobalStack, str, str]]]: def getMachinesUsingPackage(self, package_id: str) -> Tuple[List[Tuple[GlobalStack, str, str]], List[Tuple[GlobalStack, str, str]]]:

View file

@ -1,13 +0,0 @@
# Copyright (c) 2020 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
CuraAppName = "@CURA_APP_NAME@"
CuraAppDisplayName = "@CURA_APP_DISPLAY_NAME@"
CuraVersion = "@CURA_VERSION@"
CuraBuildType = "@CURA_BUILDTYPE@"
CuraDebugMode = True if "@_cura_debugmode@" == "ON" else False
CuraCloudAPIRoot = "@CURA_CLOUD_API_ROOT@"
CuraCloudAPIVersion = "@CURA_CLOUD_API_VERSION@"
CuraCloudAccountAPIRoot = "@CURA_CLOUD_ACCOUNT_API_ROOT@"
CuraMarketplaceRoot = "@CURA_MARKETPLACE_ROOT@"
CuraDigitalFactoryURL = "@CURA_DIGITAL_FACTORY_URL@"

View file

@ -24,9 +24,12 @@ class LayerPolygon:
PrimeTowerType = 11 PrimeTowerType = 11
__number_of_types = 12 __number_of_types = 12
__jump_map = numpy.logical_or(numpy.logical_or(numpy.arange(__number_of_types) == NoneType, numpy.arange(__number_of_types) == MoveCombingType), numpy.arange(__number_of_types) == MoveRetractionType) __jump_map = numpy.logical_or(numpy.logical_or(numpy.arange(__number_of_types) == NoneType,
numpy.arange(__number_of_types) == MoveCombingType),
numpy.arange(__number_of_types) == MoveRetractionType)
def __init__(self, extruder: int, line_types: numpy.ndarray, data: numpy.ndarray, line_widths: numpy.ndarray, line_thicknesses: numpy.ndarray, line_feedrates: numpy.ndarray) -> None: def __init__(self, extruder: int, line_types: numpy.ndarray, data: numpy.ndarray,
line_widths: numpy.ndarray, line_thicknesses: numpy.ndarray, line_feedrates: numpy.ndarray) -> None:
"""LayerPolygon, used in ProcessSlicedLayersJob """LayerPolygon, used in ProcessSlicedLayersJob
:param extruder: The position of the extruder :param extruder: The position of the extruder
@ -39,10 +42,12 @@ class LayerPolygon:
self._extruder = extruder self._extruder = extruder
self._types = line_types self._types = line_types
for i in range(len(self._types)): unknown_types = numpy.where(self._types >= self.__number_of_types, self._types, None)
if self._types[i] >= self.__number_of_types: # Got faulty line data from the engine. if unknown_types.any():
Logger.log("w", "Found an unknown line type: %s", i) # Got faulty line data from the engine.
self._types[i] = self.NoneType for idx in unknown_types:
Logger.warning(f"Found an unknown line type at: {idx}")
self._types[idx] = self.NoneType
self._data = data self._data = data
self._line_widths = line_widths self._line_widths = line_widths
self._line_thicknesses = line_thicknesses self._line_thicknesses = line_thicknesses
@ -58,14 +63,16 @@ class LayerPolygon:
self._mesh_line_count = len(self._types) - self._jump_count self._mesh_line_count = len(self._types) - self._jump_count
self._vertex_count = self._mesh_line_count + numpy.sum(self._types[1:] == self._types[:-1]) self._vertex_count = self._mesh_line_count + numpy.sum(self._types[1:] == self._types[:-1])
# Buffering the colors shouldn't be necessary as it is not # Buffering the colors shouldn't be necessary as it is not
# re-used and can save a lot of memory usage. # re-used and can save a lot of memory usage.
self._color_map = LayerPolygon.getColorMap() self._color_map = LayerPolygon.getColorMap()
self._colors = self._color_map[self._types] # type: numpy.ndarray self._colors = self._color_map[self._types] # type: numpy.ndarray
# When type is used as index returns true if type == LayerPolygon.InfillType or type == LayerPolygon.SkinType or type == LayerPolygon.SupportInfillType # When type is used as index returns true if type == LayerPolygon.InfillType
# or type == LayerPolygon.SkinType
# or type == LayerPolygon.SupportInfillType
# Should be generated in better way, not hardcoded. # Should be generated in better way, not hardcoded.
self._is_infill_or_skin_type_map = numpy.array([0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0], dtype = bool) self._is_infill_or_skin_type_map = numpy.array([0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0], dtype=bool)
self._build_cache_line_mesh_mask = None # type: Optional[numpy.ndarray] self._build_cache_line_mesh_mask = None # type: Optional[numpy.ndarray]
self._build_cache_needed_points = None # type: Optional[numpy.ndarray] self._build_cache_needed_points = None # type: Optional[numpy.ndarray]
@ -80,12 +87,14 @@ class LayerPolygon:
# Only if the type of line segment changes do we need to add an extra vertex to change colors # Only if the type of line segment changes do we need to add an extra vertex to change colors
self._build_cache_needed_points[1:, 0][:, numpy.newaxis] = self._types[1:] != self._types[:-1] self._build_cache_needed_points[1:, 0][:, numpy.newaxis] = self._types[1:] != self._types[:-1]
# Mark points as unneeded if they are of types we don't want in the line mesh according to the calculated mask # Mark points as unneeded if they are of types we don't want in the line mesh according to the calculated mask
numpy.logical_and(self._build_cache_needed_points, self._build_cache_line_mesh_mask, self._build_cache_needed_points ) numpy.logical_and(self._build_cache_needed_points, self._build_cache_line_mesh_mask, self._build_cache_needed_points)
self._vertex_begin = 0 self._vertex_begin = 0
self._vertex_end = cast(int, numpy.sum(self._build_cache_needed_points)) self._vertex_end = cast(int, numpy.sum(self._build_cache_needed_points))
def build(self, vertex_offset: int, index_offset: int, vertices: numpy.ndarray, colors: numpy.ndarray, line_dimensions: numpy.ndarray, feedrates: numpy.ndarray, extruders: numpy.ndarray, line_types: numpy.ndarray, indices: numpy.ndarray) -> None: def build(self, vertex_offset: int, index_offset: int, vertices: numpy.ndarray,
colors: numpy.ndarray, line_dimensions: numpy.ndarray, feedrates: numpy.ndarray,
extruders: numpy.ndarray, line_types: numpy.ndarray, indices: numpy.ndarray) -> None:
"""Set all the arrays provided by the function caller, representing the LayerPolygon """Set all the arrays provided by the function caller, representing the LayerPolygon
The arrays are either by vertex or by indices. The arrays are either by vertex or by indices.
@ -111,19 +120,20 @@ class LayerPolygon:
line_mesh_mask = self._build_cache_line_mesh_mask line_mesh_mask = self._build_cache_line_mesh_mask
needed_points_list = self._build_cache_needed_points needed_points_list = self._build_cache_needed_points
# Index to the points we need to represent the line mesh. This is constructed by generating simple # Index to the points we need to represent the line mesh.
# start and end points for each line. For line segment n these are points n and n+1. Row n reads [n n+1] # This is constructed by generating simple start and end points for each line.
# Then then the indices for the points we don't need are thrown away based on the pre-calculated list. # For line segment n, these are points n and n+1. Row n reads [n n+1]
index_list = ( numpy.arange(len(self._types)).reshape((-1, 1)) + numpy.array([[0, 1]]) ).reshape((-1, 1))[needed_points_list.reshape((-1, 1))] # Then the indices for the points we don't need are thrown away based on the pre-calculated list.
index_list = (numpy.arange(len(self._types)).reshape((-1, 1)) + numpy.array([[0, 1]])).reshape((-1, 1))[needed_points_list.reshape((-1, 1))]
# The relative values of begin and end indices have already been set in buildCache, so we only need to offset them to the parents offset. # The relative values of begin and end indices have already been set in buildCache, so we only need to offset them to the parents offset.
self._vertex_begin += vertex_offset self._vertex_begin += vertex_offset
self._vertex_end += vertex_offset self._vertex_end += vertex_offset
# Points are picked based on the index list to get the vertices needed. # Points are picked based on the index list to get the vertices needed.
vertices[self._vertex_begin:self._vertex_end, :] = self._data[index_list, :] vertices[self._vertex_begin:self._vertex_end, :] = self._data[index_list, :]
# Create an array with colors for each vertex and remove the color data for the points that has been thrown away. # Create an array with colors for each vertex and remove the color data for the points that has been thrown away.
colors[self._vertex_begin:self._vertex_end, :] = numpy.tile(self._colors, (1, 2)).reshape((-1, 4))[needed_points_list.ravel()] colors[self._vertex_begin:self._vertex_end, :] = numpy.tile(self._colors, (1, 2)).reshape((-1, 4))[needed_points_list.ravel()]
# Create an array with line widths and thicknesses for each vertex. # Create an array with line widths and thicknesses for each vertex.
@ -138,14 +148,15 @@ class LayerPolygon:
# Convert type per vertex to type per line # Convert type per vertex to type per line
line_types[self._vertex_begin:self._vertex_end] = numpy.tile(self._types, (1, 2)).reshape((-1, 1))[needed_points_list.ravel()][:, 0] line_types[self._vertex_begin:self._vertex_end] = numpy.tile(self._types, (1, 2)).reshape((-1, 1))[needed_points_list.ravel()][:, 0]
# The relative values of begin and end indices have already been set in buildCache, so we only need to offset them to the parents offset. # The relative values of begin and end indices have already been set in buildCache,
# so we only need to offset them to the parents offset.
self._index_begin += index_offset self._index_begin += index_offset
self._index_end += index_offset self._index_end += index_offset
indices[self._index_begin:self._index_end, :] = numpy.arange(self._index_end-self._index_begin, dtype = numpy.int32).reshape((-1, 1)) indices[self._index_begin:self._index_end, :] = numpy.arange(self._index_end-self._index_begin, dtype=numpy.int32).reshape((-1, 1))
# When the line type changes the index needs to be increased by 2. # When the line type changes the index needs to be increased by 2.
indices[self._index_begin:self._index_end, :] += numpy.cumsum(needed_points_list[line_mesh_mask.ravel(), 0], dtype = numpy.int32).reshape((-1, 1)) indices[self._index_begin:self._index_end, :] += numpy.cumsum(needed_points_list[line_mesh_mask.ravel(), 0], dtype = numpy.int32).reshape((-1, 1))
# Each line segment goes from it's starting point p to p+1, offset by the vertex index. # Each line segment goes from it's starting point p to p+1, offset by the vertex index.
# The -1 is to compensate for the necessarily True value of needed_points_list[0,0] which causes an unwanted +1 in cumsum above. # The -1 is to compensate for the necessarily True value of needed_points_list[0,0] which causes an unwanted +1 in cumsum above.
indices[self._index_begin:self._index_end, :] += numpy.array([self._vertex_begin - 1, self._vertex_begin]) indices[self._index_begin:self._index_end, :] += numpy.array([self._vertex_begin - 1, self._vertex_begin])
@ -214,13 +225,12 @@ class LayerPolygon:
""" """
normals = numpy.copy(self._data) normals = numpy.copy(self._data)
normals[:, 1] = 0.0 # We are only interested in 2D normals normals[:, 1] = 0.0 # We are only interested in 2D normals
# Calculate the edges between points. # Calculate the edges between points.
# The call to numpy.roll shifts the entire array by one so that # The call to numpy.roll shifts the entire array by one
# we end up subtracting each next point from the current, wrapping # so that we end up subtracting each next point from the current, wrapping around.
# around. This gives us the edges from the next point to the current # This gives us the edges from the next point to the current point.
# point.
normals = numpy.diff(normals, 1, 0) normals = numpy.diff(normals, 1, 0)
# Calculate the length of each edge using standard Pythagoras # Calculate the length of each edge using standard Pythagoras
@ -245,17 +255,17 @@ class LayerPolygon:
if cls.__color_map is None: if cls.__color_map is None:
theme = cast(Theme, QtApplication.getInstance().getTheme()) theme = cast(Theme, QtApplication.getInstance().getTheme())
cls.__color_map = numpy.array([ cls.__color_map = numpy.array([
theme.getColor("layerview_none").getRgbF(), # NoneType theme.getColor("layerview_none").getRgbF(), # NoneType
theme.getColor("layerview_inset_0").getRgbF(), # Inset0Type theme.getColor("layerview_inset_0").getRgbF(), # Inset0Type
theme.getColor("layerview_inset_x").getRgbF(), # InsetXType theme.getColor("layerview_inset_x").getRgbF(), # InsetXType
theme.getColor("layerview_skin").getRgbF(), # SkinType theme.getColor("layerview_skin").getRgbF(), # SkinType
theme.getColor("layerview_support").getRgbF(), # SupportType theme.getColor("layerview_support").getRgbF(), # SupportType
theme.getColor("layerview_skirt").getRgbF(), # SkirtType theme.getColor("layerview_skirt").getRgbF(), # SkirtType
theme.getColor("layerview_infill").getRgbF(), # InfillType theme.getColor("layerview_infill").getRgbF(), # InfillType
theme.getColor("layerview_support_infill").getRgbF(), # SupportInfillType theme.getColor("layerview_support_infill").getRgbF(), # SupportInfillType
theme.getColor("layerview_move_combing").getRgbF(), # MoveCombingType theme.getColor("layerview_move_combing").getRgbF(), # MoveCombingType
theme.getColor("layerview_move_retraction").getRgbF(), # MoveRetractionType theme.getColor("layerview_move_retraction").getRgbF(), # MoveRetractionType
theme.getColor("layerview_support_interface").getRgbF(), # SupportInterfaceType theme.getColor("layerview_support_interface").getRgbF(), # SupportInterfaceType
theme.getColor("layerview_prime_tower").getRgbF() # PrimeTowerType theme.getColor("layerview_prime_tower").getRgbF() # PrimeTowerType
]) ])

View file

@ -33,8 +33,11 @@ class MachineAction(QObject, PluginObject):
self._qml_url = "" self._qml_url = ""
self._view = None self._view = None
self._finished = False self._finished = False
self._open_as_dialog = True
self._visible = True
labelChanged = pyqtSignal() labelChanged = pyqtSignal()
visibilityChanged = pyqtSignal()
onFinished = pyqtSignal() onFinished = pyqtSignal()
def getKey(self) -> str: def getKey(self) -> str:
@ -79,6 +82,15 @@ class MachineAction(QObject, PluginObject):
pass pass
@pyqtSlot()
def execute(self) -> None:
self._execute()
def _execute(self) -> None:
"""Protected implementation of execute."""
pass
@pyqtSlot() @pyqtSlot()
def setFinished(self) -> None: def setFinished(self) -> None:
self._finished = True self._finished = True
@ -94,7 +106,7 @@ class MachineAction(QObject, PluginObject):
plugin_path = PluginRegistry.getInstance().getPluginPath(self.getPluginId()) plugin_path = PluginRegistry.getInstance().getPluginPath(self.getPluginId())
if plugin_path is None: if plugin_path is None:
Logger.log("e", "Cannot create QML view: cannot find plugin path for plugin [%s]", self.getPluginId()) Logger.error(f"Cannot create QML view: cannot find plugin path for plugin {self.getPluginId()}")
return None return None
path = os.path.join(plugin_path, self._qml_url) path = os.path.join(plugin_path, self._qml_url)
@ -106,7 +118,7 @@ class MachineAction(QObject, PluginObject):
def qmlPath(self) -> "QUrl": def qmlPath(self) -> "QUrl":
plugin_path = PluginRegistry.getInstance().getPluginPath(self.getPluginId()) plugin_path = PluginRegistry.getInstance().getPluginPath(self.getPluginId())
if plugin_path is None: if plugin_path is None:
Logger.log("e", "Cannot create QML view: cannot find plugin path for plugin [%s]", self.getPluginId()) Logger.error(f"Cannot create QML view: cannot find plugin path for plugin {self.getPluginId()}")
return QUrl("") return QUrl("")
path = os.path.join(plugin_path, self._qml_url) path = os.path.join(plugin_path, self._qml_url)
return QUrl.fromLocalFile(path) return QUrl.fromLocalFile(path)
@ -114,3 +126,30 @@ class MachineAction(QObject, PluginObject):
@pyqtSlot(result = QObject) @pyqtSlot(result = QObject)
def getDisplayItem(self) -> Optional["QObject"]: def getDisplayItem(self) -> Optional["QObject"]:
return self._createViewFromQML() return self._createViewFromQML()
@pyqtProperty(bool, constant=True)
def shouldOpenAsDialog(self) -> bool:
"""Whether this action will show a dialog.
If not, the action will directly run the function inside execute().
:return: Defaults to true to be in line with the old behaviour.
"""
return self._open_as_dialog
@pyqtSlot()
def setVisible(self, visible: bool) -> None:
if self._visible != visible:
self._visible = visible
self.visibilityChanged.emit()
@pyqtProperty(bool, notify = visibilityChanged)
def visible(self) -> bool:
"""Whether this action button will be visible.
Example: Show only when isLoggedIn
:return: Defaults to true to be in line with the old behaviour.
"""
return self._visible

View file

@ -43,13 +43,9 @@ class MachineErrorChecker(QObject):
self._application = cura.CuraApplication.CuraApplication.getInstance() self._application = cura.CuraApplication.CuraApplication.getInstance()
self._machine_manager = self._application.getMachineManager() self._machine_manager = self._application.getMachineManager()
self._start_time = 0. # measure checking time self._check_start_time = time.time()
# This timer delays the starting of error check so we can react less frequently if the user is frequently self._setCheckTimer()
# changing settings.
self._error_check_timer = QTimer(self)
self._error_check_timer.setInterval(100)
self._error_check_timer.setSingleShot(True)
self._keys_to_check = set() # type: Set[str] self._keys_to_check = set() # type: Set[str]
@ -66,6 +62,18 @@ class MachineErrorChecker(QObject):
self._onMachineChanged() self._onMachineChanged()
def _setCheckTimer(self) -> None:
"""A QTimer to regulate error check frequency
This timer delays the starting of error check
so we can react less frequently if the user is frequently
changing settings.
"""
self._error_check_timer = QTimer(self)
self._error_check_timer.setInterval(100)
self._error_check_timer.setSingleShot(True)
def _onMachineChanged(self) -> None: def _onMachineChanged(self) -> None:
if self._global_stack: if self._global_stack:
self._global_stack.propertyChanged.disconnect(self.startErrorCheckPropertyChanged) self._global_stack.propertyChanged.disconnect(self.startErrorCheckPropertyChanged)
@ -152,7 +160,7 @@ class MachineErrorChecker(QObject):
self._stacks_and_keys_to_check.append((stack, key)) self._stacks_and_keys_to_check.append((stack, key))
self._application.callLater(self._checkStack) self._application.callLater(self._checkStack)
self._start_time = time.time() self._check_start_time = time.time()
Logger.log("d", "New error check scheduled.") Logger.log("d", "New error check scheduled.")
def _checkStack(self) -> None: def _checkStack(self) -> None:
@ -204,12 +212,10 @@ class MachineErrorChecker(QObject):
self._has_errors = result self._has_errors = result
self.hasErrorUpdated.emit() self.hasErrorUpdated.emit()
self._machine_manager.stacksValidationChanged.emit() self._machine_manager.stacksValidationChanged.emit()
if keys_to_recheck is None: self._keys_to_check = keys_to_recheck if keys_to_recheck else set()
self._keys_to_check = set()
else:
self._keys_to_check = keys_to_recheck
self._need_to_check = False self._need_to_check = False
self._check_in_progress = False self._check_in_progress = False
self.needToWaitForResultChanged.emit() self.needToWaitForResultChanged.emit()
self.errorCheckFinished.emit() self.errorCheckFinished.emit()
Logger.log("i", "Error check finished, result = %s, time = %0.1fs", result, time.time() - self._start_time) execution_time = time.time() - self._check_start_time
Logger.info(f"Error check finished, result = {result}, time = {execution_time:.2f}s")

View file

@ -0,0 +1,83 @@
# Copyright (c) 2022 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from typing import Optional
from PyQt6.QtCore import Qt, QObject, pyqtSlot, pyqtProperty, pyqtSignal
from UM.Logger import Logger
from UM.Qt.ListModel import ListModel
from UM.i18n import i18nCatalog
class CompatibleMachineModel(ListModel):
NameRole = Qt.ItemDataRole.UserRole + 1
UniqueIdRole = Qt.ItemDataRole.UserRole + 2
ExtrudersRole = Qt.ItemDataRole.UserRole + 3
def __init__(self, parent: Optional[QObject] = None) -> None:
super().__init__(parent)
self._catalog = i18nCatalog("cura")
self.addRoleName(self.NameRole, "name")
self.addRoleName(self.UniqueIdRole, "unique_id")
self.addRoleName(self.ExtrudersRole, "extruders")
self._update()
from cura.CuraApplication import CuraApplication
machine_manager = CuraApplication.getInstance().getMachineManager()
machine_manager.globalContainerChanged.connect(self._update)
machine_manager.outputDevicesChanged.connect(self._update)
@pyqtSlot()
def forceUpdate(self):
self._update()
def _update(self) -> None:
self.clear()
def _makeMaterial(brand, name, color):
if name.lower() in ["", "empty"]:
return {"brand": "", "name": "(empty)", "hexcolor": "#ffffff"}
else:
return {"brand": brand, "name": name, "hexcolor": color}
from cura.CuraApplication import CuraApplication
machine_manager = CuraApplication.getInstance().getMachineManager()
# Loop over the output-devices, not the stacks; need all applicable configurations, not just the current loaded one.
for output_device in machine_manager.printerOutputDevices:
for printer in output_device.printers:
extruder_configs = dict()
# initialize & add current active material:
for extruder in printer.extruders:
if not extruder.activeMaterial:
continue
materials = [_makeMaterial(
extruder.activeMaterial.brand, extruder.activeMaterial.name, extruder.activeMaterial.color)]
extruder_configs[extruder.getPosition()] = {
"position": extruder.getPosition(),
"core": extruder.hotendID,
"materials": materials
}
# add currently inactive, but possible materials:
for configuration in printer.availableConfigurations:
for extruder in configuration.extruderConfigurations:
if not extruder.position in extruder_configs:
Logger.log("w", f"No active extruder for position {extruder.position}.")
continue
entry = _makeMaterial(extruder.material.brand, extruder.material.name, extruder.material.color)
if entry not in extruder_configs[extruder.position]["materials"]:
extruder_configs[extruder.position]["materials"].append(entry)
if any([len(extruder["materials"]) > 0 for extruder in extruder_configs.values()]):
self.appendItem({
"name": printer.name,
"unique_id": printer.name, # <- Can assume the cloud doesn't have duplicate names?
"extruders": list(extruder_configs.values())
})

View file

@ -44,6 +44,7 @@ class GlobalStacksModel(ListModel):
self._filter_connection_type = None # type: Optional[ConnectionType] self._filter_connection_type = None # type: Optional[ConnectionType]
self._filter_online_only = False self._filter_online_only = False
self._filter_capabilities: List[str] = [] # Required capabilities that all listed printers must have. self._filter_capabilities: List[str] = [] # Required capabilities that all listed printers must have.
self._filter_abstract_machines: Optional[bool] = None
# Listen to changes # Listen to changes
CuraContainerRegistry.getInstance().containerAdded.connect(self._onContainerChanged) CuraContainerRegistry.getInstance().containerAdded.connect(self._onContainerChanged)
@ -54,6 +55,7 @@ class GlobalStacksModel(ListModel):
filterConnectionTypeChanged = pyqtSignal() filterConnectionTypeChanged = pyqtSignal()
filterCapabilitiesChanged = pyqtSignal() filterCapabilitiesChanged = pyqtSignal()
filterOnlineOnlyChanged = pyqtSignal() filterOnlineOnlyChanged = pyqtSignal()
filterAbstractMachinesChanged = pyqtSignal()
def setFilterConnectionType(self, new_filter: Optional[ConnectionType]) -> None: def setFilterConnectionType(self, new_filter: Optional[ConnectionType]) -> None:
if self._filter_connection_type != new_filter: if self._filter_connection_type != new_filter:
@ -98,6 +100,22 @@ class GlobalStacksModel(ListModel):
""" """
return self._filter_capabilities return self._filter_capabilities
def setFilterAbstractMachines(self, new_filter: Optional[bool]) -> None:
if self._filter_abstract_machines != new_filter:
self._filter_abstract_machines = new_filter
self.filterAbstractMachinesChanged.emit()
@pyqtProperty(bool, fset = setFilterAbstractMachines, notify = filterAbstractMachinesChanged)
def filterAbstractMachines(self) -> Optional[bool]:
"""
Weather we include abstract printers, non-abstract printers or both
if this is set to None both abstract and non-abstract printers will be included in the list
set to True will only include abstract printers
set to False will only inclde non-abstract printers
"""
return self._filter_abstract_machines
def _onContainerChanged(self, container) -> None: def _onContainerChanged(self, container) -> None:
"""Handler for container added/removed events from registry""" """Handler for container added/removed events from registry"""
@ -130,6 +148,10 @@ class GlobalStacksModel(ListModel):
if self._filter_online_only and not is_online: if self._filter_online_only and not is_online:
continue continue
is_abstract_machine = parseBool(container_stack.getMetaDataEntry("is_abstract_machine", False))
if self._filter_abstract_machines is not None and self._filter_abstract_machines is not is_abstract_machine:
continue
capabilities = set(container_stack.getMetaDataEntry(META_CAPABILITIES, "").split(",")) capabilities = set(container_stack.getMetaDataEntry(META_CAPABILITIES, "").split(","))
if set(self._filter_capabilities) - capabilities: # Not all required capabilities are met. if set(self._filter_capabilities) - capabilities: # Not all required capabilities are met.
continue continue

View file

@ -0,0 +1,161 @@
# Copyright (c) 2022 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
# The MachineListModel is used to display the connected printers in the interface. Both the abstract machines and all
# online cloud connected printers are represented within this ListModel. Additional information such as the number of
# connected printers for each printer type is gathered.
from typing import Optional, List, cast
from PyQt6.QtCore import Qt, QTimer, QObject, pyqtSlot, pyqtProperty, pyqtSignal
from UM.Qt.ListModel import ListModel
from UM.Settings.ContainerStack import ContainerStack
from UM.Settings.Interfaces import ContainerInterface
from UM.i18n import i18nCatalog
from UM.Util import parseBool
from cura.Settings.CuraContainerRegistry import CuraContainerRegistry
from cura.Settings.GlobalStack import GlobalStack
class MachineListModel(ListModel):
NameRole = Qt.ItemDataRole.UserRole + 1
IdRole = Qt.ItemDataRole.UserRole + 2
HasRemoteConnectionRole = Qt.ItemDataRole.UserRole + 3
MetaDataRole = Qt.ItemDataRole.UserRole + 4
IsOnlineRole = Qt.ItemDataRole.UserRole + 5
MachineCountRole = Qt.ItemDataRole.UserRole + 6
IsAbstractMachineRole = Qt.ItemDataRole.UserRole + 7
ComponentTypeRole = Qt.ItemDataRole.UserRole + 8
IsNetworkedMachineRole = Qt.ItemDataRole.UserRole + 9
def __init__(self, parent: Optional[QObject] = None, machines_filter: List[GlobalStack] = None, listenToChanges: bool = True) -> None:
super().__init__(parent)
self._show_cloud_printers = False
self._machines_filter = machines_filter
self._catalog = i18nCatalog("cura")
self.addRoleName(self.NameRole, "name")
self.addRoleName(self.IdRole, "id")
self.addRoleName(self.HasRemoteConnectionRole, "hasRemoteConnection")
self.addRoleName(self.MetaDataRole, "metadata")
self.addRoleName(self.IsOnlineRole, "isOnline")
self.addRoleName(self.MachineCountRole, "machineCount")
self.addRoleName(self.IsAbstractMachineRole, "isAbstractMachine")
self.addRoleName(self.ComponentTypeRole, "componentType")
self.addRoleName(self.IsNetworkedMachineRole, "isNetworked")
self._change_timer = QTimer()
self._change_timer.setInterval(200)
self._change_timer.setSingleShot(True)
self._change_timer.timeout.connect(self._update)
if listenToChanges:
CuraContainerRegistry.getInstance().containerAdded.connect(self._onContainerChanged)
CuraContainerRegistry.getInstance().containerMetaDataChanged.connect(self._onContainerChanged)
CuraContainerRegistry.getInstance().containerRemoved.connect(self._onContainerChanged)
self._updateDelayed()
showCloudPrintersChanged = pyqtSignal(bool)
@pyqtProperty(bool, notify=showCloudPrintersChanged)
def showCloudPrinters(self) -> bool:
return self._show_cloud_printers
@pyqtSlot(bool)
def setShowCloudPrinters(self, show_cloud_printers: bool) -> None:
self._show_cloud_printers = show_cloud_printers
self._updateDelayed()
self.showCloudPrintersChanged.emit(show_cloud_printers)
def _onContainerChanged(self, container: ContainerInterface) -> None:
"""Handler for container added/removed events from registry"""
# We only need to update when the added / removed container GlobalStack
if isinstance(container, GlobalStack):
self._updateDelayed()
def _updateDelayed(self) -> None:
self._change_timer.start()
def _getMachineStacks(self) -> List[ContainerStack]:
return CuraContainerRegistry.getInstance().findContainerStacks(type = "machine")
def _getAbstractMachineStacks(self) -> List[ContainerStack]:
return CuraContainerRegistry.getInstance().findContainerStacks(is_abstract_machine = "True")
def set_machines_filter(self, machines_filter: Optional[List[GlobalStack]]) -> None:
self._machines_filter = machines_filter
self._update()
def _update(self) -> None:
self.clear()
from cura.CuraApplication import CuraApplication
machines_manager = CuraApplication.getInstance().getMachineManager()
other_machine_stacks = self._getMachineStacks()
other_machine_stacks.sort(key = lambda machine: machine.getName().upper())
abstract_machine_stacks = self._getAbstractMachineStacks()
abstract_machine_stacks.sort(key = lambda machine: machine.getName().upper(), reverse = True)
if self._machines_filter is not None:
filter_ids = [machine_filter.id for machine_filter in self._machines_filter]
other_machine_stacks = [machine for machine in other_machine_stacks if machine.id in filter_ids]
abstract_machine_stacks = [machine for machine in abstract_machine_stacks if machine.id in filter_ids]
for abstract_machine in abstract_machine_stacks:
definition_id = abstract_machine.definition.getId()
online_machine_stacks = machines_manager.getMachinesWithDefinition(definition_id, online_only = True)
online_machine_stacks = list(filter(lambda machine: machine.hasNetworkedConnection(), online_machine_stacks))
online_machine_stacks.sort(key=lambda machine: machine.getName().upper())
if abstract_machine in other_machine_stacks:
other_machine_stacks.remove(abstract_machine)
if abstract_machine in online_machine_stacks:
online_machine_stacks.remove(abstract_machine)
# Create a list item for abstract machine
self.addItem(abstract_machine, True, len(online_machine_stacks))
# Create list of machines that are children of the abstract machine
for stack in online_machine_stacks:
if self._show_cloud_printers:
self.addItem(stack, True)
# Remove this machine from the other stack list
if stack in other_machine_stacks:
other_machine_stacks.remove(stack)
if len(abstract_machine_stacks) > 0:
self.appendItem({
"componentType": "HIDE_BUTTON" if self._show_cloud_printers else "SHOW_BUTTON",
"isOnline": True,
"isAbstractMachine": False,
"machineCount": 0,
"catergory": "connected",
})
for stack in other_machine_stacks:
self.addItem(stack, False)
def addItem(self, container_stack: ContainerStack, is_online: bool, machine_count: int = 0) -> None:
if parseBool(container_stack.getMetaDataEntry("hidden", False)):
return
self.appendItem({
"componentType": "MACHINE",
"name": container_stack.getName(),
"id": container_stack.getId(),
"metadata": container_stack.getMetaData().copy(),
"isOnline": is_online,
"isAbstractMachine": parseBool(container_stack.getMetaDataEntry("is_abstract_machine", False)),
"isNetworked": cast(GlobalStack, container_stack).hasNetworkedConnection() if isinstance(container_stack, GlobalStack) else False,
"machineCount": machine_count,
"catergory": "connected" if is_online else "other",
})

View file

@ -184,7 +184,8 @@ class QualityManagementModel(ListModel):
container_registry.addContainer(container.duplicate(new_id, new_name)) container_registry.addContainer(container.duplicate(new_id, new_name))
@pyqtSlot(str) @pyqtSlot(str)
def createQualityChanges(self, base_name: str) -> None: @pyqtSlot(str, bool)
def createQualityChanges(self, base_name: str, activate_after_success: bool = False) -> None:
"""Create quality changes containers from the user containers in the active stacks. """Create quality changes containers from the user containers in the active stacks.
This will go through the global and extruder stacks and create quality_changes containers from the user This will go through the global and extruder stacks and create quality_changes containers from the user
@ -233,6 +234,14 @@ class QualityManagementModel(ListModel):
container_registry.addContainer(new_changes) container_registry.addContainer(new_changes)
if activate_after_success:
# At this point, the QualityChangesGroup object for the new changes may not exist yet.
# This can be forced by asking for all of them. At that point it's just as well to loop.
for quality_changes in ContainerTree.getInstance().getCurrentQualityChangesGroups():
if quality_changes.name == unique_name:
machine_manager.setQualityChangesGroup(quality_changes)
break
def _createQualityChanges(self, quality_type: str, intent_category: Optional[str], new_name: str, machine: "GlobalStack", extruder_stack: Optional["ExtruderStack"]) -> "InstanceContainer": def _createQualityChanges(self, quality_type: str, intent_category: Optional[str], new_name: str, machine: "GlobalStack", extruder_stack: Optional["ExtruderStack"]) -> "InstanceContainer":
"""Create a quality changes container with the given set-up. """Create a quality changes container with the given set-up.

View file

@ -118,12 +118,14 @@ class UserChangesModel(ListModel):
if original_value is not None: if original_value is not None:
break break
item_to_add = {"key": setting_key, item_to_add = {
"label": label, "key": setting_key,
"user_value": str(user_changes.getProperty(setting_key, "value")), "label": label,
"original_value": str(original_value), "user_value": str(user_changes.getProperty(setting_key, "value", default_value_resolve_context)),
"extruder": "", "original_value": str(original_value),
"category": category_label} "extruder": "",
"category": category_label,
}
if stack != global_stack: if stack != global_stack:
item_to_add["extruder"] = stack.getName() item_to_add["extruder"] = stack.getName()

View file

@ -274,7 +274,7 @@ class AuthorizationService:
self._unable_to_get_data_message.show() self._unable_to_get_data_message.show()
else: else:
self._unable_to_get_data_message = Message(i18n_catalog.i18nc("@info", self._unable_to_get_data_message = Message(i18n_catalog.i18nc("@info",
"Unable to reach the Ultimaker account server."), "Unable to reach the UltiMaker account server."),
title = i18n_catalog.i18nc("@info:title", "Log-in failed"), title = i18n_catalog.i18nc("@info:title", "Log-in failed"),
message_type = Message.MessageType.ERROR) message_type = Message.MessageType.ERROR)
Logger.warning("Unable to get user profile using auth data from preferences.") Logger.warning("Unable to get user profile using auth data from preferences.")

View file

@ -50,8 +50,13 @@ class PlatformPhysics:
if not self._enabled: if not self._enabled:
return return
app_instance = Application.getInstance()
app_preferences = app_instance.getPreferences()
app_automatic_drop_down = app_preferences.getValue("physics/automatic_drop_down")
app_automatic_push_free = app_preferences.getValue("physics/automatic_push_free")
root = self._controller.getScene().getRoot() root = self._controller.getScene().getRoot()
build_volume = Application.getInstance().getBuildVolume() build_volume = app_instance.getBuildVolume()
build_volume.updateNodeBoundaryCheck() build_volume.updateNodeBoundaryCheck()
# Keep a list of nodes that are moving. We use this so that we don't move two intersecting objects in the # Keep a list of nodes that are moving. We use this so that we don't move two intersecting objects in the
@ -75,7 +80,7 @@ class PlatformPhysics:
# Move it downwards if bottom is above platform # Move it downwards if bottom is above platform
move_vector = Vector() move_vector = Vector()
if Application.getInstance().getPreferences().getValue("physics/automatic_drop_down") and not (node.getParent() and node.getParent().callDecoration("isGroup") or node.getParent() != root) and node.isEnabled(): #If an object is grouped, don't move it down if node.getSetting(SceneNodeSettings.AutoDropDown, app_automatic_drop_down) and not (node.getParent() and node.getParent().callDecoration("isGroup") or node.getParent() != root) and node.isEnabled(): #If an object is grouped, don't move it down
z_offset = node.callDecoration("getZOffset") if node.getDecorator(ZOffsetDecorator.ZOffsetDecorator) else 0 z_offset = node.callDecoration("getZOffset") if node.getDecorator(ZOffsetDecorator.ZOffsetDecorator) else 0
move_vector = move_vector.set(y = -bbox.bottom + z_offset) move_vector = move_vector.set(y = -bbox.bottom + z_offset)
@ -84,7 +89,7 @@ class PlatformPhysics:
node.addDecorator(ConvexHullDecorator()) node.addDecorator(ConvexHullDecorator())
# only push away objects if this node is a printing mesh # only push away objects if this node is a printing mesh
if not node.callDecoration("isNonPrintingMesh") and Application.getInstance().getPreferences().getValue("physics/automatic_push_free"): if not node.callDecoration("isNonPrintingMesh") and app_automatic_push_free:
# Do not move locked nodes # Do not move locked nodes
if node.getSetting(SceneNodeSettings.LockPosition): if node.getSetting(SceneNodeSettings.LockPosition):
continue continue

View file

@ -13,9 +13,9 @@ class ExtruderConfigurationModel(QObject):
def __init__(self, position: int = -1) -> None: def __init__(self, position: int = -1) -> None:
super().__init__() super().__init__()
self._position = position # type: int self._position: int = position
self._material = None # type: Optional[MaterialOutputModel] self._material: Optional[MaterialOutputModel] = None
self._hotend_id = None # type: Optional[str] self._hotend_id: Optional[str] = None
def setPosition(self, position: int) -> None: def setPosition(self, position: int) -> None:
self._position = position self._position = position

View file

@ -1,4 +1,4 @@
# Copyright (c) 2018 Ultimaker B.V. # Copyright (c) 2022 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
from typing import Optional, TYPE_CHECKING, List from typing import Optional, TYPE_CHECKING, List
@ -6,6 +6,8 @@ from typing import Optional, TYPE_CHECKING, List
from PyQt6.QtCore import pyqtSignal, pyqtProperty, QObject, pyqtSlot, QUrl from PyQt6.QtCore import pyqtSignal, pyqtProperty, QObject, pyqtSlot, QUrl
from PyQt6.QtGui import QImage from PyQt6.QtGui import QImage
from cura.CuraApplication import CuraApplication
if TYPE_CHECKING: if TYPE_CHECKING:
from cura.PrinterOutput.PrinterOutputController import PrinterOutputController from cura.PrinterOutput.PrinterOutputController import PrinterOutputController
from cura.PrinterOutput.Models.PrinterOutputModel import PrinterOutputModel from cura.PrinterOutput.Models.PrinterOutputModel import PrinterOutputModel
@ -86,6 +88,18 @@ class PrintJobOutputModel(QObject):
self._owner = owner self._owner = owner
self.ownerChanged.emit() self.ownerChanged.emit()
@pyqtProperty(bool, notify = ownerChanged)
def isMine(self) -> bool:
"""
Returns whether this print job was sent by the currently logged in user.
This checks the owner of the print job with the owner of the currently
logged in account. Both of these are human-readable account names which
may be duplicate. In practice the harm here is limited, but it's the
best we can do with the information available to the API.
"""
return self._owner == CuraApplication.getInstance().getCuraAPI().account.userName
@pyqtProperty(QObject, notify=assignedPrinterChanged) @pyqtProperty(QObject, notify=assignedPrinterChanged)
def assignedPrinter(self): def assignedPrinter(self):
return self._assigned_printer return self._assigned_printer

View file

@ -350,5 +350,6 @@ class PrinterOutputModel(QObject):
self.availableConfigurationsChanged.emit() self.availableConfigurationsChanged.emit()
def setAvailableConfigurations(self, new_configurations: List[PrinterConfigurationModel]) -> None: def setAvailableConfigurations(self, new_configurations: List[PrinterConfigurationModel]) -> None:
self._available_printer_configurations = new_configurations if self._available_printer_configurations != new_configurations:
self.availableConfigurationsChanged.emit() self._available_printer_configurations = new_configurations
self.availableConfigurationsChanged.emit()

View file

@ -1,20 +1,19 @@
# Copyright (c) 2019 Ultimaker B.V. # Copyright (c) 2019 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
from dataclasses import dataclass
@dataclass
class Peripheral: class Peripheral:
"""Data class that represents a peripheral for a printer. """Data class that represents a peripheral for a printer.
Output device plug-ins may specify that the printer has a certain set of Output device plug-ins may specify that the printer has a certain set of
peripherals. This set is then possibly shown in the interface of the monitor peripherals. This set is then possibly shown in the interface of the monitor
stage. stage.
Args:
type (string): A unique ID for the type of peripheral.
name (string): A human-readable name for the peripheral.
""" """
type: str
def __init__(self, peripheral_type: str, name: str) -> None: name: str
"""Constructs the peripheral.
:param peripheral_type: A unique ID for the type of peripheral.
:param name: A human-readable name for the peripheral.
"""
self.type = peripheral_type
self.name = name

View file

@ -50,13 +50,12 @@ class PrinterOutputDevice(QObject, OutputDevice):
The assumption is made the printer is a FDM printer. The assumption is made the printer is a FDM printer.
Note that a number of settings are marked as "final". This is because decorators Note that a number of settings are marked as "final". This is because decorators
are not inherited by children. To fix this we use the private counter part of those are not inherited by children. To fix this we use the private counterpart of those
functions to actually have the implementation. functions to actually have the implementation.
For all other uses it should be used in the same way as a "regular" OutputDevice. For all other uses it should be used in the same way as a "regular" OutputDevice.
""" """
printersChanged = pyqtSignal() printersChanged = pyqtSignal()
connectionStateChanged = pyqtSignal(str) connectionStateChanged = pyqtSignal(str)
acceptsCommandsChanged = pyqtSignal() acceptsCommandsChanged = pyqtSignal()
@ -183,8 +182,8 @@ class PrinterOutputDevice(QObject, OutputDevice):
@pyqtProperty(QObject, constant = True) @pyqtProperty(QObject, constant = True)
def monitorItem(self) -> QObject: def monitorItem(self) -> QObject:
# Note that we specifically only check if the monitor component is created. # Note that we specifically only check if the monitor component is created.
# It could be that it failed to actually create the qml item! If we check if the item was created, it will try to # It could be that it failed to actually create the qml item! If we check if the item was created, it will try
# create the item (and fail) every time. # to create the item (and fail) every time.
if not self._monitor_component: if not self._monitor_component:
self._createMonitorViewFromQML() self._createMonitorViewFromQML()
return self._monitor_item return self._monitor_item
@ -237,9 +236,9 @@ class PrinterOutputDevice(QObject, OutputDevice):
self.acceptsCommandsChanged.emit() self.acceptsCommandsChanged.emit()
# Returns the unique configurations of the printers within this output device
@pyqtProperty("QVariantList", notify = uniqueConfigurationsChanged) @pyqtProperty("QVariantList", notify = uniqueConfigurationsChanged)
def uniqueConfigurations(self) -> List["PrinterConfigurationModel"]: def uniqueConfigurations(self) -> List["PrinterConfigurationModel"]:
""" Returns the unique configurations of the printers within this output device """
return self._unique_configurations return self._unique_configurations
def _updateUniqueConfigurations(self) -> None: def _updateUniqueConfigurations(self) -> None:
@ -248,17 +247,19 @@ class PrinterOutputDevice(QObject, OutputDevice):
if printer.printerConfiguration is not None and printer.printerConfiguration.hasAnyMaterialLoaded(): if printer.printerConfiguration is not None and printer.printerConfiguration.hasAnyMaterialLoaded():
all_configurations.add(printer.printerConfiguration) all_configurations.add(printer.printerConfiguration)
all_configurations.update(printer.availableConfigurations) all_configurations.update(printer.availableConfigurations)
if None in all_configurations: # Shouldn't happen, but it does. I don't see how it could ever happen. Skip adding that configuration. List could end up empty! if None in all_configurations:
# Shouldn't happen, but it does. I don't see how it could ever happen. Skip adding that configuration.
# List could end up empty!
Logger.log("e", "Found a broken configuration in the synced list!") Logger.log("e", "Found a broken configuration in the synced list!")
all_configurations.remove(None) all_configurations.remove(None)
new_configurations = sorted(all_configurations, key = lambda config: config.printerType or "") new_configurations = sorted(all_configurations, key = lambda config: config.printerType or "", reverse = True)
if new_configurations != self._unique_configurations: if new_configurations != self._unique_configurations:
self._unique_configurations = new_configurations self._unique_configurations = new_configurations
self.uniqueConfigurationsChanged.emit() self.uniqueConfigurationsChanged.emit()
# Returns the unique configurations of the printers within this output device
@pyqtProperty("QStringList", notify = uniqueConfigurationsChanged) @pyqtProperty("QStringList", notify = uniqueConfigurationsChanged)
def uniquePrinterTypes(self) -> List[str]: def uniquePrinterTypes(self) -> List[str]:
""" Returns the unique configurations of the printers within this output device """
return list(sorted(set([configuration.printerType or "" for configuration in self._unique_configurations]))) return list(sorted(set([configuration.printerType or "" for configuration in self._unique_configurations])))
def _onPrintersChanged(self) -> None: def _onPrintersChanged(self) -> None:

View file

@ -390,7 +390,7 @@ class ConvexHullDecorator(SceneNodeDecorator):
if self._global_stack.getProperty("print_sequence", "value") == "one_at_a_time": if self._global_stack.getProperty("print_sequence", "value") == "one_at_a_time":
# Find the root node that's placed in the scene; the root of the mesh group. # Find the root node that's placed in the scene; the root of the mesh group.
ancestor = self.getNode() ancestor = self.getNode()
while ancestor.getParent() != self._root: while ancestor.getParent() != self._root and ancestor.getParent() is not None:
ancestor = ancestor.getParent() ancestor = ancestor.getParent()
center = ancestor.getBoundingBox().center center = ancestor.getBoundingBox().center
else: else:

View file

@ -139,7 +139,7 @@ class CuraSceneController(QObject):
def setActiveBuildPlate(self, nr): def setActiveBuildPlate(self, nr):
if nr == self._active_build_plate: if nr == self._active_build_plate:
return return
Logger.log("d", "Select build plate: %s" % nr) Logger.debug(f"Selected build plate: {nr}")
self._active_build_plate = nr self._active_build_plate = nr
Selection.clear() Selection.clear()

View file

@ -0,0 +1,48 @@
from dataclasses import dataclass
from typing import List
from UM import i18nCatalog
catalog = i18nCatalog("cura")
@dataclass
class ActiveQuality:
""" Represents the active intent+profile combination, contains all information needed to display active quality. """
intent_category: str = "" # Name of the base intent. For example "visual" or "engineering".
intent_name: str = "" # Name of the base intent formatted for display. For Example "Visual" or "Engineering"
profile: str = "" # Name of the base profile. For example "Fine" or "Fast"
custom_profile: str = "" # Name of the custom profile, this is based on profile. For example "MyCoolCustomProfile"
layer_height: float = None # Layer height of quality in mm. For example 0.4
is_experimental: bool = False # If the quality experimental.
def getMainStringParts(self) -> List[str]:
string_parts = []
if self.custom_profile is not None:
string_parts.append(self.custom_profile)
else:
string_parts.append(self.profile)
if self.intent_category != "default":
string_parts.append(self.intent_name)
return string_parts
def getTailStringParts(self) -> List[str]:
string_parts = []
if self.custom_profile is not None:
string_parts.append(self.profile)
if self.intent_category != "default":
string_parts.append(self.intent_name)
if self.layer_height:
string_parts.append(f"{self.layer_height}mm")
if self.is_experimental:
string_parts.append(catalog.i18nc("@label", "Experimental"))
return string_parts
def getStringParts(self) -> List[str]:
return self.getMainStringParts() + self.getTailStringParts()

View file

@ -108,7 +108,7 @@ class CuraContainerRegistry(ContainerRegistry):
:param container_type: :type{string} Type of the container (machine, quality, ...) :param container_type: :type{string} Type of the container (machine, quality, ...)
:param container_name: :type{string} Name to check :param container_name: :type{string} Name to check
""" """
container_class = ContainerStack if container_type == "machine" else InstanceContainer container_class = ContainerStack if "machine" in container_type else InstanceContainer
return self.findContainersMetadata(container_type = container_class, id = container_name, type = container_type, ignore_case = True) or \ return self.findContainersMetadata(container_type = container_class, id = container_name, type = container_type, ignore_case = True) or \
self.findContainersMetadata(container_type = container_class, name = container_name, type = container_type) self.findContainersMetadata(container_type = container_class, name = container_name, type = container_type)

View file

@ -49,7 +49,7 @@ class CuraContainerStack(ContainerStack):
self._empty_material = cura_empty_instance_containers.empty_material_container #type: InstanceContainer self._empty_material = cura_empty_instance_containers.empty_material_container #type: InstanceContainer
self._empty_variant = cura_empty_instance_containers.empty_variant_container #type: InstanceContainer self._empty_variant = cura_empty_instance_containers.empty_variant_container #type: InstanceContainer
self._containers = [self._empty_instance_container for i in range(len(_ContainerIndexes.IndexTypeMap))] #type: List[ContainerInterface] self._containers: List[ContainerInterface] = [self._empty_instance_container for i in _ContainerIndexes.IndexTypeMap]
self._containers[_ContainerIndexes.QualityChanges] = self._empty_quality_changes self._containers[_ContainerIndexes.QualityChanges] = self._empty_quality_changes
self._containers[_ContainerIndexes.Quality] = self._empty_quality self._containers[_ContainerIndexes.Quality] = self._empty_quality
self._containers[_ContainerIndexes.Material] = self._empty_material self._containers[_ContainerIndexes.Material] = self._empty_material
@ -427,4 +427,4 @@ class _ContainerIndexes:
} }
# Reverse lookup: type -> index # Reverse lookup: type -> index
TypeIndexMap = dict([(v, k) for k, v in IndexTypeMap.items()]) TypeIndexMap = {v: k for k, v in IndexTypeMap.items()}

View file

@ -1,7 +1,9 @@
# Copyright (c) 2019 Ultimaker B.V. # Copyright (c) 2022 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
from typing import Optional import copy
from typing import Optional, cast
from UM.ConfigurationErrorMessage import ConfigurationErrorMessage from UM.ConfigurationErrorMessage import ConfigurationErrorMessage
from UM.Logger import Logger from UM.Logger import Logger
@ -27,7 +29,7 @@ class CuraStackBuilder:
:return: The new global stack or None if an error occurred. :return: The new global stack or None if an error occurred.
""" """
from cura.CuraApplication import CuraApplication from cura.CuraApplication import CuraApplication # inline import needed due to circular import
application = CuraApplication.getInstance() application = CuraApplication.getInstance()
registry = application.getContainerRegistry() registry = application.getContainerRegistry()
container_tree = ContainerTree.getInstance() container_tree = ContainerTree.getInstance()
@ -91,7 +93,7 @@ class CuraStackBuilder:
:param extruder_position: The position of the current extruder. :param extruder_position: The position of the current extruder.
""" """
from cura.CuraApplication import CuraApplication from cura.CuraApplication import CuraApplication # inline import needed due to circular import
application = CuraApplication.getInstance() application = CuraApplication.getInstance()
registry = application.getContainerRegistry() registry = application.getContainerRegistry()
@ -199,13 +201,21 @@ class CuraStackBuilder:
:return: A new Global stack instance with the specified parameters. :return: A new Global stack instance with the specified parameters.
""" """
from cura.CuraApplication import CuraApplication
application = CuraApplication.getInstance()
registry = application.getContainerRegistry()
stack = GlobalStack(new_stack_id) stack = GlobalStack(new_stack_id)
stack.setDefinition(definition) stack.setDefinition(definition)
cls.createUserContainer(new_stack_id, definition, stack, variant_container, material_container, quality_container)
return stack
@classmethod
def createUserContainer(cls, new_stack_id: str, definition: DefinitionContainerInterface,
stack: GlobalStack,
variant_container: "InstanceContainer",
material_container: "InstanceContainer",
quality_container: "InstanceContainer") -> None:
from cura.CuraApplication import CuraApplication
application = CuraApplication.getInstance()
registry = application.getContainerRegistry()
# Create user container # Create user container
user_container = cls.createUserChangesContainer(new_stack_id + "_user", definition.getId(), new_stack_id, user_container = cls.createUserChangesContainer(new_stack_id + "_user", definition.getId(), new_stack_id,
@ -221,8 +231,6 @@ class CuraStackBuilder:
registry.addContainer(user_container) registry.addContainer(user_container)
return stack
@classmethod @classmethod
def createUserChangesContainer(cls, container_name: str, definition_id: str, stack_id: str, def createUserChangesContainer(cls, container_name: str, definition_id: str, stack_id: str,
is_global_stack: bool) -> "InstanceContainer": is_global_stack: bool) -> "InstanceContainer":
@ -259,3 +267,36 @@ class CuraStackBuilder:
container_stack.definitionChanges = definition_changes_container container_stack.definitionChanges = definition_changes_container
return definition_changes_container return definition_changes_container
@classmethod
def createAbstractMachine(cls, definition_id: str) -> Optional[GlobalStack]:
"""Create a new instance of an abstract machine.
:param definition_id: The ID of the machine definition to use.
:return: The new Abstract Machine or None if an error occurred.
"""
abstract_machine_id = f"{definition_id}_abstract_machine"
from cura.CuraApplication import CuraApplication
application = CuraApplication.getInstance()
registry = application.getContainerRegistry()
abstract_machines = registry.findContainerStacks(id = abstract_machine_id)
if abstract_machines:
return cast(GlobalStack, abstract_machines[0])
definitions = registry.findDefinitionContainers(id=definition_id)
name = ""
if definitions:
name = definitions[0].getName()
stack = cls.createMachine(abstract_machine_id, definition_id, show_warning_message=False)
if not stack:
return None
stack.setName(name)
stack.setMetaDataEntry("is_abstract_machine", True)
stack.setMetaDataEntry("is_online", True)
return stack

View file

@ -2,13 +2,15 @@
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
from PyQt6.QtCore import pyqtSignal, pyqtProperty, QObject, QVariant # For communicating data and events to Qt. from PyQt6.QtCore import pyqtSignal, pyqtProperty, QObject, QVariant # For communicating data and events to Qt.
from UM.Application import Application
from UM.FlameProfiler import pyqtSlot from UM.FlameProfiler import pyqtSlot
import cura.CuraApplication # To get the global container stack to find the current machine. import cura.CuraApplication # To get the global container stack to find the current machine.
from UM.Util import parseBool
from cura.Settings.GlobalStack import GlobalStack from cura.Settings.GlobalStack import GlobalStack
from UM.Logger import Logger from UM.Logger import Logger
from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator
from UM.Scene.SceneNode import SceneNode
from UM.Scene.Selection import Selection from UM.Scene.Selection import Selection
from UM.Scene.Iterator.BreadthFirstIterator import BreadthFirstIterator from UM.Scene.Iterator.BreadthFirstIterator import BreadthFirstIterator
from UM.Settings.ContainerRegistry import ContainerRegistry # Finding containers by ID. from UM.Settings.ContainerRegistry import ContainerRegistry # Finding containers by ID.
@ -45,6 +47,7 @@ class ExtruderManager(QObject):
self._selected_object_extruders = [] # type: List[Union[str, "ExtruderStack"]] self._selected_object_extruders = [] # type: List[Union[str, "ExtruderStack"]]
Selection.selectionChanged.connect(self.resetSelectedObjectExtruders) Selection.selectionChanged.connect(self.resetSelectedObjectExtruders)
Application.getInstance().globalContainerStackChanged.connect(self.emitGlobalStackExtrudersChanged) # When the machine is swapped we must update the active machine extruders
extrudersChanged = pyqtSignal(QVariant) extrudersChanged = pyqtSignal(QVariant)
"""Signal to notify other components when the list of extruders for a machine definition changes.""" """Signal to notify other components when the list of extruders for a machine definition changes."""
@ -52,6 +55,21 @@ class ExtruderManager(QObject):
activeExtruderChanged = pyqtSignal() activeExtruderChanged = pyqtSignal()
"""Notify when the user switches the currently active extruder.""" """Notify when the user switches the currently active extruder."""
def emitGlobalStackExtrudersChanged(self):
# HACK
# The emit function can't be directly connected to another signal. This wrapper function is required.
# The extrudersChanged signal is emitted early when changing machines. This triggers it a second time
# after the extruder have changed properly. This is important for any QML using ExtruderManager.extruderIds
# This is a hack, but other behaviour relys on the updating in this order.
self.extrudersChanged.emit(self._application.getGlobalContainerStack().getId())
@pyqtProperty(int, notify = extrudersChanged)
def enabledExtruderCount(self) -> int:
global_container_stack = self._application.getGlobalContainerStack()
if global_container_stack:
return len([extruder for extruder in global_container_stack.extruderList if parseBool(extruder.getMetaDataEntry("enabled", "True"))])
return 0
@pyqtProperty(str, notify = activeExtruderChanged) @pyqtProperty(str, notify = activeExtruderChanged)
def activeExtruderStackId(self) -> Optional[str]: def activeExtruderStackId(self) -> Optional[str]:
"""Gets the unique identifier of the currently active extruder stack. """Gets the unique identifier of the currently active extruder stack.
@ -275,7 +293,7 @@ class ExtruderManager(QObject):
for extruder_setting in used_adhesion_extruders: for extruder_setting in used_adhesion_extruders:
extruder_str_nr = str(global_stack.getProperty(extruder_setting, "value")) extruder_str_nr = str(global_stack.getProperty(extruder_setting, "value"))
if extruder_str_nr == "-1": if extruder_str_nr == "-1":
extruder_str_nr = self._application.getMachineManager().defaultExtruderPosition continue # An optional extruder doesn't force any extruder to be used if it isn't used already
if extruder_str_nr in self.extruderIds: if extruder_str_nr in self.extruderIds:
used_extruder_stack_ids.add(self.extruderIds[extruder_str_nr]) used_extruder_stack_ids.add(self.extruderIds[extruder_str_nr])
@ -298,7 +316,7 @@ class ExtruderManager(QObject):
# Starts with the adhesion extruder. # Starts with the adhesion extruder.
adhesion_type = global_stack.getProperty("adhesion_type", "value") adhesion_type = global_stack.getProperty("adhesion_type", "value")
if adhesion_type in {"skirt", "brim"}: if adhesion_type in {"skirt", "brim"}:
return global_stack.getProperty("skirt_brim_extruder_nr", "value") return max(0, int(global_stack.getProperty("skirt_brim_extruder_nr", "value"))) # optional skirt/brim extruder defaults to zero
if adhesion_type == "raft": if adhesion_type == "raft":
return global_stack.getProperty("raft_base_extruder_nr", "value") return global_stack.getProperty("raft_base_extruder_nr", "value")
@ -448,7 +466,6 @@ class ExtruderManager(QObject):
return False return False
return list(active_material_node_qualities.keys())[0] != "empty_quality" return list(active_material_node_qualities.keys())[0] != "empty_quality"
@pyqtSlot(str, result="QVariant") @pyqtSlot(str, result="QVariant")
def getInstanceExtruderValues(self, key: str) -> List: def getInstanceExtruderValues(self, key: str) -> List:
"""Get all extruder values for a certain setting. """Get all extruder values for a certain setting.

View file

@ -142,8 +142,6 @@ class ExtruderStack(CuraContainerStack):
limit_to_extruder = super().getProperty(key, "limit_to_extruder", context) limit_to_extruder = super().getProperty(key, "limit_to_extruder", context)
if limit_to_extruder is not None: if limit_to_extruder is not None:
if limit_to_extruder == -1:
limit_to_extruder = int(cura.CuraApplication.CuraApplication.getInstance().getMachineManager().defaultExtruderPosition)
limit_to_extruder = str(limit_to_extruder) limit_to_extruder = str(limit_to_extruder)
if (limit_to_extruder is not None and limit_to_extruder != "-1") and self.getMetaDataEntry("position") != str(limit_to_extruder): if (limit_to_extruder is not None and limit_to_extruder != "-1") and self.getMetaDataEntry("position") != str(limit_to_extruder):

View file

@ -1,4 +1,4 @@
# Copyright (c) 2019 Ultimaker B.V. # Copyright (c) 2022 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
from collections import defaultdict from collections import defaultdict
@ -8,10 +8,9 @@ import uuid
from PyQt6.QtCore import pyqtProperty, pyqtSlot, pyqtSignal from PyQt6.QtCore import pyqtProperty, pyqtSlot, pyqtSignal
from UM.Decorators import deprecated, override from UM.Decorators import override
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.Settings.ContainerStack import ContainerStack from UM.Settings.ContainerStack import ContainerStack
from UM.Settings.SettingInstance import InstanceState
from UM.Settings.ContainerRegistry import ContainerRegistry from UM.Settings.ContainerRegistry import ContainerRegistry
from UM.Settings.Interfaces import PropertyEvaluationContext from UM.Settings.Interfaces import PropertyEvaluationContext
from UM.Logger import Logger from UM.Logger import Logger
@ -91,7 +90,6 @@ class GlobalStack(CuraContainerStack):
@pyqtProperty("QVariantList", notify=configuredConnectionTypesChanged) @pyqtProperty("QVariantList", notify=configuredConnectionTypesChanged)
def configuredConnectionTypes(self) -> List[int]: def configuredConnectionTypes(self) -> List[int]:
"""The configured connection types can be used to find out if the global """The configured connection types can be used to find out if the global
stack is configured to be connected with a printer, without having to stack is configured to be connected with a printer, without having to
know all the details as to how this is exactly done (and without know all the details as to how this is exactly done (and without
actually setting the stack to be active). actually setting the stack to be active).
@ -228,8 +226,6 @@ class GlobalStack(CuraContainerStack):
# Handle the "limit_to_extruder" property. # Handle the "limit_to_extruder" property.
limit_to_extruder = super().getProperty(key, "limit_to_extruder", context) limit_to_extruder = super().getProperty(key, "limit_to_extruder", context)
if limit_to_extruder is not None: if limit_to_extruder is not None:
if limit_to_extruder == -1:
limit_to_extruder = int(cura.CuraApplication.CuraApplication.getInstance().getMachineManager().defaultExtruderPosition)
limit_to_extruder = str(limit_to_extruder) limit_to_extruder = str(limit_to_extruder)
if limit_to_extruder is not None and limit_to_extruder != "-1" and limit_to_extruder in self._extruders: if limit_to_extruder is not None and limit_to_extruder != "-1" and limit_to_extruder in self._extruders:
if super().getProperty(key, "settable_per_extruder", context): if super().getProperty(key, "settable_per_extruder", context):
@ -293,7 +289,6 @@ class GlobalStack(CuraContainerStack):
for extruder_train in extruder_trains: for extruder_train in extruder_trains:
extruder_position = extruder_train.getMetaDataEntry("position") extruder_position = extruder_train.getMetaDataEntry("position")
extruder_check_position.add(extruder_position) extruder_check_position.add(extruder_position)
for check_position in range(machine_extruder_count): for check_position in range(machine_extruder_count):
if str(check_position) not in extruder_check_position: if str(check_position) not in extruder_check_position:
return False return False
@ -344,13 +339,17 @@ class GlobalStack(CuraContainerStack):
def getName(self) -> str: def getName(self) -> str:
return self._metadata.get("group_name", self._metadata.get("name", "")) return self._metadata.get("group_name", self._metadata.get("name", ""))
def setName(self, name: "str") -> None: def setName(self, name: str) -> None:
super().setName(name) super().setName(name)
nameChanged = pyqtSignal() nameChanged = pyqtSignal()
name = pyqtProperty(str, fget=getName, fset=setName, notify=nameChanged) name = pyqtProperty(str, fget=getName, fset=setName, notify=nameChanged)
def hasNetworkedConnection(self) -> bool:
has_connection = False
for connection_type in [ConnectionType.NetworkConnection.value, ConnectionType.CloudConnection.value]:
has_connection |= connection_type in self.configuredConnectionTypes
return has_connection
## private: ## private:
global_stack_mime = MimeType( global_stack_mime = MimeType(

View file

@ -1,4 +1,4 @@
# Copyright (c) 2021 Ultimaker B.V. # Copyright (c) 2022 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
import time import time
@ -19,6 +19,7 @@ from UM.Logger import Logger
from UM.Message import Message from UM.Message import Message
from UM.Settings.SettingFunction import SettingFunction from UM.Settings.SettingFunction import SettingFunction
from UM.Settings.ContainerStack import ContainerStack
from UM.Signal import postponeSignals, CompressTechnique from UM.Signal import postponeSignals, CompressTechnique
import cura.CuraApplication # Imported like this to prevent circular references. import cura.CuraApplication # Imported like this to prevent circular references.
@ -39,6 +40,7 @@ from cura.Settings.cura_empty_instance_containers import (empty_definition_chang
empty_material_container, empty_quality_container, empty_material_container, empty_quality_container,
empty_quality_changes_container, empty_intent_container) empty_quality_changes_container, empty_intent_container)
from cura.UltimakerCloud.UltimakerCloudConstants import META_UM_LINKED_TO_ACCOUNT from cura.UltimakerCloud.UltimakerCloudConstants import META_UM_LINKED_TO_ACCOUNT
from .ActiveQuality import ActiveQuality
from .CuraStackBuilder import CuraStackBuilder from .CuraStackBuilder import CuraStackBuilder
@ -98,7 +100,7 @@ class MachineManager(QObject):
self._application.getPreferences().addPreference("cura/active_machine", "") self._application.getPreferences().addPreference("cura/active_machine", "")
self._printer_output_devices = [] # type: List[PrinterOutputDevice] self._printer_output_devices: List[PrinterOutputDevice] = []
self._application.getOutputDeviceManager().outputDevicesChanged.connect(self._onOutputDevicesChanged) self._application.getOutputDeviceManager().outputDevicesChanged.connect(self._onOutputDevicesChanged)
# There might already be some output devices by the time the signal is connected # There might already be some output devices by the time the signal is connected
self._onOutputDevicesChanged() self._onOutputDevicesChanged()
@ -111,7 +113,7 @@ class MachineManager(QObject):
self._application.callLater(self.setInitialActiveMachine) self._application.callLater(self.setInitialActiveMachine)
containers = CuraContainerRegistry.getInstance().findInstanceContainers(id = self.activeMaterialId) # type: List[InstanceContainer] containers: List[InstanceContainer] = CuraContainerRegistry.getInstance().findInstanceContainers(id = self.activeMaterialId)
if containers: if containers:
containers[0].nameChanged.connect(self._onMaterialNameChanged) containers[0].nameChanged.connect(self._onMaterialNameChanged)
@ -186,6 +188,32 @@ class MachineManager(QObject):
self.outputDevicesChanged.emit() self.outputDevicesChanged.emit()
def getMachinesWithDefinition(self, definition_id: str, online_only=False) -> List[ContainerStack]:
""" Fetches all container stacks that match definition_id.
:param definition_id: The id of the machine definition.
:return: A list of Containers that match definition_id
"""
from cura.CuraApplication import CuraApplication # In function to avoid circular import
application = CuraApplication.getInstance()
registry = application.getContainerRegistry()
machines = registry.findContainerStacks(type="machine")
# Filter machines that match definition
machines = filter(lambda machine: machine.definition.id == definition_id, machines)
# Filter only LAN and Cloud printers
machines = filter(lambda machine: ConnectionType.CloudConnection in machine.configuredConnectionTypes or
ConnectionType.NetworkConnection in machine.configuredConnectionTypes,
machines)
if online_only:
# LAN printers can have is_online = False but should still be included,
# their online status is only checked when they are the active printer.
machines = filter(lambda machine: parseBool(machine.getMetaDataEntry("is_online", False) or
ConnectionType.NetworkConnection in machine.configuredConnectionTypes),
machines)
return list(machines)
@pyqtProperty(QObject, notify = currentConfigurationChanged) @pyqtProperty(QObject, notify = currentConfigurationChanged)
def currentConfiguration(self) -> PrinterConfigurationModel: def currentConfiguration(self) -> PrinterConfigurationModel:
return self._current_printer_configuration return self._current_printer_configuration
@ -332,6 +360,7 @@ class MachineManager(QObject):
extruder_manager = ExtruderManager.getInstance() extruder_manager = ExtruderManager.getInstance()
extruder_manager.fixSingleExtrusionMachineExtruderDefinition(global_stack) extruder_manager.fixSingleExtrusionMachineExtruderDefinition(global_stack)
if not global_stack.isValid(): if not global_stack.isValid():
Logger.warning("Global stack isn't valid, adding it to faulty container list")
# Mark global stack as invalid # Mark global stack as invalid
ConfigurationErrorMessage.getInstance().addFaultyContainers(global_stack.getId()) ConfigurationErrorMessage.getInstance().addFaultyContainers(global_stack.getId())
return # We're done here return # We're done here
@ -503,6 +532,10 @@ class MachineManager(QObject):
def printerConnected(self) -> bool: def printerConnected(self) -> bool:
return bool(self._printer_output_devices) return bool(self._printer_output_devices)
@pyqtProperty(bool, notify = globalContainerChanged)
def activeMachineIsAbstractCloudPrinter(self) -> bool:
return len(self._printer_output_devices) == 1 and self._printer_output_devices[0].__class__.__name__ == "AbstractCloudOutputDevice"
@pyqtProperty(bool, notify = printerConnectedStatusChanged) @pyqtProperty(bool, notify = printerConnectedStatusChanged)
def activeMachineIsGroup(self) -> bool: def activeMachineIsGroup(self) -> bool:
if self.activeMachine is None: if self.activeMachine is None:
@ -872,7 +905,8 @@ class MachineManager(QObject):
if self._global_container_stack is None \ if self._global_container_stack is None \
or self._global_container_stack.getProperty(setting_key, "value") == new_value \ or self._global_container_stack.getProperty(setting_key, "value") == new_value \
or self.numberExtrudersEnabled < 2: or self._global_container_stack.definitionChanges.getProperty("extruders_enabled_count", "value") is None \
or self._global_container_stack.definitionChanges.getProperty("extruders_enabled_count", "value") < 2:
return return
user_changes_container = self._global_container_stack.userChanges user_changes_container = self._global_container_stack.userChanges
@ -977,7 +1011,7 @@ class MachineManager(QObject):
for position, extruder in enumerate(self._global_container_stack.extruderList): for position, extruder in enumerate(self._global_container_stack.extruderList):
if extruder.isEnabled and int(position) < machine_extruder_count: if extruder.isEnabled and int(position) < machine_extruder_count:
extruder_count += 1 extruder_count += 1
if self.numberExtrudersEnabled != extruder_count: if self._global_container_stack.definitionChanges.getProperty("extruders_enabled_count", "value") != extruder_count:
definition_changes_container.setProperty("extruders_enabled_count", "value", extruder_count) definition_changes_container.setProperty("extruders_enabled_count", "value", extruder_count)
self.numberExtrudersEnabledChanged.emit() self.numberExtrudersEnabledChanged.emit()
@ -1598,33 +1632,31 @@ class MachineManager(QObject):
# Examples: # Examples:
# - "my_profile - Fine" (only based on a default quality, no intent involved) # - "my_profile - Fine" (only based on a default quality, no intent involved)
# - "my_profile - Engineering - Fine" (based on an intent) # - "my_profile - Engineering - Fine" (based on an intent)
@pyqtProperty("QVariantMap", notify = activeQualityDisplayNameChanged) @pyqtProperty("QList<QString>", notify = activeQualityDisplayNameChanged)
def activeQualityDisplayNameMap(self) -> Dict[str, str]: def activeQualityDisplayNameStringParts(self) -> List[str]:
return self.activeQualityDisplayNameMap().getStringParts()
@pyqtProperty("QList<QString>", notify = activeQualityDisplayNameChanged)
def activeQualityDisplayNameMainStringParts(self) -> List[str]:
return self.activeQualityDisplayNameMap().getMainStringParts()
@pyqtProperty("QList<QString>", notify = activeQualityDisplayNameChanged)
def activeQualityDisplayNameTailStringParts(self) -> List[str]:
return self.activeQualityDisplayNameMap().getTailStringParts()
def activeQualityDisplayNameMap(self) -> ActiveQuality:
global_stack = self._application.getGlobalContainerStack() global_stack = self._application.getGlobalContainerStack()
if global_stack is None: if global_stack is None:
return {"main": "", return ActiveQuality()
"suffix": ""}
display_name = global_stack.quality.getName() return ActiveQuality(
profile = global_stack.quality.getName(),
intent_category = self.activeIntentCategory intent_category = self.activeIntentCategory,
if intent_category != "default": intent_name = IntentCategoryModel.translation(self.activeIntentCategory, "name", self.activeIntentCategory.title()),
intent_display_name = IntentCategoryModel.translation(intent_category, custom_profile = self.activeQualityOrQualityChangesName if global_stack.qualityChanges is not empty_quality_changes_container else None,
"name", layer_height = self.activeQualityLayerHeight if self.isActiveQualitySupported else None,
intent_category.title()) is_experimental = self.isActiveQualityExperimental and self.isActiveQualitySupported
display_name = "{intent_name} - {the_rest}".format(intent_name = intent_display_name, )
the_rest = display_name)
main_part = display_name
suffix_part = ""
# Not a custom quality
if global_stack.qualityChanges != empty_quality_changes_container:
main_part = self.activeQualityOrQualityChangesName
suffix_part = display_name
return {"main": main_part,
"suffix": suffix_part}
@pyqtSlot(str) @pyqtSlot(str)
def setIntentByCategory(self, intent_category: str) -> None: def setIntentByCategory(self, intent_category: str) -> None:
@ -1743,7 +1775,9 @@ class MachineManager(QObject):
@pyqtProperty(bool, notify = activeQualityGroupChanged) @pyqtProperty(bool, notify = activeQualityGroupChanged)
def hasNotSupportedQuality(self) -> bool: def hasNotSupportedQuality(self) -> bool:
global_container_stack = self._application.getGlobalContainerStack() global_container_stack = self._application.getGlobalContainerStack()
return (not global_container_stack is None) and global_container_stack.quality == empty_quality_container and global_container_stack.qualityChanges == empty_quality_changes_container return global_container_stack is not None\
and global_container_stack.quality == empty_quality_container \
and global_container_stack.qualityChanges == empty_quality_changes_container
@pyqtProperty(bool, notify = activeQualityGroupChanged) @pyqtProperty(bool, notify = activeQualityGroupChanged)
def isActiveQualityCustom(self) -> bool: def isActiveQualityCustom(self) -> bool:

View file

@ -42,21 +42,8 @@ class SimpleModeSettingsManager(QObject):
for extruder_stack in global_stack.extruderList: for extruder_stack in global_stack.extruderList:
user_setting_keys.update(extruder_stack.userChanges.getAllKeys()) user_setting_keys.update(extruder_stack.userChanges.getAllKeys())
# remove settings that are visible in recommended (we don't show the reset button for those)
for skip_key in self.__ignored_custom_setting_keys:
if skip_key in user_setting_keys:
user_setting_keys.remove(skip_key)
has_customized_user_settings = len(user_setting_keys) > 0 has_customized_user_settings = len(user_setting_keys) > 0
if has_customized_user_settings != self._is_profile_customized: if has_customized_user_settings != self._is_profile_customized:
self._is_profile_customized = has_customized_user_settings self._is_profile_customized = has_customized_user_settings
self.isProfileCustomizedChanged.emit() self.isProfileCustomizedChanged.emit()
# These are the settings included in the Simple ("Recommended") Mode, so only when the other settings have been
# changed, we consider it as a user customized profile in the Simple ("Recommended") Mode.
__ignored_custom_setting_keys = ["support_enable",
"infill_sparse_density",
"gradual_infill_steps",
"adhesion_type",
"support_extruder_nr"]

View file

@ -10,10 +10,13 @@ if TYPE_CHECKING:
# #
# This class manages a all registered upon-exit checks that need to be perform when the application tries to exit. # This class manages all registered upon-exit checks
# For example, to show a confirmation dialog when there is USB printing in progress, etc. All callbacks will be called # that need to be performed when the application tries to exit.
# in the order of when they got registered. If all callbacks "passes", that is, for example, if the user clicks "yes" # For example, show a confirmation dialog when there is USB printing in progress.
# on the exit confirmation dialog or nothing that's blocking the exit, then the application will quit after that. # All callbacks will be called in the order of when they were registered.
# If all callbacks "pass", for example:
# if the user clicks "yes" on the exit confirmation dialog
# and nothing else is blocking the exit, then the application will quit.
# #
class OnExitCallbackManager: class OnExitCallbackManager:
@ -35,10 +38,12 @@ class OnExitCallbackManager:
def getIsAllChecksPassed(self) -> bool: def getIsAllChecksPassed(self) -> bool:
return self._is_all_checks_passed return self._is_all_checks_passed
# Trigger the next callback if available. If not, it means that all callbacks have "passed", which means we should # Trigger the next callback if there is one.
# not block the application to quit, and it will call the application to actually quit. # If not, all callbacks have "passed",
# which means we should not prevent the application from quitting,
# and we call the application to actually quit.
def triggerNextCallback(self) -> None: def triggerNextCallback(self) -> None:
# Get the next callback and schedule that if # Get the next callback and schedule it
this_callback = None this_callback = None
if self._current_callback_idx < len(self._on_exit_callback_list): if self._current_callback_idx < len(self._on_exit_callback_list):
this_callback = self._on_exit_callback_list[self._current_callback_idx] this_callback = self._on_exit_callback_list[self._current_callback_idx]
@ -55,10 +60,11 @@ class OnExitCallbackManager:
# Tell the application to exit # Tell the application to exit
self._application.callLater(self._application.closeApplication) self._application.callLater(self._application.closeApplication)
# This is the callback function which an on-exit callback should call when it finishes, it should provide the # Callback function which an on-exit callback calls when it finishes.
# "should_proceed" flag indicating whether this check has "passed", or in other words, whether quitting the # It provides a "should_proceed" flag indicating whether the check has "passed",
# application should be blocked. If the last on-exit callback doesn't block the quitting, it will call the next # or whether quitting the application should be blocked.
# registered on-exit callback if available. # If the last on-exit callback doesn't block quitting, it will call the next
# registered on-exit callback if one is available.
def onCurrentCallbackFinished(self, should_proceed: bool = True) -> None: def onCurrentCallbackFinished(self, should_proceed: bool = True) -> None:
if not should_proceed: if not should_proceed:
Logger.log("d", "on-app-exit callback finished and we should not proceed.") Logger.log("d", "on-app-exit callback finished and we should not proceed.")

View file

@ -12,7 +12,7 @@ class AddPrinterPagesModel(WelcomePagesModel):
def initialize(self, cancellable: bool = True) -> None: def initialize(self, cancellable: bool = True) -> None:
self._pages.append({"id": "add_network_or_local_printer", self._pages.append({"id": "add_network_or_local_printer",
"page_url": self._getBuiltinWelcomePagePath("AddNetworkOrLocalPrinterContent.qml"), "page_url": self._getBuiltinWelcomePagePath("AddUltimakerOrThirdPartyPrinterStack.qml"),
"next_page_id": "machine_actions", "next_page_id": "machine_actions",
"next_page_button_text": self._catalog.i18nc("@action:button", "Add"), "next_page_button_text": self._catalog.i18nc("@action:button", "Add"),
}) })

View file

@ -23,9 +23,9 @@ catalog = i18nCatalog("cura")
class PrintInformation(QObject): class PrintInformation(QObject):
"""A class for processing and the print times per build plate as well as managing the job name """A class for processing the print times per build plate and managing the job name
This class also mangles the current machine name and the filename of the first loaded mesh into a job name. This class also combines the current machine name and the filename of the first loaded mesh into a job name.
This job name is requested by the JobSpecs qml file. This job name is requested by the JobSpecs qml file.
""" """
@ -38,6 +38,8 @@ class PrintInformation(QObject):
self.initializeCuraMessagePrintTimeProperties() self.initializeCuraMessagePrintTimeProperties()
self.slice_uuid: Optional[str] = None
# Indexed by build plate number # Indexed by build plate number
self._material_lengths = {} # type: Dict[int, List[float]] self._material_lengths = {} # type: Dict[int, List[float]]
self._material_weights = {} # type: Dict[int, List[float]] self._material_weights = {} # type: Dict[int, List[float]]
@ -184,7 +186,7 @@ class PrintInformation(QObject):
if time != time: # Check for NaN. Engine can sometimes give us weird values. if time != time: # Check for NaN. Engine can sometimes give us weird values.
duration.setDuration(0) duration.setDuration(0)
Logger.log("w", "Received NaN for print duration message") Logger.warning("Received NaN for print duration message")
continue continue
total_estimated_time += time total_estimated_time += time
@ -366,7 +368,7 @@ class PrintInformation(QObject):
mime_type = MimeTypeDatabase.getMimeTypeForFile(name) mime_type = MimeTypeDatabase.getMimeTypeForFile(name)
data = mime_type.stripExtension(name) data = mime_type.stripExtension(name)
except MimeTypeNotFoundError: except MimeTypeNotFoundError:
Logger.log("w", "Unsupported Mime Type Database file extension %s", name) Logger.warning(f"Unsupported Mime Type Database file extension {name}")
if data is not None and check_name is not None: if data is not None and check_name is not None:
self._base_name = data self._base_name = data
@ -390,7 +392,7 @@ class PrintInformation(QObject):
return self._base_name return self._base_name
def _defineAbbreviatedMachineName(self) -> None: def _defineAbbreviatedMachineName(self) -> None:
"""Created an acronym-like abbreviated machine name from the currently active machine name. """Creates an abbreviated machine name from the currently active machine name.
Called each time the global stack is switched. Called each time the global stack is switched.
""" """
@ -444,7 +446,7 @@ class PrintInformation(QObject):
self.setToZeroPrintInformation(self._active_build_plate) self.setToZeroPrintInformation(self._active_build_plate)
def _onOutputStart(self, output_device: OutputDevice) -> None: def _onOutputStart(self, output_device: OutputDevice) -> None:
"""If this is the sort of output 'device' (like local or online file storage, rather than a printer), """If this is a sort of output 'device' (like local or online file storage, rather than a printer),
the user could have altered the file-name, and thus the project name should be altered as well.""" the user could have altered the file-name, and thus the project name should be altered as well."""
if isinstance(output_device, ProjectOutputDevice): if isinstance(output_device, ProjectOutputDevice):
new_name = output_device.getLastOutputName() new_name = output_device.getLastOutputName()

View file

@ -265,7 +265,7 @@ class WelcomePagesModel(ListModel):
"should_show_function": self.shouldShowCloudPage, "should_show_function": self.shouldShowCloudPage,
}, },
{"id": "add_network_or_local_printer", {"id": "add_network_or_local_printer",
"page_url": self._getBuiltinWelcomePagePath("AddNetworkOrLocalPrinterContent.qml"), "page_url": self._getBuiltinWelcomePagePath("AddUltimakerOrThirdPartyPrinterStack.qml"),
"next_page_id": "machine_actions", "next_page_id": "machine_actions",
}, },
{"id": "add_printer_by_ip", {"id": "add_printer_by_ip",

View file

@ -62,15 +62,21 @@ class WhatsNewPagesModel(WelcomePagesModel):
def initialize(self) -> None: def initialize(self) -> None:
self._pages = [] self._pages = []
self._pages.append({"id": "whats_new", try:
"page_url": self._getBuiltinWelcomePagePath("WhatsNewContent.qml"), self._pages.append({"id": "whats_new",
"next_page_button_text": self._catalog.i18nc("@action:button", "Skip"), "page_url": self._getBuiltinWelcomePagePath("WhatsNewContent.qml"),
"next_page_id": "changelog" "next_page_button_text": self._catalog.i18nc("@action:button", "Skip"),
}) "next_page_id": "changelog"
self._pages.append({"id": "changelog", })
"page_url": self._getBuiltinWelcomePagePath("ChangelogContent.qml"), except FileNotFoundError:
"next_page_button_text": self._catalog.i18nc("@action:button", "Close"), Logger.warning("Unable to find what's new page")
}) try:
self._pages.append({"id": "changelog",
"page_url": self._getBuiltinWelcomePagePath("ChangelogContent.qml"),
"next_page_button_text": self._catalog.i18nc("@action:button", "Close"),
})
except FileNotFoundError:
Logger.warning("Unable to find changelog page")
self.setItems(self._pages) self.setItems(self._pages)
images, max_image = WhatsNewPagesModel._collectOrdinalFiles(Resources.Images, WhatsNewPagesModel.image_formats) images, max_image = WhatsNewPagesModel._collectOrdinalFiles(Resources.Images, WhatsNewPagesModel.image_formats)

View file

@ -1,71 +0,0 @@
#!/usr/bin/env bash
# Abort at the first error.
set -e
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
PROJECT_DIR="$( cd "${SCRIPT_DIR}/.." && pwd )"
# Make sure that environment variables are set properly
export PATH="${CURA_BUILD_ENV_PATH}/bin:${PATH}"
export PKG_CONFIG_PATH="${CURA_BUILD_ENV_PATH}/lib/pkgconfig:${PKG_CONFIG_PATH}"
export LD_LIBRARY_PATH="${CURA_BUILD_ENV_PATH}/lib:${LD_LIBRARY_PATH}"
cd "${PROJECT_DIR}"
#
# Clone Uranium and set PYTHONPATH first
#
# Check the branch to use for Uranium.
# It tries the following branch names and uses the first one that's available.
# - GITHUB_HEAD_REF: the branch name of a PR. If it's not a PR, it will be empty.
# - GITHUB_BASE_REF: the branch a PR is based on. If it's not a PR, it will be empty.
# - GITHUB_REF: the branch name if it's a branch on the repository;
# refs/pull/123/merge if it's a pull_request.
# - master: the master branch. It should always exist.
# For debugging.
echo "GITHUB_REF: ${GITHUB_REF}"
echo "GITHUB_HEAD_REF: ${GITHUB_HEAD_REF}"
echo "GITHUB_BASE_REF: ${GITHUB_BASE_REF}"
GIT_REF_NAME_LIST=( "${GITHUB_HEAD_REF}" "${GITHUB_BASE_REF}" "${GITHUB_REF}" "master" )
for git_ref_name in "${GIT_REF_NAME_LIST[@]}"
do
if [ -z "${git_ref_name}" ]; then
continue
fi
git_ref_name="$(basename "${git_ref_name}")"
# Skip refs/pull/1234/merge as pull requests use it as GITHUB_REF
if [[ "${git_ref_name}" == "merge" ]]; then
echo "Skip [${git_ref_name}]"
continue
fi
URANIUM_BRANCH="${git_ref_name}"
output="$(git ls-remote --heads https://github.com/Ultimaker/Uranium.git "${URANIUM_BRANCH}")"
if [ -n "${output}" ]; then
echo "Found Uranium branch [${URANIUM_BRANCH}]."
break
else
echo "Could not find Uranium branch [${URANIUM_BRANCH}], try next."
fi
done
echo "Using Uranium branch ${URANIUM_BRANCH} ..."
git clone --depth=1 -b "${URANIUM_BRANCH}" https://github.com/Ultimaker/Uranium.git "${PROJECT_DIR}"/Uranium
export PYTHONPATH="${PROJECT_DIR}/Uranium:.:${PYTHONPATH}"
mkdir build
cd build
cmake \
-DCMAKE_BUILD_TYPE=Debug \
-DCMAKE_PREFIX_PATH="${CURA_BUILD_ENV_PATH}" \
-DURANIUM_DIR="${PROJECT_DIR}/Uranium" \
-DBUILD_TESTS=ON \
-DPRINT_PLUGIN_LIST=OFF \
-DGENERATE_TRANSLATIONS=OFF \
..
make

View file

@ -1,3 +0,0 @@
#!/usr/bin/env bash
cd build
ctest -j4 --output-on-failure -T Test

81
docs/Report.md Normal file
View file

@ -0,0 +1,81 @@
# Reporting Issues
Please attach the following information in case <br>
you want to report crashing or similar issues.
<br>
## DxDiag
### ![Badge Windows]
The log as produced by **dxdiag**.
<kbd>start</kbd>  »  <kbd>run</kbd>  »  <kbd>dxdiag</kbd>  »  <kbd>save output</kbd>
<br>
<br>
## Cura GUI Log
If the Cura user interface still starts, you can also <br>
reach these directories from the application menu:
<kbd>Help</kbd>  »  <kbd>Show settings folder</kbd>
<br>
### ![Badge Windows]
```
%APPDATA%\cura\< >\cura.log
```
or
```
C:\Users\<your username>\AppData\Roaming\cura\< >\cura.log
```
<br>
### ![Badge Linux]
```
~/.local/share/cura/< >/cura.log
```
<br>
### ![Badge MacOS]
```
~/Library/Application Support/cura/< >/cura.log
```
<br>
<br>
## Alternative
An alternative is to install the **[ExtensiveSupportLogging]** <br>
plugin this creates a zip folder of the relevant log files.
If you're experiencing performance issues, we might ask <br>
you to connect the CPU profiler in this plugin and attach <br>
the collected data to your support ticket.
<br>
<!----------------------------------------------------------------------------->
[ExtensiveSupportLogging]: https://marketplace.ultimaker.com/app/cura/plugins/UltimakerPackages/ExtensiveSupportLogging
<!---------------------------------[ Badges ]---------------------------------->
[Badge Windows]: https://img.shields.io/badge/Windows-0078D6?style=for-the-badge&logoColor=white&logo=Windows
[Badge Linux]: https://img.shields.io/badge/Linux-00A95C?style=for-the-badge&logoColor=white&logo=Linux
[Badge MacOS]: https://img.shields.io/badge/MacOS-403C3D?style=for-the-badge&logoColor=white&logo=MacOS

View file

@ -2,32 +2,77 @@ Setting Properties
==== ====
Each setting in Cura has a number of properties. It's not just a key and a value. This page lists the properties that a setting can define. Each setting in Cura has a number of properties. It's not just a key and a value. This page lists the properties that a setting can define.
* `key` (string): The identifier by which the setting is referenced. This is not a human-readable name, but just a reference string, such as `layer_height_0`. Typically these are named with the most significant category first, in order to sort them better, such as `material_print_temperature`. This is not actually a real property but just an identifier; it can't be changed. * `key` (string): __The identifier by which the setting is referenced.__
* `value` (optional): The current value of the setting. This can be a function, an arbitrary Python expression that depends on the values of other settings. If it's not present, the `default_value` is used. * This is not a human-readable name, but just a reference string, such as `layer_height_0`.
* `default_value`: A default value for the setting if `value` is undefined. This property is required however. It can't be a Python expression, but it can be any JSON type. This is made separate so that CuraEngine can read it out as well for its debugging mode via the command line, without needing a complete Python interpreter. * This is not actually a real property but just an identifier; it can't be changed.
* `label` (string): The human-readable name for the setting. This label is translated. * Typically these are named with the most significant category first, in order to sort them better, such as `material_print_temperature`.
* `description` (string): A longer description of what the setting does when you change it. This description is translated as well. * `value` (optional): __The current value of the setting.__
* `type` (string): The type of value that this setting contains. Allowed types are: `bool`, `str`, `float`, `int`, `enum`, `category`, `[int]`, `vec3`, `polygon` and `polygons`. * This can be a function (an arbitrary Python expression) that depends on the values of other settings.
* `unit` (optional string): A unit that is displayed at the right-hand side of the text field where the user enters the setting value. * If it's not present, the `default_value` is used.
* `resolve` (optional string): A Python expression that resolves disagreements for global settings if multiple per-extruder profiles define different values for a setting. Typically this takes the values for the setting from all stacks and computes one final value for it that will be used for the global setting. For instance, the `resolve` function for the build plate temperature is `max(extruderValues('material_bed_temperature')`, meaning that it will use the hottest bed temperature of all materials of the extruders in use. * `default_value`: __A default value for the setting if `value` is undefined.__
* `limit_to_extruder` (optional): A Python expression that indicates which extruder a setting will be obtained from. This is used for settings that may be extruder-specific but the extruder is not necessarily the current extruder. For instance, support settings need to be evaluated for the support extruder. Infill settings need to be evaluated for the infill extruder if the infill extruder is changed. * This property is required.
* `enabled` (optional string or boolean): Whether the setting can currently be made visible for the user. This can be a simple true/false, or a Python expression that depends on other settings. Typically used for settings that don't apply when another setting is disabled, such as to hide the support settings if support is disabled. * It can't be a Python expression, but it can be any JSON type.
* `minimum_value` (optional): The lowest acceptable value for this setting. If it's any lower, Cura will not allow the user to slice. By convention this is used to prevent setting values that are technically or physically impossible, such as a layer height of 0mm. This property only applies to numerical settings. * This is made separate so that CuraEngine can read it out for its debugging mode via the command line, without needing a complete Python interpreter.
* `maximum_value` (optional): The highest acceptable value for this setting. If it's any higher, Cura will not allow the user to slice. By convention this is used to prevent setting values that are technically or physically impossible, such as a support overhang angle of more than 90 degrees. This property only applies to numerical settings. * `label` (string): __The human-readable name for the setting.__
* `minimum_value_warning` (optional): The threshold under which a warning is displayed to the user. By convention this is used to indicate that it will probably not print very nicely with such a low setting value. This property only applies to numerical settings. * This label is translated.
* `maximum_value_warning` (optional): The threshold above which a warning is displayed to the user. By convention this is used to indicate that it will probably not print very nicely with such a high setting value. This property only applies to numerical settings. * `description` (string): __A longer description of what the setting does when you change it.__
* `settable_globally` (optional boolean): Whether the setting can be changed globally. For some mesh-type settings such as `support_mesh` this doesn't make sense, so those can't be changed globally. They are not displayed in the main settings list then. * This description is translated.
* `settable_per_meshgroup` (optional boolean): Whether a setting can be changed per group of meshes. Currently unused in Cura. * `type` (string): __The type of value that this setting contains.__
* `settable_per_extruder` (optional boolean): Whether a setting can be changed per extruder. Some settings, like the build plate temperature, can't be adjusted separately for each extruder. An icon is shown in the interface to indicate this. If the user changes these settings they are stored in the global stack. * Allowed types are: `bool`, `str`, `float`, `int`, `enum`, `category`, `[int]`, `vec3`, `polygon` and `polygons`.
* `settable_per_mesh` (optional boolean): Whether a setting can be changed per mesh. The settings that can be changed per mesh are shown in the list of available settings in the per-object settings tool. * `unit` (optional string): __A unit that is displayed at the right-hand side of the text field where the user enters the setting value.__
* `children` (optional list): A list of child settings. These are displayed with an indentation. If all child settings are overridden by the user, the parent setting gets greyed out to indicate that the parent setting has no effect any more. This is not strictly always the case though, because that would depend on the inheritance functions in the `value`. * `resolve` (optional string): __A Python expression that resolves disagreements for global settings if multiple per-extruder profiles define different values for a setting.__
* `icon` (optional string): A path to an icon to be displayed. Only applies to setting categories. * Typically this takes the values for the setting from all stacks and computes one final value for it that will be used for the global setting. For instance, the `resolve` function for the build plate temperature is `max(extruderValues('material_bed_temperature')`, meaning that it will use the hottest bed temperature of all materials of the extruders in use.
* `allow_empty` (optional bool): Whether the setting is allowed to be empty. If it's not, this will be treated as a setting error and Cura will not allow the user to slice. Only applies to string-type settings. * `limit_to_extruder` (optional): __A Python expression that indicates which extruder a setting will be obtained from.__
* `warning_description` (optional string): A warning message to display when the setting has a warning value. This is currently unused by Cura. * This is used for settings that may be extruder-specific but the extruder is not necessarily the current extruder. For instance, support settings need to be evaluated for the support extruder. Infill settings need to be evaluated for the infill extruder if the infill extruder is changed.
* `error_description` (optional string): An error message to display when the setting has an error value. This is currently unused by Cura. * `enabled` (optional string or boolean): __Whether the setting can currently be made visible for the user.__
* `options` (dictionary): A list of values that the user can choose from. The keys of this dictionary are keys that CuraEngine identifies the option with. The values are human-readable strings and will be translated. Only applies to (and only required for) enum-type settings. * This can be a simple true/false, or a Python expression that depends on other settings.
* `comments` (optional string): Comments to other programmers about the setting. This is not used by Cura. * Typically used for settings that don't apply when another setting is disabled, such as to hide the support settings if support is disabled.
* `is_uuid` (optional boolean): Whether or not this setting indicates a UUID-4. If it is, the setting will indicate an error if it's not in the correct format. Only applies to string-type settings. * `minimum_value` (optional): __The lowest acceptable value for this setting.__
* `regex_blacklist_pattern` (optional string): A regular expression, where if the setting value matches with this regular expression, it gets an error state. Only applies to string-type settings. * If it's any lower, Cura will not allow the user to slice.
* `error_value` (optional): If the setting value is equal to this value, it will show a setting error. This is used to display errors for non-numerical settings such as checkboxes. * This property only applies to numerical settings.
* `warning_value` (optional): If the setting value is equal to this value, it will show a setting warning. This is used to display warnings for non-numerical settings such as checkboxes. * By convention this is used to prevent setting values that are technically or physically impossible, such as a layer height of 0mm.
* `maximum_value` (optional): __The highest acceptable value for this setting.__
* If it's any higher, Cura will not allow the user to slice.
* This property only applies to numerical settings.
* By convention this is used to prevent setting values that are technically or physically impossible, such as a support overhang angle of more than 90 degrees.
* `minimum_value_warning` (optional): __The threshold under which a warning is displayed to the user.__
* This property only applies to numerical settings.
* By convention this is used to indicate that it will probably not print very nicely with such a low setting value.
* `maximum_value_warning` (optional): __The threshold above which a warning is displayed to the user.__
* This property only applies to numerical settings.
* By convention this is used to indicate that it will probably not print very nicely with such a high setting value.
* `settable_globally` (optional boolean): __Whether the setting can be changed globally.__
* For some mesh-type settings such as `support_mesh` this doesn't make sense, so those can't be changed globally. They are not displayed in the main settings list then.
* `settable_per_meshgroup` (optional boolean): __Whether a setting can be changed per group of meshes.__
* *This is currently unused by Cura.*
* `settable_per_extruder` (optional boolean): __Whether a setting can be changed per extruder.__
* Some settings, like the build plate temperature, can't be adjusted separately for each extruder. An icon is shown in the interface to indicate this.
* If the user changes these settings they are stored in the global stack.
* `settable_per_mesh` (optional boolean): __Whether a setting can be changed per mesh.__
* The settings that can be changed per mesh are shown in the list of available settings in the per-object settings tool.
* `children` (optional list): __A list of child settings.__
* These are displayed with an indentation. If all child settings are overridden by the user, the parent setting gets greyed out to indicate that the parent setting has no effect any more. This is not strictly always the case though, because that would depend on the inheritance functions in the `value`.
* `icon` (optional string): __A path to an icon to be displayed.__
* Only applies to setting categories.
* `allow_empty` (optional bool): __Whether the setting is allowed to be empty.__
* If it's not, this will be treated as a setting error and Cura will not allow the user to slice.
* Only applies to string-type settings.
* `warning_description` (optional string): __A warning message to display when the setting has a warning value.__
* *This is currently unused by Cura.*
* `error_description` (optional string): __An error message to display when the setting has an error value.__
* *This is currently unused by Cura.*
* `options` (dictionary): __A list of values that the user can choose from.__
* The keys of this dictionary are keys that CuraEngine identifies the option with.
* The values are human-readable strings and will be translated.
* Only applies to (and only required for) enum-type settings.
* `comments` (optional string): __Comments to other programmers about the setting.__
* *This is currently unused by Cura.*
* `is_uuid` (optional boolean): __Whether or not this setting indicates a UUID-4.__
* If it is, the setting will indicate an error if it's not in the correct format.
* Only applies to string-type settings.
* `regex_blacklist_pattern` (optional string): __A regular expression, where if the setting value matches with this regular expression, it gets an error state.__
* Only applies to string-type settings.
* `error_value` (optional): __If the setting value is equal to this value, it will show a setting error.__
* This is used to display errors for non-numerical settings such as checkboxes.
* `warning_value` (optional): __If the setting value is equal to this value, it will show a setting warning.__
* This is used to display warnings for non-numerical settings such as checkboxes.

View file

@ -1,21 +1,33 @@
Repositories Repositories
==== ====
Cura uses a number of repositories where parts of our source code are separated, in order to get a cleaner architecture. Those repositories are: Cura uses a number of repositories where parts of our source code are separated, in order to get a cleaner architecture. Those repositories are:
* [Cura](https://github.com/Ultimaker/Cura), the main repository for the front-end of Cura. This contains all of the business logic for the front-end, including the specific types of profiles that are available, the concept of 3D printers and materials, specific tools for handling 3D printed models, pretty much all of the GUI, as well as Ultimaker services such as the Marketplace and accounts. * [Cura](https://github.com/Ultimaker/Cura) is the main repository for the front-end of Cura. This contains:
* The Cura repository is built on [Uranium](https://github.com/Ultimaker/Uranium), a framework for desktop applications that handle 3D models and have a separate back-end. This provides Cura with a basic GUI framework ([Qt](https://www.qt.io/)), a 3D scene, a rendering system, a plug-in system and a system for stacked profiles that change settings. - all of the business logic for the front-end, including the specific types of profiles that are available
* In order to slice, Cura starts [CuraEngine](https://github.com/Ultimaker/CuraEngine) in the background. This does the actual process that converts 3D models into a toolpath for the printer. - the concept of 3D printers and materials
* Communication to CuraEngine goes via [libArcus](https://github.com/Ultimaker/libArcus), a small library that wraps around [Protobuf](https://developers.google.com/protocol-buffers/) in order to make it run over a local socket. - specific tools for handling 3D printed models
* Cura's build scripts are in [cura-build](https://github.com/Ultimaker/cura-build) and build scripts for building dependencies are in [cura-build-environment](https://github.com/Ultimaker/cura-build-environment). - pretty much all of the GUI
- Ultimaker services such as the Marketplace and accounts.
* [Uranium](https://github.com/Ultimaker/Uranium) is the underlying framework the Cura repository is built on. [Uranium](https://github.com/Ultimaker/Uranium) is a framework for desktop applications that handle 3D models. It has a separate back-end. This provides Cura with:
- a basic GUI framework ([Qt](https://www.qt.io/))
- a 3D scene, a rendering system
- a plug-in system
- a system for stacked profiles that change settings.
* [CuraEngine](https://github.com/Ultimaker/CuraEngine) is the slicer used by Cura in the background. This does the actual process that converts 3D models into a toolpath for the printer.
* [libArcus](https://github.com/Ultimaker/libArcus) handles the communication to CuraEngine. [libArcus](https://github.com/Ultimaker/libArcus) is a small library that wraps around [Protobuf](https://developers.google.com/protocol-buffers/) in order to make it run over a local socket.
* [cura-build](https://github.com/Ultimaker/cura-build): Cura's build scripts.
* [cura-build-environment](https://github.com/Ultimaker/cura-build-environment) build scripts for building dependencies.
There are also a number of repositories under our control that are not integral parts of Cura's architecture, but more like separated side-gigs: There are also a number of repositories under our control that are not integral parts of Cura's architecture, but more like separated side-gigs:
* Loading and writing 3MF files is done through [libSavitar](https://github.com/Ultimaker/libSavitar). * [libSavitar](https://github.com/Ultimaker/libSavitar) is used for loading and writing 3MF files.
* Loading and writing UFP files is done through [libCharon](https://github.com/Ultimaker/libCharon). * [libCharon](https://github.com/Ultimaker/libCharon) is used for loading and writing UFP files.
* To make the build system a bit simpler, some parts are pre-compiled in [cura-binary-data](https://github.com/Ultimaker/cura-binary-data). This holds things like the machine-readable translation files and the Marlin builds for firmware updates, which would require considerable tooling to build automatically. * [cura-binary-data](https://github.com/Ultimaker/cura-binary-data) pre-compiled parts to make the build system a bit simpler. This holds things which would require considerable tooling to build automatically like:
* There are automated GUI tests in [Cura-squish-tests](https://github.com/Ultimaker/Cura-squish-tests). - the machine-readable translation files
* Material profiles are stored in [fdm_materials](https://github.com/Ultimaker/fdm_materials). This is separated out and combined in our build process, so that the firmware for Ultimaker's printers can use the same set of profiles too. - the Marlin builds for firmware updates
* [Cura-squish-tests](https://github.com/Ultimaker/Cura-squish-tests): automated GUI tests.
* [fdm_materials](https://github.com/Ultimaker/fdm_materials) stores Material profiles. This is separated out and combined in our build process, so that the firmware for Ultimaker's printers can use the same set of profiles too.
Interplay Interplay
---- ----
At a very high level, Cura's repositories interconnect as follows: At a very high level, Cura's repositories interconnect as follows:
![Overview of interplay between repositories](resources/repositories.svg) ![Overview of interplay between repositories](resources/repositories.svg)

54
docs/resources/deps.dot Normal file
View file

@ -0,0 +1,54 @@
digraph {
"cpython/3.10.4@ultimaker/testing" -> "zlib/1.2.12"
"cpython/3.10.4@ultimaker/testing" -> "openssl/1.1.1l"
"cpython/3.10.4@ultimaker/testing" -> "expat/2.4.1"
"cpython/3.10.4@ultimaker/testing" -> "libffi/3.2.1"
"cpython/3.10.4@ultimaker/testing" -> "mpdecimal/2.5.0@ultimaker/testing"
"cpython/3.10.4@ultimaker/testing" -> "libuuid/1.0.3"
"cpython/3.10.4@ultimaker/testing" -> "libxcrypt/4.4.25"
"cpython/3.10.4@ultimaker/testing" -> "bzip2/1.0.8"
"cpython/3.10.4@ultimaker/testing" -> "gdbm/1.19"
"cpython/3.10.4@ultimaker/testing" -> "sqlite3/3.36.0"
"cpython/3.10.4@ultimaker/testing" -> "tk/8.6.10"
"cpython/3.10.4@ultimaker/testing" -> "ncurses/6.2"
"cpython/3.10.4@ultimaker/testing" -> "xz_utils/5.2.5"
"pynest2d/5.1.0-beta+3@ultimaker/stable" -> "libnest2d/5.1.0-beta+3@ultimaker/stable"
"pynest2d/5.1.0-beta+3@ultimaker/stable" -> "cpython/3.10.4@ultimaker/testing"
"freetype/2.12.1" -> "libpng/1.6.37"
"freetype/2.12.1" -> "zlib/1.2.12"
"freetype/2.12.1" -> "bzip2/1.0.8"
"freetype/2.12.1" -> "brotli/1.0.9"
"savitar/5.1.0-beta+3@ultimaker/stable" -> "pugixml/1.12.1"
"savitar/5.1.0-beta+3@ultimaker/stable" -> "cpython/3.10.4@ultimaker/testing"
"arcus/5.1.0-beta+3@ultimaker/stable" -> "protobuf/3.17.1"
"arcus/5.1.0-beta+3@ultimaker/stable" -> "cpython/3.10.4@ultimaker/testing"
"arcus/5.1.0-beta+3@ultimaker/stable" -> "zlib/1.2.12"
"libpng/1.6.37" -> "zlib/1.2.12"
"curaengine/5.1.0-beta+3@ultimaker/stable" -> "clipper/6.4.2"
"curaengine/5.1.0-beta+3@ultimaker/stable" -> "boost/1.78.0"
"curaengine/5.1.0-beta+3@ultimaker/stable" -> "rapidjson/1.1.0"
"curaengine/5.1.0-beta+3@ultimaker/stable" -> "stb/20200203"
"curaengine/5.1.0-beta+3@ultimaker/stable" -> "protobuf/3.17.1"
"curaengine/5.1.0-beta+3@ultimaker/stable" -> "arcus/5.1.0-beta+3@ultimaker/stable"
"tcl/8.6.10" -> "zlib/1.2.12"
"uranium/5.1.0-beta+3@ultimaker/stable" -> "arcus/5.1.0-beta+3@ultimaker/stable"
"uranium/5.1.0-beta+3@ultimaker/stable" -> "cpython/3.10.4@ultimaker/testing"
"libnest2d/5.1.0-beta+3@ultimaker/stable" -> "boost/1.78.0"
"libnest2d/5.1.0-beta+3@ultimaker/stable" -> "clipper/6.4.2"
"libnest2d/5.1.0-beta+3@ultimaker/stable" -> "nlopt/2.7.0"
"conanfile.py (cura/5.1.0-beta+3@ultimaker/testing)" -> "arcus/5.1.0-beta+3@ultimaker/stable"
"conanfile.py (cura/5.1.0-beta+3@ultimaker/testing)" -> "curaengine/5.1.0-beta+3@ultimaker/stable"
"conanfile.py (cura/5.1.0-beta+3@ultimaker/testing)" -> "savitar/5.1.0-beta+3@ultimaker/stable"
"conanfile.py (cura/5.1.0-beta+3@ultimaker/testing)" -> "pynest2d/5.1.0-beta+3@ultimaker/stable"
"conanfile.py (cura/5.1.0-beta+3@ultimaker/testing)" -> "uranium/5.1.0-beta+3@ultimaker/stable"
"conanfile.py (cura/5.1.0-beta+3@ultimaker/testing)" -> "fdm_materials/5.1.0-beta+3@ultimaker/stable"
"conanfile.py (cura/5.1.0-beta+3@ultimaker/testing)" -> "cura_binary_data/5.1.0-beta+3@ultimaker/stable"
"conanfile.py (cura/5.1.0-beta+3@ultimaker/testing)" -> "cpython/3.10.4@ultimaker/testing"
"fontconfig/2.13.93" -> "freetype/2.12.1"
"fontconfig/2.13.93" -> "expat/2.4.1"
"fontconfig/2.13.93" -> "libuuid/1.0.3"
"tk/8.6.10" -> "tcl/8.6.10"
"tk/8.6.10" -> "fontconfig/2.13.93"
"tk/8.6.10" -> "xorg/system"
"protobuf/3.17.1" -> "zlib/1.2.12"
}

Binary file not shown.

25
packaging/AppImage/AppRun Normal file
View file

@ -0,0 +1,25 @@
#!/bin/bash
scriptdir=$(dirname $0)
export PYTHONPATH="$scriptdir/lib/python3.10"
export LD_LIBRARY_PATH=$scriptdir
export QT_PLUGIN_PATH="$scriptdir/qt/plugins"
export QML2_IMPORT_PATH="$scriptdir/qt/qml"
export QT_QPA_FONTDIR=/usr/share/fonts
export QT_QPA_PLATFORMTHEME=xdgdesktopportal
export QT_XKB_CONFIG_ROOT=/usr/share/X11/xkb
# Use the openssl.cnf packaged in the AppImage
export OPENSSL_CONF="$scriptdir/openssl.cnf"
# If this variable is set on Zorin OS 16 Cura would crash
# unset `QT_STYLE_OVERRIDE` as a precaution
unset QT_STYLE_OVERRIDE
BIN=`basename "$ARGV0" .AppImage`
if [ -f $scriptdir/$BIN ]; then
$scriptdir/$BIN "$@"
else
$scriptdir/UltiMaker-Cura "$@"
fi;

View file

@ -0,0 +1,77 @@
# Copyright (c) 2022 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
import argparse # Command line arguments parsing and help.
from jinja2 import Template
import os # Finding installation directory.
import os.path # Finding files.
import shutil # Copying files.
import stat # For setting file permissions.
import subprocess # For calling system commands.
def build_appimage(dist_path, version, appimage_filename):
"""
Creates an AppImage file from the build artefacts created so far.
"""
copy_metadata_files(dist_path, version)
try:
os.remove(os.path.join(dist_path, appimage_filename)) # Ensure any old file is removed, if it exists.
except FileNotFoundError:
pass # If it didn't exist, that's even better.
generate_appimage(dist_path, appimage_filename)
sign_appimage(dist_path, appimage_filename)
def copy_metadata_files(dist_path, version):
"""
Copy metadata files for the metadata of the AppImage.
"""
copied_files = {
os.path.join("..", "icons", "cura-icon_256x256.png"): "cura-icon.png",
"cura.appdata.xml": "cura.appdata.xml",
"AppRun": "AppRun"
}
packaging_dir = os.path.dirname(__file__)
for source, dest in copied_files.items():
print("Copying", os.path.join(packaging_dir, source), "to", os.path.join(dist_path, dest))
shutil.copyfile(os.path.join(packaging_dir, source), os.path.join(dist_path, dest))
# Ensure that AppRun has the proper permissions: 755 (user reads, writes and executes, group reads and executes, world reads and executes).
print("Changing permissions for AppRun")
os.chmod(os.path.join(dist_path, "AppRun"), stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
# Provision the Desktop file with the correct version number.
template_path = os.path.join(packaging_dir, "cura.desktop.jinja")
desktop_path = os.path.join(dist_path, "cura.desktop")
print("Provisioning desktop file from", template_path, "to", desktop_path)
with open(template_path, "r") as f:
desktop_file = Template(f.read())
with open(desktop_path, "w") as f:
f.write(desktop_file.render(cura_version = version))
def generate_appimage(dist_path, appimage_filename):
appimage_path = os.path.join(dist_path, "..", appimage_filename)
appimagetool = os.getenv("APPIMAGETOOL_LOCATION", "appimagetool")
command = [appimagetool, "--appimage-extract-and-run", f"{dist_path}/", appimage_path]
result = subprocess.call(command)
if result != 0:
raise RuntimeError(f"The AppImageTool command returned non-zero: {result}")
def sign_appimage(dist_path, appimage_filename):
appimage_path = os.path.join(dist_path, "..", appimage_filename)
command = ["gpg", "--yes", "--armor", "--detach-sig", appimage_path]
result = subprocess.call(command)
if result != 0:
raise RuntimeError(f"The GPG command returned non-zero: {result}")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description = "Create AppImages of Cura.")
parser.add_argument("dist_path", type=str, help="Path to where PyInstaller installed the distribution of Cura.")
parser.add_argument("version", type=str, help="Full version number of Cura (e.g. '5.1.0-beta')")
parser.add_argument("filename", type = str, help = "Filename of the AppImage (e.g. 'UltiMaker-Cura-5.1.0-beta-Linux-X64.AppImage')")
args = parser.parse_args()
build_appimage(args.dist_path, args.version, args.filename)

View file

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<component type="desktop-application">
<id>com.ultimaker.cura</id>
<metadata_license>CC0-1.0</metadata_license>
<project_license>LGPL-3.0</project_license>
<name>UltiMaker Cura</name>
<summary>Slicer to prepare your 3D printing projects</summary>
<description>
<p>UltiMaker Cura is a slicer, an application that prepares your model for 3D printing. Optimized, expert-tested profiles for 3D printers and materials mean you can start printing reliably in no time. And with industry-standard software integration, you can streamline your workflow for maximum efficiency.</p>
</description>
<url type="homepage">https://ultimaker.com/en/software/ultimaker-cura</url>
<screenshots>
<screenshot type="default">
<caption>Print preparation screen</caption>

</screenshot>
</screenshots>
</component>

View file

@ -0,0 +1,15 @@
[Desktop Entry]
Name=UltiMaker Cura
Name[de]=UltiMaker Cura
GenericName=3D Printing Software
GenericName[de]=3D-Druck-Software
GenericName[nl]=3D-Print Software
Comment=Cura converts 3D models into paths for a 3D printer. It prepares your print for maximum accuracy, minimum printing time and good reliability with many extra features that make your print come out great.
Exec=UltiMaker-Cura %F
Icon=cura-icon
Terminal=false
Type=Application
MimeType=model/stl;application/vnd.ms-3mfdocument;application/prs.wavefront-obj;image/bmp;image/gif;image/jpeg;image/png;text/x-gcode;application/x-amf;application/x-ply;application/x-ctm;model/vnd.collada+xml;model/gltf-binary;model/gltf+json;model/vnd.collada+xml+zip;
Categories=Graphics;
Keywords=3D;Printing;
X-AppImage-Version={{ cura_version }}

View file

@ -0,0 +1,155 @@
# Copyright (c) 2023 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
import os
import argparse # Command line arguments parsing and help.
import subprocess
from pathlib import Path
ULTIMAKER_CURA_DOMAIN = os.environ.get("ULTIMAKER_CURA_DOMAIN", "nl.ultimaker.cura")
def build_dmg(source_path: str, dist_path: str, filename: str, app_name: str) -> None:
create_dmg_executable = os.environ.get("CREATE_DMG_EXECUTABLE", "create-dmg")
arguments = [create_dmg_executable,
"--window-pos", "640", "360",
"--window-size", "690", "503",
"--app-drop-link", "520", "272",
"--volicon", f"{source_path}/packaging/icons/VolumeIcons_Cura.icns",
"--icon-size", "90",
"--icon", app_name, "169", "272",
"--eula", f"{source_path}/packaging/cura_license.txt",
"--background", f"{source_path}/packaging/MacOs/cura_background_dmg.png",
f"{dist_path}/{filename}",
f"{dist_path}/{app_name}"]
subprocess.run(arguments)
def build_pkg(dist_path: str, app_filename: str, component_filename: str, cura_version: str, installer_filename: str) -> None:
""" Builds and signs the pkg installer.
@param dist_path: Path to put output pkg in
@param app_filename: name of the .app file to bundle inside the pkg
@param component_filename: Name of the pkg component package to bundle the app in
@param cura_version: The version is used when automatically replacing existing versions with the installer.
@param installer_filename: Name of the installer that contains the component package
"""
pkg_build_executable = os.environ.get("PKG_BUILD_EXECUTABLE", "pkgbuild")
product_build_executable = os.environ.get("PRODUCT_BUILD_EXECUTABLE", "productbuild")
codesign_identity = os.environ.get("CODESIGN_IDENTITY")
# This builds the component package that contains UltiMaker-Cura.app. This component package will be bundled in a distribution package.
pkg_build_arguments = [
pkg_build_executable,
"--identifier", f"{ULTIMAKER_CURA_DOMAIN}_{cura_version}", # If we want to replace previous version automatically remove {cure_version}
"--component",
Path(dist_path, app_filename),
Path(dist_path, component_filename),
"--install-location", "/Applications",
]
if codesign_identity:
pkg_build_arguments.extend(["--sign", codesign_identity])
else:
print("CODESIGN_IDENTITY missing. The installer is not being signed")
subprocess.run(pkg_build_arguments)
# This automatically generates a distribution.xml file that is used to build the installer.
# If you want to make any changes to how the installer functions, this file should be changed to do that.
# TODO: Use --product {property_list_file} to pull keys out of file for distribution.xml. This can be used to set min requirements
distribution_creation_arguments = [
product_build_executable,
"--synthesize",
"--package", Path(dist_path, component_filename), # Package that will be inside installer
Path(dist_path, "distribution.xml"), # Output location for sythesized distributions file
]
subprocess.run(distribution_creation_arguments)
# This creates the distributable package (Installer)
installer_creation_arguments = [
product_build_executable,
"--distribution", Path(dist_path, "distribution.xml"),
"--package-path", dist_path, # Where to find the component packages mentioned in distribution.xml (UltiMaker-Cura.pkg)
Path(dist_path, installer_filename),
]
if codesign_identity:
installer_creation_arguments.extend(["--sign", codesign_identity])
subprocess.run(installer_creation_arguments)
def notarize_file(dist_path: str, filename: str) -> None:
""" Notarize a file. This takes 5+ minutes, there is indication that this step is successful."""
notarize_user = os.environ.get("MAC_NOTARIZE_USER")
notarize_password = os.environ.get("MAC_NOTARIZE_PASS")
altool_executable = os.environ.get("ALTOOL_EXECUTABLE", "altool")
notarize_arguments = [
"xcrun", altool_executable,
"--notarize-app",
"--primary-bundle-id", ULTIMAKER_CURA_DOMAIN,
"--username", notarize_user,
"--password", notarize_password,
"--file", Path(dist_path, filename)
]
subprocess.run(notarize_arguments)
def create_pkg_installer(filename: str, dist_path: str, cura_version: str, app_name: str) -> None:
""" Creates a pkg installer from {filename}.app called {filename}-Installer.pkg
The final package structure is UltiMaker-Cura-XXX-Installer.pkg[UltiMaker-Cura.pkg[UltiMaker-Cura.app]]. The outer
pkg file is a distributable pkg (Installer). Inside the distributable pkg there is a component pkg. The component
pkg contains the .app file that will be installed in the users Applications folder.
@param filename: The name of the app file and the app component package file without the extension
@param dist_path: The location to read the app from and save the pkg to
"""
filename_stem = Path(filename).stem
cura_component_package_name = f"{filename_stem}-Component.pkg" # This is a component package that is nested inside the installer, it contains the UltiMaker-Cura.app file This is the app file that will end up in your applications folder
build_pkg(dist_path, app_name, cura_component_package_name, cura_version, filename)
notarize = bool(os.environ.get("NOTARIZE_INSTALLER", "FALSE"))
if notarize:
notarize_file(dist_path, filename)
def create_dmg(filename: str, dist_path: str, source_path: str, app_name: str) -> None:
""" Creates a dmg executable from UltiMaker-Cura.app named {filename}.dmg
@param filename: The name of the app file and the output dmg file without the extension
@param dist_path: The location to read the app from and save the dmg to
@param source_path: The location of the project source files
"""
build_dmg(source_path, dist_path, filename, app_name)
notarize_dmg = bool(os.environ.get("NOTARIZE_DMG", "TRUE"))
if notarize_dmg:
notarize_file(dist_path, filename)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description = "Create installer for Cura.")
parser.add_argument("source_path", type = str, help = "Path to Pyinstaller source folder")
parser.add_argument("dist_path", type = str, help = "Path to Pyinstaller dist folder")
parser.add_argument("cura_conan_version", type = str, help="The version of cura")
parser.add_argument("filename", type = str, help = "Filename of the pkg/dmg (e.g. 'UltiMaker-Cura-5.1.0-beta-Macos-X64.pkg' or 'UltiMaker-Cura-5.1.0-beta-Macos-X64.dmg')")
parser.add_argument("app_name", type = str, help = "Filename of the .app that will be contained within the dmg/pkg")
args = parser.parse_args()
cura_version = args.cura_conan_version.split("/")[-1]
app_name = f"{args.app_name}.app"
if Path(args.filename).suffix == ".pkg":
create_pkg_installer(args.filename, args.dist_path, cura_version, app_name)
else:
create_dmg(args.filename, args.dist_path, args.source_path, app_name)

View file

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
<true/>
<key>com.apple.security.cs.disable-executable-page-protection</key>
<true/>
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
</dict>
</plist>

Binary file not shown.

After

Width:  |  Height:  |  Size: 381 KiB

Some files were not shown because too many files have changed in this diff Show more