mirror of
https://github.com/Ultimaker/Cura.git
synced 2025-12-11 16:00:47 -07:00
Merge branch 'master' into xray_in_solid_view
This commit is contained in:
commit
0150f37937
315 changed files with 33845 additions and 1731 deletions
|
|
@ -86,7 +86,7 @@ class ThreeMFReader(MeshReader):
|
|||
|
||||
## Convenience function that converts a SceneNode object (as obtained from libSavitar) to a scene node.
|
||||
# \returns Scene node.
|
||||
def _convertSavitarNodeToUMNode(self, savitar_node: Savitar.SceneNode) -> Optional[SceneNode]:
|
||||
def _convertSavitarNodeToUMNode(self, savitar_node: Savitar.SceneNode, file_name: str = "") -> Optional[SceneNode]:
|
||||
self._object_count += 1
|
||||
node_name = "Object %s" % self._object_count
|
||||
|
||||
|
|
@ -104,6 +104,10 @@ class ThreeMFReader(MeshReader):
|
|||
vertices = numpy.resize(data, (int(data.size / 3), 3))
|
||||
mesh_builder.setVertices(vertices)
|
||||
mesh_builder.calculateNormals(fast=True)
|
||||
if file_name:
|
||||
# The filename is used to give the user the option to reload the file if it is changed on disk
|
||||
# It is only set for the root node of the 3mf file
|
||||
mesh_builder.setFileName(file_name)
|
||||
mesh_data = mesh_builder.build()
|
||||
|
||||
if len(mesh_data.getVertices()):
|
||||
|
|
@ -171,7 +175,7 @@ class ThreeMFReader(MeshReader):
|
|||
scene_3mf = parser.parse(archive.open("3D/3dmodel.model").read())
|
||||
self._unit = scene_3mf.getUnit()
|
||||
for node in scene_3mf.getSceneNodes():
|
||||
um_node = self._convertSavitarNodeToUMNode(node)
|
||||
um_node = self._convertSavitarNodeToUMNode(node, file_name)
|
||||
if um_node is None:
|
||||
continue
|
||||
# compensate for original center position, if object(s) is/are not around its zero position
|
||||
|
|
|
|||
|
|
@ -285,13 +285,13 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
|
|||
serialized = archive.open(instance_container_file_name).read().decode("utf-8")
|
||||
|
||||
# Qualities and variants don't have upgrades, so don't upgrade them
|
||||
parser = ConfigParser(interpolation = None)
|
||||
parser = ConfigParser(interpolation = None, comment_prefixes = ())
|
||||
parser.read_string(serialized)
|
||||
container_type = parser["metadata"]["type"]
|
||||
if container_type not in ("quality", "variant"):
|
||||
serialized = InstanceContainer._updateSerialized(serialized, instance_container_file_name)
|
||||
|
||||
parser = ConfigParser(interpolation = None)
|
||||
parser = ConfigParser(interpolation = None, comment_prefixes = ())
|
||||
parser.read_string(serialized)
|
||||
container_info = ContainerInfo(instance_container_file_name, serialized, parser)
|
||||
instance_container_info_dict[container_id] = container_info
|
||||
|
|
|
|||
|
|
@ -118,7 +118,7 @@ class AMFReader(MeshReader):
|
|||
mesh.merge_vertices()
|
||||
mesh.remove_unreferenced_vertices()
|
||||
mesh.fix_normals()
|
||||
mesh_data = self._toMeshData(mesh)
|
||||
mesh_data = self._toMeshData(mesh, file_name)
|
||||
|
||||
new_node = CuraSceneNode()
|
||||
new_node.setSelectable(True)
|
||||
|
|
@ -147,7 +147,13 @@ class AMFReader(MeshReader):
|
|||
|
||||
return group_node
|
||||
|
||||
def _toMeshData(self, tri_node: trimesh.base.Trimesh) -> MeshData:
|
||||
## Converts a Trimesh to Uranium's MeshData.
|
||||
# \param tri_node A Trimesh containing the contents of a file that was
|
||||
# just read.
|
||||
# \param file_name The full original filename used to watch for changes
|
||||
# \return Mesh data from the Trimesh in a way that Uranium can understand
|
||||
# it.
|
||||
def _toMeshData(self, tri_node: trimesh.base.Trimesh, file_name: str = "") -> MeshData:
|
||||
tri_faces = tri_node.faces
|
||||
tri_vertices = tri_node.vertices
|
||||
|
||||
|
|
@ -169,5 +175,5 @@ class AMFReader(MeshReader):
|
|||
indices = numpy.asarray(indices, dtype = numpy.int32)
|
||||
normals = calculateNormalsFromIndexedVertices(vertices, indices, face_count)
|
||||
|
||||
mesh_data = MeshData(vertices = vertices, indices = indices, normals = normals)
|
||||
mesh_data = MeshData(vertices = vertices, indices = indices, normals = normals,file_name = file_name)
|
||||
return mesh_data
|
||||
|
|
|
|||
119
plugins/CuraDrive/src/CreateBackupJob.py
Normal file
119
plugins/CuraDrive/src/CreateBackupJob.py
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
# Copyright (c) 2020 Ultimaker B.V.
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
import json
|
||||
import threading
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from PyQt5.QtNetwork import QNetworkReply, QNetworkRequest
|
||||
|
||||
from UM.Job import Job
|
||||
from UM.Logger import Logger
|
||||
from UM.Message import Message
|
||||
from UM.TaskManagement.HttpRequestManager import HttpRequestManager
|
||||
from UM.TaskManagement.HttpRequestScope import JsonDecoratorScope
|
||||
from UM.i18n import i18nCatalog
|
||||
from cura.CuraApplication import CuraApplication
|
||||
from cura.UltimakerCloud.UltimakerCloudScope import UltimakerCloudScope
|
||||
|
||||
catalog = i18nCatalog("cura")
|
||||
|
||||
|
||||
class CreateBackupJob(Job):
|
||||
"""Creates backup zip, requests upload url and uploads the backup file to cloud storage."""
|
||||
|
||||
MESSAGE_TITLE = catalog.i18nc("@info:title", "Backups")
|
||||
DEFAULT_UPLOAD_ERROR_MESSAGE = catalog.i18nc("@info:backup_status", "There was an error while uploading your backup.")
|
||||
|
||||
def __init__(self, api_backup_url: str) -> None:
|
||||
""" Create a new backup Job. start the job by calling start()
|
||||
|
||||
:param api_backup_url: The url of the 'backups' endpoint of the Cura Drive Api
|
||||
"""
|
||||
|
||||
super().__init__()
|
||||
|
||||
self._api_backup_url = api_backup_url
|
||||
self._json_cloud_scope = JsonDecoratorScope(UltimakerCloudScope(CuraApplication.getInstance()))
|
||||
|
||||
self._backup_zip = None # type: Optional[bytes]
|
||||
self._job_done = threading.Event()
|
||||
"""Set when the job completes. Does not indicate success."""
|
||||
self.backup_upload_error_message = ""
|
||||
"""After the job completes, an empty string indicates success. Othrerwise, the value is a translated message."""
|
||||
|
||||
def run(self) -> None:
|
||||
upload_message = Message(catalog.i18nc("@info:backup_status", "Creating your backup..."), title = self.MESSAGE_TITLE, progress = -1)
|
||||
upload_message.show()
|
||||
CuraApplication.getInstance().processEvents()
|
||||
cura_api = CuraApplication.getInstance().getCuraAPI()
|
||||
self._backup_zip, backup_meta_data = cura_api.backups.createBackup()
|
||||
|
||||
if not self._backup_zip or not backup_meta_data:
|
||||
self.backup_upload_error_message = catalog.i18nc("@info:backup_status", "There was an error while creating your backup.")
|
||||
upload_message.hide()
|
||||
return
|
||||
|
||||
upload_message.setText(catalog.i18nc("@info:backup_status", "Uploading your backup..."))
|
||||
CuraApplication.getInstance().processEvents()
|
||||
|
||||
# Create an upload entry for the backup.
|
||||
timestamp = datetime.now().isoformat()
|
||||
backup_meta_data["description"] = "{}.backup.{}.cura.zip".format(timestamp, backup_meta_data["cura_release"])
|
||||
self._requestUploadSlot(backup_meta_data, len(self._backup_zip))
|
||||
|
||||
self._job_done.wait()
|
||||
if self.backup_upload_error_message == "":
|
||||
upload_message.setText(catalog.i18nc("@info:backup_status", "Your backup has finished uploading."))
|
||||
upload_message.setProgress(None) # Hide progress bar
|
||||
else:
|
||||
# some error occurred. This error is presented to the user by DrivePluginExtension
|
||||
upload_message.hide()
|
||||
|
||||
def _requestUploadSlot(self, backup_metadata: Dict[str, Any], backup_size: int) -> None:
|
||||
"""Request a backup upload slot from the API.
|
||||
|
||||
:param backup_metadata: A dict containing some meta data about the backup.
|
||||
:param backup_size: The size of the backup file in bytes.
|
||||
"""
|
||||
|
||||
payload = json.dumps({"data": {"backup_size": backup_size,
|
||||
"metadata": backup_metadata
|
||||
}
|
||||
}).encode()
|
||||
|
||||
HttpRequestManager.getInstance().put(
|
||||
self._api_backup_url,
|
||||
data = payload,
|
||||
callback = self._onUploadSlotCompleted,
|
||||
error_callback = self._onUploadSlotCompleted,
|
||||
scope = self._json_cloud_scope)
|
||||
|
||||
def _onUploadSlotCompleted(self, reply: QNetworkReply, error: Optional["QNetworkReply.NetworkError"] = None) -> None:
|
||||
if error is not None:
|
||||
Logger.warning(str(error))
|
||||
self.backup_upload_error_message = self.DEFAULT_UPLOAD_ERROR_MESSAGE
|
||||
self._job_done.set()
|
||||
return
|
||||
if reply.attribute(QNetworkRequest.HttpStatusCodeAttribute) >= 300:
|
||||
Logger.warning("Could not request backup upload: %s", HttpRequestManager.readText(reply))
|
||||
self.backup_upload_error_message = self.DEFAULT_UPLOAD_ERROR_MESSAGE
|
||||
self._job_done.set()
|
||||
return
|
||||
|
||||
backup_upload_url = HttpRequestManager.readJSON(reply)["data"]["upload_url"]
|
||||
|
||||
# Upload the backup to storage.
|
||||
HttpRequestManager.getInstance().put(
|
||||
backup_upload_url,
|
||||
data=self._backup_zip,
|
||||
callback=self._uploadFinishedCallback,
|
||||
error_callback=self._uploadFinishedCallback
|
||||
)
|
||||
|
||||
def _uploadFinishedCallback(self, reply: QNetworkReply, error: QNetworkReply.NetworkError = None):
|
||||
if not HttpRequestManager.replyIndicatesSuccess(reply, error):
|
||||
Logger.log("w", "Could not upload backup file: %s", HttpRequestManager.readText(reply))
|
||||
self.backup_upload_error_message = self.DEFAULT_UPLOAD_ERROR_MESSAGE
|
||||
|
||||
self._job_done.set()
|
||||
|
|
@ -1,90 +1,70 @@
|
|||
# Copyright (c) 2018 Ultimaker B.V.
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
from datetime import datetime
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import Any, Optional, List, Dict
|
||||
from typing import Any, Optional, List, Dict, Callable
|
||||
|
||||
import requests
|
||||
from PyQt5.QtNetwork import QNetworkReply
|
||||
|
||||
from UM.Logger import Logger
|
||||
from UM.Message import Message
|
||||
from UM.Signal import Signal, signalemitter
|
||||
from UM.TaskManagement.HttpRequestManager import HttpRequestManager
|
||||
from UM.TaskManagement.HttpRequestScope import JsonDecoratorScope
|
||||
from UM.i18n import i18nCatalog
|
||||
from cura.CuraApplication import CuraApplication
|
||||
|
||||
from .UploadBackupJob import UploadBackupJob
|
||||
from cura.UltimakerCloud.UltimakerCloudScope import UltimakerCloudScope
|
||||
from .CreateBackupJob import CreateBackupJob
|
||||
from .RestoreBackupJob import RestoreBackupJob
|
||||
from .Settings import Settings
|
||||
|
||||
from UM.i18n import i18nCatalog
|
||||
catalog = i18nCatalog("cura")
|
||||
|
||||
|
||||
## The DriveApiService is responsible for interacting with the CuraDrive API and Cura's backup handling.
|
||||
@signalemitter
|
||||
class DriveApiService:
|
||||
"""The DriveApiService is responsible for interacting with the CuraDrive API and Cura's backup handling."""
|
||||
|
||||
BACKUP_URL = "{}/backups".format(Settings.DRIVE_API_URL)
|
||||
|
||||
# Emit signal when restoring backup started or finished.
|
||||
restoringStateChanged = Signal()
|
||||
"""Emits signal when restoring backup started or finished."""
|
||||
|
||||
# Emit signal when creating backup started or finished.
|
||||
creatingStateChanged = Signal()
|
||||
"""Emits signal when creating backup started or finished."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._cura_api = CuraApplication.getInstance().getCuraAPI()
|
||||
self._json_cloud_scope = JsonDecoratorScope(UltimakerCloudScope(CuraApplication.getInstance()))
|
||||
|
||||
def getBackups(self) -> List[Dict[str, Any]]:
|
||||
access_token = self._cura_api.account.accessToken
|
||||
if not access_token:
|
||||
Logger.log("w", "Could not get access token.")
|
||||
return []
|
||||
try:
|
||||
backup_list_request = requests.get(self.BACKUP_URL, headers = {
|
||||
"Authorization": "Bearer {}".format(access_token)
|
||||
})
|
||||
except requests.exceptions.ConnectionError:
|
||||
Logger.logException("w", "Unable to connect with the server.")
|
||||
return []
|
||||
def getBackups(self, changed: Callable[[List[Dict[str, Any]]], None]) -> None:
|
||||
def callback(reply: QNetworkReply, error: Optional["QNetworkReply.NetworkError"] = None) -> None:
|
||||
if error is not None:
|
||||
Logger.log("w", "Could not get backups: " + str(error))
|
||||
changed([])
|
||||
return
|
||||
|
||||
# HTTP status 300s mean redirection. 400s and 500s are errors.
|
||||
# Technically 300s are not errors, but the use case here relies on "requests" to handle redirects automatically.
|
||||
if backup_list_request.status_code >= 300:
|
||||
Logger.log("w", "Could not get backups list from remote: %s", backup_list_request.text)
|
||||
Message(catalog.i18nc("@info:backup_status", "There was an error listing your backups."), title = catalog.i18nc("@info:title", "Backup")).show()
|
||||
return []
|
||||
backup_list_response = HttpRequestManager.readJSON(reply)
|
||||
if "data" not in backup_list_response:
|
||||
Logger.log("w", "Could not get backups from remote, actual response body was: %s",
|
||||
str(backup_list_response))
|
||||
changed([]) # empty list of backups
|
||||
return
|
||||
|
||||
backup_list_response = backup_list_request.json()
|
||||
if "data" not in backup_list_response:
|
||||
Logger.log("w", "Could not get backups from remote, actual response body was: %s", str(backup_list_response))
|
||||
return []
|
||||
changed(backup_list_response["data"])
|
||||
|
||||
return backup_list_response["data"]
|
||||
HttpRequestManager.getInstance().get(
|
||||
self.BACKUP_URL,
|
||||
callback= callback,
|
||||
error_callback = callback,
|
||||
scope=self._json_cloud_scope
|
||||
)
|
||||
|
||||
def createBackup(self) -> None:
|
||||
self.creatingStateChanged.emit(is_creating = True)
|
||||
|
||||
# Create the backup.
|
||||
backup_zip_file, backup_meta_data = self._cura_api.backups.createBackup()
|
||||
if not backup_zip_file or not backup_meta_data:
|
||||
self.creatingStateChanged.emit(is_creating = False, error_message ="Could not create backup.")
|
||||
return
|
||||
|
||||
# Create an upload entry for the backup.
|
||||
timestamp = datetime.now().isoformat()
|
||||
backup_meta_data["description"] = "{}.backup.{}.cura.zip".format(timestamp, backup_meta_data["cura_release"])
|
||||
backup_upload_url = self._requestBackupUpload(backup_meta_data, len(backup_zip_file))
|
||||
if not backup_upload_url:
|
||||
self.creatingStateChanged.emit(is_creating = False, error_message ="Could not upload backup.")
|
||||
return
|
||||
|
||||
# Upload the backup to storage.
|
||||
upload_backup_job = UploadBackupJob(backup_upload_url, backup_zip_file)
|
||||
upload_backup_job = CreateBackupJob(self.BACKUP_URL)
|
||||
upload_backup_job.finished.connect(self._onUploadFinished)
|
||||
upload_backup_job.start()
|
||||
|
||||
def _onUploadFinished(self, job: "UploadBackupJob") -> None:
|
||||
def _onUploadFinished(self, job: "CreateBackupJob") -> None:
|
||||
if job.backup_upload_error_message != "":
|
||||
# If the job contains an error message we pass it along so the UI can display it.
|
||||
self.creatingStateChanged.emit(is_creating = False, error_message = job.backup_upload_error_message)
|
||||
|
|
@ -96,96 +76,38 @@ class DriveApiService:
|
|||
download_url = backup.get("download_url")
|
||||
if not download_url:
|
||||
# If there is no download URL, we can't restore the backup.
|
||||
return self._emitRestoreError()
|
||||
Logger.warning("backup download_url is missing. Aborting backup.")
|
||||
self.restoringStateChanged.emit(is_restoring = False,
|
||||
error_message = catalog.i18nc("@info:backup_status",
|
||||
"There was an error trying to restore your backup."))
|
||||
return
|
||||
|
||||
try:
|
||||
download_package = requests.get(download_url, stream = True)
|
||||
except requests.exceptions.ConnectionError:
|
||||
Logger.logException("e", "Unable to connect with the server")
|
||||
return self._emitRestoreError()
|
||||
restore_backup_job = RestoreBackupJob(backup)
|
||||
restore_backup_job.finished.connect(self._onRestoreFinished)
|
||||
restore_backup_job.start()
|
||||
|
||||
if download_package.status_code >= 300:
|
||||
# Something went wrong when attempting to download the backup.
|
||||
Logger.log("w", "Could not download backup from url %s: %s", download_url, download_package.text)
|
||||
return self._emitRestoreError()
|
||||
def _onRestoreFinished(self, job: "RestoreBackupJob") -> None:
|
||||
if job.restore_backup_error_message != "":
|
||||
# If the job contains an error message we pass it along so the UI can display it.
|
||||
self.restoringStateChanged.emit(is_restoring=False)
|
||||
else:
|
||||
self.restoringStateChanged.emit(is_restoring = False, error_message = job.restore_backup_error_message)
|
||||
|
||||
# We store the file in a temporary path fist to ensure integrity.
|
||||
temporary_backup_file = NamedTemporaryFile(delete = False)
|
||||
with open(temporary_backup_file.name, "wb") as write_backup:
|
||||
for chunk in download_package:
|
||||
write_backup.write(chunk)
|
||||
def deleteBackup(self, backup_id: str, finished_callable: Callable[[bool], None]):
|
||||
|
||||
if not self._verifyMd5Hash(temporary_backup_file.name, backup.get("md5_hash", "")):
|
||||
# Don't restore the backup if the MD5 hashes do not match.
|
||||
# This can happen if the download was interrupted.
|
||||
Logger.log("w", "Remote and local MD5 hashes do not match, not restoring backup.")
|
||||
return self._emitRestoreError()
|
||||
def finishedCallback(reply: QNetworkReply, ca: Callable[[bool], None] = finished_callable) -> None:
|
||||
self._onDeleteRequestCompleted(reply, ca)
|
||||
|
||||
# Tell Cura to place the backup back in the user data folder.
|
||||
with open(temporary_backup_file.name, "rb") as read_backup:
|
||||
self._cura_api.backups.restoreBackup(read_backup.read(), backup.get("metadata", {}))
|
||||
self.restoringStateChanged.emit(is_restoring = False)
|
||||
def errorCallback(reply: QNetworkReply, error: QNetworkReply.NetworkError, ca: Callable[[bool], None] = finished_callable) -> None:
|
||||
self._onDeleteRequestCompleted(reply, ca, error)
|
||||
|
||||
def _emitRestoreError(self) -> None:
|
||||
self.restoringStateChanged.emit(is_restoring = False,
|
||||
error_message = catalog.i18nc("@info:backup_status",
|
||||
"There was an error trying to restore your backup."))
|
||||
HttpRequestManager.getInstance().delete(
|
||||
url = "{}/{}".format(self.BACKUP_URL, backup_id),
|
||||
callback = finishedCallback,
|
||||
error_callback = errorCallback,
|
||||
scope= self._json_cloud_scope
|
||||
)
|
||||
|
||||
# Verify the MD5 hash of a file.
|
||||
# \param file_path Full path to the file.
|
||||
# \param known_hash The known MD5 hash of the file.
|
||||
# \return: Success or not.
|
||||
@staticmethod
|
||||
def _verifyMd5Hash(file_path: str, known_hash: str) -> bool:
|
||||
with open(file_path, "rb") as read_backup:
|
||||
local_md5_hash = base64.b64encode(hashlib.md5(read_backup.read()).digest(), altchars = b"_-").decode("utf-8")
|
||||
return known_hash == local_md5_hash
|
||||
|
||||
def deleteBackup(self, backup_id: str) -> bool:
|
||||
access_token = self._cura_api.account.accessToken
|
||||
if not access_token:
|
||||
Logger.log("w", "Could not get access token.")
|
||||
return False
|
||||
|
||||
try:
|
||||
delete_backup = requests.delete("{}/{}".format(self.BACKUP_URL, backup_id), headers = {
|
||||
"Authorization": "Bearer {}".format(access_token)
|
||||
})
|
||||
except requests.exceptions.ConnectionError:
|
||||
Logger.logException("e", "Unable to connect with the server")
|
||||
return False
|
||||
|
||||
if delete_backup.status_code >= 300:
|
||||
Logger.log("w", "Could not delete backup: %s", delete_backup.text)
|
||||
return False
|
||||
return True
|
||||
|
||||
# Request a backup upload slot from the API.
|
||||
# \param backup_metadata: A dict containing some meta data about the backup.
|
||||
# \param backup_size The size of the backup file in bytes.
|
||||
# \return: The upload URL for the actual backup file if successful, otherwise None.
|
||||
def _requestBackupUpload(self, backup_metadata: Dict[str, Any], backup_size: int) -> Optional[str]:
|
||||
access_token = self._cura_api.account.accessToken
|
||||
if not access_token:
|
||||
Logger.log("w", "Could not get access token.")
|
||||
return None
|
||||
try:
|
||||
backup_upload_request = requests.put(
|
||||
self.BACKUP_URL,
|
||||
json = {"data": {"backup_size": backup_size,
|
||||
"metadata": backup_metadata
|
||||
}
|
||||
},
|
||||
headers = {
|
||||
"Authorization": "Bearer {}".format(access_token)
|
||||
})
|
||||
except requests.exceptions.ConnectionError:
|
||||
Logger.logException("e", "Unable to connect with the server")
|
||||
return None
|
||||
|
||||
# Any status code of 300 or above indicates an error.
|
||||
if backup_upload_request.status_code >= 300:
|
||||
Logger.log("w", "Could not request backup upload: %s", backup_upload_request.text)
|
||||
return None
|
||||
|
||||
return backup_upload_request.json()["data"]["upload_url"]
|
||||
def _onDeleteRequestCompleted(reply: QNetworkReply, callable: Callable[[bool], None], error: Optional["QNetworkReply.NetworkError"] = None) -> None:
|
||||
callable(HttpRequestManager.replyIndicatesSuccess(reply, error))
|
||||
|
|
|
|||
|
|
@ -133,7 +133,10 @@ class DrivePluginExtension(QObject, Extension):
|
|||
|
||||
@pyqtSlot(name = "refreshBackups")
|
||||
def refreshBackups(self) -> None:
|
||||
self._backups = self._drive_api_service.getBackups()
|
||||
self._drive_api_service.getBackups(self._backupsChangedCallback)
|
||||
|
||||
def _backupsChangedCallback(self, backups: List[Dict[str, Any]]) -> None:
|
||||
self._backups = backups
|
||||
self.backupsChanged.emit()
|
||||
|
||||
@pyqtProperty(bool, notify = restoringStateChanged)
|
||||
|
|
@ -158,5 +161,8 @@ class DrivePluginExtension(QObject, Extension):
|
|||
|
||||
@pyqtSlot(str, name = "deleteBackup")
|
||||
def deleteBackup(self, backup_id: str) -> None:
|
||||
self._drive_api_service.deleteBackup(backup_id)
|
||||
self.refreshBackups()
|
||||
self._drive_api_service.deleteBackup(backup_id, self._backupDeletedCallback)
|
||||
|
||||
def _backupDeletedCallback(self, success: bool):
|
||||
if success:
|
||||
self.refreshBackups()
|
||||
|
|
|
|||
92
plugins/CuraDrive/src/RestoreBackupJob.py
Normal file
92
plugins/CuraDrive/src/RestoreBackupJob.py
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
import base64
|
||||
import hashlib
|
||||
import threading
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import Optional, Any, Dict
|
||||
|
||||
from PyQt5.QtNetwork import QNetworkReply, QNetworkRequest
|
||||
|
||||
from UM.Job import Job
|
||||
from UM.Logger import Logger
|
||||
from UM.PackageManager import catalog
|
||||
from UM.TaskManagement.HttpRequestManager import HttpRequestManager
|
||||
from cura.CuraApplication import CuraApplication
|
||||
|
||||
|
||||
class RestoreBackupJob(Job):
|
||||
"""Downloads a backup and overwrites local configuration with the backup.
|
||||
|
||||
When `Job.finished` emits, `restore_backup_error_message` will either be `""` (no error) or an error message
|
||||
"""
|
||||
|
||||
DISK_WRITE_BUFFER_SIZE = 512 * 1024
|
||||
DEFAULT_ERROR_MESSAGE = catalog.i18nc("@info:backup_status", "There was an error trying to restore your backup.")
|
||||
|
||||
def __init__(self, backup: Dict[str, Any]) -> None:
|
||||
""" Create a new restore Job. start the job by calling start()
|
||||
|
||||
:param backup: A dict containing a backup spec
|
||||
"""
|
||||
|
||||
super().__init__()
|
||||
self._job_done = threading.Event()
|
||||
|
||||
self._backup = backup
|
||||
self.restore_backup_error_message = ""
|
||||
|
||||
def run(self) -> None:
|
||||
|
||||
url = self._backup.get("download_url")
|
||||
assert url is not None
|
||||
|
||||
HttpRequestManager.getInstance().get(
|
||||
url = url,
|
||||
callback = self._onRestoreRequestCompleted,
|
||||
error_callback = self._onRestoreRequestCompleted
|
||||
)
|
||||
|
||||
self._job_done.wait() # A job is considered finished when the run function completes
|
||||
|
||||
def _onRestoreRequestCompleted(self, reply: QNetworkReply, error: Optional["QNetworkReply.NetworkError"] = None) -> None:
|
||||
if not HttpRequestManager.replyIndicatesSuccess(reply, error):
|
||||
Logger.warning("Requesting backup failed, response code %s while trying to connect to %s",
|
||||
reply.attribute(QNetworkRequest.HttpStatusCodeAttribute), reply.url())
|
||||
self.restore_backup_error_message = self.DEFAULT_ERROR_MESSAGE
|
||||
self._job_done.set()
|
||||
return
|
||||
|
||||
# We store the file in a temporary path fist to ensure integrity.
|
||||
temporary_backup_file = NamedTemporaryFile(delete = False)
|
||||
with open(temporary_backup_file.name, "wb") as write_backup:
|
||||
app = CuraApplication.getInstance()
|
||||
bytes_read = reply.read(self.DISK_WRITE_BUFFER_SIZE)
|
||||
while bytes_read:
|
||||
write_backup.write(bytes_read)
|
||||
bytes_read = reply.read(self.DISK_WRITE_BUFFER_SIZE)
|
||||
app.processEvents()
|
||||
|
||||
if not self._verifyMd5Hash(temporary_backup_file.name, self._backup.get("md5_hash", "")):
|
||||
# Don't restore the backup if the MD5 hashes do not match.
|
||||
# This can happen if the download was interrupted.
|
||||
Logger.log("w", "Remote and local MD5 hashes do not match, not restoring backup.")
|
||||
self.restore_backup_error_message = self.DEFAULT_ERROR_MESSAGE
|
||||
|
||||
# Tell Cura to place the backup back in the user data folder.
|
||||
with open(temporary_backup_file.name, "rb") as read_backup:
|
||||
cura_api = CuraApplication.getInstance().getCuraAPI()
|
||||
cura_api.backups.restoreBackup(read_backup.read(), self._backup.get("metadata", {}))
|
||||
|
||||
self._job_done.set()
|
||||
|
||||
@staticmethod
|
||||
def _verifyMd5Hash(file_path: str, known_hash: str) -> bool:
|
||||
"""Verify the MD5 hash of a file.
|
||||
|
||||
:param file_path: Full path to the file.
|
||||
:param known_hash: The known MD5 hash of the file.
|
||||
:return: Success or not.
|
||||
"""
|
||||
|
||||
with open(file_path, "rb") as read_backup:
|
||||
local_md5_hash = base64.b64encode(hashlib.md5(read_backup.read()).digest(), altchars = b"_-").decode("utf-8")
|
||||
return known_hash == local_md5_hash
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
# Copyright (c) 2018 Ultimaker B.V.
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
from cura import UltimakerCloudAuthentication
|
||||
from cura.UltimakerCloud import UltimakerCloudAuthentication
|
||||
|
||||
|
||||
class Settings:
|
||||
|
|
|
|||
|
|
@ -1,41 +0,0 @@
|
|||
# Copyright (c) 2018 Ultimaker B.V.
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
import requests
|
||||
|
||||
from UM.Job import Job
|
||||
from UM.Logger import Logger
|
||||
from UM.Message import Message
|
||||
|
||||
from UM.i18n import i18nCatalog
|
||||
catalog = i18nCatalog("cura")
|
||||
|
||||
|
||||
class UploadBackupJob(Job):
|
||||
MESSAGE_TITLE = catalog.i18nc("@info:title", "Backups")
|
||||
|
||||
# This job is responsible for uploading the backup file to cloud storage.
|
||||
# As it can take longer than some other tasks, we schedule this using a Cura Job.
|
||||
def __init__(self, signed_upload_url: str, backup_zip: bytes) -> None:
|
||||
super().__init__()
|
||||
self._signed_upload_url = signed_upload_url
|
||||
self._backup_zip = backup_zip
|
||||
self._upload_success = False
|
||||
self.backup_upload_error_message = ""
|
||||
|
||||
def run(self) -> None:
|
||||
upload_message = Message(catalog.i18nc("@info:backup_status", "Uploading your backup..."), title = self.MESSAGE_TITLE, progress = -1)
|
||||
upload_message.show()
|
||||
|
||||
backup_upload = requests.put(self._signed_upload_url, data = self._backup_zip)
|
||||
upload_message.hide()
|
||||
|
||||
if backup_upload.status_code >= 300:
|
||||
self.backup_upload_error_message = backup_upload.text
|
||||
Logger.log("w", "Could not upload backup file: %s", backup_upload.text)
|
||||
Message(catalog.i18nc("@info:backup_status", "There was an error while uploading your backup."), title = self.MESSAGE_TITLE).show()
|
||||
else:
|
||||
self._upload_success = True
|
||||
Message(catalog.i18nc("@info:backup_status", "Your backup has finished uploading."), title = self.MESSAGE_TITLE).show()
|
||||
|
||||
self.finished.emit(self)
|
||||
|
|
@ -421,7 +421,10 @@ class CuraEngineBackend(QObject, Backend):
|
|||
|
||||
if job.getResult() == StartJobResult.NothingToSlice:
|
||||
if self._application.platformActivity:
|
||||
self._error_message = Message(catalog.i18nc("@info:status", "Nothing to slice because none of the models fit the build volume or are assigned to a disabled extruder. Please scale or rotate models to fit, or enable an extruder."),
|
||||
self._error_message = Message(catalog.i18nc("@info:status", "Please review settings and check if your models:"
|
||||
"\n- Fit within the build volume"
|
||||
"\n- Are assigned to an enabled extruder"
|
||||
"\n- Are not all set as modifier meshes"),
|
||||
title = catalog.i18nc("@info:title", "Unable to slice"))
|
||||
self._error_message.show()
|
||||
self.setState(BackendState.Error)
|
||||
|
|
|
|||
|
|
@ -422,13 +422,14 @@ class StartSliceJob(Job):
|
|||
|
||||
# Pre-compute material material_bed_temp_prepend and material_print_temp_prepend
|
||||
start_gcode = settings["machine_start_gcode"]
|
||||
# Remove all the comments from the start g-code
|
||||
start_gcode = re.sub(r";.+?(\n|$)", "\n", start_gcode)
|
||||
bed_temperature_settings = ["material_bed_temperature", "material_bed_temperature_layer_0"]
|
||||
pattern = r"\{(%s)(,\s?\w+)?\}" % "|".join(bed_temperature_settings) # match {setting} as well as {setting, extruder_nr}
|
||||
settings["material_bed_temp_prepend"] = re.search(pattern, start_gcode) == None
|
||||
print_temperature_settings = ["material_print_temperature", "material_print_temperature_layer_0", "default_material_print_temperature", "material_initial_print_temperature", "material_final_print_temperature", "material_standby_temperature"]
|
||||
pattern = r"\{(%s)(,\s?\w+)?\}" % "|".join(print_temperature_settings) # match {setting} as well as {setting, extruder_nr}
|
||||
settings["material_print_temp_prepend"] = re.search(pattern, start_gcode) == None
|
||||
|
||||
# Replace the setting tokens in start and end g-code.
|
||||
# Use values from the first used extruder by default so we get the expected temperatures
|
||||
initial_extruder_stack = CuraApplication.getInstance().getExtruderManager().getUsedExtruderStacks()[0]
|
||||
|
|
|
|||
|
|
@ -44,6 +44,7 @@ class FirmwareUpdateCheckerJob(Job):
|
|||
try:
|
||||
# CURA-6698 Create an SSL context and use certifi CA certificates for verification.
|
||||
context = ssl.SSLContext(protocol = ssl.PROTOCOL_TLSv1_2)
|
||||
context.verify_mode = ssl.CERT_REQUIRED
|
||||
context.load_verify_locations(cafile = certifi.where())
|
||||
|
||||
request = urllib.request.Request(url, headers = self._headers)
|
||||
|
|
|
|||
|
|
@ -7,20 +7,6 @@ from UM.Mesh.MeshReader import MeshReader #The class we're extending/implementin
|
|||
from UM.MimeTypeDatabase import MimeTypeDatabase, MimeType #To add the .gcode.gz files to the MIME type database.
|
||||
from UM.PluginRegistry import PluginRegistry
|
||||
|
||||
import contextlib
|
||||
import resource
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def limit(limit, type=resource.RLIMIT_AS):
|
||||
soft_limit, hard_limit = resource.getrlimit(type)
|
||||
resource.setrlimit(type, (limit, hard_limit)) # set soft limit
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
resource.setrlimit(type, (soft_limit, hard_limit)) # restore
|
||||
|
||||
|
||||
## A file reader that reads gzipped g-code.
|
||||
#
|
||||
# If you're zipping g-code, you might as well use gzip!
|
||||
|
|
@ -39,9 +25,7 @@ class GCodeGzReader(MeshReader):
|
|||
def _read(self, file_name):
|
||||
with open(file_name, "rb") as file:
|
||||
file_data = file.read()
|
||||
|
||||
with limit(1 << 30): # Prevent a gzip bomb (by setting the max size to 1 gig)
|
||||
uncompressed_gcode = gzip.decompress(file_data).decode("utf-8")
|
||||
uncompressed_gcode = gzip.decompress(file_data).decode("utf-8")
|
||||
PluginRegistry.getInstance().getPluginObject("GCodeReader").preReadFromStream(uncompressed_gcode)
|
||||
result = PluginRegistry.getInstance().getPluginObject("GCodeReader").readFromStream(uncompressed_gcode, file_name)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright (c) 2015 Ultimaker B.V.
|
||||
# Copyright (c) 2020 Ultimaker B.V.
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
import numpy
|
||||
|
|
@ -96,7 +96,7 @@ class ImageReader(MeshReader):
|
|||
texel_width = 1.0 / (width_minus_one) * scale_vector.x
|
||||
texel_height = 1.0 / (height_minus_one) * scale_vector.z
|
||||
|
||||
height_data = numpy.zeros((height, width), dtype=numpy.float32)
|
||||
height_data = numpy.zeros((height, width), dtype = numpy.float32)
|
||||
|
||||
for x in range(0, width):
|
||||
for y in range(0, height):
|
||||
|
|
@ -112,7 +112,7 @@ class ImageReader(MeshReader):
|
|||
height_data = 1 - height_data
|
||||
|
||||
for _ in range(0, blur_iterations):
|
||||
copy = numpy.pad(height_data, ((1, 1), (1, 1)), mode= "edge")
|
||||
copy = numpy.pad(height_data, ((1, 1), (1, 1)), mode = "edge")
|
||||
|
||||
height_data += copy[1:-1, 2:]
|
||||
height_data += copy[1:-1, :-2]
|
||||
|
|
@ -165,7 +165,7 @@ class ImageReader(MeshReader):
|
|||
offsetsz = numpy.array(offsetsz, numpy.float32).reshape(-1, 1) * texel_height
|
||||
|
||||
# offsets for each texel quad
|
||||
heightmap_vertex_offsets = numpy.concatenate([offsetsx, numpy.zeros((offsetsx.shape[0], offsetsx.shape[1]), dtype=numpy.float32), offsetsz], 1)
|
||||
heightmap_vertex_offsets = numpy.concatenate([offsetsx, numpy.zeros((offsetsx.shape[0], offsetsx.shape[1]), dtype = numpy.float32), offsetsz], 1)
|
||||
heightmap_vertices += heightmap_vertex_offsets.repeat(6, 0).reshape(-1, 6, 3)
|
||||
|
||||
# apply height data to y values
|
||||
|
|
@ -174,7 +174,7 @@ class ImageReader(MeshReader):
|
|||
heightmap_vertices[:, 2, 1] = heightmap_vertices[:, 3, 1] = height_data[1:, 1:].reshape(-1)
|
||||
heightmap_vertices[:, 4, 1] = height_data[:-1, 1:].reshape(-1)
|
||||
|
||||
heightmap_indices = numpy.array(numpy.mgrid[0:heightmap_face_count * 3], dtype=numpy.int32).reshape(-1, 3)
|
||||
heightmap_indices = numpy.array(numpy.mgrid[0:heightmap_face_count * 3], dtype = numpy.int32).reshape(-1, 3)
|
||||
|
||||
mesh._vertices[0:(heightmap_vertices.size // 3), :] = heightmap_vertices.reshape(-1, 3)
|
||||
mesh._indices[0:(heightmap_indices.size // 3), :] = heightmap_indices
|
||||
|
|
@ -223,7 +223,7 @@ class ImageReader(MeshReader):
|
|||
mesh.addFaceByPoints(geo_width, 0, y, geo_width, 0, ny, geo_width, he1, ny)
|
||||
mesh.addFaceByPoints(geo_width, he1, ny, geo_width, he0, y, geo_width, 0, y)
|
||||
|
||||
mesh.calculateNormals(fast=True)
|
||||
mesh.calculateNormals(fast = True)
|
||||
|
||||
scene_node.setMeshData(mesh.build())
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright (c) 2015 Ultimaker B.V.
|
||||
# Copyright (c) 2020 Ultimaker B.V.
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
import os
|
||||
|
|
@ -33,9 +33,9 @@ class ImageReaderUI(QObject):
|
|||
self.base_height = 0.4
|
||||
self.peak_height = 2.5
|
||||
self.smoothing = 1
|
||||
self.lighter_is_higher = False;
|
||||
self.use_transparency_model = True;
|
||||
self.transmittance_1mm = 50.0; # based on pearl PLA
|
||||
self.lighter_is_higher = False
|
||||
self.use_transparency_model = True
|
||||
self.transmittance_1mm = 50.0 # based on pearl PLA
|
||||
|
||||
self._ui_lock = threading.Lock()
|
||||
self._cancelled = False
|
||||
|
|
@ -85,7 +85,7 @@ class ImageReaderUI(QObject):
|
|||
Logger.log("d", "Creating ImageReader config UI")
|
||||
path = os.path.join(PluginRegistry.getInstance().getPluginPath("ImageReader"), "ConfigUI.qml")
|
||||
self._ui_view = Application.getInstance().createQmlComponent(path, {"manager": self})
|
||||
self._ui_view.setFlags(self._ui_view.flags() & ~Qt.WindowCloseButtonHint & ~Qt.WindowMinimizeButtonHint & ~Qt.WindowMaximizeButtonHint);
|
||||
self._ui_view.setFlags(self._ui_view.flags() & ~Qt.WindowCloseButtonHint & ~Qt.WindowMinimizeButtonHint & ~Qt.WindowMaximizeButtonHint)
|
||||
self._disable_size_callbacks = False
|
||||
|
||||
@pyqtSlot()
|
||||
|
|
|
|||
|
|
@ -107,7 +107,7 @@ Item
|
|||
labelWidth: base.labelWidth
|
||||
controlWidth: base.controlWidth
|
||||
unitText: catalog.i18nc("@label", "mm")
|
||||
allowNegativeValue: true
|
||||
minimum: Number.NEGATIVE_INFINITY
|
||||
forceUpdateOnChangeFunction: forceUpdateFunction
|
||||
}
|
||||
|
||||
|
|
@ -122,7 +122,7 @@ Item
|
|||
labelWidth: base.labelWidth
|
||||
controlWidth: base.controlWidth
|
||||
unitText: catalog.i18nc("@label", "mm")
|
||||
allowNegativeValue: true
|
||||
minimum: Number.NEGATIVE_INFINITY
|
||||
forceUpdateOnChangeFunction: forceUpdateFunction
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -72,6 +72,7 @@ Item
|
|||
labelWidth: base.labelWidth
|
||||
controlWidth: base.controlWidth
|
||||
unitText: catalog.i18nc("@label", "mm")
|
||||
maximum: 2000000
|
||||
forceUpdateOnChangeFunction: forceUpdateFunction
|
||||
}
|
||||
|
||||
|
|
@ -86,6 +87,7 @@ Item
|
|||
labelWidth: base.labelWidth
|
||||
controlWidth: base.controlWidth
|
||||
unitText: catalog.i18nc("@label", "mm")
|
||||
maximum: 2000000
|
||||
forceUpdateOnChangeFunction: forceUpdateFunction
|
||||
}
|
||||
|
||||
|
|
@ -204,8 +206,8 @@ Item
|
|||
|
||||
axisName: "x"
|
||||
axisMinOrMax: "min"
|
||||
allowNegativeValue: true
|
||||
allowPositiveValue: false
|
||||
minimum: Number.NEGATIVE_INFINITY
|
||||
maximum: 0
|
||||
|
||||
forceUpdateOnChangeFunction: forceUpdateFunction
|
||||
}
|
||||
|
|
@ -224,8 +226,8 @@ Item
|
|||
|
||||
axisName: "y"
|
||||
axisMinOrMax: "min"
|
||||
allowNegativeValue: true
|
||||
allowPositiveValue: false
|
||||
minimum: Number.NEGATIVE_INFINITY
|
||||
maximum: 0
|
||||
|
||||
forceUpdateOnChangeFunction: forceUpdateFunction
|
||||
}
|
||||
|
|
@ -244,8 +246,6 @@ Item
|
|||
|
||||
axisName: "x"
|
||||
axisMinOrMax: "max"
|
||||
allowNegativeValue: false
|
||||
allowPositiveValue: true
|
||||
|
||||
forceUpdateOnChangeFunction: forceUpdateFunction
|
||||
}
|
||||
|
|
@ -266,8 +266,6 @@ Item
|
|||
|
||||
axisName: "y"
|
||||
axisMinOrMax: "max"
|
||||
allowNegativeValue: false
|
||||
allowPositiveValue: true
|
||||
|
||||
forceUpdateOnChangeFunction: forceUpdateFunction
|
||||
}
|
||||
|
|
|
|||
|
|
@ -73,6 +73,8 @@ class ModelChecker(QObject, Extension):
|
|||
# Check node material shrinkage and bounding box size
|
||||
for node in self.sliceableNodes():
|
||||
node_extruder_position = node.callDecoration("getActiveExtruderPosition")
|
||||
if node_extruder_position is None:
|
||||
continue
|
||||
|
||||
# This function can be triggered in the middle of a machine change, so do not proceed if the machine change
|
||||
# has not done yet.
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ class PerObjectSettingVisibilityHandler(UM.Settings.Models.SettingVisibilityHand
|
|||
|
||||
# Add all instances that are not added, but are in visibility list
|
||||
for item in visible:
|
||||
if settings.getInstance(item) is not None: # Setting was not added already.
|
||||
if settings.getInstance(item) is None: # Setting was not added already.
|
||||
definition = self._stack.getSettingDefinition(item)
|
||||
if definition:
|
||||
new_instance = SettingInstance(definition, settings)
|
||||
|
|
|
|||
|
|
@ -49,18 +49,6 @@ Item
|
|||
visibility_handler.addSkipResetSetting(currentMeshType)
|
||||
}
|
||||
|
||||
function setOverhangsMeshType()
|
||||
{
|
||||
if (infillOnlyCheckbox.checked)
|
||||
{
|
||||
setMeshType(infillMeshType)
|
||||
}
|
||||
else
|
||||
{
|
||||
setMeshType(cuttingMeshType)
|
||||
}
|
||||
}
|
||||
|
||||
function setMeshType(type)
|
||||
{
|
||||
UM.ActiveTool.setProperty("MeshType", type)
|
||||
|
|
@ -140,26 +128,43 @@ Item
|
|||
verticalAlignment: Text.AlignVCenter
|
||||
}
|
||||
|
||||
CheckBox
|
||||
|
||||
ComboBox
|
||||
{
|
||||
id: infillOnlyCheckbox
|
||||
id: infillOnlyComboBox
|
||||
width: parent.width / 2 - UM.Theme.getSize("default_margin").width
|
||||
|
||||
text: catalog.i18nc("@action:checkbox", "Infill only");
|
||||
model: ListModel
|
||||
{
|
||||
id: infillOnlyComboBoxModel
|
||||
|
||||
style: UM.Theme.styles.checkbox;
|
||||
Component.onCompleted: {
|
||||
append({ text: catalog.i18nc("@item:inlistbox", "Infill mesh only") })
|
||||
append({ text: catalog.i18nc("@item:inlistbox", "Cutting mesh") })
|
||||
}
|
||||
}
|
||||
|
||||
visible: currentMeshType === infillMeshType || currentMeshType === cuttingMeshType
|
||||
onClicked: setOverhangsMeshType()
|
||||
|
||||
|
||||
onActivated:
|
||||
{
|
||||
if (index == 0){
|
||||
setMeshType(infillMeshType)
|
||||
} else {
|
||||
setMeshType(cuttingMeshType)
|
||||
}
|
||||
}
|
||||
|
||||
Binding
|
||||
{
|
||||
target: infillOnlyCheckbox
|
||||
property: "checked"
|
||||
value: currentMeshType === infillMeshType
|
||||
target: infillOnlyComboBox
|
||||
property: "currentIndex"
|
||||
value: currentMeshType === infillMeshType ? 0 : 1
|
||||
}
|
||||
}
|
||||
|
||||
Column // Settings Dialog
|
||||
Column // List of selected Settings to override for the selected object
|
||||
{
|
||||
// This is to ensure that the panel is first increasing in size up to 200 and then shows a scrollbar.
|
||||
// It kinda looks ugly otherwise (big panel, no content on it)
|
||||
|
|
|
|||
|
|
@ -82,6 +82,7 @@ class PerObjectSettingsTool(Tool):
|
|||
selected_object.addDecorator(SettingOverrideDecorator())
|
||||
stack = selected_object.callDecoration("getStack")
|
||||
|
||||
settings_visibility_changed = False
|
||||
settings = stack.getTop()
|
||||
for property_key in ["infill_mesh", "cutting_mesh", "support_mesh", "anti_overhang_mesh"]:
|
||||
if property_key != mesh_type:
|
||||
|
|
@ -97,17 +98,20 @@ class PerObjectSettingsTool(Tool):
|
|||
|
||||
for property_key in ["top_bottom_thickness", "wall_thickness"]:
|
||||
if mesh_type == "infill_mesh":
|
||||
if not settings.getInstance(property_key):
|
||||
if settings.getInstance(property_key) is None:
|
||||
definition = stack.getSettingDefinition(property_key)
|
||||
new_instance = SettingInstance(definition, settings)
|
||||
new_instance.setProperty("value", 0)
|
||||
new_instance.resetState() # Ensure that the state is not seen as a user state.
|
||||
settings.addInstance(new_instance)
|
||||
visible = self.visibility_handler.getVisible()
|
||||
visible.add(property_key)
|
||||
self.visibility_handler.setVisible(visible)
|
||||
settings_visibility_changed = True
|
||||
|
||||
elif old_mesh_type == "infill_mesh" and settings.getInstance(property_key) and settings.getProperty(property_key, "value") == 0:
|
||||
settings.removeInstance(property_key)
|
||||
settings_visibility_changed = True
|
||||
|
||||
if settings_visibility_changed:
|
||||
self.visibility_handler.forceVisibilityChanged()
|
||||
|
||||
self.propertyChanged.emit()
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -484,15 +484,53 @@ UM.Dialog
|
|||
onClicked: dialog.accept()
|
||||
}
|
||||
|
||||
Cura.SecondaryButton
|
||||
Item
|
||||
{
|
||||
objectName: "postProcessingSaveAreaButton"
|
||||
visible: activeScriptsList.count > 0
|
||||
height: UM.Theme.getSize("action_button").height
|
||||
width: height
|
||||
tooltip: catalog.i18nc("@info:tooltip", "Change active post-processing scripts")
|
||||
onClicked: dialog.show()
|
||||
iconSource: "postprocessing.svg"
|
||||
fixedWidthMode: true
|
||||
|
||||
Cura.SecondaryButton
|
||||
{
|
||||
height: UM.Theme.getSize("action_button").height
|
||||
tooltip:
|
||||
{
|
||||
var tipText = catalog.i18nc("@info:tooltip", "Change active post-processing scripts.");
|
||||
if (activeScriptsList.count > 0)
|
||||
{
|
||||
tipText += "<br><br>" + catalog.i18ncp("@info:tooltip",
|
||||
"The following script is active:",
|
||||
"The following scripts are active:",
|
||||
activeScriptsList.count
|
||||
) + "<ul>";
|
||||
for(var i = 0; i < activeScriptsList.count; i++)
|
||||
{
|
||||
tipText += "<li>" + manager.getScriptLabelByKey(manager.scriptList[i]) + "</li>";
|
||||
}
|
||||
tipText += "</ul>";
|
||||
}
|
||||
return tipText
|
||||
}
|
||||
toolTipContentAlignment: Cura.ToolTip.ContentAlignment.AlignLeft
|
||||
onClicked: dialog.show()
|
||||
iconSource: "postprocessing.svg"
|
||||
fixedWidthMode: false
|
||||
}
|
||||
|
||||
Cura.NotificationIcon
|
||||
{
|
||||
id: activeScriptCountIcon
|
||||
visible: activeScriptsList.count > 0
|
||||
anchors
|
||||
{
|
||||
top: parent.top
|
||||
right: parent.right
|
||||
rightMargin: (-0.5 * width) | 0
|
||||
topMargin: (-0.5 * height) | 0
|
||||
}
|
||||
|
||||
labelText: activeScriptsList.count
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -6,14 +6,22 @@
|
|||
#Authors of the 2-1 ColorMix plug-in / script:
|
||||
# Written by John Hryb - john.hryb.4@gmail.com
|
||||
|
||||
##history / change-log:
|
||||
##V1.0.0
|
||||
|
||||
## Uses -
|
||||
## M163 - Set Mix Factor
|
||||
## M164 - Save Mix - saves to T3 as a unique mix
|
||||
|
||||
import re #To perform the search and replace.
|
||||
#history / change-log:
|
||||
#V1.0.0 - Initial
|
||||
#V1.1.0 -
|
||||
# additions:
|
||||
#Object number - To select individual models or all when using "one at a time" print sequence
|
||||
#V1.2.0
|
||||
# fixed layer heights Cura starts at 1 while G-code starts at 0
|
||||
# removed notes
|
||||
# changed Units of measurement to Units
|
||||
#V1.2.1
|
||||
# Fixed mm bug when not in multiples of layer height
|
||||
# Uses -
|
||||
# M163 - Set Mix Factor
|
||||
# M164 - Save Mix - saves to T2 as a unique mix
|
||||
|
||||
import re #To perform the search and replace.
|
||||
from ..Script import Script
|
||||
|
||||
class ColorMix(Script):
|
||||
|
|
@ -22,20 +30,28 @@ class ColorMix(Script):
|
|||
|
||||
def getSettingDataString(self):
|
||||
return """{
|
||||
"name":"ColorMix 2-1",
|
||||
"name":"ColorMix 2-1 V1.2.1",
|
||||
"key":"ColorMix 2-1",
|
||||
"metadata": {},
|
||||
"version": 2,
|
||||
"settings":
|
||||
{
|
||||
"measurement_units":
|
||||
"units_of_measurement":
|
||||
{
|
||||
"label": "Units of measurement",
|
||||
"label": "Units",
|
||||
"description": "Input value as mm or layer number.",
|
||||
"type": "enum",
|
||||
"options": {"mm":"mm","layer":"Layer"},
|
||||
"default_value": "layer"
|
||||
},
|
||||
"object_number":
|
||||
{
|
||||
"label": "Object Number",
|
||||
"description": "Select model to apply to for print one at a time print sequence. 0 = everything",
|
||||
"type": "int",
|
||||
"default_value": 0,
|
||||
"minimum_value": "0"
|
||||
},
|
||||
"start_height":
|
||||
{
|
||||
"label": "Start Height",
|
||||
|
|
@ -59,10 +75,10 @@ class ColorMix(Script):
|
|||
"type": "float",
|
||||
"default_value": 0,
|
||||
"minimum_value": "0",
|
||||
"minimum_value_warning": "0.1",
|
||||
"enabled": "c_behavior == 'blend_value'"
|
||||
"minimum_value_warning": "start_height",
|
||||
"enabled": "behavior == 'blend_value'"
|
||||
},
|
||||
"mix_start_ratio":
|
||||
"mix_start":
|
||||
{
|
||||
"label": "Start mix ratio",
|
||||
"description": "First extruder percentage 0-100",
|
||||
|
|
@ -72,7 +88,7 @@ class ColorMix(Script):
|
|||
"minimum_value_warning": "0",
|
||||
"maximum_value_warning": "100"
|
||||
},
|
||||
"mix_finish_ratio":
|
||||
"mix_finish":
|
||||
{
|
||||
"label": "End mix ratio",
|
||||
"description": "First extruder percentage 0-100 to finish blend",
|
||||
|
|
@ -81,14 +97,7 @@ class ColorMix(Script):
|
|||
"minimum_value": "0",
|
||||
"minimum_value_warning": "0",
|
||||
"maximum_value_warning": "100",
|
||||
"enabled": "c_behavior == 'blend_value'"
|
||||
},
|
||||
"notes":
|
||||
{
|
||||
"label": "Notes",
|
||||
"description": "A spot to put a note",
|
||||
"type": "str",
|
||||
"default_value": ""
|
||||
"enabled": "behavior == 'blend_value'"
|
||||
}
|
||||
}
|
||||
}"""
|
||||
|
|
@ -112,52 +121,53 @@ class ColorMix(Script):
|
|||
return default
|
||||
|
||||
def execute(self, data):
|
||||
#get user variables
|
||||
firstHeight = 0.0
|
||||
secondHeight = 0.0
|
||||
firstMix = 0.0
|
||||
SecondMix = 0.0
|
||||
|
||||
firstHeight = self.getSettingValueByKey("start_height")
|
||||
secondHeight = self.getSettingValueByKey("finish_height")
|
||||
firstMix = self.getSettingValueByKey("mix_start_ratio")
|
||||
SecondMix = self.getSettingValueByKey("mix_finish_ratio")
|
||||
|
||||
#locals
|
||||
layer = 0
|
||||
|
||||
firstMix = self.getSettingValueByKey("mix_start")
|
||||
secondMix = self.getSettingValueByKey("mix_finish")
|
||||
modelOfInterest = self.getSettingValueByKey("object_number")
|
||||
|
||||
#get layer height
|
||||
layerHeight = .2
|
||||
layerHeight = 0
|
||||
for active_layer in data:
|
||||
lines = active_layer.split("\n")
|
||||
for line in lines:
|
||||
if ";Layer height: " in line:
|
||||
layerHeight = self.getValue(line, ";Layer height: ", layerHeight)
|
||||
break
|
||||
if layerHeight != 0:
|
||||
break
|
||||
|
||||
#default layerHeight if not found
|
||||
if layerHeight == 0:
|
||||
layerHeight = .2
|
||||
|
||||
#get layers to use
|
||||
startLayer = 0
|
||||
endLayer = 0
|
||||
if self.getSettingValueByKey("measurement_units") == "mm":
|
||||
if firstHeight == 0:
|
||||
startLayer = 0
|
||||
else:
|
||||
startLayer = firstHeight / layerHeight
|
||||
if secondHeight == 0:
|
||||
endLayer = 0
|
||||
else:
|
||||
endLayer = secondHeight / layerHeight
|
||||
else: #layer height
|
||||
startLayer = firstHeight
|
||||
endLayer = secondHeight
|
||||
if self.getSettingValueByKey("units_of_measurement") == "mm":
|
||||
startLayer = round(firstHeight / layerHeight)
|
||||
endLayer = round(secondHeight / layerHeight)
|
||||
else: #layer height shifts down by one for g-code
|
||||
if firstHeight <= 0:
|
||||
firstHeight = 1
|
||||
if secondHeight <= 0:
|
||||
secondHeight = 1
|
||||
startLayer = firstHeight - 1
|
||||
endLayer = secondHeight - 1
|
||||
#see if one-shot
|
||||
if self.getSettingValueByKey("behavior") == "fixed_value":
|
||||
endLayer = startLayer
|
||||
firstExtruderIncrements = 0
|
||||
else: #blend
|
||||
firstExtruderIncrements = (SecondMix - firstMix) / (endLayer - startLayer)
|
||||
firstExtruderIncrements = (secondMix - firstMix) / (endLayer - startLayer)
|
||||
firstExtruderValue = 0
|
||||
index = 0
|
||||
|
||||
#start scanning
|
||||
layer = -1
|
||||
modelNumber = 0
|
||||
for active_layer in data:
|
||||
modified_gcode = ""
|
||||
lineIndex = 0;
|
||||
|
|
@ -169,22 +179,30 @@ class ColorMix(Script):
|
|||
# find current layer
|
||||
if ";LAYER:" in line:
|
||||
layer = self.getValue(line, ";LAYER:", layer)
|
||||
if (layer >= startLayer) and (layer <= endLayer): #find layers of interest
|
||||
if lines[lineIndex + 4] == "T2": #check if needing to delete old data
|
||||
del lines[(lineIndex + 1):(lineIndex + 5)]
|
||||
firstExtruderValue = int(((layer - startLayer) * firstExtruderIncrements) + firstMix)
|
||||
if firstExtruderValue == 100:
|
||||
modified_gcode += "M163 S0 P1\n"
|
||||
modified_gcode += "M163 S1 P0\n"
|
||||
elif firstExtruderValue == 0:
|
||||
modified_gcode += "M163 S0 P0\n"
|
||||
modified_gcode += "M163 S1 P1\n"
|
||||
else:
|
||||
modified_gcode += "M163 S0 P0.{:02d}\n".format(firstExtruderValue)
|
||||
modified_gcode += "M163 S1 P0.{:02d}\n".format(100 - firstExtruderValue)
|
||||
modified_gcode += "M164 S2\n"
|
||||
modified_gcode += "T2\n"
|
||||
#get model number by layer 0 repeats
|
||||
if layer == 0:
|
||||
modelNumber = modelNumber + 1
|
||||
#search for layers to manipulate
|
||||
if (layer >= startLayer) and (layer <= endLayer):
|
||||
#make sure correct model is selected
|
||||
if (modelOfInterest == 0) or (modelOfInterest == modelNumber):
|
||||
#Delete old data if required
|
||||
if lines[lineIndex + 4] == "T2":
|
||||
del lines[(lineIndex + 1):(lineIndex + 5)]
|
||||
#add mixing commands
|
||||
firstExtruderValue = int(((layer - startLayer) * firstExtruderIncrements) + firstMix)
|
||||
if firstExtruderValue == 100:
|
||||
modified_gcode += "M163 S0 P1\n"
|
||||
modified_gcode += "M163 S1 P0\n"
|
||||
elif firstExtruderValue == 0:
|
||||
modified_gcode += "M163 S0 P0\n"
|
||||
modified_gcode += "M163 S1 P1\n"
|
||||
else:
|
||||
modified_gcode += "M163 S0 P0.{:02d}\n".format(firstExtruderValue)
|
||||
modified_gcode += "M163 S1 P0.{:02d}\n".format(100 - firstExtruderValue)
|
||||
modified_gcode += "M164 S2\n"
|
||||
modified_gcode += "T2\n"
|
||||
lineIndex += 1 #for deleting index
|
||||
data[index] = modified_gcode
|
||||
index += 1
|
||||
return data
|
||||
return data
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
//Copyright (c) 2019 Ultimaker B.V.
|
||||
//Copyright (c) 2020 Ultimaker B.V.
|
||||
//Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
import QtQuick 2.4
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ Item
|
|||
rightMargin: UM.Theme.getSize("wide_margin").width
|
||||
}
|
||||
height: UM.Theme.getSize("toolbox_footer_button").height
|
||||
text: catalog.i18nc("@info:button", "Quit Cura")
|
||||
text: catalog.i18nc("@info:button, %1 is the application name", "Quit %1").arg(CuraApplication.applicationDisplayName)
|
||||
onClicked: toolbox.restart()
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from typing import Union
|
||||
|
||||
from cura import ApplicationMetadata, UltimakerCloudAuthentication
|
||||
from cura import ApplicationMetadata
|
||||
from cura.UltimakerCloud import UltimakerCloudAuthentication
|
||||
|
||||
|
||||
class CloudApiModel:
|
||||
|
|
|
|||
|
|
@ -1,8 +1,9 @@
|
|||
from UM.Logger import Logger
|
||||
from UM.TaskManagement.HttpRequestManager import HttpRequestManager
|
||||
from UM.TaskManagement.HttpRequestScope import JsonDecoratorScope
|
||||
from cura.CuraApplication import CuraApplication
|
||||
from cura.UltimakerCloud.UltimakerCloudScope import UltimakerCloudScope
|
||||
from ..CloudApiModel import CloudApiModel
|
||||
from ..UltimakerCloudScope import UltimakerCloudScope
|
||||
|
||||
|
||||
class CloudApiClient:
|
||||
|
|
@ -26,7 +27,7 @@ class CloudApiClient:
|
|||
if self.__instance is not None:
|
||||
raise RuntimeError("This is a Singleton. use getInstance()")
|
||||
|
||||
self._scope = UltimakerCloudScope(app) # type: UltimakerCloudScope
|
||||
self._scope = JsonDecoratorScope(UltimakerCloudScope(app)) # type: JsonDecoratorScope
|
||||
|
||||
app.getPackageManager().packageInstalled.connect(self._onPackageInstalled)
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
|
||||
import json
|
||||
from typing import List, Dict, Any
|
||||
from typing import Optional
|
||||
|
||||
from PyQt5.QtCore import QObject
|
||||
|
|
@ -11,12 +12,12 @@ from UM import i18nCatalog
|
|||
from UM.Logger import Logger
|
||||
from UM.Message import Message
|
||||
from UM.Signal import Signal
|
||||
from UM.TaskManagement.HttpRequestScope import JsonDecoratorScope
|
||||
from cura.CuraApplication import CuraApplication, ApplicationMetadata
|
||||
from ..CloudApiModel import CloudApiModel
|
||||
from cura.UltimakerCloud.UltimakerCloudScope import UltimakerCloudScope
|
||||
from .SubscribedPackagesModel import SubscribedPackagesModel
|
||||
from ..UltimakerCloudScope import UltimakerCloudScope
|
||||
from ..CloudApiModel import CloudApiModel
|
||||
|
||||
from typing import List, Dict, Any
|
||||
|
||||
class CloudPackageChecker(QObject):
|
||||
def __init__(self, application: CuraApplication) -> None:
|
||||
|
|
@ -24,7 +25,7 @@ class CloudPackageChecker(QObject):
|
|||
|
||||
self.discrepancies = Signal() # Emits SubscribedPackagesModel
|
||||
self._application = application # type: CuraApplication
|
||||
self._scope = UltimakerCloudScope(application)
|
||||
self._scope = JsonDecoratorScope(UltimakerCloudScope(application))
|
||||
self._model = SubscribedPackagesModel()
|
||||
self._message = None # type: Optional[Message]
|
||||
|
||||
|
|
@ -111,4 +112,4 @@ class CloudPackageChecker(QObject):
|
|||
|
||||
def _onSyncButtonClicked(self, sync_message: Message, sync_message_action: str) -> None:
|
||||
sync_message.hide()
|
||||
self.discrepancies.emit(self._model)
|
||||
self.discrepancies.emit(self._model)
|
||||
|
|
|
|||
|
|
@ -12,8 +12,8 @@ from UM.Message import Message
|
|||
from UM.Signal import Signal
|
||||
from UM.TaskManagement.HttpRequestManager import HttpRequestManager
|
||||
from cura.CuraApplication import CuraApplication
|
||||
from cura.UltimakerCloud.UltimakerCloudScope import UltimakerCloudScope
|
||||
from .SubscribedPackagesModel import SubscribedPackagesModel
|
||||
from ..UltimakerCloudScope import UltimakerCloudScope
|
||||
|
||||
|
||||
## Downloads a set of packages from the Ultimaker Cloud Marketplace
|
||||
|
|
|
|||
|
|
@ -9,22 +9,20 @@ from typing import cast, Any, Dict, List, Set, TYPE_CHECKING, Tuple, Optional, U
|
|||
from PyQt5.QtCore import QObject, pyqtProperty, pyqtSignal, pyqtSlot
|
||||
from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkRequest, QNetworkReply
|
||||
|
||||
from UM.Extension import Extension
|
||||
from UM.Logger import Logger
|
||||
from UM.PluginRegistry import PluginRegistry
|
||||
from UM.Extension import Extension
|
||||
from UM.i18n import i18nCatalog
|
||||
from UM.TaskManagement.HttpRequestScope import JsonDecoratorScope
|
||||
from UM.Version import Version
|
||||
|
||||
from UM.i18n import i18nCatalog
|
||||
from cura import ApplicationMetadata
|
||||
|
||||
from cura.CuraApplication import CuraApplication
|
||||
from cura.Machines.ContainerTree import ContainerTree
|
||||
|
||||
from .CloudApiModel import CloudApiModel
|
||||
from cura.UltimakerCloud.UltimakerCloudScope import UltimakerCloudScope
|
||||
from .AuthorsModel import AuthorsModel
|
||||
from .CloudApiModel import CloudApiModel
|
||||
from .CloudSync.LicenseModel import LicenseModel
|
||||
from .PackagesModel import PackagesModel
|
||||
from .UltimakerCloudScope import UltimakerCloudScope
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from UM.TaskManagement.HttpRequestData import HttpRequestData
|
||||
|
|
@ -54,7 +52,8 @@ class Toolbox(QObject, Extension):
|
|||
self._download_request_data = None # type: Optional[HttpRequestData]
|
||||
self._download_progress = 0 # type: float
|
||||
self._is_downloading = False # type: bool
|
||||
self._scope = UltimakerCloudScope(application) # type: UltimakerCloudScope
|
||||
self._cloud_scope = UltimakerCloudScope(application) # type: UltimakerCloudScope
|
||||
self._json_scope = JsonDecoratorScope(self._cloud_scope) # type: JsonDecoratorScope
|
||||
|
||||
self._request_urls = {} # type: Dict[str, str]
|
||||
self._to_update = [] # type: List[str] # Package_ids that are waiting to be updated
|
||||
|
|
@ -151,7 +150,7 @@ class Toolbox(QObject, Extension):
|
|||
url = "{base_url}/packages/{package_id}/ratings".format(base_url = CloudApiModel.api_url, package_id = package_id)
|
||||
data = "{\"data\": {\"cura_version\": \"%s\", \"rating\": %i}}" % (Version(self._application.getVersion()), rating)
|
||||
|
||||
self._application.getHttpRequestManager().put(url, data = data.encode(), scope = self._scope)
|
||||
self._application.getHttpRequestManager().put(url, data = data.encode(), scope = self._json_scope)
|
||||
|
||||
def getLicenseDialogPluginFileLocation(self) -> str:
|
||||
return self._license_dialog_plugin_file_location
|
||||
|
|
@ -541,7 +540,7 @@ class Toolbox(QObject, Extension):
|
|||
self._application.getHttpRequestManager().get(url,
|
||||
callback = callback,
|
||||
error_callback = error_callback,
|
||||
scope=self._scope)
|
||||
scope=self._json_scope)
|
||||
|
||||
@pyqtSlot(str)
|
||||
def startDownload(self, url: str) -> None:
|
||||
|
|
@ -554,7 +553,7 @@ class Toolbox(QObject, Extension):
|
|||
callback = callback,
|
||||
error_callback = error_callback,
|
||||
download_progress_callback = download_progress_callback,
|
||||
scope=self._scope
|
||||
scope=self._cloud_scope
|
||||
)
|
||||
|
||||
self._download_request_data = request_data
|
||||
|
|
|
|||
|
|
@ -1,28 +0,0 @@
|
|||
from PyQt5.QtNetwork import QNetworkRequest
|
||||
|
||||
from UM.Logger import Logger
|
||||
from UM.TaskManagement.HttpRequestScope import DefaultUserAgentScope
|
||||
from cura.API import Account
|
||||
from cura.CuraApplication import CuraApplication
|
||||
|
||||
|
||||
## Add a Authorization header to the request for Ultimaker Cloud Api requests.
|
||||
# When the user is not logged in or a token is not available, a warning will be logged
|
||||
# Also add the user agent headers (see DefaultUserAgentScope)
|
||||
class UltimakerCloudScope(DefaultUserAgentScope):
|
||||
def __init__(self, application: CuraApplication):
|
||||
super().__init__(application)
|
||||
api = application.getCuraAPI()
|
||||
self._account = api.account # type: Account
|
||||
|
||||
def request_hook(self, request: QNetworkRequest):
|
||||
super().request_hook(request)
|
||||
token = self._account.accessToken
|
||||
if not self._account.isLoggedIn or token is None:
|
||||
Logger.warning("Cannot add authorization to Cloud Api request")
|
||||
return
|
||||
|
||||
header_dict = {
|
||||
"Authorization": "Bearer {}".format(token)
|
||||
}
|
||||
self.add_headers(request, header_dict)
|
||||
|
|
@ -108,7 +108,7 @@ class TrimeshReader(MeshReader):
|
|||
mesh.merge_vertices()
|
||||
mesh.remove_unreferenced_vertices()
|
||||
mesh.fix_normals()
|
||||
mesh_data = self._toMeshData(mesh)
|
||||
mesh_data = self._toMeshData(mesh, file_name)
|
||||
|
||||
file_base_name = os.path.basename(file_name)
|
||||
new_node = CuraSceneNode()
|
||||
|
|
@ -133,9 +133,10 @@ class TrimeshReader(MeshReader):
|
|||
## Converts a Trimesh to Uranium's MeshData.
|
||||
# \param tri_node A Trimesh containing the contents of a file that was
|
||||
# just read.
|
||||
# \param file_name The full original filename used to watch for changes
|
||||
# \return Mesh data from the Trimesh in a way that Uranium can understand
|
||||
# it.
|
||||
def _toMeshData(self, tri_node: trimesh.base.Trimesh) -> MeshData:
|
||||
def _toMeshData(self, tri_node: trimesh.base.Trimesh, file_name: str = "") -> MeshData:
|
||||
tri_faces = tri_node.faces
|
||||
tri_vertices = tri_node.vertices
|
||||
|
||||
|
|
@ -157,5 +158,5 @@ class TrimeshReader(MeshReader):
|
|||
indices = numpy.asarray(indices, dtype = numpy.int32)
|
||||
normals = calculateNormalsFromIndexedVertices(vertices, indices, face_count)
|
||||
|
||||
mesh_data = MeshData(vertices = vertices, indices = indices, normals = normals)
|
||||
mesh_data = MeshData(vertices = vertices, indices = indices, normals = normals, file_name = file_name)
|
||||
return mesh_data
|
||||
|
|
@ -9,18 +9,16 @@ from PyQt5.QtCore import QUrl
|
|||
from PyQt5.QtNetwork import QNetworkRequest, QNetworkReply, QNetworkAccessManager
|
||||
|
||||
from UM.Logger import Logger
|
||||
from cura import UltimakerCloudAuthentication
|
||||
from cura.API import Account
|
||||
|
||||
from cura.UltimakerCloud import UltimakerCloudAuthentication
|
||||
from .ToolPathUploader import ToolPathUploader
|
||||
from ..Models.BaseModel import BaseModel
|
||||
from ..Models.Http.CloudClusterResponse import CloudClusterResponse
|
||||
from ..Models.Http.CloudError import CloudError
|
||||
from ..Models.Http.CloudClusterStatus import CloudClusterStatus
|
||||
from ..Models.Http.CloudError import CloudError
|
||||
from ..Models.Http.CloudPrintJobResponse import CloudPrintJobResponse
|
||||
from ..Models.Http.CloudPrintJobUploadRequest import CloudPrintJobUploadRequest
|
||||
from ..Models.Http.CloudPrintResponse import CloudPrintResponse
|
||||
from ..Models.Http.CloudPrintJobResponse import CloudPrintJobResponse
|
||||
|
||||
|
||||
## The generic type variable used to document the methods below.
|
||||
CloudApiClientModel = TypeVar("CloudApiClientModel", bound=BaseModel)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# Copyright (c) 2019 Ultimaker B.V.
|
||||
# Cura is released under the terms of the LGPLv3 or higher.
|
||||
from time import time
|
||||
import os
|
||||
from typing import List, Optional, cast
|
||||
|
||||
from PyQt5.QtCore import QObject, QUrl, pyqtProperty, pyqtSignal, pyqtSlot
|
||||
|
|
@ -191,8 +192,9 @@ class CloudOutputDevice(UltimakerNetworkedPrinterOutputDevice):
|
|||
def _onPrintJobCreated(self, job: ExportFileJob) -> None:
|
||||
output = job.getOutput()
|
||||
self._tool_path = output # store the tool path to prevent re-uploading when printing the same file again
|
||||
file_name = job.getFileName()
|
||||
request = CloudPrintJobUploadRequest(
|
||||
job_name=job.getFileName(),
|
||||
job_name=os.path.splitext(file_name)[0],
|
||||
file_size=len(output),
|
||||
content_type=job.getMimeType(),
|
||||
)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue