├── .dockerignore ├── .github └── workflows │ └── build_on_push_pr.yml ├── .gitignore ├── .gitmodules ├── CMakeLists.txt ├── CPackConfig.cmake ├── Charon ├── Client │ ├── DBusInterface.py │ ├── Request.py │ ├── __init__.py │ ├── test_glib.py │ └── test_qt.py ├── FileInterface.py ├── OpenMode.py ├── ReadOnlyError.py ├── Service │ ├── FileService.py │ ├── RequestQueue.py │ ├── __init__.py │ └── main.py ├── VirtualFile.py ├── WriteOnlyError.py ├── __init__.py └── filetypes │ ├── GCodeFile.py │ ├── GCodeGzFile.py │ ├── GCodeSocket.py │ ├── OpenPackagingConvention.py │ ├── UltimakerFormatPackage.py │ └── __init__.py ├── LICENSE ├── README.md ├── build.sh ├── build_for_ultimaker.sh ├── charon_requirements.txt ├── coverage.ini ├── docker_env ├── Dockerfile ├── buildenv_check.sh └── make_docker.sh ├── docs ├── class_diagram.plantuml ├── class_diagram.png ├── library.md ├── service.md ├── service_sequence.plantuml ├── service_sequence.png └── ultimaker_format_package.md ├── mypy.ini ├── pycodestyle.ini ├── pytest.ini ├── release.sh ├── requirements-testing.txt ├── run_all_tests.sh ├── run_complexity_analysis.sh ├── run_dead_code_analysis.sh ├── run_mypy.sh ├── run_pytest.sh ├── run_shellcheck.sh ├── run_style_analysis.sh ├── service ├── charon.service ├── nl.ultimaker.charon.conf └── postinst ├── setup.py └── tests ├── __init__.py └── filetypes ├── TestGCodeFile.py ├── TestGCodeFormat.py ├── TestOpenPackagingConvention.py ├── __init__.py └── resources ├── empty.opc ├── hello.opc ├── um3.gcode └── um3.gcode.gz /.dockerignore: -------------------------------------------------------------------------------- 1 | # Items that don't need to be in a Docker image. 2 | # Anything not used by the build system should go here. 3 | Dockerfile 4 | Dockerfile.local 5 | Jenkinsfile 6 | .dockerignore 7 | .gitignore 8 | .gitmodules 9 | .git 10 | .coverage 11 | .build.sh 12 | README.md -------------------------------------------------------------------------------- /.github/workflows/build_on_push_pr.yml: -------------------------------------------------------------------------------- 1 | name: Build package on Push or Pull Request 2 | 3 | on: 4 | push 5 | 6 | jobs: 7 | Prepare: 8 | name: 'Prepare' 9 | uses: Ultimaker/embedded-workflows/.github/workflows/prepare_env.yml@main 10 | with: 11 | BUILD_DOCKER_CACHE: true 12 | secrets: inherit 13 | 14 | Shellcheck: 15 | name: 'Test' 16 | needs: Prepare 17 | uses: Ultimaker/embedded-workflows/.github/workflows/shellcheck.yml@main 18 | secrets: inherit 19 | 20 | Flake8: 21 | name: 'Test' 22 | needs: Prepare 23 | uses: Ultimaker/python-quality-control/.github/workflows/flake8.yml@master 24 | with: 25 | PARENT_BRANCH: 'main' 26 | secrets: inherit 27 | 28 | MyPy: 29 | name: 'Test' 30 | needs: Prepare 31 | uses: Ultimaker/python-quality-control/.github/workflows/mypy.yml@master 32 | with: 33 | PARENT_BRANCH: 'main' 34 | secrets: inherit 35 | 36 | PyCodeStyle: 37 | name: 'Test' 38 | needs: Prepare 39 | uses: Ultimaker/python-quality-control/.github/workflows/pycodestyle.yml@master 40 | with: 41 | PARENT_BRANCH: 'main' 42 | secrets: inherit 43 | 44 | Vulture: 45 | name: 'Test' 46 | needs: Prepare 47 | uses: Ultimaker/python-quality-control/.github/workflows/vulture.yml@master 48 | secrets: inherit 49 | 50 | PyTest: 51 | name: 'Test' 52 | needs: Prepare 53 | uses: Ultimaker/python-quality-control/.github/workflows/pytest.yml@master 54 | secrets: inherit 55 | 56 | Build: 57 | name: 'Build' 58 | needs: Prepare 59 | uses: Ultimaker/embedded-workflows/.github/workflows/build.yml@main 60 | with: 61 | RELEASE_VERSION: ${{ needs.Prepare.outputs.RELEASE_VERSION }} 62 | secrets: inherit 63 | 64 | Release_Package: 65 | name: 'Release' 66 | needs: [Prepare, Shellcheck, Build, Flake8, MyPy, PyCodeStyle, Vulture, PyTest] 67 | if: ${{ (success() && needs.Prepare.outputs.RELEASE_REPO != 'none') || 68 | (failure() && needs.Build.result == 'success' && needs.Prepare.outputs.RELEASE_REPO == 'packages-dev') }} 69 | uses: Ultimaker/embedded-workflows/.github/workflows/release_pkg.yml@main 70 | with: 71 | RELEASE_REPO: ${{ needs.Prepare.outputs.RELEASE_REPO }} 72 | secrets: inherit 73 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | .pytest_cache 6 | 7 | # Build folder 8 | _build_armhf/* 9 | 10 | # Ignore MyTest data 11 | .pytest_cache/ 12 | pstats/ 13 | 14 | # VIM temp files 15 | *.swp 16 | 17 | # C extensions 18 | *.so 19 | 20 | # Distribution / packaging 21 | .Python 22 | env/ 23 | build/ 24 | develop-eggs/ 25 | dist/ 26 | downloads/ 27 | eggs/ 28 | .eggs/ 29 | lib/ 30 | lib64/ 31 | parts/ 32 | sdist/ 33 | var/ 34 | wheels/ 35 | *.egg-info/ 36 | .installed.cfg 37 | *.egg 38 | *.deb 39 | 40 | # PyInstaller 41 | # Usually these files are written by a python script from a template 42 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 43 | *.manifest 44 | *.spec 45 | 46 | # Installer logs 47 | pip-log.txt 48 | pip-delete-this-directory.txt 49 | 50 | # Unit test / coverage reports 51 | htmlcov/ 52 | cov_report/ 53 | .tox/ 54 | .coverage 55 | .coverage.* 56 | .cache 57 | nosetests.xml 58 | coverage.xml 59 | *.cover 60 | .hypothesis/ 61 | .pytest_cache/ 62 | 63 | # Translations 64 | *.mo 65 | *.pot 66 | 67 | # Django stuff: 68 | *.log 69 | local_settings.py 70 | 71 | # Flask stuff: 72 | instance/ 73 | .webassets-cache 74 | 75 | # Scrapy stuff: 76 | .scrapy 77 | 78 | # Sphinx documentation 79 | docs/_build/ 80 | 81 | # PyBuilder 82 | target/ 83 | 84 | # Jupyter Notebook 85 | .ipynb_checkpoints 86 | 87 | # pyenv 88 | .python-version 89 | 90 | # celery beat schedule file 91 | celerybeat-schedule 92 | 93 | # SageMath parsed files 94 | *.sage.py 95 | 96 | # dotenv 97 | .env 98 | .idea 99 | 100 | # virtualenv 101 | .venv 102 | venv/ 103 | ENV/ 104 | 105 | # Spyder project settings 106 | .spyderproject 107 | .spyproject 108 | 109 | # Rope project settings 110 | .ropeproject 111 | 112 | # mkdocs documentation 113 | /site 114 | 115 | # mypy 116 | .mypy_cache/ 117 | 118 | # KDevelop project files 119 | *.kdev4 120 | 121 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "ci"] 2 | path = ci 3 | url = git@github.com:Ultimaker/python-quality-control.git 4 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | project(charon NONE) 2 | cmake_minimum_required(VERSION 3.6) #Tested only with 3.6.1 and 3.9.1. 3 | 4 | # FIXME: Remove the code for CMake <3.12 once we have switched over completely. 5 | # FindPython3 is a new module since CMake 3.12. It deprecates FindPythonInterp and FindPythonLibs. 6 | if(${CMAKE_VERSION} VERSION_LESS 3.12) 7 | # Use FindPythonInterp and FindPythonLibs for CMake <3.12 8 | find_package(PythonInterp 3.4 REQUIRED) 9 | 10 | set(Python3_EXECUTABLE ${PYTHON_EXECUTABLE}) 11 | set(Python3_VERSION_MAJOR ${PYTHON_VERSION_MAJOR}) 12 | set(Python3_VERSION_MINOR ${PYTHON_VERSION_MINOR}) 13 | else() 14 | # Use FindPython3 for CMake >=3.12 15 | find_package(Python3 ${CURA_PYTHON_VERSION} EXACT REQUIRED COMPONENTS Interpreter) 16 | endif() 17 | 18 | option(INSTALL_SERVICE "Install the Charon DBus-service" ON) 19 | option(INSTALL_CLIENT "Install the Charon Client library" ON) 20 | 21 | if(EXISTS /etc/debian_version) 22 | set(CHARON_INSTALL_PATH lib${LIB_SUFFIX}/python${Python3_VERSION_MAJOR}/dist-packages) 23 | else() 24 | set(CHARON_INSTALL_PATH lib${LIB_SUFFIX}/python${Python3_VERSION_MAJOR}.${Python3_VERSION_MINOR}/site-packages) 25 | endif() 26 | 27 | set(_excludes PATTERN __pycache__ EXCLUDE) 28 | if(NOT INSTALL_SERVICE) 29 | set(_excludes ${_excludes} PATTERN "Service" EXCLUDE) 30 | endif() 31 | if(NOT INSTALL_CLIENT) 32 | set(_excludes ${_excludes} PATTERN "Client" EXCLUDE) 33 | endif() 34 | 35 | install(DIRECTORY Charon DESTINATION ${CHARON_INSTALL_PATH} ${_excludes}) 36 | 37 | if(INSTALL_SERVICE) 38 | install(FILES service/charon.service DESTINATION lib/systemd/system) 39 | install(FILES service/nl.ultimaker.charon.conf DESTINATION share/dbus-1/system.d) 40 | endif() 41 | 42 | include(CPackConfig.cmake) 43 | 44 | ####################Loading the unit tests.################### 45 | enable_testing() 46 | 47 | include(CMakeParseArguments) 48 | 49 | if(NOT _PYTHONPATH) 50 | set(_PYTHONPATH ${CMAKE_SOURCE_DIR}) 51 | endif() 52 | if(WIN32) 53 | string(REPLACE "|" "\\;" _PYTHONPATH ${_PYTHONPATH}) 54 | set(_PYTHONPATH "${_PYTHONPATH}\\;$ENV{PYTHONPATH}") 55 | else() 56 | string(REPLACE "|" ":" _PYTHONPATH ${_PYTHONPATH}) 57 | set(_PYTHONPATH "${_PYTHONPATH}:$ENV{PYTHONPATH}") 58 | endif() 59 | 60 | add_test( 61 | NAME pytest-main 62 | COMMAND ${Python3_EXECUTABLE} -m pytest --junitxml=${CMAKE_BINARY_DIR}/junit-pytest-main.xml ${CMAKE_SOURCE_DIR}/tests 63 | ) 64 | set_tests_properties(pytest-main PROPERTIES ENVIRONMENT LANG=C) 65 | set_tests_properties(pytest-main PROPERTIES ENVIRONMENT "PYTHONPATH=${_PYTHONPATH}") 66 | -------------------------------------------------------------------------------- /CPackConfig.cmake: -------------------------------------------------------------------------------- 1 | set(CPACK_PACKAGE_VENDOR "Ultimaker") 2 | set(CPACK_PACKAGE_CONTACT "Ultimaker ") 3 | set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "Library to read and write 3D printer related files including G-Code and Ultimaker File Package.") 4 | set(CPACK_PACKAGE_VERSION_MAJOR 1) 5 | set(CPACK_PACKAGE_VERSION_MINOR 0) 6 | set(CPACK_PACKAGE_VERSION_PATCH 0) 7 | set(CPACK_GENERATOR "DEB") 8 | 9 | set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE all) 10 | 11 | set(DEB_DEPENDS 12 | "python3 (>= 3.4.2)" 13 | "python3-dbus (>= 1.2.0)" 14 | "dbus (>= 1.8.0)" 15 | ) 16 | string(REPLACE ";" "," DEB_DEPENDS "${DEB_DEPENDS}") 17 | set(CPACK_DEBIAN_PACKAGE_DEPENDS ${DEB_DEPENDS}) 18 | set(CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA "${CMAKE_CURRENT_SOURCE_DIR}/service/postinst") 19 | 20 | include(CPack) 21 | -------------------------------------------------------------------------------- /Charon/Client/DBusInterface.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | from typing import Callable, Optional, Union, Any 4 | 5 | # We want to use either dbus-python or QtDBus for handling DBus. 6 | # We first need to try importing the module, if that fails we know 7 | # there is no chance of using Qt in the first place. If it succeeds, 8 | # we still may not be using Qt for the main loop, but this check is 9 | # done at runtime. 10 | _has_qt = False 11 | try: 12 | from PyQt5.QtCore import QCoreApplication, QObject, pyqtSlot 13 | from PyQt5.QtDBus import QDBusConnection, QDBusMessage, QDBusReply, QDBusInterface, QDBusPendingCallWatcher 14 | _has_qt = True 15 | except ImportError: 16 | pass 17 | 18 | # Always also try to import dbus-python, since we need to determine things 19 | # at runtime. 20 | try: 21 | import dbus 22 | import dbus.mainloop.glib 23 | from gi.repository import GLib 24 | except ImportError: 25 | if not _has_qt: 26 | raise ImportError("Either QtDBus or dbus-python should be available!") 27 | 28 | GLib.threads_init() 29 | dbus.mainloop.glib.threads_init() 30 | 31 | log = logging.getLogger(__name__) 32 | 33 | 34 | ## Provides a wrapper around dbus-python or QtDBus to make DBus calls 35 | # 36 | # Since signals and async method calls are pretty tightly linked to the main 37 | # loop implementation, we try to use the DBus implementation that matches with 38 | # the main loop. This class abstracts those details away. 39 | # 40 | # There are two levels of checks, the first is an import check listed above. The 41 | # second is a runtime check to see if there is a Qt main loop. If both of those 42 | # pass, we use QtDBus. If it fails, we use dbus-python. 43 | class DBusInterface: 44 | # Define default paths that can be used. 45 | DefaultServicePath = "nl.ultimaker.charon" 46 | DefaultObjectPath = "/nl/ultimaker/charon" 47 | DefaultInterface = "nl.ultimaker.charon" 48 | 49 | ## Make a synchronous call to a DBus method. 50 | # 51 | # \param method_name The name of the method to call. 52 | # \param signature The method's argument signature. 53 | # \param args Arguments to pass to the DBus method. 54 | # 55 | # The following can only be used as keyword arguments. They default to the 56 | # Default* constants defined in this class. 57 | # 58 | # \param service_path The path to the service to call the method on. 59 | # \param object_path The object path of the service to call the method on. 60 | # \param interface The interface name of the method to call. 61 | @classmethod 62 | def callMethod(cls, method_name: str, signature: str, *args, service_path: str = DefaultServicePath, object_path: str = DefaultObjectPath, interface: str = DefaultInterface) -> Any: 63 | cls.__ensureDBusSetup() 64 | assert cls.__connection is not None 65 | 66 | if cls.__use_qt: 67 | message = QDBusMessage.createMethodCall(service_path, object_path, interface, method_name) 68 | message.setArguments(args) 69 | result = QDBusReply(cls.__connection.call(message)) 70 | if result.isValid(): 71 | return result.value() 72 | else: 73 | log.warning("Did not receive a valid reply for method call %s", method_name) 74 | log.warning(result.error().message()) 75 | return None 76 | 77 | else: 78 | return cls.__connection.call_blocking(service_path, object_path, interface, method_name, signature, args) 79 | 80 | ## Make an asynchronous call to a DBus method. 81 | # 82 | # \param method_name The name of the method to call. 83 | # \param success_callback The Callable to call if the method call was successful. 84 | # \param error_callback The Callable to call if the method call was unsuccessful. 85 | # \param signature The method's argument signature. 86 | # \param args Arguments to pass to the DBus method. 87 | # 88 | # The following can only be used as keyword arguments. They default to the 89 | # Default* constants defined in this class. 90 | # 91 | # \param service_path The path to the service to call the method on. 92 | # \param object_path The object path of the service to call the method on. 93 | # \param interface The interface name of the method to call. 94 | @classmethod 95 | def callAsync(cls, method_name: str, success_callback: Callable[..., None], error_callback: Callable[..., None], signature: str, *args, service_path: str = DefaultServicePath, object_path: str = DefaultObjectPath, interface: str = DefaultInterface) -> None: 96 | cls.__ensureDBusSetup() 97 | assert cls.__connection is not None 98 | 99 | if cls.__use_qt: 100 | assert cls.__signal_forwarder is not None 101 | 102 | message = QDBusMessage.createMethodCall(service_path, object_path, interface, method_name) 103 | message.setArguments(args) 104 | cls.__signal_forwarder.asyncCall(message, success_callback, error_callback) 105 | else: 106 | cls.__connection.call_async(service_path, object_path, interface, method_name, signature, args, success_callback, error_callback) 107 | 108 | ## Connect to a DBus signal. 109 | # 110 | # \param signal_name The name of the signal to connect to. 111 | # \param callback The callable to call when the signal is received. 112 | # 113 | # The following can only be used as keyword arguments. They default to the 114 | # Default* constants defined in this class. 115 | # 116 | # \param service_path The path to the service to call the method on. 117 | # \param object_path The object path of the service to call the method on. 118 | # \param interface The interface name of the method to call. 119 | @classmethod 120 | def connectSignal(cls, signal_name: str, callback: Callable[..., None], *, service_path: str = DefaultServicePath, object_path: str = DefaultObjectPath, interface: str = DefaultInterface) -> bool: 121 | cls.__ensureDBusSetup() 122 | 123 | if cls.__use_qt: 124 | assert cls.__signal_forwarder is not None 125 | return cls.__signal_forwarder.addConnection(service_path, object_path, interface, signal_name, callback) 126 | else: 127 | assert cls.__connection is not None 128 | cls.__connection.add_signal_receiver(callback, signal_name, interface, service_path, object_path) 129 | return True 130 | 131 | ## Disconnect from a DBus signal connection. 132 | # 133 | # \param signal_name The name of the signal to disconnect from. 134 | # \param callback The Callable to disconnect from the signal. 135 | # 136 | # The following can only be used as keyword arguments. They default to the 137 | # Default* constants defined in this class. 138 | # 139 | # \param service_path The path to the service to call the method on. 140 | # \param object_path The object path of the service to call the method on. 141 | # \param interface The interface name of the method to call. 142 | @classmethod 143 | def disconnectSignal(cls, signal_name: str, callback: Callable[..., None], *, service_path: str = DefaultServicePath, object_path: str = DefaultObjectPath, interface: str = DefaultInterface) -> bool: 144 | cls.__ensureDBusSetup() 145 | 146 | if cls.__use_qt: 147 | assert cls.__signal_forwarder is not None 148 | return cls.__signal_forwarder.removeConnection(service_path, object_path, interface, signal_name, callback) 149 | else: 150 | assert cls.__connection is not None 151 | cls.__connection.remove_signal_receiver(callback, signal_name, interface, service_path, object_path) 152 | return True 153 | 154 | # Private method to ensure we have a DBus connection. 155 | @classmethod 156 | def __ensureDBusSetup(cls): 157 | if cls.__connection: 158 | return 159 | 160 | if _has_qt and QCoreApplication.instance(): 161 | if os.environ.get("CHARON_USE_SESSION_BUS", 1) == 1: 162 | cls.__connection = QDBusConnection.sessionBus() 163 | else: 164 | cls.__connection = QDBusConnection.systemBus() 165 | 166 | cls.__signal_forwarder = DBusSignalForwarder(cls.__connection) 167 | cls.__use_qt = True 168 | return 169 | 170 | if os.environ.get("CHARON_USE_SESSION_BUS", 0) == 1: 171 | cls.__connection = dbus.Bus.get_session() 172 | else: 173 | GLib.MainLoop().run() 174 | cls.__connection = dbus.SystemBus(private=True, mainloop=dbus.mainloop.glib.DBusGMainLoop()) 175 | 176 | __use_qt = False 177 | __connection = None # type: Optional[Union[dbus.SystemBus]] 178 | __signal_forwarder = None # type: Optional[DBusSignalForwarder] 179 | 180 | if _has_qt: 181 | ## Helper class to handle QtDBus signal connections. 182 | # 183 | # QtDBus wants a QObject for its signal connections. Since we do not want 184 | # to make Request a QObject, we need to add an intermediary which receives 185 | # the signal and calls the appropriate Callable. 186 | # 187 | # In addition, to make it properly handle success/error callbacks for async 188 | # method calls, we need to create a QDBusPendingCallWatcher object that we 189 | # can listen to. This has the same limitations as QtDBus signals. 190 | class DBusSignalForwarder(QObject): 191 | def __init__(self, dbus_connection): 192 | super().__init__() 193 | self.__connection = dbus_connection 194 | self.__connection.registerObject("/" + str(id(self)), self) 195 | 196 | self.__interface_objects = {} 197 | self.__connected_signals = set() 198 | self.__callbacks = {} 199 | 200 | self.__pending_async_calls = {} 201 | 202 | ## Add a signal connection to process. 203 | def addConnection(self, service_path, object_path, interface, signal_name, callback): 204 | connection = (object_path, interface, signal_name) 205 | if connection not in self.__connected_signals: 206 | self.__connection.connect(service_path, object_path, interface, signal_name, self.handleSignal) 207 | self.__connected_signals.add(connection) 208 | 209 | if connection not in self.__callbacks: 210 | self.__callbacks[connection] = [] 211 | self.__callbacks[connection].append(callback) 212 | 213 | ## Remove a signal connection. 214 | def removeConnection(self, service_path, object_path, interface, signal_name, callback): 215 | connection = (object_path, interface, signal_name) 216 | if connection not in self.__connected_signals: 217 | return 218 | 219 | self.__callbacks[connection].remove(callback) 220 | 221 | # Essentially, we do reference counting of the signal here. If the list 222 | # of connections for the specified signal becomes empty, also remove the 223 | # signal handler. This prevents us from listening on signals that are 224 | # not used. 225 | if not self.__callbacks[connection]: 226 | self.__connection.disconnect(service_path, object_path, interface, signal_name, self.handleSignal) 227 | self.__connected_signals.remove(connection) 228 | del self.__callbacks[connection] 229 | 230 | # Process a signal from DBus. 231 | @pyqtSlot(QDBusMessage) 232 | def handleSignal(self, message): 233 | connection = (message.path(), message.interface(), message.member()) 234 | if connection not in self.__callbacks: 235 | return 236 | 237 | for callback in self.__callbacks[connection]: 238 | callback(*message.arguments()) 239 | 240 | # Make an asynchronous DBus call. This will trigger __onAsyncCallFinished once it is done. 241 | def asyncCall(self, message, success_callback, error_callback): 242 | watcher = QDBusPendingCallWatcher(self.__connection.asyncCall(message)) 243 | watcher.finished.connect(self.__onAsyncCallFinished) 244 | self.__pending_async_calls[watcher] = (success_callback, error_callback) 245 | 246 | # Handle async call completion. 247 | @pyqtSlot(QDBusPendingCallWatcher) 248 | def __onAsyncCallFinished(self, watcher): 249 | assert watcher in self.__pending_async_calls 250 | 251 | success_callback = self.__pending_async_calls[watcher][0] 252 | error_callback = self.__pending_async_calls[watcher][1] 253 | del self.__pending_async_calls[watcher] 254 | 255 | reply = QDBusReply(watcher) 256 | if reply.isValid(): 257 | if success_callback: 258 | success_callback(reply.value()) 259 | else: 260 | if error_callback: 261 | error_callback(reply.error().message()) 262 | -------------------------------------------------------------------------------- /Charon/Client/Request.py: -------------------------------------------------------------------------------- 1 | import enum 2 | import threading 3 | import uuid 4 | from typing import List, Dict, Any, Optional, Callable 5 | 6 | from .DBusInterface import DBusInterface 7 | 8 | 9 | ## Wrapper around the Charon DBus service that hides the DBus details. 10 | # 11 | # This class encapsulates all the data and information needed for 12 | # retrieving some data from a file supported by the Charon file service. 13 | # 14 | # It can be used to simplify dealing with the DBus service. 15 | class Request: 16 | # The request state. 17 | class State(enum.IntEnum): 18 | Initial = 0 # Request was created, but not started yet. 19 | Running = 1 # Request was started. 20 | Completed = 2 # Request completed successfully. 21 | Error = 3 # Request encountered an error. 22 | 23 | ## Constructor. 24 | # 25 | # \param file_path The path to a file to get data from. 26 | # \param virtual_paths A list of virtual paths with the data to retrieve. 27 | def __init__(self, file_path: str, virtual_paths: List[str]) -> None: 28 | self.__file_path = file_path 29 | self.__virtual_paths = virtual_paths 30 | 31 | self.__state = self.State.Initial 32 | self.__request_id = 0 33 | self.__data = {} # type: Dict[str, Any] 34 | self.__error_string = "" 35 | 36 | self.__event = threading.Event() 37 | 38 | self.__request_data_callback = None # type: Optional[Callable[["Request", Dict[str, Any]], None]] 39 | self.__request_completed_callback = None # type: Optional[Callable[["Request"], None]] 40 | self.__request_error_callback = None # type: Optional[Callable[["Request", str], None]] 41 | 42 | ## Cleanup function. 43 | def __del__(self): 44 | if self.__state != self.State.Initial: 45 | self.stop() 46 | 47 | DBusInterface.disconnectSignal("requestData", self.__onRequestData) 48 | DBusInterface.disconnectSignal("requestCompleted", self.__onRequestCompleted) 49 | DBusInterface.disconnectSignal("requestError", self.__onRequestError) 50 | 51 | ## The file path for this request. 52 | @property 53 | def filePath(self) -> str: 54 | return self.__file_path 55 | 56 | ## The virtual paths for this request. 57 | @property 58 | def virtualPaths(self) -> List[str]: 59 | return self.__virtual_paths 60 | 61 | ## The state of this request. 62 | @property 63 | def state(self) -> State: 64 | return self.__state 65 | 66 | ## The data associated with this request. 67 | # 68 | # Note that this will be an empty dictionary until the request 69 | # completed. 70 | @property 71 | def data(self) -> Dict[str, Any]: 72 | return self.__data 73 | 74 | ## A description of the error that was encountered during the request. 75 | # 76 | # Note that this will be an empty string if there was no error. 77 | @property 78 | def errorString(self) -> str: 79 | return self.__error_string 80 | 81 | ## Set the callbacks that should be called while the request is running. 82 | # 83 | # Note: These parameters can only be passed as keyword arguments. 84 | # \param data The callback to call when data is received. Will be passed the request object and a dict with data. 85 | # \param completed The callback to call when the request has completed. Will be passed the request object. 86 | # \param error The callback to call when the request encountered an error. Will be passed the request object and a string describing the error. 87 | # 88 | def setCallbacks(self, *, 89 | data: Callable[["Request", Dict[str, Any]], None] = None, 90 | completed: Callable[["Request"], None] = None, 91 | error: Callable[["Request", str], None] = None) -> None: 92 | self.__request_data_callback = data 93 | self.__request_completed_callback = completed 94 | self.__request_error_callback = error 95 | 96 | ## Start the request. 97 | def start(self): 98 | if self.__state != self.State.Initial: 99 | return 100 | 101 | self.__request_id = str(uuid.uuid4()) 102 | 103 | DBusInterface.connectSignal("requestData", self.__onRequestData) 104 | DBusInterface.connectSignal("requestCompleted", self.__onRequestCompleted) 105 | DBusInterface.connectSignal("requestError", self.__onRequestError) 106 | 107 | self.__state = self.State.Running 108 | 109 | DBusInterface.callAsync("startRequest", self.__startSuccess, self.__startError, "ssas", self.__request_id, self.__file_path, self.__virtual_paths) 110 | 111 | ## Stop the request. 112 | # 113 | # Note that this may fail if the file service was already processing the request. 114 | def stop(self): 115 | if self.__state != self.State.Running: 116 | return 117 | 118 | DBusInterface.callAsync("cancelRequest", None, None, "s", self.__request_id) 119 | 120 | ## Wait until the request is finished. 121 | # 122 | # Warning! This method will block the calling thread until it is finished. The DBus implementations 123 | # require a running event loop for signal delivery to work. This means that if you block the main 124 | # loop with this method, you will deadlock since the completed signal is never received. 125 | def waitForFinished(self): 126 | if self.__state == self.State.Initial: 127 | self.start() 128 | 129 | self.__event.clear() 130 | self.__event.wait() 131 | 132 | def __startSuccess(self, start_success: bool): 133 | if not start_success: 134 | self.__startError("Could not start the request") 135 | return 136 | 137 | def __startError(self, error: str): 138 | self.__state = self.State.Error 139 | self.__error_string = error 140 | self.__event.set() 141 | 142 | if self.__request_error_callback: 143 | self.__request_error_callback(self, error) 144 | 145 | def __onRequestData(self, request_id: str, data: Dict[str, Any]): 146 | if self.__state != self.State.Running: 147 | return 148 | 149 | if self.__request_id != request_id: 150 | return 151 | 152 | self.__data.update(data) 153 | 154 | if self.__request_data_callback: 155 | self.__request_data_callback(self, data) 156 | 157 | def __onRequestCompleted(self, request_id: str): 158 | if self.__state != self.State.Running: 159 | return 160 | 161 | if self.__request_id != request_id: 162 | return 163 | 164 | self.__state = self.State.Completed 165 | 166 | if self.__request_completed_callback: 167 | self.__request_completed_callback(self) 168 | 169 | self.__event.set() 170 | 171 | def __onRequestError(self, request_id: str, error_string: str): 172 | if self.__request_id != request_id: 173 | return 174 | 175 | self.__state = self.State.Error 176 | self.__error_string = error_string 177 | 178 | if self.__request_error_callback: 179 | self.__request_error_callback(self, error_string) 180 | 181 | self.__event.set() 182 | 183 | def __repr__(self): 184 | return "".format(id = id(self), path = self.__file_path, virtual = self.__virtual_paths) 185 | -------------------------------------------------------------------------------- /Charon/Client/__init__.py: -------------------------------------------------------------------------------- 1 | from .Request import Request 2 | -------------------------------------------------------------------------------- /Charon/Client/test_glib.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | import dbus 4 | import dbus.mainloop.glib 5 | from gi.repository import GLib 6 | 7 | import Charon.Client 8 | 9 | if len(sys.argv) != 2: 10 | print("Usage: test.py [file]") 11 | exit(1) 12 | 13 | GLib.threads_init() 14 | dbus.mainloop.glib.threads_init() 15 | 16 | loop = GLib.MainLoop() 17 | dbus.set_default_main_loop(dbus.mainloop.glib.DBusGMainLoop()) 18 | 19 | request = Charon.Client.Request(sys.argv[1], ["/Metadata/thumbnail.png"]) 20 | request.setCallbacks(completed=lambda request: loop.quit()) 21 | 22 | request.start() 23 | 24 | loop.run() 25 | 26 | if request.state == Charon.Client.Request.State.Completed: 27 | print("Request Complete") 28 | print(request.data) 29 | elif request.state == Charon.Client.Request.State.Error: 30 | print("Request Error") 31 | print(request.errorString) 32 | else: 33 | print("Request did not finish properly") 34 | print(request.state) 35 | -------------------------------------------------------------------------------- /Charon/Client/test_qt.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | from PyQt5.QtCore import QCoreApplication, QTimer 4 | 5 | import Charon.Client 6 | 7 | if len(sys.argv) != 2: 8 | print("Usage: test.py [file]") 9 | exit(1) 10 | 11 | app = QCoreApplication(sys.argv) 12 | 13 | request = Charon.Client.Request(sys.argv[1], ["/Metadata/thumbnail.png"]) 14 | request.start() 15 | 16 | while(request.state == Charon.Client.Request.State.Running): 17 | app.processEvents() 18 | 19 | if request.state == Charon.Client.Request.State.Completed: 20 | print("Request Complete") 21 | print(request.data) 22 | elif request.state == Charon.Client.Request.State.Error: 23 | print("Request Error") 24 | print(request.errorString) 25 | else: 26 | print("Request did not finish properly") 27 | print(request.state) 28 | -------------------------------------------------------------------------------- /Charon/FileInterface.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Ultimaker B.V. 2 | # libCharon is released under the terms of the LGPLv3 or higher. 3 | from typing import Any, Dict, List, IO, Optional, Callable 4 | 5 | from Charon.OpenMode import OpenMode 6 | 7 | 8 | ## An interface for accessing files. 9 | # 10 | # This interface is designed to be able to access 3D-printing related files, 11 | # and for container-type files to access the resources therein. 12 | class FileInterface: 13 | stream_handler = open # type: Callable[[str, str], IO[bytes]] 14 | 15 | mime_type = "" 16 | 17 | ## Opens a file for reading or writing. 18 | # 19 | # After opening the file, this instance will represent that file from then 20 | # on, meaning that the metadata getters/setters and the streams will be 21 | # functioning on that file. 22 | # \param path The path to the file on local disk, relative or absolute. 23 | # \param mode The mode with which to open the file (see OpenMode). 24 | def open(self, path: str, mode: OpenMode = OpenMode.ReadOnly) -> None: 25 | raise NotImplementedError("The open() function of " + self.__class__.__qualname__ + " is not implemented.") 26 | 27 | ## Opens a stream for reading or writing. 28 | # 29 | # After opening the stream, this instance will represent that stream from 30 | # then on, meaning that the metadata getters/setters and the streams will 31 | # be functioning on that stream. 32 | # \param stream The stream to read from or write to. 33 | # \param mime The MIME type of the stream. This determines what implementation is used to read/write it. 34 | # \param mode The mode with which to open the file (see OpenMode). 35 | def openStream(self, stream: IO[bytes], mime: str, mode: OpenMode = OpenMode.ReadOnly) -> None: 36 | raise NotImplementedError("The openStream() function of " + self.__class__.__qualname__ + " is not implemented.") 37 | 38 | ## Closes the opened file, releasing the resources in use for it. 39 | # 40 | # After the file is closed, this instance can no longer be used until the ``open`` method is called again. 41 | def close(self) -> None: 42 | raise NotImplementedError("The close() function of " + self.__class__.__qualname__ + " is not implemented.") 43 | 44 | ## Ensures that no buffered data is still pending to be read or written. 45 | def flush(self) -> None: 46 | raise NotImplementedError("The flush() function of " + self.__class__.__qualname__ + " is not implemented.") 47 | 48 | ## Returns a list of all resources and metadata in the file. 49 | def listPaths(self) -> List[str]: 50 | raise NotImplementedError("The listPaths() function of " + self.__class__.__qualname__ + " is not implemented.") 51 | 52 | ## Gets the data stored at the specified virtual path and all its descendants. 53 | # 54 | # The returned dictionary may contain normal resources as well as 55 | # metadata. If it is a normal resource, the value will contain the 56 | # serialised data (either ``bytes`` or ``str``, depending on whether the 57 | # file opens in binary mode or not). If it is metadata, all metadata keys 58 | # under the specified path are returned (all descendants in the tree). If 59 | # there is no metadata and no resource under the selected virtual path, an 60 | # empty dictionary is returned. 61 | # \param virtual_path The path inside the file to get the data from. 62 | # \return The data and metadata under the specified virtual path. 63 | def getData(self, virtual_path: str) -> Dict[str, Any]: 64 | raise NotImplementedError("The getData() function of " + self.__class__.__qualname__ + " is not implemented.") 65 | 66 | ## Sets the data of several virtual paths at once. 67 | # 68 | # The ``data`` parameter provides a dictionary mapping virtual paths to 69 | # the new data that should be provided in the path. 70 | def setData(self, data: Dict[str, Any]) -> None: 71 | raise NotImplementedError("The setData() function of " + self.__class__.__qualname__ + " is not implemented.") 72 | 73 | ## Gets metadata entries in the opened file. 74 | # 75 | # The metadata is a dictionary, where the keys are virtual paths in the 76 | # subtree of the resource tree specified by ``virtual_path``. For 77 | # instance, when requesting the metadata of the resource with virtual path 78 | # ``/metadata``, this function could return a dictionary containing: 79 | # * ``/metadata/size``: 12354 80 | # * ``/metadata/toolpath/default/size``: 12000 81 | # * ``/metadata/toolpath/default/machine_type``: ``ultimaker3`` 82 | # * ``/metadata/toolpath/default/print_time``: 121245 83 | # * ``/metadata/toolpath/default/print_size``: (0, 0, 0) x (100, 100, 100) 84 | # 85 | # But a subtree can be requested as well, such as 86 | # ``/metadata/toolpath/default/size``, which would then return a 87 | # dictionary containing only the key ``/metadata/toolpath/default/size`` 88 | # and its value, because there are no other subitems in that subtree. 89 | # 90 | # If there is no metadata in the requested path, an empty dictionary is 91 | # returned. 92 | # \param virtual_path The subtree of metadata entries to get the metadata 93 | # of. 94 | # \return A dictionary of all the metadata entries in the selected 95 | # subtree. 96 | def getMetadata(self, virtual_path: str) -> Dict[str, Any]: 97 | raise NotImplementedError("The getMetadata() function of " + self.__class__.__qualname__ + " is not implemented.") 98 | 99 | ## Changes some metadata entries in the opened file. 100 | # 101 | # The provided dictionary must have the full virtual paths of the metadata 102 | # entries it wants to change as its keys, and the new values along with 103 | # every key. 104 | # 105 | # If a metadata entry didn't exist yet, it is created. 106 | # 107 | # If a metadata entry by cannot be changed (such as the file size of a 108 | # resource) then a ``ReadOnlyError`` must be raised for that resource, and 109 | # none of the changes of this function call may be applied (or everything 110 | # must be undone). 111 | # \param metadata A dictionary of metadata entries to change. 112 | # \raises ReadOnlyError A metadata entry cannot be changed (such as the 113 | # file size of a resource). 114 | def setMetadata(self, metadata: Dict[str, Any]) -> None: 115 | raise NotImplementedError("The setMetadata() function of " + self.__class__.__qualname__ + " is not implemented.") 116 | 117 | ## Gets an I/O stream to the resource or metadata at the specified virtual 118 | # path. 119 | # 120 | # This stream may be a normal resource or it may be metadata. If it is 121 | # metadata, a stream will be returned in the form of a JSON document 122 | # (encoded in UTF-8 for binary streams) containing all the metadata that 123 | # would be returned by the getMetadata method. 124 | # 125 | # Whether the returned stream is an input or an output stream depends on 126 | # the mode that was provided in the ``open`` method. This determines 127 | # whether you can read from and/or write to the stream. 128 | # 129 | # If a resource didn't exist and you can write, the resource is created. 130 | # \param virtual_path The virtual path to the resource that you want to 131 | # read or write. 132 | # \raises ReadOnlyError The resource doesn't exist and there are no write 133 | # permissions to create it. 134 | def getStream(self, virtual_path: str) -> IO[bytes]: 135 | raise NotImplementedError("The getStream() function of " + self.__class__.__qualname__ + " is not implemented.") 136 | 137 | ## Gets a bytes representation of the file. 138 | # 139 | # Resources inside the file are not supported by this method. Use 140 | # ``getStream`` for that. 141 | # \param offset The number of bytes to skip at the beginning of the file. 142 | # \param count The maximum number of bytes to return. If the file is 143 | # longer than this, it is truncated. If the file is shorter than this, 144 | # fewer bytes than this might be returned. If not specified, the entire 145 | # file will be returned except the initial offset. 146 | # \return bytes A bytes array representing the file or a part of it. 147 | def toByteArray(self, offset: int = 0, count: int = -1) -> bytes: 148 | raise NotImplementedError("The toByteArray() function of " + self.__class__.__qualname__ + " is not implemented.") -------------------------------------------------------------------------------- /Charon/OpenMode.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Ultimaker B.V. 2 | # libCharon is released under the terms of the LGPLv3 or higher. 3 | 4 | import enum #The class in this file is an enum. 5 | 6 | ## The possible purposes for which you could open a file. 7 | # 8 | # You could always open a file in read-write mode, but it's best practice to 9 | # open a file in specific read or write only modes if only one of the two is 10 | # needed. This will prevent the programmer from accidentally modifying the 11 | # file and may trigger some operating systems to treat the file lock 12 | # differently. 13 | class OpenMode(enum.Enum): 14 | ## The file can only be read from. 15 | ReadOnly = "r" 16 | 17 | ## The file can only be written to. 18 | WriteOnly = "w" -------------------------------------------------------------------------------- /Charon/ReadOnlyError.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Ultimaker B.V. 2 | # libCharon is released under the terms of the LGPLv3 or higher. 3 | 4 | ## Exception to indicate that an attempt was made to write to a resource that 5 | # is read-only. 6 | # 7 | # Normally this sort of thing would be a ``PermissionError`` (the built-in 8 | # Python exception), but we want to be able to distinguish between these 9 | # errors ordinary ``PermissionErrors`` raised by the file system not having 10 | # access to that file. 11 | class ReadOnlyError(PermissionError): 12 | ## Creates the exception instance. 13 | # \param virtual_path The resource that could not be written to. If not 14 | # provided, an empty string is used which indicates that the entire file 15 | # could not be written to. 16 | def __init__(self, virtual_path: str = "") -> None: 17 | self.virtual_path = virtual_path 18 | 19 | ## Provides a human-readable version of this error for in the stack trace. 20 | def __repr__(self) -> str: 21 | return "ReadOnlyError({resource})".format(resource = self.virtual_path) -------------------------------------------------------------------------------- /Charon/Service/FileService.py: -------------------------------------------------------------------------------- 1 | import dbus 2 | import logging 3 | 4 | import RequestQueue 5 | 6 | log = logging.getLogger(__name__) 7 | 8 | ## The main interface for the Charon file service. 9 | # 10 | # This contains the main interface definition for the Charon file service. 11 | # It is exposed over DBus as the "nl.ultimaker.charon" service, with 12 | # "/nl/ultimaker/charon" as its object path and all functions registered 13 | # in the "nl.ultimaker.charon" interface name. 14 | # 15 | # The file service maintains a queue of jobs that need to be processed. 16 | # See RequestQueue for details on this process. 17 | # 18 | # Note: This class does not currently use type hinting since type hints, 19 | # dbus-python decorators and Python 3.4 do not mix well. 20 | class FileService(dbus.service.Object): 21 | 22 | def __init__(self, dbus_bus: dbus.Bus) -> None: 23 | self.__dbus_bus = dbus_bus 24 | super().__init__( 25 | conn=self.__dbus_bus, 26 | object_path="/nl/ultimaker/charon", 27 | # Postpone claiming a well-known name until the class is fully initialized. 28 | # If we do this right now, the DBus service will be published to the 29 | # bus in an incomplete state, and clients connecting early on may 30 | # encounter, for instance, emptry introspection data on this service. 31 | bus_name=None, 32 | ) 33 | self.__bus_name = None 34 | 35 | log.debug("FileService initialized") 36 | self.__queue = RequestQueue.RequestQueue() 37 | 38 | ## Publish the fully initialized DBus service to the bus 39 | def publish(self) -> None: 40 | # Store a reference to the BusName for as long as the DBus service is alive; 41 | # otherwise it gets GC'd, and the well-known name gets released again. 42 | # Taking ownership of this well-known name is also the trigger for other services 43 | # to notice that we are alive & available. 44 | self.__bus_name = dbus.service.BusName("nl.ultimaker.charon", self.__dbus_bus) 45 | 46 | ## Start a request for data from a file. 47 | # 48 | # This function will start a request for data from a certain file. 49 | # It will be processed in a separate thread. 50 | # 51 | # When the request has finished, `requestFinished` will be emitted. 52 | # 53 | # \param request_id A unique identifier to track this request with. 54 | # \param file_path The path to a file to load. 55 | # \param virtual_paths A list of virtual paths that define what set of data to retrieve. 56 | # 57 | # \return A boolean indicating whether the request was successfully started. 58 | @dbus.decorators.method("nl.ultimaker.charon", "ssas", "b") 59 | def startRequest(self, request_id, file_path, virtual_paths): 60 | log.debug("Received request {id} for {virtual} from {path}".format(id = request_id, virtual = virtual_paths, path = file_path)) 61 | request = RequestQueue.Request(self, request_id, file_path, virtual_paths) 62 | return self.__queue.enqueue(request) 63 | 64 | ## Cancel a pending request for data. 65 | # 66 | # This will cancel a request that was previously posted. 67 | # 68 | # Note that if the request is already being processed, the request will not be 69 | # canceled. If the cancel was successful, `requestError` will be emitted with the 70 | # specified request and an error string describing it was canceled. 71 | # 72 | # \param request_id The ID of the request to cancel. 73 | @dbus.decorators.method("nl.ultimaker.charon", "s", "") 74 | def cancelRequest(self, request_id): 75 | log.debug("Cancel request '{id}'".format(id = request_id)) 76 | if self.__queue.dequeue(request_id): 77 | self.requestError(request_id, "Request canceled") 78 | 79 | ## Emitted whenever data for a request is available. 80 | # 81 | # This will be emitted while a request is processing and requested data has become 82 | # available. 83 | # 84 | # \param request_id The ID of the request that data is available for. 85 | # \param data A dictionary with virtual paths and data for those paths. 86 | @dbus.decorators.signal("nl.ultimaker.charon", "sa{sv}") 87 | def requestData(self, request_id, data): 88 | pass 89 | 90 | ## Emitted whenever a request for data has been completed. 91 | # 92 | # This signal will be emitted once a request is completed successfully. 93 | # 94 | # \param request_id The ID of the request that completed. 95 | @dbus.decorators.signal("nl.ultimaker.charon", "s") 96 | def requestCompleted(self, request_id): 97 | pass 98 | 99 | ## Emitted whenever a request that is processing encounters an error. 100 | # 101 | # \param request_id The ID of the request that encountered an error. 102 | # \param error_string A string describing the error. 103 | @dbus.decorators.signal("nl.ultimaker.charon", "ss") 104 | def requestError(self, request_id, error_string): 105 | pass 106 | -------------------------------------------------------------------------------- /Charon/Service/RequestQueue.py: -------------------------------------------------------------------------------- 1 | import queue 2 | import threading 3 | import logging 4 | import dbus 5 | from typing import List, Dict, Any 6 | 7 | import FileService 8 | 9 | import Charon.VirtualFile 10 | import Charon.OpenMode 11 | 12 | log = logging.getLogger(__name__) 13 | 14 | ## A request for data that needs to be processed. 15 | # 16 | # Each request will be processed by a worker thread to actually perform the data 17 | # retrieval. 18 | class Request: 19 | ## Constructor. 20 | # 21 | # \param file_service The main FileService object. Used to emit signals. 22 | # \param request_id The ID used to identify this request. 23 | # \param file_path A path to a file to retrieve data from. 24 | # \param virtual_paths The virtual paths to retrieve for this request. 25 | def __init__(self, file_service: FileService.FileService, request_id: str, file_path: str, virtual_paths: List[str]) -> None: 26 | self.file_service = file_service 27 | self.file_path = file_path 28 | self.virtual_paths = virtual_paths 29 | self.request_id = request_id 30 | 31 | # This is used a workaround for limitations of Python's Queue class. 32 | # Queue does not implement a "remove arbitrary item" method. So instead, 33 | # keep a removed request in the queue and set this flag to true, after 34 | # which a worker thread can dispose of the object when it encounters 35 | # the request. 36 | self.should_remove = False 37 | 38 | ## Perform the actual data retrieval. 39 | # 40 | # This is a potentially long-running operation that should be handled by a 41 | # thread. 42 | def run(self): 43 | try: 44 | virtual_file = Charon.VirtualFile.VirtualFile() 45 | virtual_file.open(self.file_path, Charon.OpenMode.OpenMode.ReadOnly) 46 | 47 | for path in self.virtual_paths: 48 | data = virtual_file.getData(path) 49 | 50 | 51 | for key, value in data.items(): 52 | if isinstance(value, bytes): 53 | data[key] = dbus.ByteArray(value) 54 | 55 | # dbus-python is stupid and we need to convert the entire nested dictionary 56 | # into something it understands. 57 | data = self._convertDictionary(data) 58 | 59 | self.file_service.requestData(self.request_id, data) 60 | 61 | virtual_file.close() 62 | self.file_service.requestCompleted(self.request_id) 63 | except Exception as e: 64 | log.log(logging.DEBUG, "", exc_info = 1) 65 | self.file_service.requestError(self.request_id, str(e)) 66 | 67 | # Helper for dbus-python to convert a nested dict to a nested dict. 68 | # 69 | # Yes, really, apparently dbus-python does some really stupid things with dictionaries 70 | # making this necessary. 71 | def _convertDictionary(self, dictionary: Dict[str, Any]) -> dbus.Dictionary: 72 | result = dbus.Dictionary({}, signature = "sv") 73 | 74 | for key, value in dictionary.items(): 75 | key = str(key) # Since we are sending a dict of str, Any, make sure the keys are strings. 76 | if isinstance(value, bytes): 77 | # Workaround dbus-python being stupid and not realizing that a bytes object 78 | # should be sent as byte array, not as string. 79 | result[key] = dbus.ByteArray(value) 80 | elif isinstance(value, dict): 81 | result[key] = self._convertDictionary(value) 82 | else: 83 | result[key] = value 84 | 85 | return result 86 | 87 | ## A queue of requests that need to be processed. 88 | # 89 | # This class will maintain a queue of requests to process along with the worker threads 90 | # to process them. It processes the request in LIFO order. 91 | class RequestQueue: 92 | def __init__(self): 93 | self.__queue = queue.LifoQueue(self.__maximum_queue_size) 94 | 95 | # This map is used to keep track of which requests we already received. 96 | # This is mostly intended to be able to cancel requests that are 97 | # in the queue. 98 | self.__request_map = {} 99 | 100 | self.__workers = [] 101 | 102 | for i in range(self.__worker_count): 103 | worker = threading.Thread(target = self.__worker_thread_run, daemon = True) 104 | worker.start() 105 | self.__workers.append(worker) 106 | 107 | ## Add a new request to the queue. 108 | # 109 | # \param request The request to add. 110 | # 111 | # \return True if successful, False if the request could not be enqueued for some reason. 112 | def enqueue(self, request: Request): 113 | if(request.request_id in self.__request_map): 114 | log.debug("Tried to enqueue a request with ID {id} which is already in the queue".format(id = request.request_id)) 115 | return False 116 | 117 | try: 118 | self.__queue.put(request, block = False) 119 | except queue.Full: 120 | log.debug("Tried to enqueue a request with ID {id} but the queue is full".format(id = request.request_id)) 121 | return False 122 | 123 | self.__request_map[request.request_id] = request 124 | return True 125 | 126 | ## Remove a request from the queue. 127 | # 128 | # \param request_id The ID of the request to remove. 129 | # 130 | # \return True if the request was successfully removed, False if the request was not in the queue. 131 | def dequeue(self, request_id: str): 132 | if request_id not in self.__request_map: 133 | log.debug("Unable to remove request with ID {id} which is not in the queue".format(id = request_id)) 134 | return False 135 | 136 | self.__request_map[request_id].should_remove = True 137 | return True 138 | 139 | ## Take the next request off the queue. 140 | # 141 | # Note that this method will block if there are no current requests on the queue. 142 | # 143 | # \return The next request on the queue. 144 | def takeNext(self) -> Request: 145 | request = self.__queue.get() 146 | del self.__request_map[request.request_id] 147 | return request 148 | 149 | # Implementation of the worker thread run method. 150 | def __worker_thread_run(self): 151 | while True: 152 | request = self.takeNext() 153 | if request.should_remove: 154 | continue 155 | 156 | try: 157 | request.run() 158 | except Exception as e: 159 | log.log(logging.DEBUG, "Request caused an uncaught exception when running!", exc_info = 1) 160 | 161 | __maximum_queue_size = 100 162 | __worker_count = 2 163 | -------------------------------------------------------------------------------- /Charon/Service/__init__.py: -------------------------------------------------------------------------------- 1 | from .FileService import FileService 2 | -------------------------------------------------------------------------------- /Charon/Service/main.py: -------------------------------------------------------------------------------- 1 | 2 | import os 3 | import logging 4 | 5 | import dbus.service 6 | import dbus.mainloop.glib 7 | from typing import Dict, Any 8 | from gi.repository import GLib 9 | 10 | import Charon.Service 11 | 12 | # Very basic service main loop built with GLib. 13 | 14 | GLib.threads_init() 15 | dbus.mainloop.glib.threads_init() 16 | 17 | config = {} # type: Dict[str, Any] 18 | if os.environ.get("CHARON_DEBUG", "0") == "1": 19 | config["level"] = logging.DEBUG 20 | else: 21 | config["level"] = logging.WARNING 22 | config["format"] = "%(asctime)s | %(levelname)s | %(name)s:%(lineno)d@%(funcName)s | %(message)s" 23 | logging.basicConfig(**config) 24 | 25 | _loop = GLib.MainLoop() 26 | 27 | # Use a single bus object for all dbus communication. 28 | if os.environ.get("CHARON_USE_SESSION_BUS", "1") == "1": 29 | _bus = dbus.SessionBus(private=True, mainloop=dbus.mainloop.glib.DBusGMainLoop()) 30 | else: 31 | _bus = dbus.SystemBus(private=True, mainloop=dbus.mainloop.glib.DBusGMainLoop()) 32 | 33 | _service = Charon.Service.FileService(_bus) 34 | _service.publish() 35 | 36 | _loop.run() 37 | -------------------------------------------------------------------------------- /Charon/VirtualFile.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Ultimaker B.V. 2 | # libCharon is released under the terms of the LGPLv3 or higher. 3 | import os 4 | 5 | from Charon.FileInterface import FileInterface # The interface we're implementing. 6 | from Charon.OpenMode import OpenMode #To open local files with the selected open mode. 7 | # The supported file types. 8 | from Charon.filetypes.UltimakerFormatPackage import UltimakerFormatPackage 9 | from Charon.filetypes.GCodeFile import GCodeFile 10 | from Charon.filetypes.GCodeGzFile import GCodeGzFile 11 | from Charon.filetypes.GCodeSocket import GCodeSocket 12 | 13 | extension_to_mime = { 14 | ".ufp": "application/x-ufp", 15 | ".gcode": "text/x-gcode", 16 | ".gz": "text/x-gcode-gz", 17 | ".gcode.gz": "text/x-gcode-gz", 18 | ".gsock": "text/x-gcode-socket" 19 | } 20 | 21 | mime_to_implementation = { 22 | "application/x-ufp": UltimakerFormatPackage, 23 | "text/x-gcode": GCodeFile, 24 | "text/x-gcode-gz": GCodeGzFile, 25 | "text/x-gcode-socket": GCodeSocket 26 | } 27 | 28 | 29 | ## A facade for a file object. 30 | # 31 | # This facade finds the correct implementation based on the MIME type of the 32 | # file it needs to open. 33 | class VirtualFile(FileInterface): 34 | def __init__(self): 35 | self._implementation = None 36 | 37 | def open(self, path, mode = OpenMode.ReadOnly, *args, **kwargs): 38 | _, extension = os.path.splitext(path) 39 | if extension not in extension_to_mime: 40 | raise IOError("Unknown extension \"{extension}\".".format(extension = extension)) 41 | mime = extension_to_mime[extension] 42 | implementation = mime_to_implementation[mime] 43 | return self.openStream(implementation.stream_handler(path, mode.value + "b"), mime, mode, *args, **kwargs) 44 | 45 | def openStream(self, stream, mime, mode = OpenMode.ReadOnly, *args, **kwargs): 46 | self._implementation = mime_to_implementation[mime]() 47 | return self._implementation.openStream(stream, mime, mode, *args, **kwargs) 48 | 49 | def close(self, *args, **kwargs): 50 | if self._implementation is None: 51 | raise IOError("Can't close a file before it's opened.") 52 | result = self._implementation.close(*args, **kwargs) 53 | self._implementation = None # You have to open a file again, which might need a different implementation. 54 | return result 55 | 56 | ## Causes all calls to functions that aren't defined in this class to be 57 | # passed through to the implementation. 58 | def __getattribute__(self, item): 59 | if item == "open" or item == "openStream" or item == "close" or item == "__del__" or item == "_implementation": 60 | # Attributes that VirtualFile overwrites should be called normally. 61 | return object.__getattribute__(self, item) 62 | if not object.__getattribute__(self, "_implementation"): 63 | raise IOError("Can't use '{attribute}' before a file is opened.".format(attribute = item)) 64 | return getattr(self._implementation, item) 65 | 66 | ## When the object is deleted, close the file. 67 | def __del__(self): 68 | if self._implementation is not None: 69 | self.close() 70 | -------------------------------------------------------------------------------- /Charon/WriteOnlyError.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Ultimaker B.V. 2 | # libCharon is released under the terms of the LGPLv3 or higher. 3 | 4 | ## Exception to indicate that an attempt was made to read from a resource that 5 | # is write-only. 6 | # 7 | # Normally this sort of thing would be a ``PermissionError`` (the built-in 8 | # Python exception), but we want to be able to distinguish between these 9 | # errors ordinary ``PermissionErrors`` raised by the file system not having 10 | # access to that file. 11 | class WriteOnlyError(PermissionError): 12 | ## Creates the exception instance. 13 | # \param virtual_path The resource that could not be read from. If not 14 | # provided, an empty string is used which indicates that the entire file 15 | # could not be read from. 16 | def __init__(self, virtual_path: str = "") -> None: 17 | self.virtual_path = virtual_path 18 | 19 | ## Provides a human-readable version of this error for in the stack trace. 20 | def __repr__(self) -> str: 21 | return "WriteOnlyError({resource})".format(resource = self.virtual_path) -------------------------------------------------------------------------------- /Charon/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Ultimaker B.V. 2 | # libCharon is released under the terms of the LGPLv3 or higher. 3 | -------------------------------------------------------------------------------- /Charon/filetypes/GCodeFile.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Ultimaker B.V. 2 | # libCharon is released under the terms of the LGPLv3 or higher. 3 | import ast 4 | 5 | from typing import Any, Dict, IO, List, Optional, Union 6 | 7 | from Charon.FileInterface import FileInterface 8 | from Charon.OpenMode import OpenMode 9 | 10 | 11 | def isAPositiveNumber(a: str) -> bool: 12 | try: 13 | number = float(repr(a)) 14 | return number >= 0 15 | except: 16 | bool_a = False 17 | 18 | return bool_a 19 | 20 | 21 | class GCodeFile(FileInterface): 22 | mime_type = "text/x-gcode" 23 | 24 | MaximumHeaderLength = 100 25 | 26 | def __init__(self) -> None: 27 | self.__stream = None # type: Optional[IO[bytes]] 28 | self.__metadata = {} # type: Dict[str, Any] 29 | 30 | def openStream(self, stream: IO[bytes], mime: str, mode: OpenMode = OpenMode.ReadOnly) -> None: 31 | if mode != OpenMode.ReadOnly: 32 | raise NotImplementedError() 33 | 34 | self.__stream = stream 35 | self.__metadata = {} 36 | self.__metadata = self.parseHeader(self.__stream, prefix = "/metadata/toolpath/default/") 37 | 38 | @staticmethod 39 | def parseHeader(stream: IO[bytes], *, prefix: str = "") -> Dict[str, Any]: 40 | try: 41 | metadata = {} # type: Dict[str, Any] 42 | line_number = 0 43 | for line_number, bytes_line in enumerate(stream): 44 | if line_number > GCodeFile.MaximumHeaderLength: 45 | break 46 | line = bytes_line.decode("utf-8") 47 | 48 | if line.startswith(";START_OF_HEADER"): 49 | continue 50 | elif line.startswith(";LAYER") or line.startswith(";END_OF_HEADER"): 51 | break 52 | elif line.startswith(";HEADER_VERSION"): 53 | # Header version is a number but should not be parsed as number, so special case it. 54 | metadata["header_version"] = line.split(":")[1].strip() 55 | elif line.startswith(";") and ":" in line: 56 | key, value = line[1:].split(":") 57 | key = key.strip().lower() 58 | value = value.strip() 59 | try: 60 | value = ast.literal_eval(value.strip()) 61 | except: 62 | pass 63 | key_elements = key.split(".") 64 | GCodeFile.__insertKeyValuePair(metadata, key_elements, value) 65 | 66 | if stream.seekable(): 67 | stream.seek(0) 68 | 69 | flavor = metadata.get("flavor", None) 70 | if flavor in ("Griffin", "Cheetah"): 71 | if metadata["header_version"] != "0.1": 72 | raise InvalidHeaderException("Unsupported Griffin header version: {0}".format(metadata["header_version"])) 73 | GCodeFile.__validateGriffinHeader(metadata) 74 | GCodeFile.__cleanGriffinHeader(metadata) 75 | elif flavor == "UltiGCode": 76 | metadata["machine_type"] = "ultimaker2" 77 | else: 78 | raise InvalidHeaderException("Flavor must be defined!") 79 | 80 | if prefix: 81 | prefixed_metadata = {} 82 | for key, value in metadata.items(): 83 | prefixed_metadata[prefix + key] = value 84 | metadata = prefixed_metadata 85 | 86 | return metadata 87 | except Exception as e: 88 | raise InvalidHeaderException("Unable to parse the header. An exception occured; %s" % e) 89 | 90 | 91 | ## Add a key-value pair to the metadata dictionary. 92 | # Splits up key each element to it's own dictionary. 93 | # @param metadata Metadata collection 94 | # @param key_elements List of separate key name elements 95 | # @param value Key value 96 | @staticmethod 97 | def __insertKeyValuePair( 98 | metadata: Dict[str, Any], 99 | key_elements: Any, 100 | value: Any 101 | ) -> Any: 102 | if not key_elements: 103 | return value 104 | 105 | sub_dict = {} 106 | 107 | if key_elements[0] in metadata: 108 | sub_dict = metadata[key_elements[0]] 109 | 110 | metadata[key_elements[0]] = GCodeFile.__insertKeyValuePair(sub_dict, key_elements[1:], value) 111 | 112 | return metadata 113 | 114 | def getData(self, virtual_path: str) -> Dict[str, Any]: 115 | assert self.__stream is not None 116 | 117 | if virtual_path.startswith("/metadata"): 118 | result = {} 119 | for key, value in self.__metadata.items(): 120 | if key.startswith(virtual_path): 121 | result[key] = value 122 | return result 123 | 124 | if virtual_path == "/toolpath" or virtual_path == "/toolpath/default": 125 | return { virtual_path: self.__stream.read() } 126 | 127 | return {} 128 | 129 | ## Cleans a parsed GRIFFIN flavoured GCODE header. 130 | @staticmethod 131 | def __cleanGriffinHeader(metadata: Dict[str, Any]) -> None: 132 | metadata["machine_type"] = metadata["target_machine"]["name"] 133 | del metadata["target_machine"] 134 | 135 | if GCodeFile.__isAvailable(metadata, ["time"]): 136 | GCodeFile.__insertKeyValuePair(metadata, ["print", "time"], metadata["time"]) 137 | # del metadata["time"] # We want to delete the old key, but it's behavior of how the code was. 138 | 139 | GCodeFile.__insertKeyValuePair(metadata, ["print", "min_size"], metadata["print"]["size"]["min"]) 140 | GCodeFile.__insertKeyValuePair(metadata, ["print", "max_size"], metadata["print"]["size"]["max"]) 141 | del metadata["print"]["size"] 142 | 143 | for key, value in metadata["extruder_train"].items(): 144 | GCodeFile.__insertKeyValuePair(metadata, ["extruders", int(key)], value) 145 | 146 | del metadata["extruder_train"] 147 | 148 | ## Checks if a path to a key is available 149 | # @param metadata Metadata collection to check for the presence of the key 150 | # @param keys List of key elements describing the path to a value. If a key element is a list, then all the elements 151 | # must exist on the location of that key element 152 | # @return True if the key is available and not empty 153 | @staticmethod 154 | def __isAvailable(metadata: Dict[str, Any], keys: List[Any]) -> bool: 155 | if not keys: 156 | return True 157 | 158 | key = keys[0] 159 | 160 | if isinstance(key, list): 161 | key_is_valid = True 162 | for sub_key in key: 163 | key_is_valid = key_is_valid and GCodeFile.__isAvailable(metadata, [sub_key] + [keys[1:]]) 164 | else: 165 | key_is_valid = key in metadata and metadata[key] is not None and not str(metadata[key]) == "" 166 | key_is_valid = key_is_valid and GCodeFile.__isAvailable(metadata[key], keys[1:]) 167 | 168 | return key_is_valid 169 | 170 | ## Validates a parsed GRIFFIN flavoured GCODE header. 171 | # Will raise an InvalidHeader exception when the header is invalid. 172 | # @param metadata Key/value dictionary based on the header. 173 | @staticmethod 174 | def __validateGriffinHeader(metadata: Dict[str, Any]) -> None: 175 | 176 | # Validate target settings 177 | if not GCodeFile.__isAvailable(metadata, ["target_machine", "name"]): 178 | raise InvalidHeaderException("TARGET_MACHINE.NAME must be set") 179 | 180 | # Validate generator settings 181 | if not GCodeFile.__isAvailable(metadata, ["generator", "name"]): 182 | raise InvalidHeaderException("GENERATOR.NAME must be set") 183 | if not GCodeFile.__isAvailable(metadata, ["generator", "version"]): 184 | raise InvalidHeaderException("GENERATOR.VERSION must be set") 185 | if not GCodeFile.__isAvailable(metadata, ["generator", "build_date"]): 186 | raise InvalidHeaderException("GENERATOR.BUILD_DATE must be set") 187 | 188 | # Validate build plate temperature 189 | if not GCodeFile.__isAvailable(metadata, ["build_plate", "initial_temperature"]) or \ 190 | not isAPositiveNumber(metadata["build_plate"]["initial_temperature"]): 191 | raise InvalidHeaderException("BUILD_PLATE.INITIAL_TEMPERATURE must be set and be a positive real") 192 | 193 | # Validate dimensions 194 | if not GCodeFile.__isAvailable(metadata, ["print", "size", "min", ["x", "y", "z"]]): 195 | raise InvalidHeaderException("PRINT.SIZE.MIN.[x,y,z] must be set. Ensure all three are defined.") 196 | if not GCodeFile.__isAvailable(metadata, ["print", "size", "max", ["x", "y", "z"]]): 197 | raise InvalidHeaderException("PRINT.SIZE.MAX.[x,y,z] must be set. Ensure all three are defined.") 198 | 199 | # Validate print time 200 | print_time = -1 201 | 202 | if GCodeFile.__isAvailable(metadata, ["print", "time"]): 203 | print_time = int(metadata["print"]["time"]) 204 | elif GCodeFile.__isAvailable(metadata, ["time"]): 205 | print_time = int(metadata["time"]) 206 | else: 207 | raise InvalidHeaderException("TIME or PRINT.TIME must be set") 208 | 209 | if print_time < 0: 210 | raise InvalidHeaderException("Print Time should be a positive integer") 211 | 212 | # Validate extruder train 213 | for index in range(0, 10): 214 | index_str = str(index) 215 | if GCodeFile.__isAvailable(metadata, ["extruder_train", index_str]): 216 | 217 | if not GCodeFile.__isAvailable(metadata, ["extruder_train", index_str, "nozzle", "diameter"]) or \ 218 | not isAPositiveNumber(metadata["extruder_train"][index_str]["nozzle"]["diameter"]): 219 | raise InvalidHeaderException( 220 | "extruder_train.{}.nozzle.diameter must be defined and be a positive real".format(index)) 221 | 222 | if not GCodeFile.__isAvailable(metadata, ["extruder_train", index_str, "material", "volume_used"]) or \ 223 | not isAPositiveNumber(metadata["extruder_train"][index_str]["material"]["volume_used"]): 224 | raise InvalidHeaderException( 225 | "extruder_train.{}.material.volume_used must be defined and positive".format(index)) 226 | 227 | if not GCodeFile.__isAvailable(metadata, ["extruder_train", index_str, "initial_temperature"]) or \ 228 | not isAPositiveNumber(metadata["extruder_train"][index_str]["initial_temperature"]): 229 | raise InvalidHeaderException( 230 | "extruder_train.{}.initial_temperature must be defined and positive".format(index)) 231 | 232 | def getStream(self, virtual_path: str) -> IO[bytes]: 233 | assert self.__stream is not None 234 | 235 | if virtual_path != "/toolpath" and virtual_path != "/toolpath/default": 236 | raise NotImplementedError("G-code files only support /toolpath as stream") 237 | 238 | return self.__stream 239 | 240 | def close(self) -> None: 241 | assert self.__stream is not None 242 | 243 | self.__stream.close() 244 | 245 | 246 | class InvalidHeaderException(Exception): 247 | pass 248 | -------------------------------------------------------------------------------- /Charon/filetypes/GCodeGzFile.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Ultimaker B.V. 2 | # libCharon is released under the terms of the LGPLv3 or higher. 3 | import gzip 4 | 5 | from Charon.filetypes.GCodeFile import GCodeFile 6 | 7 | 8 | class GCodeGzFile(GCodeFile): 9 | stream_handler = gzip.open 10 | mime_type = "text/x-gcode-gz" 11 | 12 | def __init__(self) -> None: 13 | super().__init__() 14 | -------------------------------------------------------------------------------- /Charon/filetypes/GCodeSocket.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2021 Ultimaker B.V. 2 | # libCharon is released under the terms of the LGPLv3 or higher. 3 | 4 | import socket 5 | import struct 6 | from io import BytesIO, SEEK_SET, SEEK_CUR 7 | 8 | from typing import Any, Dict, IO, Optional, List 9 | 10 | from Charon.filetypes.GCodeFile import GCodeFile 11 | from urllib.parse import urlparse 12 | 13 | 14 | ## This class is used to read GCode stream that are served 15 | # dynamically over a TCP connection. 16 | class SocketFileStream(BytesIO): 17 | def __init__(self, sock_object: socket.socket) -> None: 18 | super().__init__() 19 | self.current_line = 0 20 | self.__socket = sock_object 21 | 22 | def seekable(self) -> bool: 23 | return True 24 | 25 | def seek(self, offset: int, whence: Optional[int] = None) -> int: 26 | if whence is None or whence == SEEK_SET: 27 | self.current_line = offset 28 | elif whence == SEEK_CUR: 29 | self.current_line += offset 30 | else: 31 | raise ValueError('Unsupported whence mode in seek: %d' % whence) 32 | return offset 33 | 34 | def readline(self, _size: int = -1) -> bytes: 35 | self.__socket.send(struct.pack('>I', self.current_line)) 36 | line = b'' 37 | char = b'' 38 | 39 | while char != b'\n': 40 | char = self.__socket.recv(1) 41 | line += char 42 | 43 | self.current_line += 1 44 | return line 45 | 46 | def read(self, _size: int = -1) -> bytes: 47 | raise NotImplementedError("Only readline has been implemented") 48 | 49 | def readlines(self, _hint: int = -1) -> List[bytes]: 50 | raise NotImplementedError("Only readline has been implemented") 51 | 52 | def tell(self) -> int: 53 | raise NotImplementedError("Only readline has been implemented") 54 | 55 | def close(self) -> None: 56 | self.__socket.close() 57 | 58 | def __iter__(self): 59 | return self 60 | 61 | def __next__(self): 62 | return self.readline() 63 | 64 | 65 | class GCodeSocket(GCodeFile): 66 | mime_type = "text/x-gcode-socket" 67 | 68 | MaximumHeaderLength = 100 69 | 70 | def __init__(self) -> None: 71 | super().__init__() 72 | self.__stream = None # type: Optional[IO[bytes]] 73 | self.__metadata = {} # type: Dict[str, Any] 74 | self.__sock = None 75 | 76 | @staticmethod 77 | def stream_handler(path: str, mode: str) -> IO: 78 | url = urlparse(path) 79 | sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 80 | sock.connect((url.hostname, 1337)) 81 | return SocketFileStream(sock) 82 | -------------------------------------------------------------------------------- /Charon/filetypes/OpenPackagingConvention.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Ultimaker B.V. 2 | # libCharon is released under the terms of the LGPLv3 or higher. 3 | from collections import OrderedDict # To specify the aliases in order. 4 | from io import BytesIO 5 | import json # The metadata format. 6 | import re # To find the path aliases. 7 | from typing import Any, Dict, List, IO, Optional 8 | import xml.etree.ElementTree as ET # For writing XML manifest files. 9 | import zipfile 10 | 11 | from Charon.FileInterface import FileInterface # The interface we're implementing. 12 | from Charon.OpenMode import OpenMode # To detect whether we want to read and/or write to the file. 13 | from Charon.ReadOnlyError import ReadOnlyError # To be thrown when trying to write while in read-only mode. 14 | from Charon.WriteOnlyError import WriteOnlyError # To be thrown when trying to read while in write-only mode. 15 | from Charon.filetypes.GCodeFile import GCodeFile # Required for fallback G-Code header parsing. 16 | 17 | 18 | ## A container file type that contains multiple 3D-printing related files that 19 | # belong together. 20 | class OpenPackagingConvention(FileInterface): 21 | # Some constants related to this format. 22 | _xml_header = ET.ProcessingInstruction("xml", 23 | "version=\"1.0\" encoding=\"UTF-8\"") # Header element being put atop every XML file. 24 | _content_types_file = "/[Content_Types].xml" # Where the content types file is. 25 | _global_metadata_file = "/Metadata/OPC_Global.json" # Where the global metadata file is. 26 | _opc_metadata_relationship_type = "http://schemas.ultimaker.org/package/2018/relationships/opc_metadata" # Unique identifier of the relationship type that relates OPC metadata to files. 27 | _metadata_prefix = "/metadata" 28 | _aliases = OrderedDict([]) # type: Dict[str, str] # A standard OPC file doest not have default aliases. These must be implemented in inherited classes. 29 | 30 | mime_type = "application/x-opc" 31 | 32 | ## Initialises the fields of this class. 33 | def __init__(self) -> None: 34 | self._mode = None # type: Optional[OpenMode] # Whether we're in read or write mode. 35 | self._stream = None # type: Optional[IO[bytes]] # The currently open stream. 36 | self._zipfile = None # type: Optional[zipfile.ZipFile] # The zip interface to the currently open stream. 37 | self._metadata = {} # type: Dict[str, Any] # The metadata in the currently open file. 38 | self._content_types_element = None # type: Optional[ET.Element] # An XML element holding all the content types. 39 | self._relations = {} # type: Dict[str, ET.Element] # For each virtual path, a relations XML element (which is left out of the file if empty). 40 | self._open_bytes_streams = {} # type: Dict[str, IO[bytes]] # With old Python versions, the currently open BytesIO streams that need to be flushed, by their virtual path. 41 | 42 | # The zipfile module may only have one write stream open at a time. So when you open a new stream, close the previous one. 43 | self._last_open_path = None # type: Optional[str] 44 | self._last_open_stream = None # type: Optional[IO[bytes]] 45 | 46 | def openStream(self, stream: IO[bytes], mime: str = "application/x-opc", 47 | mode: OpenMode = OpenMode.ReadOnly) -> None: 48 | self._mode = mode 49 | self._stream = stream # A copy in case we need to rewind for toByteArray. We should mostly be reading via self._zipfile. 50 | self._zipfile = zipfile.ZipFile(self._stream, self._mode.value, compression=zipfile.ZIP_DEFLATED) 51 | 52 | self._readContentTypes() # Load or create the content types element. 53 | self._readRels() # Load or create the relations. 54 | self._readMetadata() # Load the metadata, if any. 55 | 56 | def close(self) -> None: 57 | if not self._stream: 58 | raise ValueError("This file is already closed.") 59 | if self._zipfile is None: 60 | return 61 | 62 | self.flush() 63 | self._zipfile.close() 64 | 65 | def flush(self) -> None: 66 | if not self._stream: 67 | raise ValueError("Can't flush a closed file.") 68 | assert self._zipfile is not None 69 | 70 | if self._mode == OpenMode.ReadOnly: 71 | return # No need to flush reading of zip archives as they are blocking calls. 72 | 73 | if self._last_open_stream is not None and self._last_open_path not in self._open_bytes_streams: 74 | self._last_open_stream.close() 75 | 76 | # If using old Python versions (<= 3.5), the write streams were kept in memory to be written all at once when flushing. 77 | for virtual_path, stream in self._open_bytes_streams.items(): 78 | stream.seek(0) 79 | self._zipfile.writestr(virtual_path, stream.read()) 80 | stream.close() 81 | 82 | self._writeMetadata() # Metadata must be updated first, because that adds rels and a content type. 83 | self._writeContentTypes() 84 | self._writeRels() 85 | 86 | def listPaths(self) -> List[str]: 87 | if not self._stream: 88 | raise ValueError("Can't list the paths in a closed file.") 89 | paths = [self._zipNameToVirtualPath(zip_name) for zip_name in self._zipfile.namelist()] 90 | return list(self._metadata.keys()) + paths 91 | 92 | def getData(self, virtual_path: str) -> Dict[str, Any]: 93 | if not self._stream: 94 | raise ValueError("Can't get data from a closed file.") 95 | assert self._zipfile is not None 96 | 97 | if self._mode == OpenMode.WriteOnly: 98 | raise WriteOnlyError(virtual_path) 99 | 100 | result = {} # type: Dict[str, Any] 101 | if virtual_path.startswith(self._metadata_prefix): 102 | result = self.getMetadata(virtual_path[len(self._metadata_prefix):]) 103 | else: 104 | canonical_path = self._processAliases(virtual_path) 105 | if self._resourceExists(canonical_path): 106 | result[virtual_path] = self.getStream( 107 | canonical_path).read() # In case of a name clash, the file wins. But that shouldn't be possible. 108 | 109 | return result 110 | 111 | def setData(self, data: Dict[str, Any]) -> None: 112 | if not self._stream: 113 | raise ValueError("Can't change the data in a closed file.") 114 | if self._mode == OpenMode.ReadOnly: 115 | raise ReadOnlyError() 116 | for virtual_path, value in data.items(): 117 | if virtual_path.startswith( 118 | self._metadata_prefix): # Detect metadata by virtue of being in the Metadata folder. 119 | self.setMetadata({virtual_path: value[len(self._metadata_prefix):]}) 120 | else: # Virtual file resources. 121 | self.getStream(virtual_path).write(value) 122 | 123 | def getMetadata(self, virtual_path: str) -> Dict[str, Any]: 124 | if not self._stream: 125 | raise ValueError("Can't get metadata from a closed file.") 126 | assert self._zipfile is not None 127 | 128 | if self._mode == OpenMode.WriteOnly: 129 | raise WriteOnlyError(virtual_path) 130 | canonical_path = self._processAliases(virtual_path) 131 | 132 | # Find all metadata that begins with the specified virtual path! 133 | result = {} 134 | 135 | if canonical_path in self._metadata: # The exact match. 136 | result[self._metadata_prefix + virtual_path] = self._metadata[canonical_path] 137 | for entry_path, value in self._metadata.items(): 138 | # We only want to match subdirectories of the provided virtual paths. 139 | # So if you provide "/foo" then we don't want to match on "/foobar" 140 | # but we do want to match on "/foo/zoo". This is why we check if they 141 | # start with the provided virtual path plus a slash. 142 | if entry_path.startswith(canonical_path + "/"): 143 | # We need to return the originally requested alias, so replace the canonical path with the virtual path. 144 | result[self._metadata_prefix + virtual_path + "/" + entry_path[len(canonical_path) + 1:]] = value 145 | 146 | # If requesting the size of a file. 147 | if canonical_path.endswith("/size"): 148 | requested_resource = canonical_path[:-len("/size")] 149 | if self._resourceExists(requested_resource): 150 | result[self._metadata_prefix + virtual_path] = self._zipfile.getinfo( 151 | requested_resource.strip("/")).file_size 152 | 153 | return result 154 | 155 | def setMetadata(self, metadata: Dict[str, Any]) -> None: 156 | if not self._stream: 157 | raise ValueError("Can't change metadata in a closed file.") 158 | if self._mode == OpenMode.ReadOnly: 159 | raise ReadOnlyError() 160 | metadata = {self._processAliases(virtual_path): metadata[virtual_path] for virtual_path in metadata} 161 | self._metadata.update(metadata) 162 | 163 | def getStream(self, virtual_path: str) -> IO[bytes]: 164 | if not self._stream: 165 | raise ValueError("Can't get a stream from a closed file.") 166 | assert self._zipfile is not None 167 | assert self._mode is not None 168 | 169 | if virtual_path.startswith("/_rels"): 170 | raise OPCError("Writing directly to a relationship file is forbidden.") 171 | 172 | if virtual_path.startswith(self._metadata_prefix): 173 | return BytesIO(json.dumps(self.getMetadata(virtual_path[len(self._metadata_prefix):])).encode("UTF-8")) 174 | 175 | virtual_path = self._processAliases(virtual_path) 176 | if not self._resourceExists(virtual_path) and self._mode == OpenMode.ReadOnly: # In write-only mode, create a new file instead of reading metadata. 177 | raise FileNotFoundError(virtual_path) 178 | 179 | # The zipfile module may only have one write stream open at a time. So when you open a new stream, close the previous one. 180 | if self._last_open_stream is not None and self._last_open_path not in self._open_bytes_streams: # Don't close streams that we still need to flush. 181 | self._last_open_stream.close() 182 | 183 | # If we are requesting a stream of an image resized, resize the image and return that. 184 | if self._mode == OpenMode.ReadOnly and ".png/" in virtual_path: 185 | png_file = virtual_path[:virtual_path.find(".png/") + 4] 186 | size_spec = virtual_path[virtual_path.find(".png/") + 5:] 187 | if re.match(r"^\s*\d+\s*x\s*\d+\s*$", size_spec): 188 | dimensions = [] 189 | for dimension in re.finditer(r"\d+", size_spec): 190 | dimensions.append(int(dimension.group())) 191 | return self._resizeImage(png_file, dimensions[0], dimensions[1]) 192 | 193 | self._last_open_path = virtual_path 194 | try: # If it happens to match some existing PNG file, we have to rescale that file and return the result. 195 | self._last_open_stream = self._zipfile.open(virtual_path, self._mode.value) 196 | except RuntimeError: # Python 3.5 and before couldn't open resources in the archive in write mode. 197 | self._last_open_stream = BytesIO() 198 | self._open_bytes_streams[virtual_path] = self._last_open_stream # Save this for flushing later. 199 | return self._last_open_stream 200 | 201 | def toByteArray(self, offset: int = 0, count: int = -1) -> bytes: 202 | if not self._stream: 203 | raise ValueError("Can't get the bytes from a closed file.") 204 | if self._mode == OpenMode.WriteOnly: 205 | raise WriteOnlyError() 206 | assert self._zipfile is not None 207 | assert self._mode is not None 208 | 209 | self._zipfile.close() # Close the zipfile first so that we won't be messing with the stream without its consent. 210 | 211 | self._stream.seek(offset) 212 | result = self._stream.read(count) 213 | 214 | self._zipfile = zipfile.ZipFile(self._stream, self._mode.value, compression=zipfile.ZIP_DEFLATED) 215 | return result 216 | 217 | ## Adds a new content type to the archive. 218 | # \param extension The file extension of the type 219 | def addContentType(self, extension: str, mime_type: str) -> None: 220 | if not self._stream: 221 | raise ValueError("Can't add a content type to a closed file.") 222 | if self._mode == OpenMode.ReadOnly: 223 | raise ReadOnlyError() 224 | assert self._content_types_element is not None 225 | 226 | # First check if it already exists. 227 | for content_type in self._content_types_element.iterfind("Default"): 228 | if "Extension" in content_type.attrib and content_type.attrib["Extension"] == extension: 229 | raise OPCError("Content type for extension {extension} already exists.".format(extension=extension)) 230 | 231 | ET.SubElement(self._content_types_element, "Default", Extension=extension, ContentType=mime_type) 232 | 233 | ## Adds a relation concerning a file type. 234 | # \param virtual_path The target file that the relation is about. 235 | # \param relation_type The type of the relation. Any reader of OPC should 236 | # be able to understand all types that are added via relations. 237 | # \param origin The origin of the relation. If the relation concerns a 238 | # specific directory or specific file, then you should point to the 239 | # virtual path of that file here. 240 | def addRelation(self, virtual_path: str, relation_type: str, origin: str = "") -> None: 241 | if not self._stream: 242 | raise ValueError("Can't add a relation to a closed file.") 243 | if self._mode == OpenMode.ReadOnly: 244 | raise ReadOnlyError(virtual_path) 245 | virtual_path = self._processAliases(virtual_path) 246 | 247 | # First check if it already exists. 248 | if origin not in self._relations: 249 | self._relations[origin] = ET.Element("Relationships", 250 | xmlns="http://schemas.openxmlformats.org/package/2006/relationships") 251 | else: 252 | for relationship in self._relations[origin].iterfind("Relationship"): 253 | if "Target" in relationship.attrib and relationship.attrib["Target"] == virtual_path: 254 | raise OPCError("Relation for virtual path {target} already exists.".format(target=virtual_path)) 255 | 256 | # Find a unique name. 257 | unique_id = 0 258 | while True: 259 | for relationship in self._relations[origin].iterfind("Relationship"): 260 | if "Id" in relationship.attrib and relationship.attrib["Id"] == "rel" + str(unique_id): 261 | break 262 | else: # Unique ID didn't exist yet! It's safe to use 263 | break 264 | unique_id += 1 265 | unique_name = "rel" + str(unique_id) 266 | 267 | # Create the element itself. 268 | ET.SubElement(self._relations[origin], "Relationship", Target=virtual_path, Type=relation_type, Id=unique_name) 269 | 270 | ## Figures out if a resource exists in the archive. 271 | # 272 | # This will not match on metadata, only on normal resources. 273 | # \param virtual_path: The path to test for. 274 | # \return ``True`` if it exists as a normal resource, or ``False`` if it 275 | # doesn't. 276 | def _resourceExists(self, virtual_path: str) -> bool: 277 | assert self._zipfile is not None 278 | 279 | for zip_name in self._zipfile.namelist(): 280 | zip_virtual_path = self._zipNameToVirtualPath(zip_name) 281 | if virtual_path == zip_virtual_path: 282 | return True 283 | if zip_virtual_path.endswith(".png") and virtual_path.startswith( 284 | zip_virtual_path + "/"): # We can rescale PNG images if you want. 285 | if re.match(r"^\s*\d+\s*x\s*\d+\s*$", virtual_path[len( 286 | zip_virtual_path) + 1:]): # Matches the form "NxM" with optional whitespace. 287 | return True 288 | return False 289 | 290 | ## Dereference the aliases for OPC files. 291 | # 292 | # This also adds a slash in front of every virtual path if it has no slash 293 | # yet, to allow referencing virtual paths with or without the initial 294 | # slash. 295 | def _processAliases(self, virtual_path: str) -> str: 296 | if not virtual_path.startswith("/"): 297 | virtual_path = "/" + virtual_path 298 | 299 | # Replace all aliases. 300 | for regex, replacement in self._aliases.items(): 301 | if regex.startswith("/"): 302 | expression = r"^" + regex 303 | else: 304 | expression = regex 305 | virtual_path = re.sub(expression, replacement, virtual_path) 306 | 307 | return virtual_path 308 | 309 | ## Convert the resource name inside the zip to a virtual path as this 310 | # library specifies it should be. 311 | # \param zip_name The name in the zip file according to zipfile module. 312 | # \return The virtual path of that resource. 313 | def _zipNameToVirtualPath(self, zip_name: str) -> str: 314 | if not zip_name.startswith("/"): 315 | return "/" + zip_name 316 | return zip_name 317 | 318 | ## Resize an image to the specified dimensions. 319 | # 320 | # For now you may assume that the input image is PNG formatted. 321 | # \param virtual_path The virtual path pointing to an image in the 322 | # zipfile. 323 | # \param width The desired width of the image. 324 | # \param height The desired height of the image. 325 | # \return A bytes stream representing a new PNG image with the desired 326 | # width and height. 327 | def _resizeImage(self, virtual_path: str, width: int, height: int) -> IO[bytes]: 328 | input = self.getStream(virtual_path) 329 | try: 330 | from PyQt5.QtGui import QImage 331 | from PyQt5.QtCore import Qt, QBuffer 332 | 333 | image = QImage() 334 | image.loadFromData(input.read()) 335 | image = image.scaled(width, height, Qt.IgnoreAspectRatio, Qt.SmoothTransformation) 336 | output_buffer = QBuffer() 337 | output_buffer.open(QBuffer.ReadWrite) 338 | image.save(output_buffer, "PNG") 339 | output_buffer.seek(0) # Reset that buffer so that the next guy can request it. 340 | return BytesIO(output_buffer.readAll()) 341 | except ImportError: 342 | # TODO: Try other image loaders. 343 | raise # Raise import error again if we find no other image loaders. 344 | 345 | #### Below follow some methods to read/write components of the archive. #### 346 | 347 | ## When loading a file, load the relations from the archive. 348 | # 349 | # If the relations are missing, empty elements are created. 350 | def _readRels(self) -> None: 351 | assert self._zipfile is not None 352 | 353 | self._relations[""] = ET.Element("Relationships", 354 | xmlns="http://schemas.openxmlformats.org/package/2006/relationships") # There must always be a global relationships document. 355 | 356 | # Below is some parsing of paths and extensions. 357 | # Normally you'd use os.path for this. But this is platform-dependent. 358 | # For instance, the path separator in Windows is a backslash, but zipfile still uses a slash on Windows. 359 | # So instead we have custom implementations here. Sorry. 360 | 361 | for virtual_path in self._zipfile.namelist(): 362 | virtual_path = self._zipNameToVirtualPath(virtual_path) 363 | if not virtual_path.endswith(".rels"): # We only want to read rels files. 364 | continue 365 | directory = virtual_path[:virtual_path.rfind("/")] # Before the last slash. 366 | if directory != "_rels" and not directory.endswith("/_rels"): # Rels files must be in a directory _rels. 367 | continue 368 | 369 | document = ET.fromstring(self._zipfile.open(virtual_path).read()) 370 | 371 | # Find out what file or directory this relation is about. 372 | origin_filename = virtual_path[virtual_path.rfind("/") + 1:-len( 373 | ".rels")] # Just the filename (no path) and without .rels extension. 374 | origin_directory = directory[ 375 | :-len("/_rels")] # The parent path. We already know it's in the _rels directory. 376 | origin = (origin_directory + "/" if (origin_directory != "") else "") + origin_filename 377 | 378 | self._relations[origin] = document 379 | 380 | ## At the end of writing a file, write the relations to the archive. 381 | # 382 | # This should be written at the end of writing an archive, when all 383 | # relations are known. 384 | def _writeRels(self) -> None: 385 | assert self._zipfile is not None 386 | # Below is some parsing of paths and extensions. 387 | # Normally you'd use os.path for this. But this is platform-dependent. 388 | # For instance, the path separator in Windows is a backslash, but zipfile still uses a slash on Windows. 389 | # So instead we have custom implementations here. Sorry. 390 | 391 | for origin, element in self._relations.items(): 392 | # Find out where to store the rels file. 393 | if "/" not in origin: # Is in root. 394 | origin_directory = "" 395 | origin_filename = origin 396 | else: 397 | origin_directory = origin[:origin.rfind("/")] 398 | origin_filename = origin[origin.rfind("/") + 1:] 399 | relations_file = origin_directory + "/_rels/" + origin_filename + ".rels" 400 | 401 | self._indent(element) 402 | self._zipfile.writestr(relations_file, ET.tostring(self._xml_header) + b"\n" + ET.tostring(element)) 403 | 404 | ## When loading a file, load the content types from the archive. 405 | # 406 | # If the content types are missing, an empty element is created. 407 | def _readContentTypes(self) -> None: 408 | assert self._zipfile is not None 409 | 410 | if self._content_types_file in self._zipfile.namelist(): 411 | content_types_element = ET.fromstring(self._zipfile.open(self._content_types_file).read()) 412 | if content_types_element: 413 | self._content_types_element = content_types_element 414 | if not self._content_types_element: 415 | self._content_types_element = ET.Element("Types", 416 | xmlns="http://schemas.openxmlformats.org/package/2006/content-types") 417 | # If there is no type for the .rels file, create it. 418 | if self._mode != OpenMode.ReadOnly: 419 | for type_element in self._content_types_element.iterfind( 420 | "{http://schemas.openxmlformats.org/package/2006/content-types}Default"): 421 | if "Extension" in type_element.attrib and type_element.attrib["Extension"] == "rels": 422 | break 423 | else: 424 | ET.SubElement(self._content_types_element, "Default", Extension="rels", 425 | ContentType="application/vnd.openxmlformats-package.relationships+xml") 426 | 427 | ## At the end of writing a file, write the content types to the archive. 428 | # 429 | # This should be written at the end of writing an archive, when all 430 | # content types are known. 431 | def _writeContentTypes(self) -> None: 432 | assert self._zipfile is not None 433 | assert self._content_types_element is not None 434 | 435 | self._indent(self._content_types_element) 436 | self._zipfile.writestr(self._content_types_file, 437 | ET.tostring(self._xml_header) + b"\n" + ET.tostring(self._content_types_element)) 438 | 439 | ## When loading a file, read its metadata from the archive. 440 | # 441 | # This depends on the relations! Read the relations first! 442 | def _readMetadata(self) -> None: 443 | assert self._zipfile is not None 444 | 445 | for origin, relations_element in self._relations.items(): 446 | for relationship in relations_element.iterfind( 447 | "{http://schemas.openxmlformats.org/package/2006/relationships}Relationship"): 448 | if "Target" not in relationship.attrib or "Type" not in relationship.attrib: # These two are required, and we actually need them here. Better ignore this one. 449 | continue 450 | if relationship.attrib[ 451 | "Type"] != self._opc_metadata_relationship_type: # Not interested in this one. It's not metadata that we recognise. 452 | continue 453 | metadata_file = relationship.attrib["Target"] 454 | if metadata_file not in self._zipfile.namelist(): # The metadata file is unknown to us. 455 | continue 456 | 457 | metadata = json.loads(self._zipfile.open(metadata_file).read().decode("utf-8")) 458 | if metadata_file == self._global_metadata_file: # Store globals as if coming from root. 459 | metadata_file = "" 460 | elif metadata_file.endswith( 461 | ".json"): # Metadata files should be named .json, meaning that they are metadata about . 462 | metadata_file = metadata_file[:-len(".json")] 463 | self._readMetadataElement(metadata, metadata_file) 464 | 465 | if self._mode != OpenMode.WriteOnly and not self.getMetadata("/3D/model.gcode"): 466 | try: 467 | # Check if the G-code file actually exists in the package. 468 | self._zipfile.getinfo("/3D/model.gcode") 469 | except KeyError: 470 | return 471 | 472 | gcode_stream = self._zipfile.open("/3D/model.gcode") 473 | header_data = GCodeFile.parseHeader(gcode_stream, prefix="/3D/model.gcode/") 474 | self._metadata.update(header_data) 475 | 476 | ## Reads a single node of metadata from a JSON document (recursively). 477 | # \param element The node in the JSON document to read. 478 | # \param current_path The path towards the current document. 479 | def _readMetadataElement(self, element: Dict[str, Any], current_path: str) -> None: 480 | for key, value in element.items(): 481 | if isinstance(value, dict): # json structures stuff in dicts if it is a subtree. 482 | self._readMetadataElement(value, current_path + "/" + key) 483 | else: 484 | self._metadata[current_path + "/" + key] = value 485 | 486 | ## At the end of writing a file, write the metadata to the archive. 487 | # 488 | # This should be written at the end of writing an archive, when all 489 | # metadata is known. 490 | # 491 | # ALWAYS WRITE METADATA BEFORE UPDATING RELS AND CONTENT TYPES. 492 | def _writeMetadata(self) -> None: 493 | assert self._zipfile is not None 494 | 495 | keys_left = set( 496 | self._metadata.keys()) # The keys that are not associated with a particular file (global metadata). 497 | metadata_per_file = {} # type: Dict[str, Dict[str, Any]] 498 | for file_name in self._zipfile.namelist(): 499 | metadata_per_file[file_name] = {} 500 | for metadata_key in self._metadata: 501 | if metadata_key.startswith(file_name + "/"): 502 | # Strip the prefix: "/a/b/c.stl/print_time" becomes just "print_time" about the file "/a/b/c.stl". 503 | metadata_per_file[file_name][metadata_key[len(file_name) + 1:]] = self._metadata[metadata_key] 504 | keys_left.remove(metadata_key) 505 | # keys_left now contains only global metadata keys. 506 | 507 | global_metadata = {key: self._metadata[key] for key in keys_left} 508 | if len(global_metadata) > 0: 509 | self._writeMetadataToFile(global_metadata, self._global_metadata_file) 510 | self.addRelation(self._global_metadata_file, self._opc_metadata_relationship_type) 511 | for file_name, metadata in metadata_per_file.items(): 512 | if len(metadata) > 0: 513 | self._writeMetadataToFile(metadata, file_name + ".json") 514 | self.addRelation(file_name + ".json", self._opc_metadata_relationship_type) 515 | if len(self._metadata) > 0: # If we've written any metadata at all, we must include the content type as well. 516 | try: 517 | self.addContentType(extension="json", mime_type="text/json") 518 | except OPCError: # User may already have defined this content type himself. 519 | pass 520 | 521 | ## Writes one dictionary of metadata to a JSON file. 522 | # \param metadata The metadata dictionary to write. 523 | # \param file_name The virtual path of the JSON file to write to. 524 | def _writeMetadataToFile(self, metadata: Dict[str, Any], file_name: str) -> None: 525 | assert self._zipfile is not None 526 | 527 | # Split the metadata into a hierarchical structure. 528 | document = {} # type: Dict[str, Any] 529 | for key, value in metadata.items(): 530 | key = key.strip("/") # TODO: Should paths ending in a slash give an error? 531 | path = key.split("/") 532 | current_element = document 533 | for element in path: 534 | if element not in current_element: 535 | current_element[element] = {} 536 | current_element = current_element[element] 537 | current_element[""] = value 538 | 539 | # We've created some empty-string keys to allow values to occur next to subelements. 540 | # If this empty-string key is the only key inside a node, fold it in to be just the value. 541 | for key in metadata: 542 | key = key.strip("/") 543 | path = key.split("/") 544 | current_element = document 545 | parent = document 546 | for element in path: 547 | parent = current_element 548 | current_element = current_element[element] 549 | if len(current_element) == 1: # The empty string is the only element. 550 | assert "" in current_element 551 | parent[path[-1]] = current_element[""] # Fold down the singleton dictionary. 552 | 553 | self._zipfile.writestr(file_name, json.dumps(document, sort_keys=True, indent=4)) 554 | 555 | ## Helper method to write data directly into an aliased path. 556 | def _writeToAlias(self, path_alias: str, package_filename: str, file_data: bytes) -> None: 557 | stream = self.getStream("{}/{}".format(path_alias, package_filename)) 558 | stream.write(file_data) 559 | 560 | ## Helper method to ensure a relationship exists. 561 | # Creates the relationship if it does not exists, ignores an OPC error if it already does. 562 | def _ensureRelationExists(self, virtual_path: str, relation_type: str, origin: str) -> None: 563 | try: 564 | # We try to add the relation. If this throws an OPCError, we know the relation already exists and ignore it. 565 | self.addRelation(virtual_path, relation_type, origin) 566 | except OPCError: 567 | pass 568 | 569 | ## Helper function for pretty-printing XML because ETree is stupid. 570 | # 571 | # Source: https://stackoverflow.com/questions/749796/pretty-printing-xml-in-python 572 | def _indent(self, elem: ET.Element, level: int = 0) -> None: 573 | i = "\n" + level * " " 574 | if len(elem): 575 | if not elem.text or not elem.text.strip(): 576 | elem.text = i + " " 577 | if not elem.tail or not elem.tail.strip(): 578 | elem.tail = i 579 | for elem in elem: 580 | self._indent(elem, level + 1) 581 | if not elem.tail or not elem.tail.strip(): 582 | elem.tail = i 583 | else: 584 | if level and (not elem.tail or not elem.tail.strip()): 585 | elem.tail = i 586 | 587 | 588 | ## Error to raise that something went wrong with reading/writing a OPC file. 589 | class OPCError(Exception): 590 | pass # This is just a marker class. 591 | -------------------------------------------------------------------------------- /Charon/filetypes/UltimakerFormatPackage.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Ultimaker B.V. 2 | # libCharon is released under the terms of the LGPLv3 or higher. 3 | from collections import OrderedDict 4 | 5 | from Charon.OpenMode import OpenMode 6 | from Charon.filetypes.GCodeFile import GCodeFile 7 | from Charon.filetypes.OpenPackagingConvention import OpenPackagingConvention 8 | 9 | 10 | ## A container file type that contains multiple 3D-printing related files that belong together. 11 | class UltimakerFormatPackage(OpenPackagingConvention): 12 | 13 | # Where the global metadata file is. 14 | _global_metadata_file = "/Metadata/UFP_Global.json" 15 | 16 | # Unique identifier of the relationship type that relates UFP metadata to files. 17 | _metadata_relationship_type = "http://schemas.ultimaker.org/package/2018/relationships/ufp_metadata" 18 | 19 | # Where the global metadata file is. 20 | global_metadata_file = "/Metadata/UFP_Global.json" 21 | 22 | # Unique identifier of the relationship type that relates UFP metadata to files. 23 | metadata_relationship_type = "http://schemas.ultimaker.org/package/2018/relationships/ufp_metadata" 24 | 25 | # Virtual path aliases. Keys are regex. Order matters! 26 | _aliases = OrderedDict([ 27 | (r"^/preview/default", "/Metadata/thumbnail.png"), 28 | (r"^/preview", "/Metadata/thumbnail.png"), 29 | (r"^/toolpath/default", "/3D/model.gcode"), 30 | (r"^/toolpath", "/3D/model.gcode"), 31 | ]) 32 | 33 | mime_type = "application/x-ufp" 34 | 35 | ## Initialises the fields of this class. 36 | def __init__(self): 37 | super().__init__() 38 | 39 | ## When loading a file, read its metadata from the archive. 40 | # 41 | # This depends on the relations! Read the relations first! 42 | def _readMetadata(self) -> None: 43 | super()._readMetadata() 44 | if self._mode != OpenMode.WriteOnly and not self.getMetadata("/3D/model.gcode"): 45 | try: 46 | # Check if the G-code file actually exists in the package. 47 | self._zipfile.getinfo("/3D/model.gcode") 48 | except KeyError: 49 | return 50 | 51 | gcode_stream = self._zipfile.open("/3D/model.gcode") 52 | header_data = GCodeFile.parseHeader(gcode_stream, prefix="/3D/model.gcode/") 53 | self._metadata.update(header_data) 54 | -------------------------------------------------------------------------------- /Charon/filetypes/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Ultimaker B.V. 2 | # libCharon is released under the terms of the LGPLv3 or higher. 3 | 4 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU LESSER GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | 9 | This version of the GNU Lesser General Public License incorporates 10 | the terms and conditions of version 3 of the GNU General Public 11 | License, supplemented by the additional permissions listed below. 12 | 13 | 0. Additional Definitions. 14 | 15 | As used herein, "this License" refers to version 3 of the GNU Lesser 16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU 17 | General Public License. 18 | 19 | "The Library" refers to a covered work governed by this License, 20 | other than an Application or a Combined Work as defined below. 21 | 22 | An "Application" is any work that makes use of an interface provided 23 | by the Library, but which is not otherwise based on the Library. 24 | Defining a subclass of a class defined by the Library is deemed a mode 25 | of using an interface provided by the Library. 26 | 27 | A "Combined Work" is a work produced by combining or linking an 28 | Application with the Library. The particular version of the Library 29 | with which the Combined Work was made is also called the "Linked 30 | Version". 31 | 32 | The "Minimal Corresponding Source" for a Combined Work means the 33 | Corresponding Source for the Combined Work, excluding any source code 34 | for portions of the Combined Work that, considered in isolation, are 35 | based on the Application, and not on the Linked Version. 36 | 37 | The "Corresponding Application Code" for a Combined Work means the 38 | object code and/or source code for the Application, including any data 39 | and utility programs needed for reproducing the Combined Work from the 40 | Application, but excluding the System Libraries of the Combined Work. 41 | 42 | 1. Exception to Section 3 of the GNU GPL. 43 | 44 | You may convey a covered work under sections 3 and 4 of this License 45 | without being bound by section 3 of the GNU GPL. 46 | 47 | 2. Conveying Modified Versions. 48 | 49 | If you modify a copy of the Library, and, in your modifications, a 50 | facility refers to a function or data to be supplied by an Application 51 | that uses the facility (other than as an argument passed when the 52 | facility is invoked), then you may convey a copy of the modified 53 | version: 54 | 55 | a) under this License, provided that you make a good faith effort to 56 | ensure that, in the event an Application does not supply the 57 | function or data, the facility still operates, and performs 58 | whatever part of its purpose remains meaningful, or 59 | 60 | b) under the GNU GPL, with none of the additional permissions of 61 | this License applicable to that copy. 62 | 63 | 3. Object Code Incorporating Material from Library Header Files. 64 | 65 | The object code form of an Application may incorporate material from 66 | a header file that is part of the Library. You may convey such object 67 | code under terms of your choice, provided that, if the incorporated 68 | material is not limited to numerical parameters, data structure 69 | layouts and accessors, or small macros, inline functions and templates 70 | (ten or fewer lines in length), you do both of the following: 71 | 72 | a) Give prominent notice with each copy of the object code that the 73 | Library is used in it and that the Library and its use are 74 | covered by this License. 75 | 76 | b) Accompany the object code with a copy of the GNU GPL and this license 77 | document. 78 | 79 | 4. Combined Works. 80 | 81 | You may convey a Combined Work under terms of your choice that, 82 | taken together, effectively do not restrict modification of the 83 | portions of the Library contained in the Combined Work and reverse 84 | engineering for debugging such modifications, if you also do each of 85 | the following: 86 | 87 | a) Give prominent notice with each copy of the Combined Work that 88 | the Library is used in it and that the Library and its use are 89 | covered by this License. 90 | 91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license 92 | document. 93 | 94 | c) For a Combined Work that displays copyright notices during 95 | execution, include the copyright notice for the Library among 96 | these notices, as well as a reference directing the user to the 97 | copies of the GNU GPL and this license document. 98 | 99 | d) Do one of the following: 100 | 101 | 0) Convey the Minimal Corresponding Source under the terms of this 102 | License, and the Corresponding Application Code in a form 103 | suitable for, and under terms that permit, the user to 104 | recombine or relink the Application with a modified version of 105 | the Linked Version to produce a modified Combined Work, in the 106 | manner specified by section 6 of the GNU GPL for conveying 107 | Corresponding Source. 108 | 109 | 1) Use a suitable shared library mechanism for linking with the 110 | Library. A suitable mechanism is one that (a) uses at run time 111 | a copy of the Library already present on the user's computer 112 | system, and (b) will operate properly with a modified version 113 | of the Library that is interface-compatible with the Linked 114 | Version. 115 | 116 | e) Provide Installation Information, but only if you would otherwise 117 | be required to provide such information under section 6 of the 118 | GNU GPL, and only to the extent that such information is 119 | necessary to install and execute a modified version of the 120 | Combined Work produced by recombining or relinking the 121 | Application with a modified version of the Linked Version. (If 122 | you use option 4d0, the Installation Information must accompany 123 | the Minimal Corresponding Source and Corresponding Application 124 | Code. If you use option 4d1, you must provide the Installation 125 | Information in the manner specified by section 6 of the GNU GPL 126 | for conveying Corresponding Source.) 127 | 128 | 5. Combined Libraries. 129 | 130 | You may place library facilities that are a work based on the 131 | Library side by side in a single library together with other library 132 | facilities that are not Applications and are not covered by this 133 | License, and convey such a combined library under terms of your 134 | choice, if you do both of the following: 135 | 136 | a) Accompany the combined library with a copy of the same work based 137 | on the Library, uncombined with any other library facilities, 138 | conveyed under the terms of this License. 139 | 140 | b) Give prominent notice with the combined library that part of it 141 | is a work based on the Library, and explaining where to find the 142 | accompanying uncombined form of the same work. 143 | 144 | 6. Revised Versions of the GNU Lesser General Public License. 145 | 146 | The Free Software Foundation may publish revised and/or new versions 147 | of the GNU Lesser General Public License from time to time. Such new 148 | versions will be similar in spirit to the present version, but may 149 | differ in detail to address new problems or concerns. 150 | 151 | Each version is given a distinguishing version number. If the 152 | Library as you received it specifies that a certain numbered version 153 | of the GNU Lesser General Public License "or any later version" 154 | applies to it, you have the option of following the terms and 155 | conditions either of that published version or of any later version 156 | published by the Free Software Foundation. If the Library as you 157 | received it does not specify a version number of the GNU Lesser 158 | General Public License, you may choose any version of the GNU Lesser 159 | General Public License ever published by the Free Software Foundation. 160 | 161 | If the Library as you received it specifies that a proxy can decide 162 | whether future versions of the GNU Lesser General Public License shall 163 | apply, that proxy's public statement of acceptance of any version is 164 | permanent authorization for you to choose that version for the 165 | Library. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # libCharon 2 | File metadata and streaming library 3 | 4 | The Charon library is the responsibility of the Embedded Applications team. 5 | Pull requests to MASTER have to be verified by the Embedded Applications team. 6 | 7 | ## Documentation 8 | - [Library](docs/library.md) 9 | - [UFP](docs/ultimaker_format_package.md) 10 | - [Service](docs/service.md) 11 | -------------------------------------------------------------------------------- /build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ARCH="armhf" 4 | 5 | # common directory variables 6 | SYSCONFDIR="${SYSCONFDIR:-/etc}" 7 | SRC_DIR="$(pwd)" 8 | BUILD_DIR_TEMPLATE="_build_${ARCH}" 9 | BUILD_DIR="${BUILD_DIR:-${SRC_DIR}/${BUILD_DIR_TEMPLATE}}" 10 | 11 | # Debian package information 12 | PACKAGE_NAME="${PACKAGE_NAME:-libCharon}" 13 | RELEASE_VERSION="${RELEASE_VERSION:-999.999.999}" 14 | 15 | build() 16 | { 17 | mkdir -p "${BUILD_DIR}" 18 | cd "${BUILD_DIR}" || return 19 | echo "Building with cmake" 20 | cmake \ 21 | -DCMAKE_BUILD_TYPE=Debug \ 22 | -DCMAKE_PREFIX_PATH="${CURA_BUILD_ENV_PATH}" \ 23 | -DCPACK_PACKAGE_VERSION="${RELEASE_VERSION}" \ 24 | .. 25 | } 26 | 27 | create_debian_package() 28 | { 29 | make package 30 | cp ./*.deb ../ || true 31 | } 32 | 33 | cleanup() 34 | { 35 | rm -rf "${BUILD_DIR:?}" 36 | } 37 | 38 | usage() 39 | { 40 | echo "Usage: ${0} [OPTIONS]" 41 | echo " -c Explicitly cleanup the build directory" 42 | echo " -h Print this usage" 43 | echo "NOTE: This script requires root permissions to run." 44 | } 45 | 46 | while getopts ":hcs" options; do 47 | case "${options}" in 48 | c) 49 | cleanup 50 | exit 0 51 | ;; 52 | h) 53 | usage 54 | exit 0 55 | ;; 56 | s) 57 | # Ignore for compatibility with other build scripts 58 | ;; 59 | :) 60 | echo "Option -${OPTARG} requires an argument." 61 | exit 1 62 | ;; 63 | ?) 64 | echo "Invalid option: -${OPTARG}" 65 | exit 1 66 | ;; 67 | esac 68 | done 69 | shift "$((OPTIND - 1))" 70 | 71 | cleanup 72 | build 73 | create_debian_package 74 | -------------------------------------------------------------------------------- /build_for_ultimaker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Copyright (C) 2019 Ultimaker B.V. 4 | # 5 | 6 | set -eu 7 | 8 | ARCH="armhf" 9 | 10 | SRC_DIR="$(pwd)" 11 | RELEASE_VERSION="${RELEASE_VERSION:-999.999.999}" 12 | DOCKER_WORK_DIR="/build" 13 | BUILD_DIR_TEMPLATE="_build_${ARCH}" 14 | BUILD_DIR="${BUILD_DIR:-${SRC_DIR}/${BUILD_DIR_TEMPLATE}}" 15 | 16 | run_env_check="yes" 17 | run_verification="yes" 18 | action="none" 19 | 20 | env_check() 21 | { 22 | run_in_docker "./docker_env/buildenv_check.sh" 23 | } 24 | 25 | run_build() 26 | { 27 | run_in_docker "./build.sh" "${@}" 28 | } 29 | 30 | run_verification() 31 | { 32 | echo "Testing!" 33 | # These verifications should never fail! See .gitlab-ci.yml 34 | ./ci/local/run_all.sh 35 | } 36 | 37 | run_shellcheck() 38 | { 39 | docker run \ 40 | --rm \ 41 | -v "$(pwd):${DOCKER_WORK_DIR}" \ 42 | -w "${DOCKER_WORK_DIR}" \ 43 | "registry.hub.docker.com/koalaman/shellcheck-alpine:stable" \ 44 | "./run_shellcheck.sh" 45 | } 46 | 47 | usage() 48 | { 49 | echo "Usage: ${0} [OPTIONS]" 50 | echo " -c Skip build environment checks" 51 | echo " -h Print usage" 52 | echo " -s Skip code verification" 53 | } 54 | 55 | while getopts ":a:chls" options; do 56 | case "${options}" in 57 | a) 58 | action="${OPTARG}" 59 | ;; 60 | c) 61 | run_env_check="no" 62 | ;; 63 | h) 64 | usage 65 | exit 0 66 | ;; 67 | s) 68 | run_verification="no" 69 | ;; 70 | :) 71 | echo "Option -${OPTARG} requires an argument." 72 | exit 1 73 | ;; 74 | ?) 75 | echo "Invalid option: -${OPTARG}" 76 | exit 1 77 | ;; 78 | esac 79 | done 80 | shift "$((OPTIND - 1))" 81 | 82 | if ! command -V docker; then 83 | echo "Docker not found, docker-less builds are not supported." 84 | exit 1 85 | fi 86 | 87 | case "${action}" in 88 | shellcheck) 89 | run_shellcheck 90 | exit 0 91 | ;; 92 | build) 93 | source ./docker_env/make_docker.sh "" 94 | run_build 95 | exit 0 96 | ;; 97 | build_docker_cache) 98 | DOCKER_BUILD_ONLY_CACHE="yes" 99 | source ./docker_env/make_docker.sh "" 100 | exit 0 101 | ;; 102 | none) 103 | ;; 104 | ?) 105 | echo "Invalid action: -${OPTARG}" 106 | exit 1 107 | ;; 108 | esac 109 | 110 | # Make sure to pass an empty argument to make_docker, else any arguments passed to build_for_ultimaker is passed to make_docker instead! 111 | source ./docker_env/make_docker.sh "" 112 | 113 | if [ "${run_env_check}" = "yes" ]; then 114 | env_check 115 | fi 116 | 117 | run_build "${@}" 118 | 119 | if [ "${run_verification}" = "yes" ]; then 120 | run_verification 121 | fi 122 | 123 | exit 0 124 | -------------------------------------------------------------------------------- /charon_requirements.txt: -------------------------------------------------------------------------------- 1 | flake8==4.0.1 2 | flake8-polyfill==1.0.2 3 | flake8-quotes==3.3.1 4 | mypy==0.910 5 | pep8-naming==0.12.1 6 | pytest==6.2.5 7 | pytest-cov==3.0.0 8 | pytest-mock==1.10.4 9 | pytest-raises==0.11 10 | pytest-profiling==1.7.0 11 | pytest-raises==0.11 12 | vulture==2.3 13 | pycodestyle==2.8.0 14 | pylint==2.13.9 15 | coverage 16 | lizard 17 | typing 18 | -------------------------------------------------------------------------------- /coverage.ini: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | include = ./Charon 4 | [html] 5 | directory = ./cov_report 6 | -------------------------------------------------------------------------------- /docker_env/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM registry.hub.docker.com/library/debian:buster-slim 2 | 3 | RUN apt-get update && apt-get -y install cmake make python3 python3-pip git 4 | 5 | COPY docker_env/buildenv_check.sh buildenv_check.sh 6 | 7 | COPY charon_requirements.txt charon_requirements.txt 8 | 9 | RUN pip3 install -r charon_requirements.txt 10 | -------------------------------------------------------------------------------- /docker_env/buildenv_check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eu 4 | 5 | CROSS_COMPILE="${CROSS_COMPILE:-""}" 6 | 7 | COMMANDS=" \ 8 | cmake \ 9 | make \ 10 | python3 \ 11 | pip3 \ 12 | git \ 13 | " 14 | result=0 15 | 16 | echo_line(){ 17 | echo "--------------------------------------------------------------------------------" 18 | } 19 | 20 | check_command_installation() 21 | { 22 | for pkg in ${COMMANDS}; do 23 | PATH="${PATH}:/sbin:/usr/sbin:/usr/local/sbin" command -V "${pkg}" || result=1 24 | done 25 | } 26 | 27 | echo_line 28 | echo "Verifying build environment commands:" 29 | check_command_installation 30 | echo_line 31 | 32 | if [ "${result}" -ne 0 ]; then 33 | echo "ERROR: Missing preconditions, cannot continue." 34 | exit 1 35 | fi 36 | 37 | echo_line 38 | echo "Build environment OK" 39 | echo_line 40 | 41 | exit 0 42 | -------------------------------------------------------------------------------- /docker_env/make_docker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Copyright (C) 2019 Ultimaker B.V. 4 | # 5 | 6 | set -eu 7 | 8 | DOCKER_BUILD_ONLY_CACHE="${DOCKER_BUILD_ONLY_CACHE:-no}" 9 | DOCKER_IMAGE_NAME="${DOCKER_IMAGE_NAME:-libcharon}" 10 | DOCKER_REGISTRY_NAME="ghcr.io/ultimaker/${DOCKER_IMAGE_NAME}" 11 | 12 | echo "Checking for image updates" 13 | 14 | # Creates a new docker driver named "ultimaker" if it doesnt exist yet. 15 | docker buildx create --name ultimaker --driver=docker-container 2> /dev/null || true 16 | 17 | if [ "${DOCKER_BUILD_ONLY_CACHE}" = "yes" ]; then 18 | docker buildx build --builder ultimaker --cache-to "${DOCKER_REGISTRY_NAME}" --cache-from "${DOCKER_REGISTRY_NAME}" -f docker_env/Dockerfile -t "${DOCKER_IMAGE_NAME}" . 19 | else 20 | docker buildx build --builder ultimaker --load --cache-from "${DOCKER_REGISTRY_NAME}" -f docker_env/Dockerfile -t "${DOCKER_IMAGE_NAME}" . 21 | 22 | if ! docker run --rm --privileged "${DOCKER_IMAGE_NAME}" "./buildenv_check.sh"; then 23 | echo "Something is wrong with the build environment, please check your Dockerfile." 24 | docker image rm "${DOCKER_IMAGE_NAME}" 25 | exit 1 26 | fi 27 | fi; 28 | 29 | DOCKER_WORK_DIR="${WORKDIR:-/build/libcharon}" 30 | PREFIX="/usr" 31 | 32 | run_in_docker() 33 | { 34 | echo "Running '${*}' in docker." 35 | docker run \ 36 | --rm \ 37 | --privileged \ 38 | -u "$(id -u):$(id -g)" \ 39 | -v "$(pwd):${DOCKER_WORK_DIR}" \ 40 | -v "$(pwd)/../:${DOCKER_WORK_DIR}/.." \ 41 | -e "USE_DUMMY_DBUS=true" \ 42 | -e "PYTHONPATH=:../dbus-interface-lib:../libpalantir:../libPalantir:../charon:../libCharon:../libsmeagol:../libSmeagol:../marvin-service/src:../libLogger:../ultiLib/libs:../mqttHandler" \ 43 | -e "PREFIX=${PREFIX}" \ 44 | -e "RELEASE_VERSION=${RELEASE_VERSION:-}" \ 45 | -e "ONLY_CHECK_STAGED=${ONLY_CHECK_STAGED:-}" \ 46 | -w "${DOCKER_WORK_DIR}" \ 47 | "${DOCKER_IMAGE_NAME}" \ 48 | "${@}" 49 | } 50 | -------------------------------------------------------------------------------- /docs/class_diagram.plantuml: -------------------------------------------------------------------------------- 1 | @startuml 2 | 3 | package Service { 4 | class FileService << DBusService >> { 5 | +startRequest(file: String, paths: List[str]): int 6 | +cancelRequest(requestId: int) 7 | --- 8 | +signal requestData(requestId: int, data: Dict[str, Any]) 9 | +signal requestFinished(requestId: int) 10 | +signal requestError(requestId: int, error: str) 11 | } 12 | 13 | class Queue { 14 | +enqueue(job: Job) 15 | +dequeue(job: Job) 16 | +takeNext() : Job 17 | } 18 | 19 | class Worker << Thread >> { 20 | +run() 21 | } 22 | 23 | class Job { 24 | +execute() 25 | +requestId: int 26 | } 27 | 28 | FileService *-- Queue 29 | 30 | Queue o-- Job 31 | 32 | Worker --> Queue : Take 33 | Worker --> Job : Execute 34 | } 35 | 36 | package Library { 37 | 38 | interface FileInterface { 39 | +open(path : str, mode : OpenMode = ReadOnly) 40 | +openStream(stream: BufferedIOBase, mimetype: String) 41 | +close() 42 | +flush() 43 | +getData(virtual_path: String) : Dict[str, Any] 44 | +setData(data: Dict[str, Any]) 45 | +getStream(virtual_path : String) : BufferedIOBase 46 | +listPaths() : List[str] 47 | +toByteArray(offset: int = 0, count: int = -1) : Bytes 48 | } 49 | 50 | class VirtualFile << ContextManager >> { 51 | -implementation : FileImplementation 52 | } 53 | 54 | abstract class FileImplementation { 55 | } 56 | 57 | FileInterface <|-- VirtualFile 58 | FileInterface <|-- FileImplementation 59 | 60 | VirtualFile *-- FileImplementation 61 | note on link 62 | VirtualFile creates a FileImplementation 63 | based on the mimetype of the file it should 64 | open. 65 | end note 66 | 67 | FileImplementation <|-- GCodeFile 68 | FileImplementation <|-- ContainerFile 69 | } 70 | 71 | class FileRequest { 72 | +state: RequestState 73 | +file_path: str 74 | +virtual_paths: List[str] 75 | +result: Dict[str, Any] 76 | +start() 77 | +waitUntilFinished() 78 | --- 79 | signal dataReceived(request: FileRequest, data: Dict[str, any]) 80 | signal finished(request: FileRequest) 81 | } 82 | 83 | @enduml 84 | -------------------------------------------------------------------------------- /docs/class_diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ultimaker/libCharon/a0d407a6bfd3f8ecade4faadc13c8f0f3e3ff29b/docs/class_diagram.png -------------------------------------------------------------------------------- /docs/library.md: -------------------------------------------------------------------------------- 1 | File Library 2 | ============ 3 | 4 | This library will read and write several 3D-printing related file formats. 5 | 6 | ![Class Diagram](class_diagram.png) 7 | 8 | Metadata 9 | -------- 10 | 11 | Each file implementation is expected to provide some global metadata for the file and additionally some per-resource metadata. This metadata is represented as a list of key-value pairs, with the key being a virtual path to a specific metadata entry. 12 | 13 | Each file implementation is required to provide the following metadata entries for all resources: 14 | 15 | - size: Total file size. Note that when dealing with compressed files, this should be the uncompressed size. 16 | 17 | For toolpath resources, the following metadata entries are also required to be provided: 18 | 19 | - machine_type: The type of machine this toolpath targets. 20 | - print_time: The time in seconds it is estimated this toolpath will take to print. 21 | - print_size: A volume describing the total print size. 22 | 23 | Additional metadata may be available but is not required. 24 | 25 | ### The Default Set 26 | 27 | The default set of metadata as referenced below, contains the required properties for the entire file and the required properties for the default toolpath. 28 | 29 | Virtual Paths 30 | ------------- 31 | 32 | The data of the file is retrieved based on paths. These paths represent virtual locations in the file and can be mapped by the file implementation to different locations or files in the container file. The library provides a method to list all the available virtual paths of a file. 33 | 34 | The following virtual paths are guaranteed to be available: 35 | 36 | - `/metadata` or `/metadata/default`: Retrieve a "default" set of metadata. 37 | - `/toolpath` or `/toolpath/default`: Retrieve the primary or default toolpath. 38 | 39 | The following virtual paths are optionally also available. These are considered optional because they can represent non-existing resources or capabilities the file format does not support. Client code should always check before using these resources. 40 | 41 | - `/metadata/{key}`: Retrieve the named key from the file's metadata. 42 | - `/metadata/{path}`: Retrieve metadata for a specific resource. {path} can be any valid virtual path except those starting with /metadata. 43 | - `/preview` or `/preview/default`: Retrieve the default preview at a default size. 44 | - `/preview/default/{size}`: Retrieve the default preview at the specified size. 45 | - `/preview/{name}`: Retrieve the named preview. 46 | - `/preview/{name}/{size}`: Retrieve the named preview at the specified size. 47 | - `/toolpath/{name}`: Retrieve the named toolpath. 48 | - `/{file path}`: Retrieve a named file. 49 | 50 | Note that virtual paths are case-sensitive and should only contain alphanumeric characters, dots, underscores and forward slashes. 51 | 52 | Examples 53 | -------- 54 | 55 | To retrieve the default set of metadata, use `/metadata`. This would return a dictionary with something like: 56 | 57 | ``` 58 | { 59 | /metadata/size: 12354 60 | /metadata/toolpath/default/size: 12000 61 | /metadata/toolpath/default/machine_type: ultimaker3 62 | /metadata/toolpath/default/print_time: 121245 63 | /metadata/toolpath/default/print_size: (0,0,0)x(100,100,100) 64 | } 65 | ``` 66 | 67 | To retrieve a stream for the preview named "top left" at a size of 117x117 pixels, use the path `/preview/top_left/117x117`. 68 | 69 | ### Read a gcode file: 70 | ``` 71 | from Charon.VirtualFile import VirtualFile 72 | 73 | f = VirtualFile() 74 | f.open("file.gcode") 75 | print(f.getData("/metadata")) 76 | for line in f.getStream("/toolpath"): 77 | print(line) 78 | f.close() 79 | ``` 80 | -------------------------------------------------------------------------------- /docs/service.md: -------------------------------------------------------------------------------- 1 | # Service 2 | TODO 3 | -------------------------------------------------------------------------------- /docs/service_sequence.plantuml: -------------------------------------------------------------------------------- 1 | @startuml 2 | 3 | hide footbox 4 | 5 | == Synchronous == 6 | 7 | Client -> Request: Create 8 | activate Request 9 | Request --> Client 10 | 11 | Client -> Request: waitForFinished 12 | 13 | Request -> FileService: startRequest 14 | FileService --> Request: return requestId 15 | 16 | FileService -> Queue: enqueue 17 | Queue --> FileService 18 | 19 | Worker -> Queue: takeNext 20 | Queue --> Worker: job 21 | 22 | activate Worker 23 | Worker -> Worker: Process File 24 | 25 | Worker -> FileService: requestData(id, data) 26 | FileService -->>o Request: requestData(id, data) 27 | 28 | Worker -> FileService: requestFinished(id) 29 | FileService -->>o Request: requestFinished(id) 30 | deactivate Worker 31 | 32 | Request --> Client: data 33 | destroy Request 34 | 35 | == Asynchronous == 36 | 37 | Client -> Request: Create 38 | activate Request 39 | Request --> Client 40 | 41 | Client -> Request: start 42 | Request --> Client 43 | 44 | Request -> FileService: startRequest 45 | FileService --> Request: return requestId 46 | 47 | FileService -> Queue: enqueue 48 | Queue --> FileService 49 | 50 | Worker -> Queue: takeNext 51 | Queue --> Worker: job 52 | 53 | activate Worker 54 | Worker -> Worker: Process File 55 | 56 | Worker -> FileService: requestData(id, data) 57 | FileService -->>o Request: requestData(id, data) 58 | Request -->>o Client: requestData(request, data) 59 | 60 | Worker -> FileService: requestFinished(id) 61 | FileService -->>o Request: requestFinished(id) 62 | deactivate Worker 63 | Request -->>o Client: requestFinished(request) 64 | 65 | destroy Request 66 | 67 | @enduml 68 | -------------------------------------------------------------------------------- /docs/service_sequence.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ultimaker/libCharon/a0d407a6bfd3f8ecade4faadc13c8f0f3e3ff29b/docs/service_sequence.png -------------------------------------------------------------------------------- /docs/ultimaker_format_package.md: -------------------------------------------------------------------------------- 1 | # Ultimaker Format Package (UFP) 2 | 3 | Create a UltimakerFormatPackage 4 | ``` 5 | from Charon.VirtualFile import VirtualFile 6 | from Charon.OpenMode import OpenMode 7 | 8 | f = VirtualFile() 9 | f.open("output.ufp", OpenMode.WriteOnly) 10 | f.setData("/toolpath", "TEST123") 11 | f.close() 12 | ``` 13 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | python_version = 3.4 3 | disallow_untyped_calls = False 4 | disallow_untyped_defs = False 5 | disallow_incomplete_defs = False 6 | check_untyped_defs = False 7 | warn_incomplete_stub = True 8 | warn_redundant_casts = True 9 | warn_no_return = True 10 | warn_return_any = False 11 | disallow_subclassing_any = False 12 | disallow_any_unimported = False 13 | disallow_any_expr = False 14 | disallow_any_decorated = False 15 | disallow_any_explicit = False 16 | disallow_any_generics = False 17 | warn_unused_ignores = False 18 | ignore_missing_imports = True 19 | strict_optional = False 20 | no_implicit_optional = False 21 | -------------------------------------------------------------------------------- /pycodestyle.ini: -------------------------------------------------------------------------------- 1 | [pycodestyle] 2 | select = E101, E111, E112, E113, E201, E202, E203, E211, E221, E222, E223, E224, E225, E226, E227, E228, E241, E242, E4, E7, E9, W1, W292, W6 3 | ignore = E501 4 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | testpaths = tests 3 | python_files = Test*.py 4 | python_classes = Test 5 | timeout = 30 6 | log_cli = 1 7 | log_cli_level = WARNING 8 | -------------------------------------------------------------------------------- /release.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright (C) 2019 Ultimaker B.V. 3 | # Copyright (C) 2019 Raymond Siudak 4 | # 5 | # SPDX-License-Identifier: LGPL-3.0+ 6 | 7 | set -eu 8 | set -o pipefail 9 | 10 | usage() 11 | { 12 | echo "Usage: ${0} [OPTIONS] " 13 | echo "Triggers the release of this package to the CloudSmith package storage, given the" 14 | echo "release version passed as argument to the script, e.g. 6.0.1 or 6.0.1-dev." 15 | echo "" 16 | echo "This script wil create a tag and push that to origin, this triggers the CI job to release" 17 | echo "to CloudSmith. The CI release job will differentiate between pushing to official release" 18 | echo "storage or development release storage, pushing to development release storage is " 19 | echo "triggerred by adding the '-dev' postfix to the release version e.g. 6.2.0-dev." 20 | echo "" 21 | echo " -h Print usage" 22 | } 23 | 24 | is_tag_existing_locally() 25 | { 26 | if git rev-parse "${TAG}" > /dev/null 2>&1; then 27 | echo "WARNING: Local Git tag '${TAG}' already exists." 28 | return 0 29 | fi 30 | return 1 31 | } 32 | 33 | 34 | is_tag_on_github() 35 | { 36 | if ! git ls-remote origin ":refs/tags/${TAG}"; then 37 | echo "WARNING: GitHub tag '${TAG}' already exists." 38 | return 0 39 | fi 40 | return 1 41 | } 42 | 43 | trigger_release() 44 | { 45 | if is_tag_existing_locally; then 46 | if ! git tag -d "${TAG}"; then 47 | echo "Error: failed to clear local tag'${TAG}'." 48 | exit 1 49 | fi 50 | fi 51 | 52 | if ! git tag "${TAG}"; then 53 | echo "Error: failed to tag with '${TAG}'." 54 | exit 1 55 | fi 56 | 57 | if ! is_tag_on_github; then 58 | if ! git push origin "${TAG}"; then 59 | echo "Error: failed to push tag: '${TAG}'." 60 | exit 1 61 | fi 62 | return 0 63 | fi 64 | 65 | return 1 66 | } 67 | 68 | while getopts ":h" options; do 69 | case "${options}" in 70 | h) 71 | usage 72 | exit 0 73 | ;; 74 | :) 75 | echo "Option -${OPTARG} requires an argument." 76 | exit 1 77 | ;; 78 | ?) 79 | echo "Invalid option: -${OPTARG}" 80 | exit 1 81 | ;; 82 | esac 83 | done 84 | shift "$((OPTIND - 1))" 85 | 86 | if [ "${#}" -ne 1 ]; then 87 | echo "Too much or too little arguments, arguments should be exactly one: ." 88 | usage 89 | exit 1 90 | fi 91 | 92 | RELEASE_VERSION="${1}" 93 | TAG="$(git rev-parse --abbrev-ref HEAD)-v${RELEASE_VERSION}" 94 | 95 | if echo "${RELEASE_VERSION}" | grep -E '^[0-9]{1,3}+\.[0-9]{1,3}+\.[0-9]{1,3}+(-dev)?$'; then 96 | 97 | if is_tag_on_github; then 98 | echo "Error: Cannot continue, tag is already on GitHub." 99 | exit 1 100 | fi 101 | 102 | if trigger_release; then 103 | echo "Successfully triggered release '${RELEASE_VERSION}', follow the build at: http://34.90.73.76/dashboard." 104 | exit 0 105 | fi 106 | 107 | echo "Something went wrong triggering the release, please check the warnings and correct manually." 108 | fi 109 | 110 | echo "Invalid release version: '${RELEASE_VERSION}' given." 111 | usage 112 | 113 | exit 1 114 | -------------------------------------------------------------------------------- /requirements-testing.txt: -------------------------------------------------------------------------------- 1 | pytest 2 | coverage -------------------------------------------------------------------------------- /run_all_tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -eu 4 | 5 | # Run the make_docker.sh script here, within the context of the run_all_tests.sh script 6 | . ./docker_env/make_docker.sh 7 | 8 | git fetch 9 | 10 | for test in ci/*.sh ; do 11 | run_in_docker "${test}" || echo "Failed!" 12 | done 13 | 14 | echo "Testing done!" 15 | 16 | exit 0 17 | -------------------------------------------------------------------------------- /run_complexity_analysis.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -eu 4 | 5 | . ./docker_env/make_docker.sh 6 | 7 | run_in_docker "ci/complexity_analysis.sh" || echo "Failed!" 8 | 9 | exit 0 10 | -------------------------------------------------------------------------------- /run_dead_code_analysis.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -eu 4 | 5 | . ./docker_env/make_docker.sh 6 | 7 | run_in_docker "ci/dead_code_analysis.sh" || echo "Failed!" 8 | 9 | exit 0 10 | -------------------------------------------------------------------------------- /run_mypy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -eu 4 | 5 | . ./docker_env/make_docker.sh 6 | 7 | git fetch 8 | 9 | run_in_docker "ci/mypy.sh" || echo "Failed!" 10 | 11 | exit 0 12 | -------------------------------------------------------------------------------- /run_pytest.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -eu 4 | 5 | . ./docker_env/make_docker.sh 6 | 7 | run_in_docker "ci/pytest.sh" || echo "Failed!" 8 | 9 | exit 0 10 | -------------------------------------------------------------------------------- /run_shellcheck.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Copyright (C) 2019 Ultimaker B.V. 4 | # 5 | # SPDX-License-Identifier: LGPL-3.0+ 6 | 7 | # This script is mandatory in a repository, to make sure the shell scripts are correct and of good quality. 8 | 9 | set -eu 10 | 11 | SHELLCHECK_FAILURE="false" 12 | 13 | # Add your scripts or search paths here 14 | SHELLCHECK_PATHS=" \ 15 | *.sh \ 16 | ./docker_env/*.sh \ 17 | ci/*.sh 18 | " 19 | 20 | # shellcheck disable=SC2086 21 | SCRIPTS="$(find ${SHELLCHECK_PATHS} -name '*.sh')" 22 | 23 | for script in ${SCRIPTS}; do 24 | if [ ! -r "${script}" ]; then 25 | echo_line 26 | echo "WARNING: skipping shellcheck for '${script}'." 27 | echo_line 28 | continue 29 | fi 30 | 31 | echo "Running shellcheck on '${script}'" 32 | shellcheck -x -C -f tty "${script}" || SHELLCHECK_FAILURE="true" 33 | done 34 | 35 | if [ "${SHELLCHECK_FAILURE}" = "true" ]; then 36 | echo "WARNING: One or more scripts did not pass shellcheck." 37 | exit 1 38 | fi 39 | 40 | echo "All scripts passed shellcheck." 41 | 42 | exit 0 43 | -------------------------------------------------------------------------------- /run_style_analysis.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -eu 4 | 5 | . ./docker_env/make_docker.sh 6 | 7 | git fetch 8 | 9 | run_in_docker "ci/style_analysis.sh" || echo "Failed!" 10 | 11 | exit 0 12 | -------------------------------------------------------------------------------- /service/charon.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Charon File Metadata service 3 | Requires=rc-local.service 4 | After=rc-local.service 5 | 6 | [Service] 7 | Environment=CHARON_USE_SESSION_BUS=0 8 | Environment='PYTHONPATH=$PYTHONPATH:/opt/pyqt' 9 | ExecStart=/usr/bin/python3 /usr/lib/python3/dist-packages/Charon/Service/main.py 10 | BusName=nl.ultimaker.charon 11 | User=ultimaker 12 | Type=simple 13 | Restart=always 14 | SyslogIdentifier=Charon 15 | 16 | [Install] 17 | WantedBy=griffin.target 18 | -------------------------------------------------------------------------------- /service/nl.ultimaker.charon.conf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /service/postinst: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # DBus currently does not load config files from /usr but the 4 | # system-supplied config files really should be installed there. 5 | # So symlink things to /etc instead. 6 | ln -s /usr/share/dbus-1/system.d/nl.ultimaker.charon.conf /etc/dbus-1/system.d/ 7 | # Then, make sure DBus knows the policy file exists. 8 | systemctl reload dbus 9 | 10 | # Finally, enable the service 11 | systemctl daemon-reload 12 | systemctl enable charon.service 13 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Ultimaker B.V. 2 | # libCharon is released under the terms of the LGPLv3 or higher. 3 | 4 | from distutils.core import setup 5 | 6 | setup( 7 | name = "Charon", 8 | version = "1.0", 9 | description = "Library to read and write file packages.", 10 | author = "Ultimaker", 11 | author_email = "plugins@ultimaker.com", 12 | url = "https://github.com/Ultimaker/libCharon", 13 | packages = ["Charon", "Charon.Client", "Charon.Service", "Charon.filetypes"] 14 | ) 15 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Ultimaker B.V. 2 | # libCharon is released under the terms of the LGPLv3 or higher. 3 | -------------------------------------------------------------------------------- /tests/filetypes/TestGCodeFile.py: -------------------------------------------------------------------------------- 1 | import io 2 | import unittest 3 | 4 | from Charon.filetypes.GCodeFile import GCodeFile, InvalidHeaderException 5 | 6 | 7 | class TestGcodeFile(unittest.TestCase): 8 | 9 | __minimal_griffin_header = ";START_OF_HEADER\n" \ 10 | ";FLAVOR:Griffin\n" \ 11 | ";TARGET_MACHINE.NAME:target.foobar\n" \ 12 | ";GENERATOR.NAME:generator_foo\n" \ 13 | ";GENERATOR.VERSION: generator_version_foo\n" \ 14 | ";GENERATOR.BUILD_DATE: generator_build_foo\n" \ 15 | ";BUILD_PLATE.INITIAL_TEMPERATURE:30\n" \ 16 | ";PRINT.SIZE.MIN.X:1\n" \ 17 | ";PRINT.SIZE.MIN.Y:2\n" \ 18 | ";PRINT.SIZE.MIN.Z:3\n" \ 19 | ";PRINT.SIZE.MAX.X:1\n" \ 20 | ";PRINT.SIZE.MAX.Y:2\n" \ 21 | ";PRINT.SIZE.MAX.Z:3\n" \ 22 | ";HEADER_VERSION:0.1\n" \ 23 | ";TIME:11\n" \ 24 | ";EXTRUDER_TRAIN.1.NOZZLE.DIAMETER:1.5\n" \ 25 | ";EXTRUDER_TRAIN.1.MATERIAL.VOLUME_USED:1.5\n" \ 26 | ";EXTRUDER_TRAIN.1.INITIAL_TEMPERATURE:666\n" \ 27 | "{}\n" \ 28 | ";END_OF_HEADER" 29 | 30 | def _print(self, d, prefix=""): 31 | for k, v in d.items(): 32 | if type(v) is dict: 33 | self._print(v, prefix="{}.{}".format(prefix, k) if prefix else "{}".format(k)) 34 | else: 35 | if prefix: 36 | print("{}.{}: {}".format(prefix, k, v)) 37 | else: 38 | print("{}: {}".format(k, v)) 39 | 40 | def testParseGenericParameter_HappyTrail(self) -> None: 41 | gcode = self.__minimal_griffin_header.format(";A.B.C:5") 42 | gcode_stream = io.BytesIO(str.encode(gcode)) 43 | metadata = GCodeFile.parseHeader(gcode_stream) 44 | 45 | self._print(metadata) # print if any assert fails 46 | self.assertEqual(metadata["a"]["b"]["c"], 5) 47 | self.assertEqual(metadata["generator"]["name"], "generator_foo") 48 | self.assertEqual(metadata["build_plate"]["initial_temperature"], 30) 49 | self.assertEqual(metadata["extruders"][1]["nozzle"]["diameter"], 1.5) 50 | self.assertEqual(metadata["print"]["time"], 11) 51 | self.assertEqual(metadata["time"], 11) # This was the behavior of the old code. 52 | 53 | def testParseHeader_MissingFlavor(self) -> None: 54 | gcode = ";START_OF_HEADER\n" \ 55 | ";TARGET_MACHINE.NAME:target.foobar\n" \ 56 | ";GENERATOR.NAME:generator_foo\n" \ 57 | ";GENERATOR.VERSION: generator_version_foo\n" \ 58 | ";GENERATOR.BUILD_DATE: generator_build_foo\n" \ 59 | ";BUILD_PLATE.INITIAL_TEMPERATURE:30\n" \ 60 | ";PRINT.SIZE.MIN.X:1\n" \ 61 | ";PRINT.SIZE.MIN.Y:2\n" \ 62 | ";PRINT.SIZE.MIN.Z:3\n" \ 63 | ";PRINT.SIZE.MAX.X:1\n" \ 64 | ";PRINT.SIZE.MAX.Y:2\n" \ 65 | ";PRINT.SIZE.MAX.Z:3\n" \ 66 | ";HEADER_VERSION:0.1\n" \ 67 | ";TIME:11\n" 68 | 69 | self.__parseWithInvalidHeaderException(gcode, "Flavor") 70 | 71 | def testParseHeader_MissingHeaderVersion(self) -> None: 72 | gcode = ";START_OF_HEADER\n" \ 73 | ";FLAVOR:Griffin\n" \ 74 | ";TARGET_MACHINE.NAME:target.foobar\n" \ 75 | ";GENERATOR.NAME:generator_foo\n" \ 76 | ";GENERATOR.VERSION: generator_version_foo\n" \ 77 | ";GENERATOR.BUILD_DATE: generator_build_foo\n" \ 78 | ";BUILD_PLATE.INITIAL_TEMPERATURE:30\n" \ 79 | ";PRINT.SIZE.MIN.X:1\n" \ 80 | ";PRINT.SIZE.MIN.Y:2\n" \ 81 | ";PRINT.SIZE.MIN.Z:3\n" \ 82 | ";PRINT.SIZE.MAX.X:1\n" \ 83 | ";PRINT.SIZE.MAX.Y:2\n" \ 84 | ";PRINT.SIZE.MAX.Z:3\n" \ 85 | ";TIME:11\n" 86 | 87 | self.__parseWithInvalidHeaderException(gcode, "version") 88 | 89 | def testParseHeader_MissingTargetMachine(self) -> None: 90 | gcode = ";START_OF_HEADER\n" \ 91 | ";FLAVOR:Griffin\n" \ 92 | ";GENERATOR.NAME:generator_foo\n" \ 93 | ";GENERATOR.VERSION: generator_version_foo\n" \ 94 | ";GENERATOR.BUILD_DATE: generator_build_foo\n" \ 95 | ";BUILD_PLATE.INITIAL_TEMPERATURE:30\n" \ 96 | ";PRINT.SIZE.MIN.X:1\n" \ 97 | ";PRINT.SIZE.MIN.Y:2\n" \ 98 | ";PRINT.SIZE.MIN.Z:3\n" \ 99 | ";PRINT.SIZE.MAX.X:1\n" \ 100 | ";PRINT.SIZE.MAX.Y:2\n" \ 101 | ";PRINT.SIZE.MAX.Z:3\n" \ 102 | ";HEADER_VERSION:0.1\n" \ 103 | ";TIME:11\n" 104 | self.__parseWithInvalidHeaderException(gcode, "TARGET_MACHINE") 105 | 106 | def testParseHeader_MissingGeneratorName(self) -> None: 107 | gcode = ";START_OF_HEADER\n" \ 108 | ";FLAVOR:Griffin\n" \ 109 | ";TARGET_MACHINE.NAME:target.foobar\n" \ 110 | ";GENERATOR.VERSION: generator_version_foo\n" \ 111 | ";GENERATOR.BUILD_DATE: generator_build_foo\n" \ 112 | ";BUILD_PLATE.INITIAL_TEMPERATURE:30\n" \ 113 | ";PRINT.SIZE.MIN.X:1\n" \ 114 | ";PRINT.SIZE.MIN.Y:2\n" \ 115 | ";PRINT.SIZE.MIN.Z:3\n" \ 116 | ";PRINT.SIZE.MAX.X:1\n" \ 117 | ";PRINT.SIZE.MAX.Y:2\n" \ 118 | ";PRINT.SIZE.MAX.Z:3\n" \ 119 | ";HEADER_VERSION:0.1\n" \ 120 | ";TIME:11\n" 121 | self.__parseWithInvalidHeaderException(gcode, "GENERATOR.NAME") 122 | 123 | def testParseHeader_MissingGeneratorVersion(self) -> None: 124 | gcode = ";START_OF_HEADER\n" \ 125 | ";FLAVOR:Griffin\n" \ 126 | ";TARGET_MACHINE.NAME:target.foobar\n" \ 127 | ";GENERATOR.NAME:generator_foo\n" \ 128 | ";GENERATOR.BUILD_DATE: generator_build_foo\n" \ 129 | ";BUILD_PLATE.INITIAL_TEMPERATURE:30\n" \ 130 | ";PRINT.SIZE.MIN.X:1\n" \ 131 | ";PRINT.SIZE.MIN.Y:2\n" \ 132 | ";PRINT.SIZE.MIN.Z:3\n" \ 133 | ";PRINT.SIZE.MAX.X:1\n" \ 134 | ";PRINT.SIZE.MAX.Y:2\n" \ 135 | ";PRINT.SIZE.MAX.Z:3\n" \ 136 | ";HEADER_VERSION:0.1\n" \ 137 | ";TIME:11\n" 138 | self.__parseWithInvalidHeaderException(gcode, "GENERATOR.VERSION") 139 | 140 | def testParseHeader_MissingGeneratorBuildDate(self) -> None: 141 | gcode = ";START_OF_HEADER\n" \ 142 | ";FLAVOR:Griffin\n" \ 143 | ";TARGET_MACHINE.NAME:target.foobar\n" \ 144 | ";GENERATOR.NAME:generator_foo\n" \ 145 | ";GENERATOR.VERSION: generator_version_foo\n" \ 146 | ";BUILD_PLATE.INITIAL_TEMPERATURE:30\n" \ 147 | ";PRINT.SIZE.MIN.X:1\n" \ 148 | ";PRINT.SIZE.MIN.Y:2\n" \ 149 | ";PRINT.SIZE.MIN.Z:3\n" \ 150 | ";PRINT.SIZE.MAX.X:1\n" \ 151 | ";PRINT.SIZE.MAX.Y:2\n" \ 152 | ";PRINT.SIZE.MAX.Z:3\n" \ 153 | ";HEADER_VERSION:0.1\n" \ 154 | ";TIME:11\n" 155 | self.__parseWithInvalidHeaderException(gcode, "GENERATOR.BUILD_DATE") 156 | 157 | def testParseHeader_MissingInitialBuildPlateTemp(self) -> None: 158 | gcode = ";START_OF_HEADER\n" \ 159 | ";FLAVOR:Griffin\n" \ 160 | ";TARGET_MACHINE.NAME:target.foobar\n" \ 161 | ";GENERATOR.NAME:generator_foo\n" \ 162 | ";GENERATOR.VERSION: generator_version_foo\n" \ 163 | ";GENERATOR.BUILD_DATE: generator_build_foo\n" \ 164 | ";PRINT.SIZE.MIN.X:1\n" \ 165 | ";PRINT.SIZE.MIN.Y:2\n" \ 166 | ";PRINT.SIZE.MIN.Z:3\n" \ 167 | ";PRINT.SIZE.MAX.X:1\n" \ 168 | ";PRINT.SIZE.MAX.Y:2\n" \ 169 | ";PRINT.SIZE.MAX.Z:3\n" \ 170 | ";HEADER_VERSION:0.1\n" \ 171 | ";TIME:11\n" 172 | 173 | self.__parseWithInvalidHeaderException(gcode, "BUILD_PLATE.INITIAL_TEMPERATURE") 174 | 175 | def testParseHeader_MissingMinSizeX(self) -> None: 176 | gcode = ";START_OF_HEADER\n" \ 177 | ";FLAVOR:Griffin\n" \ 178 | ";TARGET_MACHINE.NAME:target.foobar\n" \ 179 | ";GENERATOR.NAME:generator_foo\n" \ 180 | ";GENERATOR.VERSION: generator_version_foo\n" \ 181 | ";GENERATOR.BUILD_DATE: generator_build_foo\n" \ 182 | ";BUILD_PLATE.INITIAL_TEMPERATURE:30\n" \ 183 | ";PRINT.SIZE.MIN.Y:2\n" \ 184 | ";PRINT.SIZE.MIN.Z:3\n" \ 185 | ";PRINT.SIZE.MAX.X:1\n" \ 186 | ";PRINT.SIZE.MAX.Y:2\n" \ 187 | ";PRINT.SIZE.MAX.Z:3\n" \ 188 | ";HEADER_VERSION:0.1\n" \ 189 | ";TIME:11\n" 190 | 191 | self.__parseWithInvalidHeaderException(gcode, "PRINT.SIZE.MIN") 192 | 193 | def testParseHeader_MissingMaxSizeZ(self) -> None: 194 | gcode = ";START_OF_HEADER\n" \ 195 | ";FLAVOR:Griffin\n" \ 196 | ";TARGET_MACHINE.NAME:target.foobar\n" \ 197 | ";GENERATOR.NAME:generator_foo\n" \ 198 | ";GENERATOR.VERSION: generator_version_foo\n" \ 199 | ";GENERATOR.BUILD_DATE: generator_build_foo\n" \ 200 | ";BUILD_PLATE.INITIAL_TEMPERATURE:30\n" \ 201 | ";PRINT.SIZE.MIN.X:1\n" \ 202 | ";PRINT.SIZE.MIN.Y:2\n" \ 203 | ";PRINT.SIZE.MIN.Z:3\n" \ 204 | ";PRINT.SIZE.MAX.X:1\n" \ 205 | ";PRINT.SIZE.MAX.Y:2\n" \ 206 | ";HEADER_VERSION:0.1\n" \ 207 | ";TIME:11\n" 208 | 209 | self.__parseWithInvalidHeaderException(gcode, "PRINT.SIZE.MAX") 210 | 211 | def testParseHeader_MissingPrintTime(self) -> None: 212 | gcode = ";START_OF_HEADER\n" \ 213 | ";FLAVOR:Griffin\n" \ 214 | ";TARGET_MACHINE.NAME:target.foobar\n" \ 215 | ";GENERATOR.NAME:generator_foo\n" \ 216 | ";GENERATOR.VERSION: generator_version_foo\n" \ 217 | ";GENERATOR.BUILD_DATE: generator_build_foo\n" \ 218 | ";BUILD_PLATE.INITIAL_TEMPERATURE:30\n" \ 219 | ";PRINT.SIZE.MIN.X:1\n" \ 220 | ";PRINT.SIZE.MIN.Y:2\n" \ 221 | ";PRINT.SIZE.MIN.Z:3\n" \ 222 | ";PRINT.SIZE.MAX.X:1\n" \ 223 | ";PRINT.SIZE.MAX.Y:2\n" \ 224 | ";PRINT.SIZE.MAX.Z:3\n" \ 225 | ";HEADER_VERSION:0.1\n" 226 | 227 | self.__parseWithInvalidHeaderException(gcode, "TIME") 228 | self.__parseWithInvalidHeaderException(gcode, "PRINT.TIME") 229 | 230 | def __parseWithInvalidHeaderException(self, gcode, text) -> None: 231 | gcode_stream = io.BytesIO(str.encode(gcode)) 232 | 233 | with self.assertRaises(InvalidHeaderException) as cm: 234 | metadata = GCodeFile.parseHeader(gcode_stream) 235 | self.assertTrue(text in str(cm.exception)) 236 | 237 | def testParseGenericParameter_NoValue(self) -> None: 238 | gcode = self.__minimal_griffin_header.format(";A.B.C:") 239 | gcode_stream = io.BytesIO(str.encode(gcode)) 240 | metadata = GCodeFile.parseHeader(gcode_stream) 241 | 242 | self.assertEqual(metadata["a"]["b"]["c"], '') 243 | 244 | -------------------------------------------------------------------------------- /tests/filetypes/TestGCodeFormat.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Ultimaker B.V. 2 | # Charon is released under the terms of the LGPLv3 or higher. 3 | import os 4 | 5 | from Charon.VirtualFile import VirtualFile 6 | 7 | 8 | def test_GCodeReader(): 9 | f = VirtualFile() 10 | f.open(os.path.join(os.path.dirname(__file__), "resources", "um3.gcode")) 11 | assert f.getData("/metadata")["/metadata/toolpath/default/flavor"] == "Griffin" 12 | assert b"M104" in f.getStream("/toolpath").read() 13 | f.close() 14 | 15 | 16 | def test_GCodeGzReader(): 17 | f = VirtualFile() 18 | f.open(os.path.join(os.path.dirname(__file__), "resources", "um3.gcode.gz")) 19 | assert f.getData("/metadata")["/metadata/toolpath/default/flavor"] == "Griffin" 20 | assert b"M104" in f.getStream("/toolpath").read() 21 | f.close() 22 | -------------------------------------------------------------------------------- /tests/filetypes/TestOpenPackagingConvention.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Ultimaker B.V. 2 | # Charon is released under the terms of the LGPLv3 or higher. 3 | import io #To create fake streams to write to and read from. 4 | import os #To find the resources with test packages. 5 | import pytest #This module contains unit tests. 6 | import zipfile #To inspect the contents of the zip archives. 7 | import xml.etree.ElementTree as ET #To inspect the contents of the OPC-spec files in the archives. 8 | from collections import OrderedDict 9 | from typing import List, Generator 10 | 11 | from Charon.filetypes.OpenPackagingConvention import OpenPackagingConvention, OPCError # The class we're testing. 12 | from Charon.OpenMode import OpenMode #To open archives. 13 | 14 | ## Returns an empty package that you can read from. 15 | # 16 | # The package has no resources at all, so reading from it will not find 17 | # anything. 18 | @pytest.fixture() 19 | def empty_read_opc() -> Generator[OpenPackagingConvention, None, None]: 20 | result = OpenPackagingConvention() 21 | result.openStream(open(os.path.join(os.path.dirname(__file__), "resources", "empty.opc"), "rb")) 22 | yield result 23 | result.close() 24 | 25 | 26 | ## Returns a package that has a single file in it. 27 | # 28 | # The file is called "hello.txt" and contains the text "Hello world!" encoded 29 | # in UTF-8. 30 | @pytest.fixture() 31 | def single_resource_read_opc() -> Generator[OpenPackagingConvention, None, None]: 32 | result = OpenPackagingConvention() 33 | result.openStream(open(os.path.join(os.path.dirname(__file__), "resources", "hello.opc"), "rb")) 34 | yield result 35 | result.close() 36 | 37 | 38 | ## Returns an empty package that you can write to. 39 | # 40 | # Note that you can't really test the output of the write since you don't have 41 | # the stream it writes to. 42 | @pytest.fixture() 43 | def empty_write_opc() -> Generator[OpenPackagingConvention, None, None]: 44 | result = OpenPackagingConvention() 45 | result.openStream(io.BytesIO(), "application/x-opc", OpenMode.WriteOnly) 46 | yield result 47 | result.close() 48 | 49 | 50 | #### Now follow the actual tests. #### 51 | 52 | ## Tests whether an empty file is recognised as empty. 53 | def test_listPathsEmpty(empty_read_opc: OpenPackagingConvention): 54 | assert len(empty_read_opc.listPaths()) == 0 55 | 56 | 57 | ## Tests getting write streams of various resources that may or may not exist. 58 | # 59 | # Every test will write some arbitrary data to it to see that that also works. 60 | @pytest.mark.parametrize("virtual_path", ["/dir/file", "/file", "dir/file", "file", "/Metadata"]) #Some extra tests without initial slash to test robustness. 61 | def test_getWriteStream(empty_write_opc: OpenPackagingConvention, virtual_path: str): 62 | stream = empty_write_opc.getStream(virtual_path) 63 | stream.write(b"The test is successful.") 64 | 65 | 66 | ## Tests not allowing to open relationship file directly to prevent mistakes. 67 | @pytest.mark.parametrize("virtual_path", ["/_rels/.rels"]) 68 | def test_getWriteStream_forbidOnRels(empty_write_opc: OpenPackagingConvention, virtual_path: str): 69 | with pytest.raises(OPCError): 70 | empty_write_opc.getStream(virtual_path) 71 | 72 | 73 | ## Tests writing data to an archive, then reading it back. 74 | @pytest.mark.parametrize("virtual_path", ["/dir/file", "/file", "/Metadata"]) #Don't try to read .rels back. That won't work. 75 | def test_cycleSetDataGetData(virtual_path: str): 76 | test_data = b"Let's see if we can read this data back." 77 | 78 | stream = io.BytesIO() 79 | package = OpenPackagingConvention() 80 | package.openStream(stream, mode = OpenMode.WriteOnly) 81 | package.setData({virtual_path: test_data}) 82 | package.close() 83 | 84 | stream.seek(0) 85 | package = OpenPackagingConvention() 86 | package.openStream(stream) 87 | result = package.getData(virtual_path) 88 | 89 | assert len(result) == 1 #This data must be the only data we've found. 90 | assert virtual_path in result #The path must be in the dictionary. 91 | assert result[virtual_path] == test_data #The data itself is still correct. 92 | 93 | 94 | @pytest.mark.parametrize("virtual_path, path_list", [ 95 | ("/foo/materials", ["/foo/materials", "/[Content_Types].xml", "/_rels/.rels"]), 96 | ("/materials", ["/files/materials", "/[Content_Types].xml", "/_rels/.rels"]) 97 | ]) 98 | def test_aliases_replacement(virtual_path: str, path_list: List[str]): 99 | test_data = b"Let's see if we can read this data back." 100 | 101 | stream = io.BytesIO() 102 | package = OpenPackagingConvention() 103 | package._aliases = OrderedDict([ 104 | (r"/materials", "/files/materials") 105 | ]) 106 | package.openStream(stream, mode = OpenMode.WriteOnly) 107 | package.setData({virtual_path: test_data}) 108 | package.close() 109 | 110 | stream.seek(0) 111 | package = OpenPackagingConvention() 112 | package.openStream(stream) 113 | result = package.listPaths() 114 | 115 | assert result == path_list 116 | 117 | ## Tests writing data via a stream to an archive, then reading it back via a 118 | # stream. 119 | @pytest.mark.parametrize("virtual_path", ["/dir/file", "/file", "/Metadata"]) 120 | def test_cycleStreamWriteRead(virtual_path: str): 121 | test_data = b"Softly does the river flow, flow, flow." 122 | 123 | stream = io.BytesIO() 124 | package = OpenPackagingConvention() 125 | package.openStream(stream, mode = OpenMode.WriteOnly) 126 | resource = package.getStream(virtual_path) 127 | resource.write(test_data) 128 | package.close() 129 | 130 | stream.seek(0) 131 | package = OpenPackagingConvention() 132 | package.openStream(stream) 133 | resource = package.getStream(virtual_path) 134 | result = resource.read() 135 | 136 | assert result == test_data 137 | 138 | 139 | ## Tests setting metadata in an archive, then reading that metadata back. 140 | @pytest.mark.parametrize("virtual_path", ["/Metadata/some/global/setting", "/hello.txt/test", "/also/global/entry"]) 141 | def test_cycleSetMetadataGetMetadata(virtual_path: str): 142 | test_data = "Hasta la vista, baby." 143 | 144 | stream = io.BytesIO() 145 | package = OpenPackagingConvention() 146 | package.openStream(stream, mode = OpenMode.WriteOnly) 147 | package.setData({"/hello.txt": b"Hello world!"}) #Add a file to attach non-global metadata to. 148 | package.setMetadata({virtual_path: test_data}) 149 | package.close() 150 | 151 | stream.seek(0) 152 | package = OpenPackagingConvention() 153 | package.openStream(stream) 154 | result = package.getMetadata(virtual_path) 155 | 156 | prefixed_virtual_path = "/metadata{}".format(virtual_path) 157 | 158 | assert len(result) == 1 #Only one metadata entry was set. 159 | assert prefixed_virtual_path in result #And it was the correct entry. 160 | assert result[prefixed_virtual_path] == test_data #With the correct value. 161 | 162 | 163 | ## Tests toByteArray with its parameters. 164 | # 165 | # This doesn't test if the bytes are correct, because that is the task of the 166 | # zipfile module. We merely test that it gets some bytes array and that the 167 | # offset and size parameters work. 168 | def test_toByteArray(single_resource_read_opc): 169 | original = single_resource_read_opc.toByteArray() 170 | original_length = len(original) 171 | 172 | #Even empty zip archives are already 22 bytes, so offsets and sizes of less than that should be okay. 173 | result = single_resource_read_opc.toByteArray(offset = 10) 174 | assert len(result) == original_length - 10 #The first 10 bytes have fallen off. 175 | 176 | result = single_resource_read_opc.toByteArray(count = 8) 177 | assert len(result) == 8 #Limited to size 8. 178 | 179 | result = single_resource_read_opc.toByteArray(offset = 10, count = 8) 180 | assert len(result) == 8 #Still limited by the size, even though there is an offset. 181 | 182 | result = single_resource_read_opc.toByteArray(count = 999999) #This is a small file, definitely smaller than 1MiB. 183 | assert len(result) == original_length #Should be limited to the actual file length. 184 | 185 | 186 | ## Tests toByteArray when loading from a stream. 187 | def test_toByteArrayStream(): 188 | stream = io.BytesIO() 189 | package = OpenPackagingConvention() 190 | package.openStream(stream, mode = OpenMode.WriteOnly) 191 | package.setData({"/hello.txt": b"Hello world!"}) #Add some arbitrary data so that the file size is not trivial regardless of what format is used. 192 | package.close() 193 | 194 | stream.seek(0) 195 | package = OpenPackagingConvention() 196 | package.openStream(stream) 197 | result = package.toByteArray() 198 | 199 | assert len(result) > 0 #There must be some data in it. 200 | 201 | 202 | ## Tests whether a content type gets added and that it gets added in the 203 | # correct location. 204 | def test_addContentType(): 205 | stream = io.BytesIO() 206 | package = OpenPackagingConvention() 207 | package.openStream(stream, mode = OpenMode.WriteOnly) 208 | package.addContentType("lol", "audio/x-laughing") 209 | package.close() 210 | 211 | stream.seek(0) 212 | #This time, open as .zip to just inspect the file contents. 213 | archive = zipfile.ZipFile(stream) 214 | assert "/[Content_Types].xml" in archive.namelist() 215 | content_types = archive.open("/[Content_Types].xml").read() 216 | content_types_element = ET.fromstring(content_types) 217 | 218 | defaults = content_types_element.findall("{http://schemas.openxmlformats.org/package/2006/content-types}Default") 219 | assert len(defaults) == 2 #We only added one content type, but there must also be the .rels content type. 220 | for default in defaults: 221 | assert "Extension" in default.attrib 222 | assert "ContentType" in default.attrib 223 | assert default.attrib["Extension"] in ["lol", "rels"] 224 | if default.attrib["Extension"] == "lol": 225 | assert default.attrib["ContentType"] == "audio/x-laughing" 226 | elif default.attrib["Extension"] == "rels": 227 | assert default.attrib["ContentType"] == "application/vnd.openxmlformats-package.relationships+xml" 228 | 229 | 230 | ## Tests whether a relation gets added and that it gets saved in the correct 231 | # location. 232 | def test_addRelation(): 233 | stream = io.BytesIO() 234 | package = OpenPackagingConvention() 235 | package.openStream(stream, mode = OpenMode.WriteOnly) 236 | package.setData({"/whoo.txt": b"Boo", "/whoo.enhanced.txt": b"BOOOO!", "/whoo.enforced.txt": b"BOOOOOOOOOO!"}) #Need 3 files: One base and two that are related. 237 | package.addRelation("whoo.enhanced.txt", "An enhanced version of it.", "whoo.txt") 238 | package.addRelation("whoo.enforced.txt", "A greatly enhanced version of it.", "whoo.txt") 239 | package.close() 240 | 241 | stream.seek(0) 242 | #This time, open as .zip to just inspect the file contents. 243 | archive = zipfile.ZipFile(stream) 244 | assert "/_rels/whoo.txt.rels" in archive.namelist() #It must create a file specifically for whoo.txt 245 | relations = archive.open("/_rels/whoo.txt.rels").read() 246 | relations_element = ET.fromstring(relations) 247 | 248 | both_relations = relations_element.findall("{http://schemas.openxmlformats.org/package/2006/relationships}Relationship") 249 | assert len(both_relations) == 2 #We added two relations. 250 | for relation in both_relations: 251 | assert "Id" in relation.attrib 252 | assert "Target" in relation.attrib 253 | assert "Type" in relation.attrib 254 | assert relation.attrib["Target"] == "/whoo.enhanced.txt" or relation.attrib["Target"] == "/whoo.enforced.txt" 255 | if relation.attrib["Target"] == "/whoo.enhanced.txt": 256 | assert relation.attrib["Type"] == "An enhanced version of it." 257 | elif relation.attrib["Target"] == "/whoo.enforced.txt": 258 | assert relation.attrib["Type"] == "A greatly enhanced version of it." 259 | assert both_relations[0].attrib["Id"] != both_relations[1].attrib["Id"] #Id must be unique. 260 | 261 | 262 | ## Tests getting the size of a file. 263 | # 264 | # This is implemented knowing the contents of single_resource_read_opc. 265 | def test_getMetadataSize(single_resource_read_opc): 266 | metadata = single_resource_read_opc.getMetadata("/hello.txt/size") 267 | assert "/metadata/hello.txt/size" in metadata 268 | assert metadata["/metadata/hello.txt/size"] == len("Hello world!\n".encode("UTF-8")) #Compare with the length of the file's contents as encoded in UTF-8. 269 | -------------------------------------------------------------------------------- /tests/filetypes/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018 Ultimaker B.V. 2 | # libCharon is released under the terms of the LGPLv3 or higher. 3 | -------------------------------------------------------------------------------- /tests/filetypes/resources/empty.opc: -------------------------------------------------------------------------------- 1 | PK -------------------------------------------------------------------------------- /tests/filetypes/resources/hello.opc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ultimaker/libCharon/a0d407a6bfd3f8ecade4faadc13c8f0f3e3ff29b/tests/filetypes/resources/hello.opc -------------------------------------------------------------------------------- /tests/filetypes/resources/um3.gcode: -------------------------------------------------------------------------------- 1 | ;START_OF_HEADER 2 | ;HEADER_VERSION:0.1 3 | ;FLAVOR:Griffin 4 | ;GENERATOR.NAME:Cura_SteamEngine 5 | ;GENERATOR.VERSION:2.7.0 6 | ;GENERATOR.BUILD_DATE:2017-08-30 7 | ;TARGET_MACHINE.NAME:Ultimaker 3 8 | ;EXTRUDER_TRAIN.0.INITIAL_TEMPERATURE:205 9 | ;EXTRUDER_TRAIN.0.MATERIAL.VOLUME_USED:782066 10 | ;EXTRUDER_TRAIN.0.MATERIAL.GUID:0e01be8c-e425-4fb1-b4a3-b79f255f1db9 11 | ;EXTRUDER_TRAIN.0.NOZZLE.DIAMETER:0.4 12 | ;EXTRUDER_TRAIN.0.NOZZLE.NAME:AA 0.4 13 | ;BUILD_PLATE.INITIAL_TEMPERATURE:60 14 | ;PRINT.TIME:342521 15 | ;PRINT.SIZE.MIN.X:9 16 | ;PRINT.SIZE.MIN.Y:6 17 | ;PRINT.SIZE.MIN.Z:0.27 18 | ;PRINT.SIZE.MAX.X:198.325 19 | ;PRINT.SIZE.MAX.Y:189.325 20 | ;PRINT.SIZE.MAX.Z:149.97 21 | ;END_OF_HEADER 22 | ;Generated with Cura_SteamEngine 2.7.0 23 | 24 | M104 S200 25 | G1 X10 Y10 F1000 26 | -------------------------------------------------------------------------------- /tests/filetypes/resources/um3.gcode.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ultimaker/libCharon/a0d407a6bfd3f8ecade4faadc13c8f0f3e3ff29b/tests/filetypes/resources/um3.gcode.gz --------------------------------------------------------------------------------