├── tests ├── __init__.py └── core │ └── test_core.py ├── pyccx ├── version.py ├── utils │ ├── __init__.py │ └── exporters.py ├── loadcase │ ├── __init__.py │ └── loadcase.py ├── material │ ├── __init__.py │ └── material.py ├── results │ └── __init__.py ├── analysis │ ├── __init__.py │ └── analysis.py ├── bc │ ├── __init__.py │ └── boundarycondition.py ├── mesh │ ├── __init__.py │ ├── utils.py │ ├── mesh.py │ └── elements.py ├── __init__.py └── core.py ├── docs ├── requirements.txt ├── index.rst ├── modules.rst └── conf.py ├── .readthedocs.yaml ├── .github └── workflows │ ├── pythonpublish.yml │ ├── cd.yml │ └── ci.yml ├── LICENSE ├── .gitignore ├── pyproject.toml ├── examples ├── example.py └── example_thermal.py ├── README.rst ├── CHANGELOG.md └── models └── cornerCube.step /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pyccx/version.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.2.0' 2 | -------------------------------------------------------------------------------- /pyccx/utils/__init__.py: -------------------------------------------------------------------------------- 1 | from .exporters import exportToVTK 2 | -------------------------------------------------------------------------------- /pyccx/loadcase/__init__.py: -------------------------------------------------------------------------------- 1 | from .loadcase import LoadCase, LoadCaseType 2 | -------------------------------------------------------------------------------- /pyccx/material/__init__.py: -------------------------------------------------------------------------------- 1 | from .material import Material, ElastoPlasticMaterial 2 | -------------------------------------------------------------------------------- /pyccx/results/__init__.py: -------------------------------------------------------------------------------- 1 | from .results import Result, NodalResult, ElementResult, ResultProcessor, ResultsValue 2 | -------------------------------------------------------------------------------- /pyccx/analysis/__init__.py: -------------------------------------------------------------------------------- 1 | from .analysis import AnalysisType, AnalysisError, MaterialAssignment, Simulation, SolidMaterialAssignment, ShellMaterialAssignment 2 | -------------------------------------------------------------------------------- /pyccx/bc/__init__.py: -------------------------------------------------------------------------------- 1 | from .boundarycondition import BoundaryCondition, BoundaryConditionType, Acceleration, Film, Fixed, Force, HeatFlux, Pressure, Radiation 2 | -------------------------------------------------------------------------------- /tests/core/test_core.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import math 3 | 4 | import pytest 5 | import pyccx 6 | import numpy as np 7 | 8 | class TestBasic: 9 | 10 | def test_version(self): 11 | 12 | assert pyccx.__version__ -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx 2 | sphinx-automodapi 3 | sphinx-autodoc-typehints 4 | sphinx-paramlinks 5 | sphinx_rtd_theme 6 | autodocsumm 7 | pypandoc 8 | mock 9 | furo 10 | numpy 11 | matplotlib 12 | trimesh 13 | m2r2==0.3.4 14 | gmsh>=4.14.0 15 | docutils==0.21.2 -------------------------------------------------------------------------------- /pyccx/mesh/__init__.py: -------------------------------------------------------------------------------- 1 | from .mesher import (Mesher, 2 | MeshingAlgorithm2D, MeshingAlgorithm3D, RecombinationAlgorithm) 3 | from .mesh import getSurfaceFacesFromRegion, getNodesFromRegion, getNodesFromVolume, removeSurfaceMeshes 4 | from .utils import Ent 5 | -------------------------------------------------------------------------------- /pyccx/mesh/utils.py: -------------------------------------------------------------------------------- 1 | class classproperty(property): 2 | def __get__(self, owner_self, owner_cls): 3 | return self.fget(owner_cls) 4 | 5 | 6 | class Ent: 7 | """ 8 | Entity types for Elementary MSH Geometry 9 | """ 10 | Point = 0 11 | Curve = 1 12 | Surface = 2 13 | Volume = 3 14 | All = -1 15 | -------------------------------------------------------------------------------- /pyccx/__init__.py: -------------------------------------------------------------------------------- 1 | from . import material 2 | from . import mesh 3 | from . import utils 4 | 5 | from .analysis import MaterialAssignment, Simulation, SolidMaterialAssignment, ShellMaterialAssignment 6 | from .bc import BoundaryConditionType, BoundaryCondition, Acceleration, Film, Fixed, HeatFlux, Pressure, Radiation 7 | from .core import Connector, DOF, ElementSet, MeshSet, NodeSet, SurfaceSet 8 | from .loadcase import LoadCaseType, LoadCase 9 | from .results import ElementResult, NodalResult, ResultProcessor 10 | from .version import __version__ 11 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. sample documentation master file, created by 2 | sphinx-quickstart on Mon Apr 16 21:22:43 2012. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to PyCCX's documentation! 7 | ================================== 8 | 9 | .. include:: ../README.rst 10 | 11 | Links 12 | ========== 13 | .. toctree:: 14 | PyCCX On Github 15 | 16 | Install 17 | ========== 18 | .. toctree:: 19 | :maxdepth: 2 20 | 21 | Module Reference 22 | ================== 23 | .. toctree:: 24 | :maxdepth: 2 25 | 26 | modules 27 | 28 | 29 | ===================== 30 | * :ref:`genindex` 31 | * :ref:`modindex` 32 | * :ref:`search` 33 | 34 | -------------------------------------------------------------------------------- /docs/modules.rst: -------------------------------------------------------------------------------- 1 | .. automodapi:: pyccx.analysis 2 | :no-inheritance-diagram: 3 | :no-inherited-members: 4 | :toctree: api 5 | 6 | .. automodapi:: pyccx.bc 7 | :toctree: api 8 | :no-inheritance-diagram: 9 | :no-inherited-members: 10 | 11 | .. automodapi:: pyccx.core 12 | :toctree: api 13 | :no-inheritance-diagram: 14 | :no-inherited-members: 15 | 16 | .. automodapi:: pyccx.loadcase 17 | :no-inheritance-diagram: 18 | :no-inherited-members: 19 | :toctree: api 20 | 21 | .. automodapi:: pyccx.material 22 | :no-inheritance-diagram: 23 | :no-inherited-members: 24 | :toctree: api 25 | 26 | .. automodapi:: pyccx.results 27 | :no-inheritance-diagram: 28 | :no-inherited-members: 29 | :toctree: api 30 | 31 | .. automodapi:: pyccx.mesh 32 | :no-inheritance-diagram: 33 | :no-inherited-members: 34 | :toctree: api 35 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | 2 | # .readthedocs.yaml 3 | # Read the Docs configuration file 4 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 5 | 6 | # Required 7 | version: 2 8 | 9 | # Set the version of Python and other tools you might need 10 | build: 11 | os: ubuntu-22.04 12 | tools: 13 | python: "3.11" 14 | # You can also specify other tool versions: 15 | # nodejs: "16" 16 | # rust: "1.55" 17 | # golang: "1.17" 18 | apt_packages: 19 | - build-essential 20 | - gmsh 21 | - calculix-ccx 22 | - libglu1-mesa 23 | 24 | 25 | # Build documentation in the docs/ directory with Sphinx 26 | sphinx: 27 | configuration: docs/conf.py 28 | 29 | # If using Sphinx, optionally build your docs in additional formats such as PDF 30 | # formats: 31 | # - pdf 32 | 33 | # Optionally declare the Python requirements required to build your docs 34 | python: 35 | install: 36 | - requirements: docs/requirements.txt 37 | -------------------------------------------------------------------------------- /.github/workflows/pythonpublish.yml: -------------------------------------------------------------------------------- 1 | # This workflows will upload a Python Package using Twine when a release is created 2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries 3 | 4 | name: Upload Python Package to PyPI 5 | 6 | on: 7 | push: 8 | branches: [ master ] 9 | 10 | jobs: 11 | deploy: 12 | 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - uses: actions/checkout@v2 17 | - name: Set up Python 18 | uses: actions/setup-python@v1 19 | with: 20 | python-version: '3.x' 21 | - name: Install dependencies 22 | run: | 23 | python -m pip install --upgrade pip 24 | pip install setuptools wheel twine 25 | - name: Build and publish 26 | env: 27 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} 28 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} 29 | run: | 30 | python setup.py sdist bdist_wheel 31 | twine upload dist/* 32 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2025, Luke Parry 2 | Copyright (c) 2025, Luktug Ltd. 3 | 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the 7 | following conditions are met: 8 | 9 | Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 10 | Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following 11 | disclaimer in the documentation and/or other materials provided with the distribution. 12 | 13 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, 14 | INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 15 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 16 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 17 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 18 | WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 19 | THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 20 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # Results files from calculix 7 | *.frd 8 | *.dat 9 | *.opt 10 | 11 | # VTK Outputs 12 | *.vtk 13 | *.vtu 14 | *.vtp 15 | 16 | # GMSH output files 17 | *.msh4 18 | *.pos 19 | *.geo 20 | *.mesh 21 | *.msh 22 | 23 | # C extensions 24 | *.so 25 | 26 | # Mac 27 | .DS_Store 28 | 29 | # Calculix input, log and output files 30 | *.cvg 31 | *.inp 32 | *.frd 33 | *.out 34 | *.spool 35 | *.sta 36 | 37 | # Inspection profiles 38 | .idea/ 39 | 40 | # Distribution / packaging 41 | .Python 42 | env/ 43 | build/ 44 | develop-eggs/ 45 | docs/api/ 46 | docs/output/ 47 | dist/ 48 | downloads/ 49 | eggs/ 50 | .eggs/ 51 | lib/ 52 | lib64/ 53 | parts/ 54 | sdist/ 55 | var/ 56 | *.egg-info/ 57 | .installed.cfg 58 | *.egg 59 | 60 | # PyInstaller 61 | # Usually these files are written by a python script from a template 62 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 63 | *.manifest 64 | *.spec 65 | 66 | # Installer logs 67 | pip-log.txt 68 | pip-delete-this-directory.txt 69 | 70 | # Unit test / coverage reports 71 | htmlcov/ 72 | .tox/ 73 | .coverage 74 | .coverage.* 75 | .cache 76 | nosetests.xml 77 | coverage.xml 78 | *,cover 79 | .hypothesis/ 80 | 81 | # Translations 82 | *.mo 83 | *.pot 84 | 85 | # Django stuff: 86 | *.log 87 | local_settings.py 88 | 89 | # Flask stuff: 90 | instance/ 91 | .webassets-cache 92 | 93 | # Scrapy stuff: 94 | .scrapy 95 | 96 | # Sphinx documentation 97 | docs/_build/ 98 | 99 | # PyBuilder 100 | target/ 101 | 102 | # IPython Notebook 103 | .ipynb_checkpoints 104 | 105 | # pyenv 106 | .python-version 107 | 108 | # celery beat schedule file 109 | celerybeat-schedule 110 | 111 | # dotenv 112 | .env 113 | 114 | # virtualenv 115 | .venv/ 116 | venv/ 117 | ENV/ 118 | 119 | # Spyder project settings 120 | .spyderproject 121 | 122 | # Rope project settings 123 | .ropeproject 124 | -------------------------------------------------------------------------------- /.github/workflows/cd.yml: -------------------------------------------------------------------------------- 1 | # Github Actions script to produce binary wheels. 2 | # 3 | # * One build to create all wheels (cross-platform universal wheel). 4 | # * One build (with matrix) test the wheels on a selection of platforms. 5 | # * One build to publish the wheels on GitHub and Pypi. 6 | 7 | name: CD 8 | 9 | on: 10 | workflow_dispatch: 11 | push: 12 | tags: 13 | - 'v*' 14 | pull_request: 15 | branches: 16 | - main 17 | 18 | jobs: 19 | 20 | build-wheels: 21 | name: Build all wheels 22 | runs-on: ubuntu-latest 23 | steps: 24 | - uses: actions/checkout@v4 25 | - name: Set up Python 26 | uses: actions/setup-python@v5 27 | with: 28 | python-version: '3.12' 29 | - name: Install dev dependencies 30 | run: | 31 | python -m pip install --upgrade pip 32 | pip install -U setuptools flit build twine hatchling 33 | - name: Create source distribution 34 | run: | 35 | python -m build -n -s 36 | - name: Build wheel 37 | run: | 38 | python -m build -n -w 39 | - name: Twine check 40 | run: | 41 | twine check dist/* 42 | - name: Upload distributions 43 | uses: actions/upload-artifact@v4 44 | with: 45 | path: dist 46 | name: all_wheels 47 | 48 | publish: 49 | name: Publish to Github and Pypi 50 | runs-on: ubuntu-latest 51 | needs: [build-wheels] 52 | if: success() && startsWith(github.ref, 'refs/tags/v') 53 | steps: 54 | - uses: actions/checkout@v4 55 | - name: Set up Python 56 | uses: actions/setup-python@v5 57 | with: 58 | python-version: '3.12' 59 | - name: Download assets 60 | uses: actions/download-artifact@v4 61 | with: 62 | path: dist 63 | - name: Flatten dist dir 64 | run: | 65 | find dist -mindepth 2 -type f -exec mv -f '{}' dist/ ';' 66 | rm -rf dist/*/ 67 | - name: Set version from git ref 68 | run: echo "PYCCX_PY_VERSION=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_ENV 69 | - name: Upload Release Assets 70 | uses: softprops/action-gh-release@v2 71 | with: 72 | tag_name: ${{ env.PYCCX_PY_VERSION }} 73 | name: ${{ env.PYCCX_PY_VERSION }} 74 | token: ${{ secrets.GITHUB_TOKEN }} 75 | files: | 76 | dist/*.tar.gz 77 | dist/*.whl 78 | body: | 79 | Autogenerated wheels for PyCCX 80 | draft: false 81 | prerelease: false 82 | - name: Publish to PyPI 83 | uses: pypa/gh-action-pypi-publish@release/v1 84 | with: 85 | user: __token__ 86 | password: ${{ secrets.PYPI_PASSWORD }} -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "PyCCX" 3 | dynamic = ["version"] 4 | description = "Simulation and FEA environment for Python built upon Calculix and GMSH" 5 | readme = "README.rst" 6 | license = {file = "LICENSE"} 7 | requires-python = ">=3.8" 8 | authors = [ 9 | { name = "Luke Parry", email = "dev@lukeparry.uk" }, 10 | { name = "Luktug", email = "contact@luktug.co.uk" } 11 | ] 12 | 13 | keywords = [ 14 | "FEA", 15 | "Finite Element Analysis", 16 | "Simulation", 17 | "Calculix", 18 | "GMSH" 19 | ] 20 | 21 | classifiers = [ 22 | "License :: OSI Approved :: BSD License", 23 | "Programming Language :: Python", 24 | "Programming Language :: Python :: 3.8", 25 | "Programming Language :: Python :: 3.9", 26 | "Programming Language :: Python :: 3.10", 27 | "Programming Language :: Python :: 3.11", 28 | "Programming Language :: Python :: 3.12", 29 | "Programming Language :: Python :: 3.13", 30 | "Topic :: Scientific/Engineering", 31 | ] 32 | dependencies = [ 33 | "matplotlib", 34 | "gmsh>=4.14.0", 35 | "numpy>1.21", 36 | "colorlog", 37 | "setuptools", 38 | ] 39 | 40 | [project.optional-dependencies] 41 | docs = [ 42 | 'numpy', 43 | 'sphinx', 44 | 'jupyter', 45 | 'sphinx_rtd_theme', 46 | 'sphinx-paramlinks', 47 | 'sphinx_automodapi', 48 | 'sphinx_rtd_theme', 49 | 'furo', 50 | 'sphinx-autodoc-typehints', 51 | 'autodocsumm', 52 | 'm2r2==0.3.4', 53 | 'docutils==0.21.2', 54 | 'pypandoc', 55 | 'autodocsumm', 56 | 'mock', 57 | 'matplotlib', 58 | 'trimesh' 59 | ] 60 | 61 | tests = [ 62 | "pytest", 63 | "pytest-cov", 64 | "coverage-badge" 65 | ] 66 | 67 | build = ["build", "hatchling", "requests", "twine"] 68 | codegen = ["pytest", "numpy"] 69 | lint = ["ruff", "pre-commit"] 70 | 71 | [project.urls] 72 | Homepage = "https://github.com/drlukeparry/pyccx" 73 | Documentation = "https://pyccx.readthedocs.io/en/latest/" 74 | Source = "https://github.com/drlukeparry/pyccx/" 75 | Tracker = "https://github.com/drlukeparry/pyccx/issues" 76 | 77 | [build-system] 78 | requires = ["requests", "hatchling", "setuptools"] 79 | build-backend = "hatchling.build" 80 | 81 | [tool.hatch.version] 82 | path = "pyccx/version.py" 83 | 84 | [tool.hatch.build.targets.sdist] 85 | include = [ 86 | "pyccx", 87 | "examples", 88 | "docs" 89 | ] 90 | 91 | [tool.hatch.build.targets.wheel] 92 | include = [ 93 | "pyccx", 94 | "examples", 95 | "docs" 96 | ] 97 | 98 | 99 | [tool.pytest.ini_options] 100 | minversion = "6.0" 101 | addopts = "-ra -q" 102 | testpaths = [ 103 | "tests" 104 | ] 105 | 106 | [tool.ruff.lint] 107 | select = ["F", "E", "W", "B", "RUF"] 108 | exclude = ["setup.py", "build/*", "dist/*", "docs/*", "examples/*", "tests/*", "__pycache__", ".github"] 109 | ignore = [ 110 | "B904", # Bare exception 111 | "E501", # Line too long 112 | "E722", # Bare exceptions 113 | "F401", # Unused imports 114 | "F841", # Unused variables 115 | "E731", # Do not assign a `lambda` expression, use a `def` 116 | "RUF005" 117 | ] 118 | 119 | [tool.mypy] 120 | python_version = "3.9" 121 | strict = true 122 | show_error_codes = true 123 | enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] 124 | disable_error_code = ["import-untyped", "var-annotated"] 125 | warn_unreachable = true 126 | warn_return_any = true 127 | warn_unused_configs = true 128 | warn_unused_ignores = true 129 | strict_optional = true 130 | ignore_missing_imports = true 131 | check_untyped_defs = true 132 | disallow_untyped_defs = true 133 | no_implicit_optional = true -------------------------------------------------------------------------------- /pyccx/mesh/mesh.py: -------------------------------------------------------------------------------- 1 | import gmsh 2 | import numpy as np 3 | from .mesher import Mesher 4 | 5 | 6 | def removeSurfaceMeshes(model: Mesher) -> None: 7 | """ 8 | In order to assign face based boundary conditions to surfaces (e.g. flux, convection), the surface mesh is compared 9 | to the volumetric mesh to identify the actual surface mesh. This is then removed afterwards. 10 | 11 | :param model: Mesher: The GMSH model 12 | """ 13 | tags = model.getPhysicalGroups(2) 14 | 15 | for tag in tags: 16 | # Remove all tri group surfaces 17 | print('removing surface {:s}'.format(model.getPhysicalName(2, tag[1]))) 18 | model.removePhysicalGroups(tag) 19 | 20 | def getNodesFromVolume(volumeName : str, model: Mesher): 21 | """ 22 | Gets the nodes for a specified volume 23 | 24 | :param volumeName: str - The volume domain in the model to obtain the nodes from 25 | :param model: Mesher: The GMSH model 26 | :return: 27 | """ 28 | vols = model.getPhysicalGroups(3) 29 | names = [(model.getPhysicalName(3, x[1]), x[1]) for x in vols] 30 | 31 | volTagId = -1 32 | for name in names: 33 | if name[0] == volumeName: 34 | volTagId = name[1] 35 | 36 | if volTagId == -1: 37 | raise ValueError('Volume region ({:s}) was not found'.format(volumeName)) 38 | 39 | return model.mesh.getNodesForPhysicalGroup(3, volTagId)[0] 40 | 41 | def getNodesFromRegion(surfaceRegionName: str, model : Mesher): 42 | """ 43 | Gets the nodes for a specified surface region 44 | 45 | :param surfaceRegionName: str - The volume domain in the model to obtain the nodes from 46 | :param model: Mesher: The GMSH model 47 | :return: 48 | """ 49 | surfs = model.getPhysicalGroups(2) 50 | names = [(model.getPhysicalName(2, x[1]), x[1]) for x in surfs] 51 | 52 | surfTagId = -1 53 | for name in names: 54 | if name[0] == surfaceRegionName: 55 | surfTagId = name[1] 56 | 57 | if surfTagId == -1: 58 | raise ValueError('Surface region ({:s}) was not found'.format(surfaceRegionName)) 59 | 60 | return model.mesh.getNodesForPhysicalGroup(2, surfTagId)[0] 61 | 62 | 63 | def getSurfaceFacesFromRegion(regionName, model): 64 | """ 65 | Gets the faces from a surface region, which are compatible with GMSH in order to apply surface BCs to. 66 | 67 | :param surfaceRegionName: str - The volume domain in the model to obtain the nodes from 68 | :param model: Mesher: The GMSH model 69 | :return: 70 | """ 71 | 72 | surfs = model.getPhysicalGroups(2) 73 | names = [(model.getPhysicalName(2, x[1]), x[1]) for x in surfs] 74 | 75 | surfTagId = -1 76 | for name in names: 77 | if name[0] == regionName: 78 | surfTagId = name[1] 79 | 80 | if surfTagId == -1: 81 | raise ValueError('Surface region ({:s}) was not found'.format(regionName)) 82 | 83 | mesh = model.mesh 84 | 85 | surfNodeList2 = mesh.getNodesForPhysicalGroup(2, surfTagId)[0] 86 | 87 | # Get tet elements 88 | tetElList = mesh.getElementsByType(4) 89 | 90 | tetNodes = tetElList[1].reshape(-1, 4) 91 | tetMinEl = np.min(tetElList[0]) 92 | 93 | mask = np.isin(tetNodes, surfNodeList2) # Mark nodes which are on boundary 94 | ab = np.sum(mask, axis=1) # Count how many nodes were marked for each element 95 | fndIdx = np.argwhere(ab > 2) # For all tets 96 | elIdx = tetElList[0][fndIdx] 97 | 98 | if np.sum(ab > 3) > 0: 99 | raise ValueError('Instance of all nodes of tet where found') 100 | 101 | # Tet elements for Film [masks] 102 | F1 = [1, 1, 1, 0] # 1: 1 2 3 = [1,1,1,0] 103 | F2 = [1, 1, 0, 1] # 2: 1 4 2 = [1,1,0,1] 104 | F3 = [0, 1, 1, 1] # 3: 2 4 3 = [0,1,1,1] 105 | F4 = [1, 0, 1, 1] # 4: 3 4 1 = [1,0,1,1] 106 | 107 | surfFaces = np.zeros((len(elIdx), 2), dtype=np.uint32) 108 | surfFaces[:, 0] = elIdx.flatten() 109 | 110 | surfFaces[mask[fndIdx.ravel()].dot(F1) == 3, 1] = 1 # Mask 111 | surfFaces[mask[fndIdx.ravel()].dot(F2) == 3, 1] = 2 # Mask 112 | surfFaces[mask[fndIdx.ravel()].dot(F3) == 3, 1] = 3 # Mask 113 | surfFaces[mask[fndIdx.ravel()].dot(F4) == 3, 1] = 4 # Mask 114 | 115 | # sort by faces 116 | surfFaces = surfFaces[surfFaces[:, 1].argsort()] 117 | 118 | surfFaces[:, 0] = surfFaces[:, 0] - (tetMinEl + 1) 119 | 120 | return surfFaces 121 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - dev 7 | pull_request: 8 | branches: 9 | - dev 10 | 11 | concurrency: 12 | group: ci-${{ github.ref }} 13 | cancel-in-progress: true 14 | 15 | jobs: 16 | 17 | lint-build: 18 | name: Linting 19 | runs-on: ubuntu-latest 20 | strategy: 21 | fail-fast: false 22 | steps: 23 | - uses: actions/checkout@v4 24 | - name: Set up Python 25 | uses: actions/setup-python@v5 26 | with: 27 | python-version: 3.12 28 | - name: Install dependencies 29 | run: | 30 | python -m pip install --upgrade pip 31 | pip install ruff 32 | - name: Ruff lint 33 | run: | 34 | ruff check --output-format=github . 35 | 36 | docs-build: 37 | name: Docs 38 | runs-on: ubuntu-latest 39 | strategy: 40 | fail-fast: false 41 | steps: 42 | - uses: actions/checkout@v4 43 | - name: Set up Python 44 | uses: actions/setup-python@v5 45 | with: 46 | python-version: 3.11 47 | - name: Install calculix dependencies 48 | run: | 49 | sudo apt-get update -y -qq 50 | sudo apt-get install -qq -y calculix-ccx libglu1-mesa gmsh 51 | - name: Install dev dependencies 52 | run: | 53 | python -m pip install --upgrade pip 54 | pip install -U -e .[docs] 55 | - name: Build docs 56 | run: | 57 | cd docs 58 | sphinx-build --builder html . out 59 | test-builds: 60 | name: ${{ matrix.name }} 61 | runs-on: ${{ matrix.os }} 62 | strategy: 63 | fail-fast: false 64 | matrix: 65 | include: 66 | - name: Test py39 67 | os: ubuntu-latest 68 | pyversion: '3.9' 69 | - name: Test py310 70 | os: ubuntu-latest 71 | pyversion: '3.10' 72 | - name: Test py311 73 | os: ubuntu-latest 74 | pyversion: '3.11' 75 | - name: Test py312 76 | os: ubuntu-latest 77 | pyversion: '3.12' 78 | - name: Test py313 79 | os: ubuntu-latest 80 | pyversion: '3.13' 81 | steps: 82 | - uses: actions/checkout@v4 83 | - name: Set up Python ${{ matrix.pyversion }} 84 | uses: actions/setup-python@v5 85 | with: 86 | python-version: ${{ matrix.pyversion }} 87 | - name: Install calculix 88 | if: matrix.os == 'ubuntu-latest' 89 | run: | 90 | sudo apt-get update -y -qq 91 | sudo apt-get install -qq -y calculix-ccx libglu1-mesa gmsh 92 | - name: Install package and dev dependencies 93 | run: | 94 | python -m pip install --upgrade pip 95 | pip install . 96 | pip install .[tests] 97 | pip install .[support] 98 | rm -r pyccx 99 | - name: Unit tests with pytest 100 | run: | 101 | pytest tests/ 102 | test-coverage: 103 | name: Test Coverage 104 | runs-on: ubuntu-latest 105 | strategy: 106 | fail-fast: false 107 | steps: 108 | - uses: actions/checkout@v4 109 | - name: Set up Python 110 | uses: actions/setup-python@v5 111 | with: 112 | python-version: 3.12 113 | - name: Install calculix 114 | run: | 115 | sudo apt-get update -y -qq 116 | sudo apt-get install -qq -y calculix-ccx gmsh libglu1-mesa 117 | - name: Install package and dev dependencies 118 | run: | 119 | python -m pip install --upgrade pip 120 | pip install . 121 | pip install .[tests] 122 | pip install .[support] 123 | rm -r pyccx 124 | - name: Run Pytest coverage 125 | run: | 126 | pytest --junitxml=pytest.xml --cov-report=term-missing:skip-covered --cov=pyccx tests/ | tee pytest-coverage.txt 127 | - name: Pytest coverage comment 128 | uses: MishaKav/pytest-coverage-comment@main 129 | with: 130 | pytest-coverage-path: ./pytest-coverage.txt 131 | junitxml-path: ./pytest.xml 132 | test-examples-build: 133 | name: Test examples ${{ matrix.pyversion }} 134 | runs-on: ${{ matrix.os }} 135 | strategy: 136 | fail-fast: false 137 | matrix: 138 | include: 139 | - os: ubuntu-latest 140 | pyversion: '3.10' 141 | - os: ubuntu-latest 142 | pyversion: '3.12' 143 | steps: 144 | - uses: actions/checkout@v4 145 | - name: Set up Python 146 | uses: actions/setup-python@v5 147 | with: 148 | python-version: 3.12 149 | - name: Install calculix solver 150 | run: | 151 | sudo apt-get update -y -qq 152 | sudo apt-get install -qq -y calculix-ccx gmsh libglu1-mesa 153 | - name: Install dev dependencies 154 | run: | 155 | python -m pip install --upgrade pip 156 | pip install -e .[examples] 157 | - name: Show pyccx version and calculix version 158 | run: | 159 | python -c "import pyccx; print(pyccx.__version__)" 160 | python -c "import pyccx; print(pyccx.Simulation.version())" 161 | 162 | release-build: 163 | name: Build release on ubuntu-latest 164 | runs-on: ubuntu-latest 165 | strategy: 166 | fail-fast: false 167 | steps: 168 | - uses: actions/checkout@v4 169 | - name: Set up Python 170 | uses: actions/setup-python@v5 171 | with: 172 | python-version: 3.12 173 | - name: Install Hatch 174 | uses: pypa/hatch@install 175 | - name: Install calculix solver 176 | run: | 177 | sudo apt-get update -y -qq 178 | sudo apt-get install -qq -y calculix-ccx gmsh libglu1-mesa 179 | - name: Install dev dependencies 180 | run: | 181 | python -m pip install --upgrade pip 182 | pip install -U setuptools flit build twine hatchling 183 | - name: Create source distribution 184 | run: | 185 | python -m build -n -s 186 | - name: Build wheel 187 | run: | 188 | python -m build -n -w 189 | - name: Test sdist 190 | shell: bash 191 | run: | 192 | rm -rf ./pyccx 193 | pushd $HOME 194 | pip install $GITHUB_WORKSPACE/dist/*.tar.gz 195 | python -c "import pyccx; print(pyccx.__version__)" 196 | popd 197 | # don't run tests, we just want to know if the sdist can be installed 198 | pip uninstall -y pyccx 199 | git reset --hard HEAD 200 | - name: Upload distributions 201 | uses: actions/upload-artifact@v4 202 | with: 203 | path: dist 204 | name: dist 205 | -------------------------------------------------------------------------------- /examples/example.py: -------------------------------------------------------------------------------- 1 | 2 | import pyccx 3 | 4 | from pyccx.mesh import ElementType, Mesher 5 | 6 | from pyccx.bc import Fixed, HeatFlux 7 | from pyccx.analysis import Simulation 8 | from pyccx.core import DOF, ElementSet, NodeSet, SurfaceSet 9 | from pyccx.results import ElementResult, NodalResult, ResultProcessor 10 | from pyccx.loadcase import LoadCase, LoadCaseType 11 | from pyccx.material import ElastoPlasticMaterial 12 | 13 | # Create a Mesher object to interface with GMSH. Provide a unique name. Multiple instance of this can be created. 14 | myMeshModel = Mesher('myModel') 15 | 16 | # Set the number of threads to use for any multi-threaded meshing algorithms e.g. HXT 17 | myMeshModel.setNumThreads(4) 18 | myMeshModel.setOptimiseNetgen(True) 19 | 20 | # Set the meshing algorithm (optional) to use globally. 21 | myMeshModel.setMeshingAlgorithm(pyccx.mesh.MeshingAlgorithm.FRONTAL_DELAUNAY) 22 | 23 | # Add the geometry and assign a physical name 'PartA' which can reference the elements generated for the volume 24 | myMeshModel.addGeometry('../models/cornerCube.step', 'PartA') 25 | 26 | """ 27 | Merges an assembly together. This is necessary there multiple bodies or volumes which share coincident faces. GMSH 28 | will automatically stitch these surfaces together and create a shared boundary which is often useful performing 29 | analyses where fields overlap (e.g. heat transfer). This should not be done if contact analysis is performed. 30 | """ 31 | 32 | myMeshModel.mergeGeometry() 33 | 34 | # Optionally set hte name of boundary name using the GMSH geometry identities 35 | myMeshModel.setEntityName((2,1), 'MySurface1') 36 | myMeshModel.setEntityName((2,2), 'MySurface2') 37 | myMeshModel.setEntityName((2,3), 'Bottom_Face') 38 | myMeshModel.setEntityName((2,4), 'MySurface4') 39 | myMeshModel.setEntityName((2,5), 'MySurface5') 40 | myMeshModel.setEntityName((3,1), 'PartA') 41 | 42 | # Set the size of the mesh 43 | geomPoints = myMeshModel.getPointsFromVolume(1) 44 | myMeshModel.setMeshSize(geomPoints, 0.5) # MM 45 | 46 | # Generate the mesh 47 | myMeshModel.generateMesh() 48 | 49 | # Obtain the surface faces (normals facing outwards) for surface 50 | surfFaces2 = myMeshModel.getSurfaceFacesFromSurfId(1) # MySurface1 51 | bottomFaces = myMeshModel.getSurfaceFacesFromSurfId(3) # ('Bottom_Face 52 | 53 | # Obtain all nodes associated with each surface 54 | surface1Nodes = myMeshModel.getNodesFromEntity((2,1)) # MySurface 55 | surface2Nodes = myMeshModel.getNodesFromEntity((2,2)) # MySurface2 56 | surface4Nodes = myMeshModel.getNodesFromEntity((2,4)) # MySurface4 57 | surface6Nodes = myMeshModel.getNodesFromEntity((2,6)) # MySurface6 58 | 59 | # An alternative method is to get nodes via the surface 60 | bottomFaceNodes = myMeshModel.getNodesFromSurfaceByName('Bottom_Face') 61 | 62 | # or via general query 63 | surface5Nodes = myMeshModel.getNodesByEntityName('MySurface5') # MySurface5 64 | 65 | # Obtain nodes from the volume 66 | volumeNodes = myMeshModel.getNodesFromVolumeByName('PartA') 67 | 68 | # The generated mesh can be interactively viewed natively within gmsh by calling the following 69 | #myMeshModel.showGui() 70 | 71 | """ Create the analysis""" 72 | # Set the number of simulation threads to be used by Calculix Solver across all analyses 73 | 74 | Simulation.setNumThreads(4) 75 | analysis = Simulation(myMeshModel) 76 | 77 | # Optionally set the working the base working directory 78 | analysis.setWorkingDirectory('.') 79 | 80 | print('Calculix version: {:d}. {:d}'.format(*analysis.version())) 81 | 82 | # Add the Node Sets For Attaching Boundary Conditions # 83 | # Note a unique name must be provided 84 | surface1NodeSet = NodeSet('surface1Nodes', surface1Nodes) 85 | surface2NodeSet = NodeSet('surface2Nodes', surface2Nodes) 86 | surface4NodeSet = NodeSet('surface4Nodes', surface4Nodes) 87 | surface5NodeSet = NodeSet('surface5Nodes', surface5Nodes) 88 | surface6NodeSet = NodeSet('surface6Nodes', surface6Nodes) 89 | bottomFaceNodeSet = NodeSet('bottomFaceNodes', bottomFaceNodes) 90 | volNodeSet = NodeSet('VolumeNodeSet', volumeNodes) 91 | 92 | # Create an element set using a concise approach 93 | partElSet = ElementSet('PartAElSet', myMeshModel.getElements((3,1))) 94 | 95 | # Create a surface set 96 | bottomFaceSet = SurfaceSet('bottomSurface', bottomFaces) 97 | 98 | # Add the userdefined nodesets to the analysis 99 | analysis.nodeSets = [surface1NodeSet, surface2NodeSet,surface4NodeSet, surface5NodeSet, surface6NodeSet, 100 | bottomFaceNodeSet, volNodeSet] 101 | 102 | 103 | # =============== Initial Conditions =============== # 104 | 105 | analysis.initialConditions.append({'type': 'temperature', 'set': 'VolumeNodeSet', 'value': 0.0}) 106 | 107 | # =============== Thermal Load Cases =============== # 108 | 109 | # Create a thermal load case and set the timesettings 110 | thermalLoadCase = LoadCase('Thermal Load Case') 111 | 112 | # Set the loadcase type to thermal - eventually this will be individual classes 113 | thermalLoadCase.setLoadCaseType(LoadCaseType.THERMAL) 114 | 115 | # Set the thermal analysis to be a steadystate simulation 116 | thermalLoadCase.isSteadyState = True 117 | thermalLoadCase.setTimeStep(5.0, 5.0, 5.0) 118 | 119 | # Attach the nodal and element result options to each loadcase 120 | # Set the nodal and element variables to record in the results (.frd) file 121 | nodeThermalPostResult = NodalResult(volNodeSet) 122 | nodeThermalPostResult.useNodalTemperatures = True 123 | 124 | elThermalPostResult = ElementResult(partElSet) 125 | elThermalPostResult.useHeatFlux = True 126 | 127 | thermalLoadCase.resultSet = [nodeThermalPostResult, elThermalPostResult] 128 | 129 | 130 | # Set thermal boundary conditions for the loadcase 131 | thermalLoadCase.boundaryConditions = [Fixed(surface6NodeSet, [DOF.T], [60.0]), 132 | Fixed(surface1NodeSet, dof=[DOF.T], values = [20.0]), 133 | HeatFlux(bottomFaceSet,flux=50.0)] 134 | 135 | # ====================== Material ====================== # 136 | # Add a elastic material and assign it to the volume. 137 | # Note ensure that the units correctly correspond with the geometry length scales 138 | 139 | steelMat = ElastoPlasticMaterial('Steel') 140 | steelMat.E = 210000. 141 | steelMat.alpha_CTE = [25e-6, 23e-6, 24e-6] # Thermal Expansion Coefficient 142 | steelMat.density = 1.0 # Density 143 | steelMat.cp = 1.0 # Specific Heat 144 | steelMat.k = 1.0 # Thermal Conductivity 145 | 146 | analysis.materials.append(steelMat) 147 | 148 | # Assign the material the volume (use the part name set for geometry) 149 | analysis.materialAssignments = [('PartA', 'Steel')] 150 | 151 | # Set the loadcases used in sequential order 152 | analysis.loadCases = [thermalLoadCase] 153 | 154 | # ====================== Analysis Run ====================== # 155 | 156 | # Run the analysis 157 | analysis.run() 158 | 159 | # Open the results file ('input') is currently the file that is generated by PyCCX 160 | results = analysis.results() 161 | 162 | # The call to read must be done to load all loadcases and timesteps from the results file 163 | results.read() 164 | 165 | # Obtain the nodal temperatures 166 | nodalTemp = results.lastIncrement()['temp'][:, 1] 167 | 168 | # Obtain the nodal coordinates and elements for further p 169 | tetEls = myMeshModel.getElementsByType(ElementType.TET4) -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | import sys, os 2 | import mock 3 | 4 | sys.path.insert(0, os.path.abspath('../pyccx')) 5 | sys.path.insert(0, os.path.abspath('..')) 6 | 7 | from pyccx.version import __version__ 8 | 9 | 10 | # If extensions (or modules to document with autodoc) are in another directory, 11 | # add these directories to sys.path here. If the directory is relative to the 12 | # documentation root, use os.path.abspath to make it absolute, like shown here. 13 | 14 | # -- General configuration ----------------------------------------------------- 15 | 16 | # If your documentation needs a minimal Sphinx version, state it here. 17 | needs_sphinx = '1.8.1' 18 | 19 | # Add any Sphinx extension module names here, as strings. They can be extensions 20 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 21 | extensions = ['sphinx.ext.autodoc', 22 | 'sphinx_automodapi.automodapi', 23 | 'sphinx.ext.autosummary', 24 | 'autodocsumm', 25 | 'sphinx_autodoc_typehints', 26 | 'm2r2', 27 | 'sphinx.ext.coverage'] 28 | # See options here for audodocsumm https://readthedocs.org/projects/sphinx-automodapi/downloads/pdf/latest/ 29 | 30 | autodoc_default_options = { 31 | 'autosummary': True, 32 | } 33 | 34 | # Add any paths that contain templates here, relative to this directory. 35 | templates_path = ['_templates'] 36 | 37 | # The suffix of source filenames. 38 | source_suffix = '.rst' 39 | 40 | # The encoding of source files. 41 | #source_encoding = 'utf-8-sig' 42 | 43 | # The master toctree document. 44 | master_doc = 'index' 45 | 46 | # General information about the project. 47 | project = u'pyccx' 48 | copyright = u'2025, Luktug Ltd.' 49 | author = 'Luktug Ltd.' 50 | 51 | autodoc_default_options = { 52 | 'autosummary': True, 53 | 'automodapi_inheritance_diagram': False 54 | } 55 | 56 | 57 | # The version info for the project you're documenting, acts as replacement for 58 | # |version| and |release|, also used in various other places throughout the 59 | # built documents. 60 | # 61 | # The short X.Y version. 62 | version = __version__ 63 | # The full version, including alpha/beta/rc tags. 64 | release = __version__ 65 | 66 | # The language for content autogenerated by Sphinx. Refer to documentation 67 | # for a list of supported languages. 68 | #language = None 69 | 70 | # There are two options for replacing |today|: either, you set today to some 71 | # non-false value, then it is used: 72 | #today = '' 73 | # Else, today_fmt is used as the format for a strftime call. 74 | #today_fmt = '%B %d, %Y' 75 | 76 | # List of patterns, relative to source directory, that match files and 77 | # directories to ignore when looking for source files. 78 | exclude_patterns = ['_build'] 79 | 80 | # The reST default role (used for this markup: `text`) to use for all documents. 81 | #default_role = None 82 | 83 | # If true, '()' will be appended to :func: etc. cross-reference text. 84 | #add_function_parentheses = True 85 | 86 | # If true, the current module name will be prepended to all description 87 | # unit titles (such as .. function::). 88 | #add_module_names = True 89 | 90 | # If true, sectionauthor and moduleauthor directives will be shown in the 91 | # output. They are ignored by default. 92 | #show_authors = False 93 | 94 | # The name of the Pygments (syntax highlighting) style to use. 95 | 96 | # A list of ignored prefixes for module index sorting. 97 | #modindex_common_prefix = [] 98 | 99 | 100 | # -- Options for HTML output --------------------------------------------------- 101 | 102 | # The theme to use for HTML and HTML Help pages. See the documentation for 103 | # a list of builtin themes. 104 | on_rtd = os.environ.get("READTHEDOCS", None) == "True" 105 | 106 | if not on_rtd: # only import and set the theme if we're building docs locally 107 | import sphinx_rtd_theme 108 | 109 | html_theme = "furo" 110 | 111 | html_theme_options = { 112 | "light_css_variables": { 113 | }, 114 | } 115 | 116 | # Theme options are theme-specific and customize the look and feel of a theme 117 | # further. For a list of options available for each theme, see the 118 | # documentation. 119 | #html_theme_options = {} 120 | 121 | # Add any paths that contain custom themes here, relative to this directory. 122 | #html_theme_path = [] 123 | 124 | # The name for this set of Sphinx documents. If None, it defaults to 125 | # " v documentation". 126 | #html_title = None 127 | 128 | # A shorter title for the navigation bar. Default is the same as html_title. 129 | #html_short_title = None 130 | 131 | # The name of an image file (relative to this directory) to place at the top 132 | # of the sidebar. 133 | #html_logo = None 134 | 135 | # The name of an image file (within the static path) to use as favicon of the 136 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 137 | # pixels large. 138 | #html_favicon = None 139 | 140 | # Add any paths that contain custom static files (such as style sheets) here, 141 | # relative to this directory. They are copied after the builtin static files, 142 | # so a file named "default.css" will overwrite the builtin "default.css". 143 | html_static_path = ['_static'] 144 | 145 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 146 | # using the given strftime format. 147 | #html_last_updated_fmt = '%b %d, %Y' 148 | 149 | # If true, SmartyPants will be used to convert quotes and dashes to 150 | # typographically correct entities. 151 | #html_use_smartypants = True 152 | 153 | # Custom sidebar templates, maps document names to template names. 154 | #html_sidebars = {} 155 | 156 | # Additional templates that should be rendered to pages, maps page names to 157 | # template names. 158 | #html_additional_pages = {} 159 | 160 | # If false, no module index is generated. 161 | #html_domain_indices = True 162 | 163 | # If false, no index is generated. 164 | #html_use_index = True 165 | 166 | # If true, the index is split into individual pages for each letter. 167 | #html_split_index = False 168 | 169 | # If true, links to the reST sources are added to the pages. 170 | #html_show_sourcelink = True 171 | 172 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 173 | #html_show_sphinx = True 174 | 175 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 176 | #html_show_copyright = True 177 | 178 | # If true, an OpenSearch description file will be output, and all pages will 179 | # contain a tag referring to it. The value of this option must be the 180 | # base URL from which the finished HTML is served. 181 | #html_use_opensearch = '' 182 | 183 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 184 | #html_file_suffix = None 185 | 186 | # Output file base name for HTML help builder. 187 | htmlhelp_basename = 'sampledoc' 188 | 189 | 190 | # -- Options for LaTeX output -------------------------------------------------- 191 | 192 | latex_elements = { 193 | # The paper size ('letterpaper' or 'a4paper'). 194 | #'papersize': 'letterpaper', 195 | 196 | # The font size ('10pt', '11pt' or '12pt'). 197 | #'pointsize': '10pt', 198 | 199 | # Additional stuff for the LaTeX preamble. 200 | #'preamble': '', 201 | } 202 | 203 | # Grouping the document tree into LaTeX files. List of tuples 204 | # (source start file, target name, title, author, documentclass [howto/manual]). 205 | latex_documents = [ 206 | ('index', 'sample.tex', u'PyCCX Documentation', 207 | u'Luke Parry', 'manual'), 208 | ] 209 | 210 | # The name of an image file (relative to this directory) to place at the top of 211 | # the title page. 212 | #latex_logo = None 213 | 214 | # For "manual" documents, if this is true, then toplevel headings are parts, 215 | # not chapters. 216 | #latex_use_parts = False 217 | 218 | # If true, show page references after internal links. 219 | #latex_show_pagerefs = False 220 | 221 | # If true, show URL addresses after external links. 222 | #latex_show_urls = False 223 | 224 | # Documents to append as an appendix to all manuals. 225 | #latex_appendices = [] 226 | 227 | # If false, no module index is generated. 228 | #latex_domain_indices = True 229 | 230 | 231 | # -- Options for manual page output -------------------------------------------- 232 | 233 | # One entry per manual page. List of tuples 234 | # (source start file, name, description, authors, manual section). 235 | man_pages = [ 236 | ('index', 'pyccx', u'PyCCX Documentation', 237 | [u'Luke Parry'], 1) 238 | ] 239 | 240 | # If true, show URL addresses after external links. 241 | #man_show_urls = False 242 | 243 | 244 | # -- Options for Texinfo output ------------------------------------------------ 245 | 246 | # Grouping the document tree into Texinfo files. List of tuples 247 | # (source start file, target name, title, author, 248 | # dir menu entry, description, category) 249 | texinfo_documents = [ 250 | ('index', 'project', u'PyCCX Documentation', 251 | u'Luktug Ltd.', 'project', 'One line description of project.', 252 | 'Miscellaneous'), 253 | ] 254 | 255 | # Documents to append as an appendix to all manuals. 256 | #texinfo_appendices = [] 257 | 258 | # If false, no module index is generated. 259 | #texinfo_domain_indices = True 260 | 261 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 262 | #texinfo_show_urls = 'footnote' 263 | -------------------------------------------------------------------------------- /pyccx/core.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Iterable, List, Optional, Tuple, Union 2 | from enum import IntEnum 3 | 4 | import numpy as np 5 | 6 | class ModelObject: 7 | 8 | def __init__(self, name: str, label: str = '') -> None: 9 | 10 | self._name = '' 11 | self._label = label 12 | 13 | self.setName(name) 14 | 15 | @property 16 | def label(self) -> str: 17 | return self._label 18 | 19 | @label.setter 20 | def label(self, label: str) -> None: 21 | self._label = label 22 | 23 | @property 24 | def name(self) -> str: 25 | return self._name 26 | 27 | @name.setter 28 | def name(self, name: str) -> None: 29 | self.setName(name) 30 | 31 | def setName(self, name: str) -> None: 32 | 33 | if not name.isascii(): 34 | raise ValueError(f"Name provided ({name}) must be alpha-numeric") 35 | 36 | if ' ' in name: 37 | raise ValueError(f"Name provided ({name}) must not contain spaces") 38 | 39 | if '*' in name: 40 | raise ValueError(f"Name provide ({name}) contains invalid character (*)") 41 | 42 | self._name = name 43 | 44 | 45 | class Amplitude(ModelObject): 46 | 47 | def __init__(self, name: str, profile = None): 48 | 49 | super().__init__(name) 50 | 51 | self._profile = profile 52 | 53 | @property 54 | def profile(self): 55 | return self._profile 56 | 57 | @profile.setter 58 | def profile(self, profile): 59 | 60 | profile = np.asanyarray(profile) 61 | 62 | if not (profile.ndim == 2 and profile.shape[1] == 2): 63 | raise ValueError('Invalid profile passed to Amplitude') 64 | 65 | self._profile = profile 66 | 67 | def writeInput(self) -> str: 68 | 69 | out = '*AMPLITUDE, NAME={:s}\n'.format(self.name) 70 | 71 | for row in self.profile: 72 | time, amplitude = row 73 | out += '{:.5f}, {:.5f}\n'.format(time, amplitude) 74 | 75 | return out 76 | 77 | 78 | class MeshSet(ModelObject): 79 | """ 80 | The Mesh set is a basic entity for storing node and element set lists that are used for creating sets across 81 | both node and element types. 82 | """ 83 | def __init__(self, name: str): 84 | super().__init__(name) 85 | 86 | 87 | class NodeSet(MeshSet): 88 | """ 89 | A NodeSet is basic entity for storing a list of Node Ids. The set remains constant or fixed without 90 | any dynamic referencing to any underlying geometric entities. 91 | """ 92 | def __init__(self, name, nodes: Iterable): 93 | super().__init__(name) 94 | self._nodes = np.unique(np.asanyarray(nodes, dtype=np.int64)) 95 | 96 | @property 97 | def nodes(self): 98 | """ 99 | Nodes contains the list of Node IDs 100 | """ 101 | return self._nodes 102 | 103 | @nodes.setter 104 | def nodes(self, nodes: Iterable) -> None: 105 | self._nodes = np.unique(np.asanyarray(nodes, dtype=np.int64)) 106 | 107 | def writeInput(self) -> str: 108 | out = '*NSET, NSET={:s}\n'.format(self.name) 109 | for i in range(0, self.nodes.shape[0], 16): 110 | out += ', '.join(['{0:6d}'.format(val) for val in self.nodes[i:i+16]]) 111 | out += '\n' 112 | return out 113 | 114 | 115 | class ElementSet(MeshSet): 116 | """ 117 | An element set is basic entity for storing a list of element ids as part of a referencable set, typically 118 | used amongst boundary conditions and assignments .The set remains constant without any dynamic 119 | referencing to any underlying geometric entities. 120 | """ 121 | def __init__(self, name: str, elIds: Iterable): 122 | 123 | super().__init__(name) 124 | self.els = elIds 125 | 126 | @property 127 | def els(self): 128 | """ 129 | Elements contains the list of element IDs 130 | """ 131 | return self._els 132 | 133 | @els.setter 134 | def els(self, elIds: Iterable): 135 | 136 | self._els = np.unique(np.asanyarray(elIds, dtype=np.int64)) 137 | 138 | def writeInput(self) -> str: 139 | 140 | out = '*ELSET, ELSET={:s}\n'.format(self.name) 141 | 142 | for i in range(0, self._els.shape[0], 16): 143 | out += ', '.join(['{0:6d}'.format(val) for val in self._els[i:i+16]]) 144 | out += '\n' 145 | 146 | return out 147 | 148 | 149 | class SurfaceNodeSet(MeshSet): 150 | """ 151 | A surface-node set is a basic entity for storing element face lists, typically for setting directional fluxes onto 152 | surface elements based on the element ordering. The set remains constant without any dynamic referencing 153 | to any underlying geometric entities. This approach requires explicitly assigning the list of nodal ids that 154 | define the surface. 155 | """ 156 | def __init__(self, name, nodalSet: Iterable): 157 | 158 | super().__init__(name) 159 | self._surfaceNodes = np.asanyarray(nodalSet) 160 | 161 | @property 162 | def surfacePairs(self) -> np.array: 163 | """ 164 | Elements with the associated face orientations are specified as Nx2 numpy array, with the first column being 165 | the element Id, and the second column the chosen face orientation 166 | """ 167 | return self._elSurfacePairs 168 | 169 | @surfacePairs.setter 170 | def surfacePairs(self, surfacePairs) -> None: 171 | self._elSurfacePairs = np.asanyarray(surfacePairs, dtype=np.int64) 172 | 173 | def writeInput(self) -> str: 174 | 175 | out = '*SURFACE,NAME={:s}, TYPE=NODE\n'.format(self.name) 176 | 177 | for i in range(self._elSurfacePairs.shape[0]): 178 | out += '{:d},S{:d}\n'.format(self._elSurfacePairs[i, 0], self._elSurfacePairs[i, 1]) 179 | 180 | return out 181 | 182 | 183 | class SurfaceSet(MeshSet): 184 | """ 185 | A surface-set is a basic entity for storing element face lists, typically for setting directional fluxes onto 186 | surface elements based on the element ordering. The set remains constant without any dynamic referencing 187 | to any underlying geometric entities. 188 | """ 189 | def __init__(self, name, surfacePairs): 190 | 191 | super().__init__(name) 192 | self._elSurfacePairs = np.asanyarray(surfacePairs, dtype=np.int64) 193 | 194 | @property 195 | def surfacePairs(self) -> np.ndarray: 196 | """ 197 | Elements with the associated face orientations are specified as Nx2 numpy array, with the first column being 198 | the element Id, and the second column the chosen face orientation 199 | """ 200 | return self._elSurfacePairs 201 | 202 | @surfacePairs.setter 203 | def surfacePairs(self, surfacePairs): 204 | self._elSurfacePairs = np.asanyarray(surfacePairs, dtype=np.int64) 205 | 206 | def writeInput(self) -> str: 207 | 208 | out = '*SURFACE,NAME={:s}\n'.format(self.name) 209 | 210 | for i in range(self._elSurfacePairs.shape[0]): 211 | out += '{:d},S{:d}\n'.format(self._elSurfacePairs[i, 0], self._elSurfacePairs[i, 1]) 212 | 213 | return out 214 | 215 | 216 | class Connector(ModelObject): 217 | """ 218 | A Connector is a rigid connector between a set of nodes and an (optional) reference node. 219 | """ 220 | def __init__(self, name: str , nodeset: Optional[NodeSet] = None, refNode = None): 221 | 222 | super().__init__(name) 223 | 224 | self._refNode = refNode 225 | self._nodeset = nodeset 226 | 227 | @property 228 | def refNode(self): 229 | """ 230 | Reference Node ID 231 | """ 232 | return self._refNode 233 | 234 | @refNode.setter 235 | def refNode(self, node): 236 | self._refNode = node 237 | 238 | @property 239 | def nodeset(self) -> NodeSet: 240 | """ 241 | Nodes contains the list of Node IDs 242 | """ 243 | return self._nodeset 244 | 245 | @nodeset.setter 246 | def nodeset(self, nodes: Union[Iterable, NodeSet]): 247 | 248 | if isinstance(nodes, list) or isinstance(nodes, np.ndarray): 249 | self._nodeset = NodeSet(f"Connecter_{self.name}", np.array(nodes)) 250 | elif isinstance(nodes, NodeSet): 251 | self._nodeset = nodes 252 | else: 253 | raise ValueError('Invalid type for nodes passed to Connector()') 254 | 255 | def writeInput(self) -> str: 256 | # A nodeset is automatically created from the name of the connector 257 | strOut = '*RIGIDBODY, NSET={:s}'.format(self.nodeset.name) 258 | 259 | # A reference node is optional 260 | if isinstance(self.refNode, int): 261 | strOut += ',REF NODE={:d}\n'.format(self.refNode) 262 | else: 263 | strOut += '\n' 264 | 265 | return strOut 266 | 267 | 268 | class DOF(IntEnum): 269 | """ 270 | Provides a reference to the typical degrees-of-freedom (DOF) used for setting boundary conditions and displaying 271 | the required output in Calculix. 272 | """ 273 | 274 | UX = 1 275 | """ Translation in the X direction """ 276 | 277 | UY = 2 278 | """ Translation in the Y direction """ 279 | 280 | UZ = 3 281 | """ Translation in the Z direction """ 282 | 283 | RX = 4 284 | """ Rotation about the X-axis """ 285 | 286 | RY = 5 287 | """ Rotation about the Y-axis """ 288 | 289 | RZ = 6 290 | """ Rotation about the Z-axis """ 291 | 292 | T = 11 293 | """ Temperature """ 294 | 295 | -------------------------------------------------------------------------------- /pyccx/utils/exporters.py: -------------------------------------------------------------------------------- 1 | 2 | import os 3 | import shutil 4 | from typing import Optional 5 | import xml.etree.ElementTree as ET 6 | import numpy as np 7 | 8 | from ..results import ResultProcessor, ResultsValue 9 | 10 | def exportToPVD(filename: str, results: ResultProcessor): 11 | """ 12 | Exports all the timestep increments to a pvd file for visualisation in Paraview 13 | 14 | :param filename: The root filename of the .pvd file 15 | :param results: The PyCCX results processor 16 | """ 17 | 18 | rootFilename = os.path.basename(filename).split('.')[0] 19 | rootDir = os.path.dirname(filename) 20 | 21 | if rootDir: 22 | rootDir = rootDir + '/' 23 | 24 | data = ET.Element('VTKFile', type="Collection", version="0.1", byte_order="LittleEndian") 25 | colEl = ET.SubElement(data, 'Collection') 26 | 27 | resultsFolder = '{:s}{:s}-data'.format(rootDir, rootFilename) 28 | 29 | """ Remove the previous directory """ 30 | try: 31 | shutil.rmtree(resultsFolder) 32 | except: 33 | pass 34 | 35 | os.mkdir(resultsFolder) 36 | 37 | colItems = [] 38 | for inc in results.increments: 39 | 40 | # iterData = results.increments[inc] 41 | 42 | incPath = '{:s}/{:s}'.format(resultsFolder, 'increment-{:d}.vtu'.format(inc)) 43 | 44 | """ Export the .vtu format to the data folder """ 45 | exportToVTK(incPath, results, inc) 46 | 47 | dataSetEl = ET.SubElement(colEl, 'DataSet', timestep="{:d}".format(inc), group="", part="0", file=incPath) 48 | colItems.append(dataSetEl) 49 | 50 | b_xml = ET.tostring(data) 51 | with open('{:s}'.format(filename), 'wb') as f: 52 | f.write(b_xml) 53 | 54 | 55 | def exportToVTK(filename: str, results: ResultProcessor, inc: Optional[int] = -1): 56 | """ 57 | Exports a single time step result to the .vtu file in its xml format 58 | 59 | :param filename: filename to export to 60 | :param results: results object 61 | :param inc: selected time increment key to export 62 | """ 63 | vtkMap = { 64 | 1: 12, # 8 node brick 65 | 2: 13, # 6 node wedge 66 | 3: 10, # 4 node tet 67 | 4: 25, # 20 node brick 68 | 5: 13, # 15 node wedge 69 | 6: 24, # 10 node tet 70 | 7: 5, # 3 node shell 71 | 8: 22, # 6 node shell 72 | 9: 9, # 4 node shell 73 | 10: 23, # 8 node shell 74 | 11: 3, # 2 node beam 75 | 12: 21, # 3 node beam 76 | } 77 | 78 | """ 79 | Note: The node map is used to remap the nodes of the elements for special types in VTK 80 | This is for wedges and hex elements which have a different node ordering in VTK 81 | """ 82 | nodeMap = { 83 | 2: [0, 2, 1, 3, 5, 4], 84 | 4: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 16, 17, 18, 19, 12, 13, 14, 15], 85 | 5: [0, 2, 1, 3, 5, 4] 86 | } 87 | 88 | """ Select the result increment to export """ 89 | if inc == -1: 90 | """ Last increment """ 91 | resultIncrement = results.lastIncrement() 92 | else: 93 | """ Selected increment """ 94 | if results.increments.get(inc, None) is None: 95 | raise ValueError('Selected increment ({:d}) does not exist in the results'.format(inc)) 96 | 97 | resultIncrement = results.increments[inc] 98 | 99 | data = ET.Element('VTKFile', type="UnstructuredGrid") 100 | e1 = ET.SubElement(data, 'UnstructuredGrid') 101 | ePiece = ET.SubElement(e1, 'Piece', NumberOfPoints=str(len(results.nodes[0])), 102 | NumberOfCells=str(len(results.elements[0]))) 103 | 104 | ePointData = ET.SubElement(ePiece, 'PointData') 105 | 106 | """ Write the Node Displacement Data """ 107 | if len(resultIncrement[ResultsValue.DISP]) > 0: 108 | 109 | eDispArray = ET.SubElement(ePointData, 'DataArray', type="Float32", 110 | Name="Displacement", 111 | NumberOfComponents="3", Format="Ascii") 112 | nodeDispStr = '' 113 | 114 | for row in resultIncrement[ResultsValue.DISP]: 115 | nodeDispStr += ' '.join([str(val) for val in row[1:]]) + '\n' 116 | eDispArray.text = nodeDispStr 117 | 118 | """ Write the Nodal Reaction Force Data """ 119 | if len(resultIncrement[ResultsValue.FORCE]) > 0: 120 | eRFArray = ET.SubElement(ePointData, 'DataArray', type="Float32", 121 | Name="RF", NumberOfComponents="3", 122 | Format="Ascii") 123 | nodalRFStr = '' 124 | for row in resultIncrement[ResultsValue.FORCE]: 125 | nodalRFStr += ' '.join([str(val) for val in row[1:]]) + '\n' 126 | eRFArray.text = nodalRFStr 127 | 128 | """ Write the Nodal Temperature Data """ 129 | if len(resultIncrement[ResultsValue.TEMP]) > 0: 130 | eTempArray = ET.SubElement(ePointData, 'DataArray', type="Float32", 131 | Name="T", NumberOfComponents="1", 132 | Format="Ascii") 133 | nodeTempStr = '' 134 | for row in resultIncrement[ResultsValue.TEMP]: 135 | nodeTempStr += ' '.join([str(val) for val in row[1:]]) + '\n' 136 | 137 | eTempArray.text = nodeTempStr 138 | 139 | """ Write the Cauchy Stress Data """ 140 | if len(resultIncrement[ResultsValue.STRESS]) > 0: 141 | sigma = resultIncrement[ResultsValue.STRESS][:, 1:] 142 | eSigmaArray = ET.SubElement(ePointData, 'DataArray', type="Float32", 143 | Name="stress", NumberOfComponents=str(sigma.shape[1]), 144 | Format="Ascii") 145 | nodeSigmaStr = '' 146 | for row in sigma: 147 | nodeSigmaStr += ' '.join([str(val) for val in row]) + '\n' 148 | eSigmaArray.text = nodeSigmaStr 149 | 150 | """ Write the Cauchy Stress Data """ 151 | if resultIncrement.get(ResultsValue.VMSTRESS, None) is not None: 152 | if len(resultIncrement[ResultsValue.VMSTRESS]) > 0: 153 | sigma = resultIncrement[ResultsValue.VMSTRESS][:, 1:] 154 | eSigmaVMArray = ET.SubElement(ePointData, 'DataArray', type="Float32", 155 | Name="stressVM", NumberOfComponents=str(sigma.shape[1]), 156 | Format="Ascii") 157 | nodeSigmaStr = '' 158 | for row in sigma: 159 | nodeSigmaStr += ' '.join([str(val) for val in row]) + '\n' 160 | 161 | eSigmaVMArray.text = nodeSigmaStr 162 | 163 | """ Write strain data """ 164 | if len(resultIncrement[ResultsValue.STRAIN]) > 0: 165 | eStrainArray = ET.SubElement(ePointData, 'DataArray', type="Float32", 166 | Name="strain", NumberOfComponents="6", 167 | Format="Ascii") 168 | nodeStrainStr = '' 169 | for row in resultIncrement[ResultsValue.STRAIN]: 170 | nodeStrainStr += ' '.join([str(val) for val in row[1:]]) + '\n' 171 | eStrainArray.text = nodeStrainStr 172 | 173 | """ Export the remaining geometrical element information to the .vtu format""" 174 | # eCellData = ET.SubElement(ePiece, 'CellData') 175 | 176 | ePoints = ET.SubElement(ePiece, 'Points') 177 | ePointsArray = ET.SubElement(ePoints, 'DataArray', type="Float32", 178 | Name="Points", NumberOfComponents="3", 179 | Format="Ascii") 180 | 181 | """ Write the Node Coordinate Data """ 182 | nodeStr = '' 183 | for row in results.nodes[1]: 184 | nodeStr += ' '.join([str(val) for val in row]) + '\n' 185 | 186 | ePointsArray.text = nodeStr 187 | 188 | eCells = ET.SubElement(ePiece, 'Cells') 189 | eConArray = ET.SubElement(eCells, 'DataArray', type="Int32", Name="connectivity", Format="Ascii") 190 | 191 | """ 192 | Write the Node Coordinate Data: 193 | Note: Row is the element id, element type, element nodes 194 | """ 195 | elConStr = '' 196 | 197 | elIds, elType, elCon = results.elements 198 | for i in range(len(elIds)): 199 | # Note (row[1]) is the element type 200 | 201 | if elType[i] in nodeMap: 202 | # Remap the nodes of the elements for special types in VTK 203 | elConIds = np.array(elCon[i]) 204 | elConIds = elConIds[np.array(nodeMap[elType[i]])] 205 | elConStr += ' '.join([str(val-1) for val in elConIds]) + '\n' 206 | else: 207 | # Write connectivity directly 208 | elConStr += ' '.join([str(val-1) for val in elCon[i]]) + '\n' 209 | 210 | eConArray.text = elConStr 211 | 212 | """ Write the element offset array """ 213 | eOffArray = ET.SubElement(eCells, 'DataArray', type="Int32", Name="offsets", Format="Ascii") 214 | elOffset = np.cumsum([len(row) for row in elCon]) 215 | eOffArray.text = ' '.join([str(int(val)) for val in elOffset]) + '\n' 216 | 217 | """ Write the element type array """ 218 | eTypeArray = ET.SubElement(eCells, 'DataArray', type="UInt8", Name="types", Format="Ascii") 219 | eTypes = [vtkMap[row] for row in elType] 220 | eTypeArray.text = ' '.join([str(int(val)) for val in eTypes]) + '\n' 221 | 222 | """ Write the binary string to the file """ 223 | b_xml = ET.tostring(data) 224 | with open(filename, 'wb') as f: 225 | f.write(b_xml) 226 | 227 | # Opening a file under the name 228 | -------------------------------------------------------------------------------- /examples/example_thermal.py: -------------------------------------------------------------------------------- 1 | """ 2 | This example demonstrates how to use PyCCX to perform a thermal analysis on a simple geometry. 3 | """ 4 | 5 | import pyccx 6 | 7 | # Import related to meshing 8 | from pyccx.mesh import Mesher, Ent 9 | import pyccx.mesh.elements as Elements 10 | 11 | # Imports related to the FEA Analysis 12 | from pyccx.bc import Fixed, HeatFlux 13 | from pyccx.analysis import Simulation, SolidMaterialAssignment 14 | from pyccx.core import DOF, ElementSet, NodeSet, SurfaceSet 15 | from pyccx.results import ElementResult, NodalResult 16 | from pyccx.loadcase import LoadCase, LoadCaseType 17 | from pyccx.material import ElastoPlasticMaterial 18 | 19 | """ ====================== Meshing ====================== """ 20 | # Create a Mesher object to interface with GMSH. Provide a unique name. Multiple instance of this can be created. 21 | myMeshModel = Mesher('myModel') 22 | 23 | # Set the number of threads to use for any multi-threaded meshing algorithms e.g. HXT 24 | myMeshModel.setNumThreads(4) 25 | myMeshModel.setOptimiseNetgen(True) 26 | 27 | # Set the meshing algorithm (optional) to use globally. 28 | myMeshModel.set2DMeshingAlgorithm(pyccx.mesh.MeshingAlgorithm2D.DELAUNAY) 29 | myMeshModel.set3DMeshingAlgorithm(pyccx.mesh.MeshingAlgorithm3D.FRONTAL_DELAUNAY) 30 | 31 | # Add the geometry and assign a physical name 'PartA' which can reference the elements generated for the volume 32 | myMeshModel.addGeometry('../models/cornerCube.step', 'PartA') 33 | 34 | """ 35 | Merges an assembly together. This is necessary there multiple bodies or volumes which share coincident faces. GMSH 36 | will automatically stitch these surfaces together and create a shared boundary which is often useful performing 37 | analyses where fields overlap (e.g. heat transfer). This should not be done if contact analysis is performed. 38 | """ 39 | 40 | myMeshModel.mergeGeometry() 41 | 42 | # Optionally set hte name of boundary name using the GMSH geometry identities 43 | myMeshModel.setEntityName((Ent.Surface,1), 'MySurface1') 44 | myMeshModel.setEntityName((Ent.Surface,2), 'MySurface2') 45 | myMeshModel.setEntityName((Ent.Surface,3), 'Bottom_Face') 46 | myMeshModel.setEntityName((Ent.Surface,4), 'MySurface4') 47 | myMeshModel.setEntityName((Ent.Surface,5), 'MySurface5') 48 | myMeshModel.setEntityName((Ent.Volume,1), 'PartA') 49 | 50 | """ 51 | Set the average element size of the mesh. This requires using a mesh control which 52 | is applied to the geometry points available within the Volume. GMSH does not natively 53 | support applying seeds along edges like commercial software. 54 | """ 55 | geomPoints = myMeshModel.getPointsFromVolume(1) 56 | myMeshModel.setMeshSize(geomPoints, 0.5) # [MM] 57 | 58 | # Generate the mesh 59 | myMeshModel.generateMesh() 60 | 61 | # Obtain the surface faces (normals facing outwards) for surface 62 | surfFaces2 = myMeshModel.getSurfaceFacesFromSurfId(1) # MySurface1 63 | bottomFaces = myMeshModel.getSurfaceFacesFromSurfId(3) # Bottom_Face 64 | 65 | # Obtain all nodes associated with each surface 66 | surface1Nodes = myMeshModel.getNodesFromEntity((Ent.Surface,1)) # MySurface 67 | surface2Nodes = myMeshModel.getNodesFromEntity((Ent.Surface,2)) # MySurface2 68 | surface4Nodes = myMeshModel.getNodesFromEntity((Ent.Surface,4)) # MySurface4 69 | surface6Nodes = myMeshModel.getNodesFromEntity((Ent.Surface,6)) # MySurface6 70 | 71 | # An alternative method is to get nodes via the surface 72 | bottomFaceNodes = myMeshModel.getNodesFromSurfaceByName('Bottom_Face') 73 | 74 | # or via general query 75 | surface5Nodes = myMeshModel.getNodesByEntityName('MySurface5') # MySurface5 76 | 77 | # Obtain nodes from the volume 78 | volumeNodes = myMeshModel.getNodesFromVolumeByName('PartA') 79 | 80 | # The generated mesh can be interactively viewed natively within gmsh by calling the following 81 | #myMeshModel.showGui() 82 | 83 | """ Create the analysis""" 84 | # Set the number of simulation threads to be used by Calculix Solver across all analyses 85 | 86 | Simulation.setNumThreads(4) 87 | 88 | # Set the direct path to the Calculix executable 89 | Simulation.CALCULIX_PATH = '/opt/homebrew/Cellar/calculix-ccx/2.22/bin/ccx_2.22' 90 | 91 | # Create a Simulation object based on the supplied mesh model 92 | analysis = Simulation(myMeshModel) 93 | 94 | # Optionally set the working the base working directory 95 | analysis.setWorkingDirectory('.') 96 | 97 | print('Calculix version: {:d}. {:d}'.format(*analysis.version())) 98 | 99 | # Add the Node Sets For Attaching Boundary Conditions # 100 | # Note a unique name must be provided 101 | surface1NodeSet = NodeSet('surface1Nodes', surface1Nodes) 102 | surface2NodeSet = NodeSet('surface2Nodes', surface2Nodes) 103 | surface4NodeSet = NodeSet('surface4Nodes', surface4Nodes) 104 | surface5NodeSet = NodeSet('surface5Nodes', surface5Nodes) 105 | surface6NodeSet = NodeSet('surface6Nodes', surface6Nodes) 106 | bottomFaceNodeSet = NodeSet('bottomFaceNodes', bottomFaceNodes) 107 | volNodeSet = NodeSet('VolumeNodeSet', volumeNodes) 108 | 109 | # Create an element set using a concise approach 110 | 111 | partElSet = ElementSet('PartAElSet', myMeshModel.getElementIds((Ent.Volume,1))) 112 | 113 | # Create a surface set 114 | bottomFaceSet = SurfaceSet('bottomSurface', bottomFaces) 115 | 116 | # Add the userdefined nodesets to the analysis 117 | analysis.nodeSets = [surface1NodeSet, surface2NodeSet, surface4NodeSet, surface5NodeSet, surface6NodeSet, 118 | bottomFaceNodeSet, volNodeSet] 119 | 120 | 121 | """ 122 | Set the element types 123 | The element types can be set by the user to ensure that the correct element type is used for the analysis. 124 | """ 125 | 126 | modelElIds = myMeshModel.getElementIds((Ent.Volume, Ent.All)) 127 | myMeshModel.setMeshAssignmentsByType(modelElIds, Elements.TET4) 128 | 129 | # Check if there are any elements that have not been assigned an element type. 130 | if len(myMeshModel.identifyUnassignedElements()) > 0: 131 | raise Exception("identified unassigned elements") 132 | 133 | 134 | """ 135 | Initial Conditions 136 | ----------------- 137 | Initial conditions can be set for the analysis. This is used for setting the nodal temeprature 138 | at the zeroth increment of the analysis. 139 | 140 | When setting the initial condition,when using a NodalSet, either a NodeSet name, or object can be 141 | specified or an explicit list of nodes to apply the initial value upon. 142 | """ 143 | 144 | analysis.initialConditions.append({'type': 'temperature', 'set': 'VolumeNodeSet', 'value': 0.0}) 145 | 146 | """ 147 | Thermal Load Cases 148 | ----------------- 149 | In this example a thermal load case is defined to simulate a transient heat transfer problem in a 150 | simple geometry for a laser optic model. 151 | """ 152 | 153 | # Create a thermal load case and set the timesettings 154 | thermalLoadCase = LoadCase('Thermal_Load_Case') 155 | 156 | # Set the loadcase type to thermal - eventually this will be individual classes 157 | thermalLoadCase.setLoadCaseType(LoadCaseType.THERMAL) 158 | 159 | # Set the thermal analysis to be a steadystate simulation 160 | thermalLoadCase.isSteadyState = False 161 | thermalLoadCase.setTimeStep(0.5, 0.5, 5.0) 162 | 163 | """ 164 | Results Export 165 | ----------------- 166 | Attach the nodal and element result options to each specific loadcase 167 | The results will be exported to the .frd file 168 | """ 169 | 170 | # Set the nodal and element variables to record in the results 171 | # When specifying the result node set, if the node set is not specified, all nodes will be used 172 | nodeThermalPostResult = NodalResult() 173 | nodeThermalPostResult.temperature = True 174 | 175 | elThermalPostResult = ElementResult(partElSet) 176 | elThermalPostResult.heatFlux = True 177 | 178 | thermalLoadCase.resultSet = [nodeThermalPostResult, elThermalPostResult] 179 | 180 | # Set thermal boundary conditions for the loadcase 181 | thermalLoadCase.boundaryConditions = [Fixed(surface6NodeSet, dof=[DOF.T], values = [60.0]), 182 | Fixed(surface1NodeSet, dof=[DOF.T], values = [20.0]), 183 | HeatFlux(bottomFaceSet, flux=50.0)] 184 | 185 | """ 186 | ====================== Materials ====================== 187 | Material should be defined before the material assignment. This can include additional 188 | thermal and mechanical properties that are used for each subsequent analysis load step. 189 | 190 | - Add a elasto-plastic material and assign it to the volume. 191 | - Note: Ensure that the units correctly correspond with the geometry length scales 192 | """ 193 | 194 | steelMat = ElastoPlasticMaterial('Steel') 195 | steelMat.E = 210000. # [MPa] Young's Modulus 196 | steelMat.alpha_CTE = [25e-6, 23e-6, 24e-6] # Thermal Expansion Coefficient 197 | steelMat.density = 1.0 # Density 198 | steelMat.cp = 1.0 # Specific Heat 199 | steelMat.k = 1.0 # Thermal Conductivity 200 | 201 | analysis.materials.append(steelMat) 202 | 203 | """ 204 | Material Assignment 205 | -------------------- 206 | The material is assigned to the element set created earlier for the part 207 | 208 | - SolidMaterialAssignment is used to assign a material to 3D continuum elements used in the model 209 | """ 210 | 211 | analysis.materialAssignments = [ 212 | SolidMaterialAssignment("solid_material", elementSet=partElSet, material=steelMat) 213 | ] 214 | 215 | # Set the loadcases used in sequential order 216 | analysis.loadCases = [thermalLoadCase] 217 | 218 | """ ====================== Analysis Run ====================== """ 219 | # Run the analysis 220 | try: 221 | analysis.run() 222 | except: 223 | print('Analysis failed on this design') 224 | 225 | """ ====================== Post-Processing ====================== """ 226 | # Open the results file ('input') is currently the file that is generated by PyCCX 227 | results = analysis.results() 228 | 229 | # The call to read must be done to load all loadcases and timesteps from the results file 230 | results.read() 231 | 232 | # Obtain the nodal temperatures 233 | nodalTemp = results.lastIncrement()['temp'][:, 1] 234 | 235 | # Obtain the nodal coordinates and elements for further p 236 | tetEls = myMeshModel.getElementsByType(Elements.TET4) 237 | 238 | import pyccx.utils.exporters as exporters 239 | 240 | # Export the results to VTK format as a significant timestep for post-processing 241 | exporters.exportToVTK('result.vtu', results, inc=-1) 242 | 243 | # Export the results to PVD format for visualization in Paraview - this includes all the timesteps during the analysis 244 | exporters.exportToPVD('results.pvd', results) 245 | -------------------------------------------------------------------------------- /pyccx/loadcase/loadcase.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import os 3 | 4 | from enum import IntEnum, auto 5 | from typing import List, Tuple, Type, Optional 6 | 7 | from ..bc import BoundaryCondition, BoundaryConditionType 8 | from ..core import ModelObject 9 | from ..results import Result 10 | 11 | 12 | class LoadCaseType(IntEnum): 13 | """ 14 | Enum Class specifies the Load Case type 15 | """ 16 | 17 | STATIC = auto() 18 | """Linear Static structural analysis""" 19 | 20 | THERMAL = auto() 21 | """Thermal analysis for performing heat transfer studies""" 22 | 23 | UNCOUPLEDTHERMOMECHANICAL = auto() 24 | """Coupled thermo-mechanical analysis""" 25 | 26 | BUCKLE = auto() 27 | """Buckling analysis of a structure""" 28 | 29 | MODAL = auto() 30 | """Modal analysis of a structure""" 31 | 32 | DYNAMIC = auto() 33 | """Dynamic analysis of a structure""" 34 | 35 | 36 | class LoadCase(ModelObject): 37 | """ 38 | A unique Load case defines a set of simulation analysis conditions and a set of boundary conditions to apply to 39 | the domain. The default and initial timestep provide an estimate for the solver should be specified along with 40 | the total duration of the load case using :meth:`setTimeStep`. The analysis type for the loadcase should be 41 | specified using :meth:`setLoadCaseType`. Depending on the analysis type the steady-state solution may instead be 42 | calculated. 43 | 44 | If the option :attr:`automaticIncrements` is set to False, the solver will use the initial timestep and the total 45 | time steps will be defined by the user when using the :attr:`nonlinear` option, which provides time-dependent 46 | behavior for the analysis. 47 | 48 | """ 49 | def __init__(self, name: str, 50 | loadCaseType: Optional[LoadCaseType] = None, 51 | resultSets: Optional[List[Result]] = None): 52 | 53 | super().__init__(name) 54 | 55 | # Internal output 56 | self._input = '' 57 | 58 | """ Analysis Types for the Load Case""" 59 | self._loadCaseType = None 60 | self._isSteadyState = False 61 | self._isNonlinear = False 62 | 63 | """ Time-stepping parameters """ 64 | self._automaticIncrements = True 65 | self._initialTimestep = 0.1 66 | self._defaultTimestep = 0.1 67 | self._minTimestep = 1e-6 68 | self._maxTimestep = 1.0 69 | self._totalTime = 1.0 70 | self._resultSet = [] 71 | self._boundaryConditions = [] 72 | 73 | if loadCaseType: 74 | if loadCaseType in LoadCaseType: 75 | self._loadCaseType = loadCaseType 76 | else: 77 | raise ValueError('Loadcase type must valid') 78 | 79 | if resultSets: 80 | self.resultSet = resultSets 81 | 82 | @property 83 | def loadCaseType(self) -> LoadCaseType: 84 | return self._loadCaseType 85 | 86 | @property 87 | def boundaryConditions(self) -> List[BoundaryCondition]: 88 | """ 89 | The list of boundary conditions to be applied during the load case 90 | """ 91 | return self._boundaryConditions 92 | 93 | @boundaryConditions.setter 94 | def boundaryConditions(self, bConds: List[BoundaryCondition]): 95 | self._boundaryConditions = bConds 96 | 97 | @property 98 | def resultSet(self) -> List[Result]: 99 | """ 100 | The result outputs (:class:`~pyccx.results.ElementResult`, :class:`~pyccx.results.NodeResult`) to generate 101 | the set of results from this load case. 102 | """ 103 | return self._resultSet 104 | 105 | @resultSet.setter 106 | def resultSet(self, rSets: List[Result]): 107 | if not any(isinstance(x, Result) for x in rSets): 108 | raise ValueError('Loadcase ResultSets must be derived from a Result class') 109 | else: 110 | self._resultSet = rSets 111 | 112 | @property 113 | def maxTimestep(self) -> float: 114 | """ 115 | The maximum timestep increment for the load case if the solver is using an adaptive time-stepping scheme 116 | which is used during a non-linear or incremental loading. 117 | """ 118 | return self._maxTimestep 119 | 120 | @maxTimestep.setter 121 | def maxTimestep(self, timeInc: float): 122 | self._maxTimestep = timeInc 123 | 124 | @property 125 | def minTimestep(self) -> float: 126 | """ 127 | The minimum timestep increment for the load case if the solver is using an adaptive time-stepping scheme 128 | """ 129 | return self._minTimestep 130 | 131 | @minTimestep.setter 132 | def minTimestep(self, timeInc: float): 133 | self._minTimestep = timeInc 134 | 135 | @property 136 | def totalTime(self) -> float: 137 | """ 138 | The total duration time for the load case 139 | """ 140 | return self._totalTime 141 | 142 | @totalTime.setter 143 | def totalTime(self, time: float): 144 | self._totalTime = time 145 | 146 | @property 147 | def defaultTimestep(self) -> float: 148 | """ 149 | The default timestep to use throughout the load case 150 | """ 151 | return self._defaultTimestep 152 | 153 | @defaultTimestep.setter 154 | def defaultTimestep(self, timestep: float) -> None: 155 | self._defaultTimestep = timestep 156 | 157 | @property 158 | def initialTimestep(self) -> float: 159 | """ 160 | The initial timestep to use for the increment during the load case if the solver is using an 161 | adaptive time-stepping scheme 162 | """ 163 | return self._initialTimestep 164 | 165 | @initialTimestep.setter 166 | def initialTimestep(self, timeStep: float): 167 | self._initialTimestep = timeStep 168 | 169 | @property 170 | def steadyState(self) -> bool: 171 | """ 172 | `True` if the loadcase is a steady-state analysis 173 | """ 174 | return self._isSteadyState 175 | 176 | @steadyState.setter 177 | def steadyState(self, state: bool) -> None: 178 | self._isSteadyState = state 179 | 180 | @property 181 | def automaticIncrements(self) -> bool: 182 | """ 183 | `True` if the solver is using adaptive time-stepping increments 184 | """ 185 | return self._automaticIncrements 186 | 187 | @automaticIncrements.setter 188 | def automaticIncrements(self, state: bool) -> None: 189 | self._automaticIncrements = state 190 | 191 | @property 192 | def nonlinear(self) -> bool: 193 | """ 194 | `True` if the load case is a non-linear analysis 195 | """ 196 | return self._isNonlinear 197 | 198 | @nonlinear.setter 199 | def nonlinear(self, state) -> None: 200 | self._isNonlinear = state 201 | 202 | def setTimeStep(self, 203 | defaultTimestep: float = 1.0, 204 | initialTimestep: Optional[float] = None, 205 | totalTime: Optional[float] = None) -> None: 206 | """ 207 | Set the time stepping values for the loadcase 208 | 209 | :param defaultTimestep: float: Default timestep to use throughout the load case 210 | :param initialTimestep: float: The initial timestep to use for the increment 211 | :param totalTime: float: The total time for the load case 212 | 213 | """ 214 | self._defaultTimestep = defaultTimestep 215 | 216 | if initialTimestep: 217 | self._initialTimestep = initialTimestep 218 | 219 | if totalTime: 220 | self._totalTime = totalTime 221 | 222 | def setLoadCaseType(self, loadCaseType: LoadCaseType) -> None: 223 | """ 224 | Set the load case type based on the analysis types available in :class:`~pyccx.loadcase.LoadCaseType`. 225 | 226 | :param loadCaseType: Set the load case type using the enum :class:`~pyccx.loadcase.LoadCaseType` 227 | """ 228 | 229 | if isinstance(loadCaseType, LoadCaseType): 230 | self._loadCaseType = loadCaseType 231 | else: 232 | raise ValueError('The load case type is not supported') 233 | 234 | def writeBoundaryCondition(self) -> str: 235 | """ 236 | Generates the string for Boundary Conditions in self.boundaryConditions containing all the attached boundary 237 | conditions. Calculix cannot share existing boundary conditions and therefore has to be explicitly 238 | initialised and created per individual load case. 239 | 240 | :return: outStr 241 | """ 242 | bCondStr = '' 243 | 244 | for bcond in self.boundaryConditions: 245 | bCondStr += bcond.writeInput() 246 | bCondStr += '\n' 247 | 248 | return bCondStr 249 | 250 | def writeInput(self) -> str: 251 | 252 | outStr = '\n' 253 | outStr += '{:*^80}\n'.format(' LOAD CASE ({:s}) '.format(self.name)) 254 | outStr += '*STEP' 255 | 256 | if self._isNonlinear: 257 | outStr += ', NLGEOM=YES' 258 | 259 | outStr += '\n' 260 | 261 | # Write the analysis loadstep case 262 | if self._loadCaseType == LoadCaseType.STATIC: 263 | outStr += '*STATIC' 264 | elif self._loadCaseType == LoadCaseType.THERMAL: 265 | outStr += '*HEAT TRANSFER' 266 | elif self._loadCaseType == LoadCaseType.UNCOUPLEDTHERMOMECHANICAL: 267 | outStr += '*UNCOUPLED TEMPERATURE-DISPLACEMENT' 268 | else: 269 | raise ValueError('The type ({:s}) for Loadcase ({:s}) is not currently supported in PyCCX'.format(self._loadCaseType, self.name)) 270 | if self._isSteadyState: 271 | outStr += ', STEADY STATE' 272 | 273 | if not self._automaticIncrements: 274 | outStr += ', DIRECT' 275 | 276 | outStr += '\n' 277 | 278 | # Write the timestepping information 279 | outStr += '{:.7f}, {:.7f} ,{:.7f} , {:.7f}\n'.format(self._initialTimestep, 280 | self._totalTime, 281 | self._minTimestep, self._maxTimestep) 282 | 283 | outStr += '\n' 284 | # Write the individual boundary conditions associated with this loadcase 285 | outStr += self.writeBoundaryCondition() 286 | 287 | outStr += os.linesep 288 | for postResult in self.resultSet: 289 | outStr += postResult.writeInput() 290 | 291 | outStr += '*END STEP\n\n' 292 | 293 | return outStr 294 | -------------------------------------------------------------------------------- /pyccx/mesh/elements.py: -------------------------------------------------------------------------------- 1 | from types import MappingProxyType 2 | from enum import IntEnum 3 | from typing import List 4 | 5 | import numpy as np 6 | from .utils import classproperty 7 | 8 | class ElementFamilies(IntEnum): 9 | """ Element Family Types""" 10 | Pnt = 1 11 | Line = 2 12 | Tri = 3 13 | Quad = 4 14 | Tet = 5 15 | Pyramid = 6 16 | Prism = 7 17 | Hex = 8 18 | 19 | @classmethod 20 | def has_value(cls, value): 21 | return value in cls._value2member_map_ 22 | 23 | class ElementTypes(IntEnum): 24 | """ 25 | The Element Family Types available generally for Calculix and other FEA solvers. This includes common 1D (beam), 26 | 2D planar and axisymmetric elements and various 3D element formulations (3D continuum and 3D shell elements) 27 | """ 28 | Node = 1 29 | Line = 2 30 | Planar = 3 31 | Shell = 4 32 | Axisymmetric = 5 33 | Volume = 6 34 | 35 | @classmethod 36 | def has_value(cls, value): 37 | return value in cls._value2member_map_ 38 | 39 | 40 | class BaseElementType: 41 | """ 42 | Library of Base Element Types for Calculix, which includes both the element type and the element family and 43 | corresponding Calculix elemental name and nodal mappings for the element type when identifying surfaces or edges 44 | for application of flux boundary conditions. 45 | """ 46 | 47 | Type = None 48 | 49 | _Data = MappingProxyType({ 50 | 'NODE': {'id': 15, 'name': 'Node', 'nodes': 1, 'order': 0, 'family': ElementFamilies.Pnt, 51 | 'map': [1], 52 | 'faces': None, 'elementType': ElementTypes.Node}, 53 | 'BEAM2': {'id': 1, 'name': 'B31', 'nodes': 2, 'order': 1, 'family': ElementFamilies.Line, 54 | 'map': [1, 2], 55 | 'faces': [[1,2]], 'elementType': ElementTypes.Line}, 56 | 'BEAM3': {'id': 1, 'name': 'B32', 'nodes': 3, 'order': 2, 'family': ElementFamilies.Line, 57 | 'map': [1, 2, 3], 58 | 'faces': [[1,2], [2,3]], 'elementType': ElementTypes.Line}, 59 | 'TRI3': {'id': 2, 'name': 'CPS3', 'nodes': 3, 'order': 1, 'family': ElementFamilies.Tri, 60 | 'map': [1, 2, 3], 61 | 'faces': [[1,2],[2,3],[3,1]], 'elementType': ElementTypes.Planar}, 62 | 'TRI6': {'id': 9, 'name': 'CPS6', 'nodes': 6, 'order': 2, 'family': ElementFamilies.Tri, 63 | 'map': [1, 2, 3, 4, 5, 6], 64 | 'faces': [[1,2],[2,3],[3,1]], 'elementType': ElementTypes.Planar}, 65 | 'QUAD4': {'id': 3, 'name': 'CPS4', 'nodes': 4, 'order': 1, 'family': ElementFamilies.Quad, 66 | 'map': [1, 2, 3, 4], 67 | 'faces': [[1, 2], [2, 3], [3, 4], [4,1]], 'elementType': ElementTypes.Planar}, 68 | 'QUAD8': {'id': 16, 'name': 'CPS8', 'nodes': 8, 'order': 2, 'family': ElementFamilies.Quad, 69 | 'map': [1, 2, 3, 4, 5, 6, 7, 8], 70 | 'faces': [[1, 2], [2, 3], [3, 4], [4,1]],'elementType': ElementTypes.Planar}, 71 | 'SHELL3': {'id': 2, 'name': 'S3', 'nodes': 3, 'order': 1, 'family': ElementFamilies.Tri, 72 | 'map': [1, 2, 3], 73 | 'faces': [[1,2],[2,3],[3,1]], 'elementType': ElementTypes.Shell}, 74 | 'SHELL4': {'id': 3, 'name': 'S4', 'nodes': 4, 'order': 1, 'family': ElementFamilies.Quad, 75 | 'map': [1, 2, 3, 4], 76 | 'faces': [[1, 2], [2, 3], [3, 4], [4,1]], 'elementType': ElementTypes.Shell}, 77 | 'SHELL6': {'id': 9, 'name': 'S6', 'nodes': 6, 'order': 2, 'family': ElementFamilies.Tri, 78 | 'map': [1, 2, 3, 4, 5, 6], 79 | 'faces': [[1, 2], [2, 3], [3, 1]], 'elementType': ElementTypes.Shell}, 80 | 'SHELL8': {'id': 16, 'name': 'S8', 'nodes': 8, 'order': 2, 'family': ElementFamilies.Quad, 81 | 'map': [1, 2, 3, 4, 5, 6, 7, 8], 82 | 'faces': [[1, 8, 4, 7, 3, 6, 2, 5]], 'elementType': ElementTypes.Shell}, 83 | 'AX3': {'id': 2, 'name': 'CAX3', 'nodes': 3, 'order': 1, 'family': ElementFamilies.Tri, 84 | 'map': [1, 2, 3], 85 | 'faces': [[1, 2], [2, 3], [3, 1]], 'elementType': ElementTypes.Axisymmetric}, 86 | 'AX6': {'id': 9, 'name': 'CAX6', 'nodes': 6, 'order': 2, 'family': ElementFamilies.Tri, 87 | 'map': [1, 2, 3, 4, 5, 6], 88 | 'faces': [[1, 2], [2, 3], [3, 1]], 'elementType': ElementTypes.Axisymmetric}, 89 | 'AX4': {'id': 3, 'name': 'CAX4', 'nodes': 4, 'order': 1, 'family': ElementFamilies.Quad, 90 | 'faces': [[1, 2], [2, 3], [3, 4], [4,1]], 91 | 'map': [1,2,3,4], 'elementType': ElementTypes.Axisymmetric}, 92 | 'AX8': {'id': 16, 'name': 'S8', 'nodes': 8, 'order': 2, 'family': ElementFamilies.Quad, 93 | 'map': [1, 2, 3, 4, 5, 6, 7, 8], 94 | 'faces': [[1, 8, 4, 7, 3, 6, 2, 5]], 'elementType': ElementTypes.Axisymmetric}, 95 | 'TET4': {'id': 4, 'name': 'C3D4', 'nodes': 4, 'order': 1, 'family': ElementFamilies.Tet, 96 | 'faces': [[1, 2, 3], [1, 4, 2], [2, 4, 3], [3, 4, 1]], 97 | 'map': [1, 2, 3, 4], 98 | 'elementType': ElementTypes.Volume}, 99 | 'TET10': {'id': 11, 'name': 'C3D10', 'nodes': 10, 'order': 2, 'family': ElementFamilies.Tet, 100 | 'faces': [[1, 2, 3], [1, 4, 2], [2, 4, 3], [3, 4, 1]], 101 | 'map': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 102 | 'elementType': ElementTypes.Volume}, 103 | 'HEX8': {'id': 5, 'name': 'C3D8', 'nodes': 8, 'order': 1, 'family': ElementFamilies.Hex, 104 | 'map': [1, 2, 3, 4, 5, 6, 7, 8], 105 | 'faces': [[1, 2, 3, 4], [5, 8, 7, 6], [1, 5, 6, 2], [2, 6, 7, 3], [3, 7, 8, 4], [4, 8, 5, 1]], 106 | 'elementType': ElementTypes.Volume}, 107 | 'HEX8R': {'id': 5, 'name': 'C3D8R', 'nodes': 8, 'order': 1, 'family': ElementFamilies.Hex, 108 | 'map': [1, 2, 3, 4, 5, 6, 7, 8], 109 | 'faces': [[1, 2, 3, 4], [5, 8, 7, 6], [1, 5, 6, 2], [2, 6, 7, 3], [3, 7, 8, 4], [4, 8, 5, 1]], 110 | 'elementType': ElementTypes.Volume}, 111 | 'HEX20': {'id': 17, 'name': 'C3D20', 'nodes': 20, 'order': 2, 'family': ElementFamilies.Hex, 112 | 'map': [1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 14, 10, 17, 19, 20, 18, 11, 13, 15, 16], 113 | 'faces': [[1, 2, 3, 4], [5, 8, 7, 6], [1, 5, 6, 2], [2, 6, 7, 3], [3, 7, 8, 4], [4, 8, 5, 1]], 114 | 'elementType': ElementTypes.Volume}, 115 | 'WEDGE6': {'id': 6, 'name': 'C3D6', 'nodes': 6, 'order': 1, 'family': ElementFamilies.Prism, 116 | 'faces': [[1, 2, 3], [4, 5, 6], [1, 2, 5, 4], [2, 3, 6, 5], [3, 1, 4, 6]], 117 | 'map': [1, 2, 3, 4, 5, 6], 118 | 'elementType': ElementTypes.Volume}, 119 | }) 120 | 121 | def __init__(self) -> None: 122 | return 123 | 124 | @classproperty 125 | def name(cls): 126 | return cls._Data[cls.Type]['name'] 127 | 128 | @classproperty 129 | def id(cls): 130 | return cls._Data[cls.Type]['id'] 131 | 132 | @classproperty 133 | def nodes(cls) -> int: 134 | return cls._Data[cls.Type]['nodes'] 135 | 136 | @classproperty 137 | def order(cls) -> int: 138 | return cls._Data[cls.Type]['order'] 139 | 140 | @classproperty 141 | def faces(cls) -> List[List[int]]: 142 | return cls._Data[cls.Type]['faces'] 143 | 144 | @classproperty 145 | def map(cls) -> List[List[int]]: 146 | return cls._Data[cls.Type]['map'] 147 | 148 | @classproperty 149 | def elementType(cls) -> List[List[int]]: 150 | return cls._Data[cls.Type]['elementType'] 151 | 152 | @classproperty 153 | def faceMask(cls) -> List[List[int]]: 154 | 155 | nodeNum = cls._Data[cls.Type]['nodes'] 156 | faceIds = cls._Data[cls.Type]['faces'] 157 | mask = np.zeros([len(faceIds), nodeNum]) 158 | 159 | for i, faceId in enumerate(faceIds): 160 | mask[i, np.array(faceId)-1] = 1 161 | 162 | return mask 163 | 164 | @classproperty 165 | def family(cls) -> ElementFamilies: 166 | return cls._Data[cls.Type]['family'] 167 | 168 | @classmethod 169 | def data(cls): 170 | """ Data structure for the Element Type""" 171 | return cls._Data[cls.Type] 172 | 173 | class NODE(BaseElementType): 174 | """ A single node element""" 175 | Type = 'NODE' 176 | 177 | class BEAM2(BaseElementType): 178 | """ A Linear Beam Element """ 179 | Type = 'BEAM2' 180 | 181 | class BEAM3(BaseElementType): 182 | """ A Quadratic Beam Element """ 183 | Type = 'BEAM3' 184 | 185 | class TET4(BaseElementType): 186 | """ 1st Order Linear Tet Element (C3D4) """ 187 | Type = 'TET4' 188 | 189 | class TET10(BaseElementType): 190 | """ 2nd order Quadratic Tet Element (C3D10) consisting of 10 nodes """ 191 | Type = 'TET10' 192 | 193 | class HEX8(BaseElementType): 194 | """ 1st order Linear Hexahedral Element (C3D8) """ 195 | Type = 'HEX8' 196 | 197 | class HEX8R(BaseElementType): 198 | """ 199 | Linear Hex Element (C3D8I) with reformulation to reduce the effects of shear and 200 | volumetric locking and hourglass effects under some extreme situations 201 | """ 202 | Type = 'HEX8R' 203 | 204 | class HEX20(BaseElementType): 205 | """ 206 | Quadratic Hexahedral Element (C3D20) consisting of 20 Nodes 207 | """ 208 | Type = 'HEX20' 209 | 210 | class WEDGE6(BaseElementType): 211 | """ 1st order Wedge or Prism Element (C3D6) """ 212 | Type = 'WEDGE6' 213 | 214 | class TRI3(BaseElementType): 215 | """ 1st order Tri Planar Stress Element (CPS4) """ 216 | Type = 'TRI3' 217 | 218 | class TRI6(BaseElementType): 219 | """ 2nd order Tri Planar Stress Element (CPS4) """ 220 | Type = 'TRI6' 221 | 222 | class QUAD4(BaseElementType): 223 | """ 1st order Quad Planar Stress Element (CPS4) """ 224 | Type = 'QUAD4' 225 | 226 | class QUAD8(BaseElementType): 227 | """ 2nd order Quad Planar Stress Element (CPS4) """ 228 | Type = 'QUAD8' 229 | 230 | class SHELL3(BaseElementType): 231 | """ 1st order Tri Shell Element (S3) """ 232 | Type = 'SHELL3' 233 | 234 | class SHELL4(BaseElementType): 235 | """ 1st order Quad Shell Element (S4) """ 236 | Type = 'SHELL4' 237 | 238 | class SHELL6(BaseElementType): 239 | """ 2nd order Tri Shell Element (S6) """ 240 | Type = 'SHELL6' 241 | 242 | class SHELL8(BaseElementType): 243 | """ 2nd order Quad Shell Element (S8) """ 244 | Type = 'SHELL8' 245 | 246 | class AX3(BaseElementType): 247 | """ 1st order Axisymmetric Tri Element (CAX4) """ 248 | Type = 'AX3' 249 | 250 | class AX4(BaseElementType): 251 | """ 1st order Axisymmetric Quad Element (CAX4) """ 252 | Type = 'AX4' 253 | 254 | class AX6(BaseElementType): 255 | """ 2nd order Axisymmetric Tri Element (CAX4) """ 256 | Type = 'AX6' 257 | 258 | class AX8(BaseElementType): 259 | """ 2nd order Axisymmetric Quad Element (CAX4) """ 260 | Type = 'AX8' 261 | 262 | def elementTypes(): 263 | """ 264 | Returns the list of available element types available 265 | :return: 266 | """ 267 | availableElementTypes = [NODE, BEAM2, BEAM3, TRI3, TRI6, TET4, TET10, HEX8, HEX20, HEX8R, AX3, AX4, AX6, AX8, SHELL3, SHELL4, SHELL8, WEDGE6] 268 | 269 | return availableElementTypes 270 | 271 | def getElementById(elTypeId: int): 272 | """ 273 | Factory method for initialising an element class type 274 | 275 | :param elTypeId: 276 | :return: 277 | """ 278 | 279 | for eType in elementTypes(): 280 | if eType.id == elTypeId: 281 | return eType 282 | 283 | return None 284 | -------------------------------------------------------------------------------- /pyccx/material/material.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import abc 3 | from enum import Enum, auto 4 | from typing import Iterable, Optional, Union 5 | 6 | from ..core import ModelObject 7 | 8 | 9 | class Material(ModelObject): 10 | """ 11 | Base class for all material model definitions 12 | """ 13 | MATERIALMODEL = 'INVALID' 14 | 15 | def __init__(self, name: str): 16 | 17 | self._input = '' 18 | self._materialModel = '' 19 | 20 | super().__init__(name) 21 | 22 | @staticmethod 23 | def toNumpy(val): 24 | v = np.asanyarray(val) 25 | if v.ndim == 0: 26 | v = v.reshape(-1) 27 | return v 28 | 29 | @property 30 | def name(self) -> str: 31 | return self._name 32 | 33 | def setName(self, matName: str) -> None: 34 | self._name = matName 35 | 36 | @property 37 | @abc.abstractmethod 38 | def materialModel(self): 39 | raise NotImplementedError() 40 | 41 | @abc.abstractmethod 42 | def writeInput(self): 43 | raise NotImplementedError() 44 | 45 | @abc.abstractmethod 46 | def isValid(self) -> bool: 47 | """ 48 | Abstract method: re-implement in material models to check parameters are correct by the user 49 | """ 50 | raise NotImplementedError() 51 | 52 | 53 | class ElastoPlasticMaterial(Material): 54 | """ 55 | Represents a generic non-linear elastic/plastic material which may be used in both structural, 56 | and thermal type analyses 57 | """ 58 | 59 | class WorkHardeningType(Enum): 60 | """ 61 | Work hardening mode selecting the hardening regime for the accumulation of plastic-strain 62 | """ 63 | 64 | NONE = auto() 65 | """ Prevents any plastic deformation """ 66 | 67 | ISOTROPIC = auto() 68 | """ Isotropic work hardening """ 69 | 70 | KINEMATIC = auto() 71 | """ Kinematic work hardening """ 72 | 73 | COMBINED = auto() 74 | """ Cyclic work hardening """ 75 | 76 | def __init__(self, name): 77 | 78 | super().__init__(name) 79 | 80 | self._E = Material.toNumpy(210e3) 81 | self._nu = Material.toNumpy(0.33) 82 | self._density = Material.toNumpy(7.85e-9) 83 | self._alpha_CTE = Material.toNumpy(12e-6) 84 | self._k = Material.toNumpy(15.0) 85 | self._cp = Material.toNumpy(420e-9) 86 | 87 | # Plastic Behavior 88 | self._workHardeningMode = ElastoPlasticMaterial.WorkHardeningType.NONE 89 | self._hardeningCurve = None 90 | 91 | @property 92 | def E(self) -> np.ndarray: 93 | """ 94 | Elastic Modulus :math:`E` 95 | 96 | The Young's Modulus :math:`E` can be both isotropic by setting as a scalar value, or orthotropic by 97 | setting to a (1x3) array corresponding to :math:`E_{ii}, E_{jj}, E_{kk}` for each direction. 98 | Temperature dependent Young's modulus can be set by providing a nx4 array, where the 1st column is 99 | the temperature :math:`T` and the remaining columns are the orthotropic values of :math:`E`. 100 | """ 101 | return self._E 102 | 103 | @E.setter 104 | def E(self, val: Union[float, Iterable]) -> None: 105 | 106 | self._E = Material.toNumpy(val) 107 | 108 | @property 109 | def nu(self): 110 | """Poisson's Ratio :math:`\\nu` """ 111 | return self._nu 112 | 113 | @nu.setter 114 | def nu(self, val: Union[float, Iterable]): 115 | 116 | self._nu = Material.toNumpy(val) 117 | 118 | @property 119 | def density(self) -> np.ndarray: 120 | """ 121 | The material density :math:`\\rho` 122 | """ 123 | return self._density 124 | 125 | @density.setter 126 | def density(self, val: Union[float, Iterable]): 127 | self._density = Material.toNumpy(val) 128 | 129 | @property 130 | def alpha_CTE(self) -> np.ndarray: 131 | """ 132 | Linear thermal expansion coefficient :math:`\\alpha_{cte}` 133 | 134 | The thermal conductivity :math:`alpha_{cte}` can be both isotropic by setting as a scalar value, or 135 | orthotropic by setting to a (1x3) array corresponding to :math:`\alpha_{cte}` for each direction. 136 | 137 | Temperature dependent thermal expansion coefficient can be set by providing a nx4 array, where 138 | the 1st column is the temperature :math:`T` and the remaining columns are the orthotropic values 139 | of :math:`\alpha_{cte}`.z 140 | """ 141 | return self._alpha_CTE 142 | 143 | @alpha_CTE.setter 144 | def alpha_CTE(self, val: Union[float, Iterable]) -> None: 145 | self._alpha_CTE = Material.toNumpy(val) 146 | 147 | @property 148 | def k(self) -> np.ndarray: 149 | """ 150 | Thermal conductivity :math:`k` 151 | 152 | The thermal conductivity :math:`k` can be both isotropic by setting as a scalar value, or orthotropic by 153 | setting to a (1x3) array corresponding to :math:`k_{ii}, k_{jj}, k_{kk}` for each direction. Temperature 154 | dependent thermal conductivity eat can be set by providing a nx4 array, where the 1st column is the 155 | temperature :math:`T` and the remaining columns are the orthotropic values of :math:`k`. 156 | """ 157 | return self._k 158 | 159 | @k.setter 160 | def k(self, val: Union[float, Iterable]) -> None: 161 | self._k = Material.toNumpy(val) 162 | 163 | @property 164 | def cp(self) -> np.ndarray: 165 | """ 166 | Specific Heat :math:`c_p` 167 | 168 | The specific heat :math:`c_p` can be both isotropic by setting as a scalar value, or orthotropic by setting 169 | to a (1x3) array corresponding to :math:`c_p` for each direction. Temperature dependent specific heat can be 170 | set by providing a nx4 array, where the 1st column is the temperature :math:`T` and the remaining columns 171 | are the orthotropic values of :math:`c_p`. 172 | """ 173 | return self._cp 174 | 175 | @cp.setter 176 | def cp(self, val: Union[float, Iterable]) -> None: 177 | self._cp = Material.toNumpy(val) 178 | 179 | def isPlastic(self) -> bool: 180 | """ 181 | Returns True if the material exhibits a plastic behaviour 182 | """ 183 | return self._workHardeningMode is not ElastoPlasticMaterial.WorkHardeningType.NONE 184 | 185 | @property 186 | def workHardeningMode(self) -> WorkHardeningType: 187 | """ 188 | The work hardening mode of the material - if this is set, plastic behaviour will be assumed requiring a 189 | work hardening curve to be provided 190 | """ 191 | return self._workHardeningMode 192 | 193 | @workHardeningMode.setter 194 | def workHardeningMode(self, mode: WorkHardeningType) -> None: 195 | if mode not in ElastoPlasticMaterial.WorkHardeningType: 196 | raise ValueError('Invalid work hardening mode supplied') 197 | 198 | self._workHardeningMode = mode 199 | 200 | @property 201 | def hardeningCurve(self) -> np.ndarray: 202 | """ 203 | Sets the work hardening stress-strain curve with a nx3 array (curve) set with each row entry to 204 | stress :math:`\\sigma`, plastic strain :math:`\\varepsilon_p`, Temperature :math:`T`. The first row 205 | of a temperature group describes the yield point :math:`\\sigma_y` for the onset of the plastic regime. 206 | """ 207 | return self._hardeningCurve 208 | 209 | @hardeningCurve.setter 210 | def hardeningCurve(self, curve): 211 | 212 | if not isinstance(curve, np.ndarray) or curve.shape[1] != 3: 213 | raise ValueError('Work hardening curve should be an nx3 numpy array') 214 | 215 | self._hardeningCurve = curve 216 | 217 | @property 218 | def materialModel(self): 219 | """ The Material Model calculix keyword """ 220 | return 'elastic' 221 | 222 | def _writeElasticProp(self) -> str: 223 | 224 | lineStr = '*elastic' 225 | nu = self.nu 226 | E = self.E 227 | 228 | if nu.ndim != E.ndim: 229 | raise ValueError("Both Poisson's ratio and Young's modulus must be temperature dependent or constant") 230 | 231 | if nu.shape[0] == 1: 232 | if nu.shape[0] != E.shape[0]: 233 | raise ValueError("Same number of entries must exist for Poisson's ratio and Young's modulus") 234 | 235 | lineStr += ',type=iso\n' 236 | if nu.ndim == 1: 237 | lineStr += '{:e},{:e}\n'.format(E[0], nu[0]) 238 | elif nu.ndim == 2: 239 | for i in range(nu.shape[0]): 240 | lineStr += '{:e},{:e},{:e}\n'.format(E[i, 1], nu[i, 1], E[0]) 241 | else: 242 | raise ValueError('Not currently supported elastic tensor description') 243 | 244 | return lineStr 245 | 246 | def _writePlasticProp(self): 247 | 248 | if not self.isPlastic(): 249 | return '' 250 | 251 | if self.isPlastic() and self.hardeningCurve is None: 252 | raise ValueError('Plasticity requires a work hardening curve to be defined') 253 | 254 | lineStr = '' 255 | if self._workHardeningMode is ElastoPlasticMaterial.WorkHardeningType.ISOTROPIC: 256 | lineStr += '*plastic HARDENING=ISOTROPIC\n' 257 | elif self._workHardeningMode is ElastoPlasticMaterial.WorkHardeningType.KINEMATIC: 258 | lineStr += '*plastic HARDENING=KINEMATIC\n' 259 | elif self._workHardeningMode is ElastoPlasticMaterial.WorkHardeningType.COMBINED: 260 | lineStr += '*cyclic hardening HARDENING=COMBINED\n' 261 | 262 | for i in range(self.hardeningCurve.shape[0]): 263 | lineStr += '{:e},{:e},{:e}\n'.format(self._hardeningCurve[i, 0], # Stress 264 | self._hardeningCurve[i, 1], # Plastic Strain 265 | self._hardeningCurve[i, 2]) # Temperature 266 | 267 | def _writeMaterialProp(self, matPropName: str, tempVals: Iterable) -> str: 268 | """ 269 | Helper method to write the material property name and formatted values depending on the anisotropy 270 | of the material and if non-linear parameters are used. 271 | 272 | :param matPropName: Material property 273 | :param tempVals: Values to assign material properties 274 | :return: str: 275 | """ 276 | tempVal = np.asanyarray(tempVals) 277 | 278 | lineStr = '*{:s}'.format(matPropName) 279 | 280 | if (tempVal.ndim == 1 and tempVal.shape[0] == 1) or (tempVal.ndim == 2 and tempVal.shape[1] == 1): 281 | lineStr += '\n' # ',type=iso\n' 282 | elif (tempVal.ndim == 1 and tempVal.shape[0] == 3) or (tempVal.ndim == 2 and tempVal.shape[1] == 4): 283 | lineStr += ',type=ortho\n' 284 | else: 285 | raise ValueError('Invalid mat property({:s}'.format(matPropName)) 286 | 287 | if tempVal.ndim == 1: 288 | if tempVal.shape[0] == 1: 289 | lineStr += '{:e}\n'.format(tempVal[0]) 290 | elif tempVal.shape[0] == 3: 291 | lineStr += '{:e},{:e},{:e}\n'.format(tempVal[0], tempVal[1], tempVal[2]) 292 | 293 | if tempVal.ndim == 2: 294 | for i in range(tempVal.shape[0]): 295 | if tempVal.shape[1] == 2: 296 | lineStr += '{:e},{:e}\n'.format(tempVal[i, 1], tempVal[i, 0]) 297 | elif tempVal.shape[1] == 4: 298 | lineStr += '{:e},{:e},{:e},{:e}\n'.format(tempVal[1], tempVal[2], tempVal[3], tempVal[0]) 299 | 300 | return lineStr 301 | 302 | def isValid(self) -> bool: 303 | return True 304 | 305 | def writeInput(self) -> str: 306 | 307 | inputStr = '*material, name={:s}\n'.format(self._name) 308 | 309 | inputStr += self._writeElasticProp() 310 | 311 | if self._density.any(): 312 | inputStr += self._writeMaterialProp('density', self._density) 313 | 314 | if self._cp.any(): 315 | inputStr += self._writeMaterialProp('specific heat', self._cp) 316 | 317 | if self._alpha_CTE.any(): 318 | inputStr += self._writeMaterialProp('expansion', self._alpha_CTE) 319 | 320 | if self._k.any(): 321 | inputStr += self._writeMaterialProp('conductivity', self._k) 322 | 323 | # Write the plastic mode 324 | inputStr += self._writePlasticProp() 325 | 326 | return inputStr 327 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | PyCCX - Python Library for Calculix 2 | ======================================= 3 | 4 | .. image:: https://github.com/drlukeparry/pyccx/workflows/Python%20application/badge.svg 5 | :target: https://github.com/drlukeparry/pyccx/actions 6 | .. image:: https://readthedocs.org/projects/pyccx/badge/?version=latest 7 | :target: https://pyccx.readthedocs.io/en/latest/?badge=latest 8 | :alt: Documentation Status 9 | .. image:: https://badge.fury.io/py/PyCCX.svg 10 | :target: https://badge.fury.io 11 | .. image:: https://img.shields.io/badge/Made%20with-Python-1f425f.svg 12 | :target: https://www.python.org/ 13 | .. image:: https://img.shields.io/pypi/l/pyccx.svg 14 | :target: https://pypi.python.org/pypi/pyccx/ 15 | .. image:: https://img.shields.io/pypi/pyversions/pyccx.svg 16 | :target: https://pypi.python.org/pypi/pyccx/ 17 | 18 | PyCCX - a library for creating and running 3D FEA simulations using the opensource Calculix FEA Package. 19 | 20 | The aim of this project was to provide a framework for implemented 3D FEA Analysis using the opensource 21 | `Calculix `_ solver. The analysis is complimented by use of the recent introduction of the 22 | `GMSH-SDK `_ , an extension to `GMSH `_ to provide 23 | API bindings for different programming languages by the project authors to provide sophisticated 3D FEA mesh 24 | generation outside of the GUI implementation. 25 | 26 | This project aims to provide an integrated approach for generating full 27 | 2D and 3D structural and thermal FEA analysis for use in research, development and prototyping all inside a 28 | Python environment. The functionality targets the delivery of automated scripted approaches for performing FEA simulations, 29 | in particular for use assessing the sensitivity of design and material inputs on the response of a system structure, that 30 | can be used as part of parametric optimisation studies. 31 | 32 | This intends to remove requirement to setup each analysis manually using a GUI such as prepromax or GMSH. 33 | 34 | Along with setting up and processing the analysis, numerous convenience functions are included to consistently interface 35 | between both the Calculix and GMSH functionality within a single python environment. 36 | 37 | Structure 38 | ############## 39 | 40 | PyCCX framework consists of classes for specifying common components on the pre-processing stage, including the following 41 | common FE workflow for performing a simulation: 42 | 43 | * Generation of both 2D and 3D compatible analysis meshes for use with Calculix via GMSH 44 | * Creation and assignment of thermal and mechanical boundary conditions for use in analyses 45 | * Creation of multiple time (in)-dependent load cases 46 | * Creation and assignment of multiple material models and element types through a single analysis 47 | * Control and monitoring the Calculix simulation execution 48 | * Processing and extraction of results obtained from Calculix 49 | 50 | A meshing infrastructure provides an interface with GMSH for performing the meshing routines and for associating 51 | physical boundary conditions with the elements/faces generated from geometrical entities obtained from CAD models, 52 | typically by importing .step files. 53 | 54 | The simulation class assembles the mesh and corresponding mesh identifier sets (Element, Nodal and Surfaces) 55 | in conjunction with the applied boundary conditions for each specified load-case within an analysis. The analysis 56 | is then exported as a Calculix input deck, and then performs the execution to the Calculix solver. The simulation 57 | can be additionally monitored within the Python environment. 58 | 59 | The results obtained upon completion of the analysis can be processes, to extract individual nodal and elemental quantities 60 | predicted in the analysis output. The results can also be exported to an unstructured VTK format for visualisation in 61 | Paraview. 62 | 63 | Currently the analysis is unit-less, therefore the user should ensure that all constant, material parameters, boundary 64 | conditions, and geometric lengths are consistent - by default GMSH assumes 'mm' units when importing BRep CAD models. 65 | 66 | Current Features 67 | ****************** 68 | 69 | Meshing: 70 | --------- 71 | Meshing is performed using the GMSH-SDK, which provides a Python interface to the GMSH meshing library. The features 72 | within pyccx provided higher-level functionality building across existing GMSH functionality. The library mainly 73 | facilitates setting up the analysis consistently within a single environment, such as mapping geometrical FE elements 74 | into compatible Calculix types with consistent nodal ordering. Additional features available for meshing include: 75 | 76 | * Integration with GMSH for generation 3D FEA Meshes 77 | * Cleaning and merging of CAD assemblies using internal functionality provided by GMSH 78 | * Creation and assignment of NodeSet, ElementSet, SurfaceSet from mesh features applied for boundary conditions 79 | * Attachment of boundary conditions to geometrical CAD entities via GMSH (native .step import supported via OCC) 80 | 81 | FEA Capabilities: 82 | ------------------- 83 | 84 | * **Boundary Conditions**: (Acceleration, Convection, Fixed Displacements, Forces, Fluxes, Pressure, Radiation) 85 | * **Loadcase Types** (Structural Static, Thermal, Coupled Thermo-Mechanical) 86 | * **Materials** (Non-linear Elasto-Plastic Material) with user defined stress-strain curves and physical properties 87 | * **Results** (Selection of exported results for nodal and element data per loadcase) 88 | * **Analysis Types** configurable solver control (:auto-incrementing timestep, non-linear analysis) 89 | 90 | Results Processing: 91 | ---------------------- 92 | * Element and Nodal Results can be obtained across each timesteps 93 | * Results can be processed and visualised using the `pyccx.results` module 94 | * Extraction of node and element results directly from the Calculix .frd and datafile 95 | * Export of results to VTK file format for visualisation directly in Paraview 96 | 97 | 98 | Installation 99 | ************* 100 | PyCCX is multi-platform as a source based pythonpackage. This can be installed along with dependencies for GMSH automatically 101 | using the following commands: 102 | 103 | .. code:: bash 104 | 105 | pip install gmsh 106 | pip install pyccx 107 | 108 | alternatively, the package can be installed using the uv library: 109 | 110 | .. code:: bash 111 | 112 | uv pip install gmsh 113 | uv pip install pyccx 114 | 115 | Calculix Solver 116 | ***************** 117 | 118 | Depending on your environment, you will need to install the latest version of Calculix. This can be done through 119 | conda-forge `calculix package `_ in the Anaconda distribution, 120 | 121 | .. code:: bash 122 | 123 | conda install -c conda-forge calculix 124 | 125 | However, it is suggested that the most reliable mode is downloading the latest distribution of Calculix directly. 126 | 127 | **Windows:** 128 | 129 | The solver be separately obtained from within the distribution of `prepromax `_ 130 | 131 | **Linux:** 132 | 133 | The latest version of Calculix can be installed from the packages available within your linux distribution 134 | 135 | **Mac OS X:** 136 | 137 | Calculix can be installed using the `Homebrew `_ package manager. This requires the appropriate XCode 138 | compiler environment to be installed. Once this is done, Calculix can be installed using the following command: 139 | 140 | .. code:: bash 141 | 142 | brew tap costerwi/homebrew-calculix 143 | brew install calculix-ccx 144 | 145 | The path of the installed Calculix solver executable should be obtained, which is dependent on the configuration of the 146 | brew installation. 147 | 148 | Usage 149 | ************* 150 | 151 | The Calculix solver executable needs to be available in the system path, or the path to the executable needs to be manually 152 | specified. Across all platforms the direct path of the calculix solver executable needs to be initialised before any 153 | further use. 154 | 155 | .. code:: python 156 | 157 | from pyccx.core import Simulation 158 | 159 | # Set the path for Calculix in Windows 160 | Simulation.setCalculixPath('Path') 161 | 162 | 163 | The following code excerpt shows part of an example for creating and running a steady state thermal analysis of model 164 | using PyCCX of an existing mesh generated using the `pyccx.mesh.mesher` class. 165 | 166 | .. code:: python 167 | 168 | from pyccx.core import DOF, ElementSet, NodeSet, SurfaceSet, Simulation 169 | from pyccx.results import ElementResult, NodalResult, ResultProcessor 170 | from pyccx.loadcase import LoadCase, LoadCaseType 171 | from pyccx.material import ElastoPlasticMaterial 172 | 173 | # Set the path for Calculix in Windows 174 | Simulation.setCalculixPath('Path') 175 | 176 | # Create a Simulation object based on the supplied mesh model (defined separately) 177 | analysis = Simulation(myMeshModel) 178 | 179 | # Optionally set the working the base working directory 180 | analysis.setWorkingDirectory('.') 181 | 182 | 183 | # Create an ElementSet and NodeSet for the entire volume of named model ('PartA') 184 | myMeshModel.setEntityName((Ent.Volume, 1), 'PartA') # Set the name of the GMSH volume to 'PartA' 185 | volElSet = ElementSet('volElSet', myMeshModel.getElementIds((Ent.Volume,1))) 186 | volNodeSet = NodeSet('VolumeNodeSet', myMeshModel.getNodesFromVolumeByName('PartA')) 187 | 188 | analysis.initialConditions.append({'type': 'temperature', 'set': 'VolumeNodeSet', 'value': 0.0}) 189 | 190 | # Create a thermal load case and set the timesettings 191 | thermalLoadCase = LoadCase('Thermal_Load_Case') 192 | 193 | # Set the loadcase type to thermal - eventually this will be individual analysis classes with defaults 194 | thermalLoadCase.setLoadCaseType(LoadCaseType.THERMAL) 195 | 196 | # Set the thermal analysis to be a steady state simulation 197 | thermalLoadCase.isSteadyState = True 198 | thermalLoadCase.setTimeStep(0.5, 0.5, 5.0) 199 | 200 | # Attach the nodal and element result options to each loadcase 201 | # Set the nodal and element variables to record in the results (.frd) file 202 | nodeThermalPostResult = NodalResult('volNodeSet') 203 | nodeThermalPostResult.temperature = True 204 | 205 | elThermalPostResult = ElementResult('Volume1') 206 | elThermalPostResult.heatFlux = True 207 | 208 | # Add the result configurations to the loadcase 209 | thermalLoadCase.resultSet = [nodeThermalPostResult, elThermalPostResult] 210 | 211 | # Set thermal boundary conditions for the loadcase using specific NodeSets 212 | thermalLoadCase.boundaryConditions.append( 213 | {'type': 'fixed', 'nodes': 'surfaceNodesA', 'dof': [DOF.T], 'value': [60]}) 214 | 215 | thermalLoadCase.boundaryConditions.append( 216 | {'type': 'fixed', 'nodes': 'surfaceNodesB', 'dof': [DOF.T], 'value': [20]}) 217 | 218 | # Material 219 | # Add a elastic material and assign it to the volume. 220 | # Note ensure that the units correctly correspond with the geometry length scales 221 | steelMat = ElastoPlasticMaterial('Steel') 222 | steelMat.E = 210000. # [MPa] Young's Modulus 223 | steelMat.alpha_CTE = [25e-6, 23e-6, 24e-6] # Thermal Expansion Coefficient 224 | steelMat.density = 1.0 # Density 225 | steelMat.cp = 1.0 # Specific Heat 226 | steelMat.k = 1.0 # Thermal Conductivity 227 | 228 | 229 | # The material and material type is assigned to the elements across the part 230 | analysis.materialAssignments = [ 231 | SolidMaterialAssignment("solid_material", elementSet=volElSet, material=steelMat) 232 | ] 233 | 234 | # Set the loadcases used in sequential order 235 | analysis.loadCases = [thermalLoadCase] 236 | 237 | # Run the analysis # 238 | analysis.run() 239 | 240 | # Open the results file ('input') is currently the file that is generated by PyCCX 241 | results = analysis.results() 242 | results.load() 243 | 244 | # Export the results to VTK format as a significant timestep for post-processing 245 | import pyccx.utils.exporters as exporters 246 | exporters.exportToVTK('result.vtu', results, inc=-1) 247 | 248 | 249 | The basic usage is split between the meshing facilities provided by GMSH and analysing a problem using the Calculix 250 | Solver. Further documented examples are provided in `examples `_ . 251 | 252 | The current changelog is found in the `CHANGELOG `_ . -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | 2 | # Change Log 3 | All notable changes to this project will be documented in this file. 4 | 5 | ## [Unreleased] 6 | 7 | ### Added 8 | 9 | ### Fixed 10 | 11 | ### Changed 12 | 13 | # [0.2.0] - 2025-08-10 14 | 15 | ### Added 16 | 17 | **General Features:** 18 | 19 | - Add support for running PyCCX on Mac OSX platforms - [5900cec2a6004f4ea649c958e82fa0fc1a356b6c](https://github.com/drlukeparry/pyccx/commit/5900cec2a6004f4ea649c958e82fa0fc1a356b6c) 20 | 21 | **Analysis Features:** 22 | 23 | - Added `pyccx.core.ModelObject` as a base class for all objects in the simulation, which provides generic infrastructure for implementing feature - [8815a89275e252afd75dd76029e98ce58a10e23e](https://github.com/drlukeparry/pyccx/commit/) 24 | - Added `pyccx.core.Meshset` as the base class for all mesh sets in the simulation, which allows for more complex mesh set definitions and operations - [67a229ee8c69f2b02a869c4c1255f62570c662ca](https://github.com/drlukeparry/pyccx/commit/) 25 | - Added `pyccx.core.SurfaceNodeSet` class which is used for defining a flux or distributed load (pressure) BCS within an analysis - [ab264a8a3c9d4c9dcd463c882d0e22d032bf3da9](https://github.com/drlukeparry/pyccx/commit/ab264a8a3c9d4c9dcd463c882d0e22d032bf3da9) 26 | - Added `pyccx.mesh.Ent` enumeration class used to select and specifying elementary geometric BREP features within GMSH - [89c35fe9eb37d74ba568ec175fbd713c79c60f93](https://github.com/drlukeparry/pyccx/commit/89c35fe9eb37d74ba568ec175fbd713c79c60f93), [31c8896608be06d957149571c7d32b009bd0ecec](https://github.com/drlukeparry/pyccx/commit/31c8896608be06d957149571c7d32b009bd0ecec) 27 | - Added `pyccx.core.Amplitude` object for defining time-dependent BCs - [fadf31cb603bfcb23f06390cbae9bf0bc3b035d0](https://github.com/drlukeparry/pyccx/commit/), [a54e392d56784bce2ca35c522eb703aeeaf1fbde](https://github.com/drlukeparry/pyccx/commit/) 28 | - Includes definition of target mesh features and time delay - [897d0fcc0c25defcd563fe5192194f95f9ab762a](https://github.com/drlukeparry/pyccx/commit/) 29 | 30 | **Analysis Features:** 31 | 32 | - Simulation can be monitored during the solving phase using the `Simulation.monitor` method, which allows for real-time updates of the simulation status and results - [df0e83c3472c43afd292c49a66687615538703f2](https://github.com/drlukeparry/pyccx/commit/df0e83c3472c43afd292c49a66687615538703f2) 33 | - Calculix output stream is piped and data is extracted to obtain runtime information 34 | - Total elapsed runtime is calculated and available in the `pyccx.analysis.Simulation` class - [77766655f61fedd5cbed065f63276b4aa7314da2](https://github.com/drlukeparry/pyccx/commit/) 35 | - Checks added for verifying calculix executable path and compiled version - [15e25fc5b19bbb5850226bb3ea581f6ff51cd19c](https://github.com/drlukeparry/pyccx/commit/) 36 | 37 | **Load Case Features:** 38 | 39 | - Added several features in the `LoadCase` class to allow for more complex simulations scenarios, including 40 | - Steady State Analysis - [14e91a14c8c03e7e54e4083c3e36e736b02c6550](https://github.com/drlukeparry/pyccx/commit/) 41 | - Default, initial, and minimum and maximum time-stepping options 42 | - Total simulation duration 43 | - Enable automatic incrementation 44 | - Enable Nonlinear analysis [24f3d7918a2df6a37c09fa63650dc24d580f129b](https://github.com/drlukeparry/pyccx/commit/) 45 | - Added a method to reset boundary condition with sequential application of loadcases [13cbbee03d0c05d31ade55621d7aad34733e2c27](https://github.com/drlukeparry/pyccx/commit/) 46 | 47 | **Postprocessing Features:** 48 | 49 | This releases includes significant improvements to the postprocessing features of PyCCX, including: 50 | - Refactored `Result` class to allow for more complex result extraction and storage 51 | - Added `ResultsValue` class which is used to specify requested simulation output features - [88c29ae537074a78cd28bf3c79a5f71aca171097](https://github.com/drlukeparry/pyccx/commit/) 52 | - Added a method to calculate the 2D and 3D von Mises stress from the cauchy stresses - [d9c40817acf5777701a30584ec6225434b7f32cb](https://github.com/drlukeparry/pyccx/commit/) 53 | - Drastically improved parsing of calculix output files (.frd, .dat) [b9a8c88dc73c55126c007d35b0f18a9db155cf87](https://github.com/drlukeparry/pyccx/commit/) 54 | - `ResultProcessor` extract nodal and elements results data from the output files, which was previously unavailable [dcb19e965a650a37241cb4ca372e5842ed461556](https://github.com/drlukeparry/pyccx/commit/) 55 | - Correctly stores the output generated during the solving phase of the Calculix simulation - [6c8d341e7cced4d99cdfa4cdd1f5522f51e6e472](https://github.com/drlukeparry/pyccx/commit/) 56 | - Added option for Calculiux to not expand shell elements when requested `NodalResults.expandShellElements` - [f52309b907dfdd4484a2b32e2d61ef3cc11225d2](https://github.com/drlukeparry/pyccx/commit/) 57 | - Added method `ResultProcessor.clearResults` - [ed15b15d8fcbe7d422062e573a5e5a375e0462d6](https://github.com/drlukeparry/pyccx/commit/) 58 | - Added VTK Exporter for exporting results to VTK format, which can be used for visualisation in ParaView or other VTK-compatible software - [6c841317d977cace690a2c3b984b61686a5e4371](https://github.com/drlukeparry/pyccx/commit/6c841317d977cace690a2c3b984b61686a5e4371) 59 | - Added `utils.exportToVTK` method that is used to export the results to native VTK format 60 | - Added `utils.exportToPVD` method to export all timesteps to a _.PVD_ format - [0b944486591ba2dfe56efb7d9e62d7bdfea1cddd](https://github.com/drlukeparry/pyccx/commit/0b944486591ba2dfe56efb7d9e62d7bdfea1cddd) 61 | 62 | **Meshing Features:** 63 | - Added the method `Mesher.setRecombineSurfaces` and `Mesher.recombineMesh` offering functionality to recombine surface meshes and corresponding 3D volumes 64 | - Uses internal GMSH method for recombining surface meshes to 3D volumes - [5f2564430c51fa6aba746c7f4cbf8d575ce5bb5c](https://github.com/drlukeparry/pyccx/commit/5f2564430c51fa6aba746c7f4cbf8d575ce5bb5c) 65 | - Added `pyccx.mesh.RecombinationAlgorithm` enumeration class which is used to specify the recombination algorithm used in GMSH - [7b433b75c314313752da7e7578d43849246b37fa](https://github.com/drlukeparry/pyccx/commit/7b433b75c314313752da7e7578d43849246b37fa) 66 | - Usage is specified in `Mesher.recombinationAlgorithm` 67 | - Added a several element families and types for matching between GMSH and Calculix Native Types including: [14e91a14c8c03e7e54e4083c3e36e736b02c6550](https://github.com/drlukeparry/pyccx/commit/14e91a14c8c03e7e54e4083c3e36e736b02c6550) 68 | - `pyccx.mesh.elements.ElementType` enumeration class which is used to specify the element type (e.g. planar, shell, 3D continuum, etc.) 69 | - `pyccx.mesh.elements.ElementFamilies` class enumeration which is used to specify the base geometric element family used in the simulation 70 | - Element mappings between GMSH and Calculix native types - including face orders 71 | - Mask for specifying the mapping between element face and nodal orders 72 | - Added GMSH meshing algorithm enumeration class which is used to specify the meshing algorithm used in GMSH - [2f4417b095a165aa48846d826612f92f2a94b671](https://github.com/drlukeparry/pyccx/commit/2f4417b095a165aa48846d826612f92f2a94b671) 73 | - Includes GMSH 2D Meshing algorithm options (`pyccx.mesh.MeshingAlgorithm2D`) and 3D Mesh algorithms options (`pyccx.mesh.MeshingAlgorithm3D`) 74 | - Add Mesh element assignments to ensure different element types are correctly mapped between GMSH and Calculix - [7cbaebed880c75b1a85d1376fb66466649155ccf](https://github.com/drlukeparry/pyccx/commit/7cbaebed880c75b1a85d1376fb66466649155ccf) 75 | - Specified assignments are stored in a `Mesh.meshAssignments` dictionary 76 | - Added method `Mesh.identifyUnassignedElements` to the `Mesh` class which identifies elements that are not assigned to a specific element type - [c52cbb11bad0af4116462b72bc66b555984ccebd](https://github.com/drlukeparry/pyccx/commit/c52cbb11bad0af4116462b72bc66b555984ccebd) 77 | - Added method `Mesher.open` to load an existing GMSH model file into the `Mesher` class - [da42c26fdc94f0cf8d65be98be3c9674ed47ce22](https://github.com/drlukeparry/pyccx/commit/da42c26fdc94f0cf8d65be98be3c9674ed47ce22) 78 | - Added method `Mesher.getAllPhysicalGroupElements` to the `Mesher` class which returns all physical group elements in the mesh - [a0cd63258e9e9f8ce4f53797d2a42cd3cf3ad7af](https://github.com/drlukeparry/pyccx/commit/a0cd63258e9e9f8ce4f53797d2a42cd3cf3ad7af) 79 | - Added method `Mesher.clearPhyiscalGroups` to remove all physical groups within the Mesh model - [ee712094168641e7c71d7f17945645f363826edf](https://github.com/drlukeparry/pyccx/commit/ee712094168641e7c71d7f17945645f363826edf) 80 | - Added method `Mesher.getElementType` and update methods to correctly generate the mesh [c22d94a1651d4609384db1484c59a789f0440db5](https://github.com/drlukeparry/pyccx/commit/c22d94a1651d4609384db1484c59a789f0440db5) 81 | - Added method `Mesher.getFacesFromId` to obtain the current correctly orientated face lists for a given surface id - [df0e83c3472c43afd292c49a66687615538703f2](https://github.com/drlukeparry/pyccx/commit/df0e83c3472c43afd292c49a66687615538703f2) 82 | - Mesher now directly exports the mesh to a Calculix input file using the `Mesher.writeMesh` method - [f57dcb9a7c7f23e4fd3022c91a8edaa0b8b1de0b](https://github.com/drlukeparry/pyccx/commit/f57dcb9a7c7f23e4fd3022c91a8edaa0b8b1de0b) 83 | - Resolves various issues with exporting from the GMSH .msh native format, and instead directly produces the correct format for Calculix 84 | 85 | ### Fixed 86 | 87 | - Update NodeSet and ElementSet to correctly export the node and element ids - [fe3e6b70fda75793cd9e70b19b9a3b5797d95204](https://github.com/drlukeparry/pyccx/commit/fe3e6b70fda75793cd9e70b19b9a3b5797d95204) 88 | - Fix maximum number of element ids per line for an ElementSet - [1cdb3d174d1d0df5366b59efa94defaf8bac3066](https://github.com/drlukeparry/pyccx/commit/1cdb3d174d1d0df5366b59efa94defaf8bac3066) 89 | - Correct exporting of decimal numbers to 5 decimal places in the input file - [c0c7f19b0d564b730fec520935123e8ec459113a](https://github.com/drlukeparry/pyccx/commit/c0c7f19b0d564b730fec520935123e8ec459113a) 90 | - Elements sets are collected for the material assignments - [da81bb96cd1d1ebb3d4a45a5053d8b9a807c4436](https://github.com/drlukeparry/pyccx/commit/da81bb96cd1d1ebb3d4a45a5053d8b9a807c4436) 91 | - Bug Fix: Surface Sets are correctly exported in the Simulation class - [d4d4ff806dd9fbc18b786e31d4aa5b95c82ff66f](https://github.com/drlukeparry/pyccx/commit/d4d4ff806dd9fbc18b786e31d4aa5b95c82ff66f) 92 | - Bug Fix: Force components are correctly written to the input file - [3705feb7ac9b11340e5b444eddaf2195feff561a](https://github.com/drlukeparry/pyccx/commit/3705feb7ac9b11340e5b444eddaf2195feff561a) 93 | - Bug Fix: General improvements to consistency for identifying nodes within a GMSH model in the `pyccx.mesh.Mesher` class - [deaa0f286849babdf32e1a0a07c8a3cd2871a1fe](https://github.com/drlukeparry/pyccx/commit/deaa0f286849babdf32e1a0a07c8a3cd2871a1fe) 94 | - General updates to catch issues and raise program exceptions [9d436f3d56a1835a0fe3b879a21092a70f18b389](https://github.com/drlukeparry/pyccx/commit/9d436f3d56a1835a0fe3b879a21092a70f18b389), [c3f4e305413d6f0c9f8c129d0c6806c27def92c6](https://github.com/drlukeparry/pyccx/commit/c3f4e305413d6f0c9f8c129d0c6806c27def92c6) 95 | - `NodalResult` and `ElementResult` attributes renamed [b8d7eba9050b436afe442c61a0c599b282556855](https://github.com/drlukeparry/pyccx/commit/b8d7eba9050b436afe442c61a0c599b282556855), [bf06c16242523853ab69ab0bd59aecd9cc838788](https://github.com/drlukeparry/pyccx/commit/bf06c16242523853ab69ab0bd59aecd9cc838788) 96 | - Use the current 'scratch' directory to loading thesSimulation's result files - [aa1976d9a01578ae1958f38fcccc382271b18b0b](https://github.com/drlukeparry/pyccx/commit/aa1976d9a01578ae1958f38fcccc382271b18b0b) 97 | 98 | ### Changed 99 | 100 | - Direct path for Calculix solver executable on Window is now used for consistency - [b48144e7407c1ff20bb43e6e9c9b1ed7e51b7023](https://github.com/drlukeparry/pyccx/commit/b48144e7407c1ff20bb43e6e9c9b1ed7e51b7023) 101 | - Updated build system to use hatchling for building the PyCCX package - [e27680ff587f53211bb8646d8194415bfe1dd41f](https://github.com/drlukeparry/pyccx/commit/e27680ff587f53211bb8646d8194415bfe1dd41f) 102 | - Migrated to `pyproject.toml` for python package metadata and build configuration 103 | - Ruff linting now used for code formatting and linting - [1aa76e4c5b4d4195574cd84cde42be3cebb30fbb](https://github.com/drlukeparry/pyccx/commit/1aa76e4c5b4d4195574cd84cde42be3cebb30fbb) 104 | - Fixed typo for emissivity attribute for `pyccx.bc.Radiation` BC - [5577091a97ae74689fb3aaafbf83de135cb174bf](https://github.com/drlukeparry/pyccx/commit/5577091a97ae74689fb3aaafbf83de135cb174bf) 105 | - Updated the `pyccx` module imports - [33fe5a9939b5cc2cfea7ab81d47602b194009e99](https://github.com/drlukeparry/pyccx/commit/33fe5a9939b5cc2cfea7ab81d47602b194009e99) 106 | - Updated project inline documentation - [03743c3ae2dfe2540de034b49f771f2235c1c417](https://github.com/drlukeparry/pyccx/commit/03743c3ae2dfe2540de034b49f771f2235c1c417), [3e12ceefe2c65d67308a2e4d05a2ccdd428a7705](https://github.com/drlukeparry/pyccx/commit/3e12ceefe2c65d67308a2e4d05a2ccdd428a7705), [d1cd854ff629fef6cae8e9651a0acf470deca273](https://github.com/drlukeparry/pyccx/commit/d1cd854ff629fef6cae8e9651a0acf470deca273) 107 | - Updated `pyccx.loadcase.LoadCaseType` to be derived from an `IntEnum` for unique IDs [156257dc65d1a2820ba5477ea428468297421adc](https://github.com/drlukeparry/pyccx/commit/156257dc65d1a2820ba5477ea428468297421adc) 108 | - Minimum version of Python is now 3.9, because 3.8 is now considered unmaintained 109 | - Minimum version of GMSH requires is 4.14 [6156c8339731a1f5359b9396e97a9707eb101320](https://github.com/drlukeparry/pyccx/commit/6156c8339731a1f5359b9396e97a9707eb101320) 110 | 111 | ## [0.1.2] - 2023-07-11 112 | 113 | The first release of PyCCX on PyPI. 114 | 115 | -------------------------------------------------------------------------------- /models/cornerCube.step: -------------------------------------------------------------------------------- 1 | ISO-10303-21; 2 | HEADER; 3 | FILE_DESCRIPTION (( 'STEP AP203' ), 4 | '1' ); 5 | FILE_NAME ('Web Model 43296_43296.step', 6 | '2018-10-11T14:58:59', 7 | ( '' ), 8 | ( '' ), 9 | 'SwSTEP 2.0', 10 | 'SolidWorks 2016', 11 | '' ); 12 | FILE_SCHEMA (( 'CONFIG_CONTROL_DESIGN' )); 13 | ENDSEC; 14 | 15 | DATA; 16 | #1 = ORGANIZATION ( 'UNSPECIFIED', 'UNSPECIFIED', '' ) ; 17 | #2 = ORIENTED_EDGE ( 'NONE', *, *, #56, .F. ) ; 18 | #3 = AXIS2_PLACEMENT_3D ( 'NONE', #118, #63, #230 ) ; 19 | #4 = CARTESIAN_POINT ( 'NONE', ( 0.0000000000000000000, 30.00000000000000000, 0.0000000000000000000 ) ) ; 20 | #5 =( LENGTH_UNIT ( ) NAMED_UNIT ( * ) SI_UNIT ( .MILLI., .METRE. ) ); 21 | #6 = APPROVAL ( #134, 'UNSPECIFIED' ) ; 22 | #7 = PERSON_AND_ORGANIZATION_ROLE ( 'design_owner' ) ; 23 | #8 =( BOUNDED_CURVE ( ) B_SPLINE_CURVE ( 3, ( #74, #19, #95, #127 ), 24 | .UNSPECIFIED., .F., .T. ) 25 | B_SPLINE_CURVE_WITH_KNOTS ( ( 4, 4 ), 26 | ( 5.235987755982993300, 7.330382858376186300 ), 27 | .UNSPECIFIED. ) 28 | CURVE ( ) GEOMETRIC_REPRESENTATION_ITEM ( ) RATIONAL_B_SPLINE_CURVE ( ( 1.000000000000000000, 0.6666666666666674100, 0.6666666666666674100, 1.000000000000000000 ) ) 29 | REPRESENTATION_ITEM ( '' ) ); 30 | #9 = CALENDAR_DATE ( 2018, 11, 10 ) ; 31 | #10 = LOCAL_TIME ( 10, 58, 59.00000000000000000, #181 ) ; 32 | #11 = SECURITY_CLASSIFICATION ( '', '', #104 ) ; 33 | #12 = SHAPE_DEFINITION_REPRESENTATION ( #177, #195 ) ; 34 | #13 = ORIENTED_EDGE ( 'NONE', *, *, #122, .T. ) ; 35 | #14 = UNCERTAINTY_MEASURE_WITH_UNIT (LENGTH_MEASURE( 1.000000000000000100E-005 ), #5, 'distance_accuracy_value', 'NONE'); 36 | #15 = CARTESIAN_POINT ( 'NONE', ( 0.0000000000000000000, 2.383525667003057800, -6.349999999999999600 ) ) ; 37 | #16 = ORIENTED_EDGE ( 'NONE', *, *, #220, .F. ) ; 38 | #17 = EDGE_CURVE ( 'NONE', #131, #143, #30, .T. ) ; 39 | #18 = CARTESIAN_POINT ( 'NONE', ( 0.0000000000000000000, 2.383525667003057800, -6.349999999999999600 ) ) ; 40 | #19 = CARTESIAN_POINT ( 'NONE', ( -7.937500000000003600, -1.064373873755038600, 2.749630657015562600 ) ) ; 41 | #20 = CIRCLE ( 'NONE', #129, 6.349999999999999600 ) ; 42 | #21 = DIRECTION ( 'NONE', ( -0.0000000000000000000, -1.000000000000000000, -0.0000000000000000000 ) ) ; 43 | #22 = PRODUCT_RELATED_PRODUCT_CATEGORY ( 'detail', '', ( #68 ) ) ; 44 | #23 = APPROVAL_PERSON_ORGANIZATION ( #77, #85, #232 ) ; 45 | #24 = CARTESIAN_POINT ( 'NONE', ( 6.349999999999975700, 1.114897253674673300, 3.719743878930865700 ) ) ; 46 | #25 = DIRECTION ( 'NONE', ( 0.0000000000000000000, 0.0000000000000000000, -1.000000000000000000 ) ) ; 47 | #26 = APPLICATION_CONTEXT ( 'configuration controlled 3d designs of mechanical parts and assemblies' ) ; 48 | #27 = PERSON_AND_ORGANIZATION ( #235, #1 ) ; 49 | #28 = DIRECTION ( 'NONE', ( 1.000000000000000000, 0.0000000000000000000, 0.0000000000000000000 ) ) ; 50 | #29 = EDGE_LOOP ( 'NONE', ( #227, #237, #150, #112 ) ) ; 51 | #30 = LINE ( 'NONE', #90, #41 ) ; 52 | #31 = DIRECTION ( 'NONE', ( -0.0000000000000000000, -1.000000000000000000, -0.0000000000000000000 ) ) ; 53 | #32 = EDGE_LOOP ( 'NONE', ( #48, #213, #88 ) ) ; 54 | #33 = PLANE ( 'NONE', #76 ) ; 55 | #34 = PERSON_AND_ORGANIZATION ( #235, #1 ) ; 56 | #35 = ORIENTED_EDGE ( 'NONE', *, *, #36, .T. ) ; 57 | #36 = EDGE_CURVE ( 'NONE', #178, #182, #184, .T. ) ; 58 | #37 = CARTESIAN_POINT ( 'NONE', ( 2.602085213965210600E-015, 10.16000000000000000, 0.0000000000000000000 ) ) ; 59 | #38 = PERSON_AND_ORGANIZATION ( #235, #1 ) ; 60 | #39 = VERTEX_POINT ( 'NONE', #15 ) ; 61 | #40 = COORDINATED_UNIVERSAL_TIME_OFFSET ( 5, 0, .BEHIND. ) ; 62 | #41 = VECTOR ( 'NONE', #198, 999.9999999999998900 ) ; 63 | #42 = CARTESIAN_POINT ( 'NONE', ( 7.776507174585692200E-016, 2.383525667002992100, 6.349999999999999600 ) ) ; 64 | #43 = LOCAL_TIME ( 10, 58, 59.00000000000000000, #61 ) ; 65 | #44 = DESIGN_CONTEXT ( 'detailed design', #26, 'design' ) ; 66 | #45 = CARTESIAN_POINT ( 'NONE', ( 0.0000000000000000000, 0.0000000000000000000, 0.0000000000000000000 ) ) ; 67 | #46 = AXIS2_PLACEMENT_3D ( 'NONE', #57, #221, #98 ) ; 68 | #47 = DATE_AND_TIME ( #105, #10 ) ; 69 | #48 = ORIENTED_EDGE ( 'NONE', *, *, #71, .T. ) ; 70 | #49 = EDGE_CURVE ( 'NONE', #39, #143, #59, .T. ) ; 71 | #50 = CC_DESIGN_PERSON_AND_ORGANIZATION_ASSIGNMENT ( #34, #191, ( #11 ) ) ; 72 | #51 = CARTESIAN_POINT ( 'NONE', ( 1.833190123778916500, 12.75230378972847800, -3.175178434318528500 ) ) ; 73 | #52 = ORIENTED_EDGE ( 'NONE', *, *, #201, .T. ) ; 74 | #53 = CARTESIAN_POINT ( 'NONE', ( 0.0000000000000000000, 2.383525667003057800, -6.349999999999999600 ) ) ; 75 | #54 = CARTESIAN_POINT ( 'NONE', ( -1.833190123778914100, 7.567696210271519100, -3.175178434318574700 ) ) ; 76 | #55 = EDGE_LOOP ( 'NONE', ( #165, #2, #194, #132 ) ) ; 77 | #56 = EDGE_CURVE ( 'NONE', #131, #133, #117, .T. ) ; 78 | #57 = CARTESIAN_POINT ( 'NONE', ( 0.0000000000000000000, 0.0000000000000000000, 0.0000000000000000000 ) ) ; 79 | #58 = DIRECTION ( 'NONE', ( 0.5773827170626467700, 0.8164736358495328100, 0.0000000000000000000 ) ) ; 80 | #59 =( BOUNDED_CURVE ( ) B_SPLINE_CURVE ( 3, ( #53, #107, #164, #162 ), 81 | .UNSPECIFIED., .F., .T. ) 82 | B_SPLINE_CURVE_WITH_KNOTS ( ( 4, 4 ), 83 | ( 5.759586531581282600, 7.330382858376186300 ), 84 | .UNSPECIFIED. ) 85 | CURVE ( ) GEOMETRIC_REPRESENTATION_ITEM ( ) RATIONAL_B_SPLINE_CURVE ( ( 1.000000000000000000, 0.8047378541243632700, 0.8047378541243632700, 1.000000000000000000 ) ) 86 | REPRESENTATION_ITEM ( '' ) ); 87 | #60 = ADVANCED_FACE ( 'NONE', ( #126 ), #203, .F. ) ; 88 | #61 = COORDINATED_UNIVERSAL_TIME_OFFSET ( 5, 0, .BEHIND. ) ; 89 | #62 = APPROVAL_ROLE ( '' ) ; 90 | #63 = DIRECTION ( 'NONE', ( -0.4082368179247598500, -0.5773827170626467700, -0.7070869101659441600 ) ) ; 91 | #64 = LOCAL_TIME ( 10, 58, 59.00000000000000000, #211 ) ; 92 | #65 = LINE ( 'NONE', #185, #170 ) ; 93 | #66 = DATE_TIME_ROLE ( 'creation_date' ) ; 94 | #67 = DIRECTION ( 'NONE', ( 0.0000000000000000000, 1.000000000000000000, 0.0000000000000000000 ) ) ; 95 | #68 = PRODUCT ( 'Web Model 43296_43296', 'Web Model 43296_43296', '', ( #103 ) ) ; 96 | #69 = DATE_AND_TIME ( #115, #163 ) ; 97 | #70 = ORIENTED_EDGE ( 'NONE', *, *, #220, .T. ) ; 98 | #71 = EDGE_CURVE ( 'NONE', #93, #131, #229, .T. ) ; 99 | #72 = CIRCLE ( 'NONE', #46, 6.349999999999999600 ) ; 100 | #73 = DIRECTION ( 'NONE', ( -0.4082597620358407500, -0.5773178213166031400, -0.7071266505320594800 ) ) ; 101 | #74 = CARTESIAN_POINT ( 'NONE', ( -3.175000000000025600, 5.670250450497947600, 5.499261314031170600 ) ) ; 102 | #75 = CARTESIAN_POINT ( 'NONE', ( -3.175000000000025600, 5.670250450497947600, 5.499261314031170600 ) ) ; 103 | #76 = AXIS2_PLACEMENT_3D ( 'NONE', #78, #225, #58 ) ; 104 | #77 = PERSON_AND_ORGANIZATION ( #235, #1 ) ; 105 | #78 = CARTESIAN_POINT ( 'NONE', ( 2.602085213965210600E-015, 10.16000000000000000, -6.349999999999999600 ) ) ; 106 | #79 = CARTESIAN_POINT ( 'NONE', ( -1.121135126298618700, 3.176221088771583400, -6.349999999999999600 ) ) ; 107 | #80 = CC_DESIGN_DATE_AND_TIME_ASSIGNMENT ( #149, #192, ( #11 ) ) ; 108 | #81 = PERSON_AND_ORGANIZATION_ROLE ( 'creator' ) ; 109 | #82 = APPROVAL_STATUS ( 'not_yet_approved' ) ; 110 | #83 = CARTESIAN_POINT ( 'NONE', ( 0.0000000000000000000, 0.0000000000000000000, 0.0000000000000000000 ) ) ; 111 | #84 = CALENDAR_DATE ( 2018, 11, 10 ) ; 112 | #85 = APPROVAL ( #82, 'UNSPECIFIED' ) ; 113 | #86 = PERSON_AND_ORGANIZATION_ROLE ( 'creator' ) ; 114 | #87 = CLOSED_SHELL ( 'NONE', ( #223, #205, #60, #146, #111, #240 ) ) ; 115 | #88 = ORIENTED_EDGE ( 'NONE', *, *, #122, .F. ) ; 116 | #89 = APPROVAL_DATE_TIME ( #188, #148 ) ; 117 | #90 = CARTESIAN_POINT ( 'NONE', ( 5.008368558097497900, 6.618847168904216500, 3.469446951953614200E-014 ) ) ; 118 | #91 = APPROVAL_PERSON_ORGANIZATION ( #242, #6, #62 ) ; 119 | #92 = VECTOR ( 'NONE', #73, 1000.000000000000000 ) ; 120 | #93 = VERTEX_POINT ( 'NONE', #200 ) ; 121 | #94 =( BOUNDED_CURVE ( ) B_SPLINE_CURVE ( 3, ( #147, #125, #144, #75 ), 122 | .UNSPECIFIED., .F., .T. ) 123 | B_SPLINE_CURVE_WITH_KNOTS ( ( 4, 4 ), 124 | ( 0.5235987755982894900, 1.047197551196593000 ), 125 | .UNSPECIFIED. ) 126 | CURVE ( ) GEOMETRIC_REPRESENTATION_ITEM ( ) RATIONAL_B_SPLINE_CURVE ( ( 1.000000000000000000, 0.9772838841927118400, 0.9772838841927118400, 1.000000000000000000 ) ) 127 | REPRESENTATION_ITEM ( '' ) ); 128 | #95 = CARTESIAN_POINT ( 'NONE', ( -7.937499999999986700, -1.064373873755012600, -2.749630657015616400 ) ) ; 129 | #96 = CARTESIAN_POINT ( 'NONE', ( 7.776507174585692200E-016, 2.383525667002992100, 6.349999999999999600 ) ) ; 130 | #97 = APPROVAL_PERSON_ORGANIZATION ( #142, #148, #139 ) ; 131 | #98 = DIRECTION ( 'NONE', ( 0.0000000000000000000, 0.0000000000000000000, 1.000000000000000000 ) ) ; 132 | #99 = ORIENTED_EDGE ( 'NONE', *, *, #175, .T. ) ; 133 | #100 = VECTOR ( 'NONE', #31, 1000.000000000000000 ) ; 134 | #101 = DIRECTION ( 'NONE', ( -0.0000000000000000000, -1.000000000000000000, -0.0000000000000000000 ) ) ; 135 | #102 = ORIENTED_EDGE ( 'NONE', *, *, #175, .F. ) ; 136 | #103 = MECHANICAL_CONTEXT ( 'NONE', #215, 'mechanical' ) ; 137 | #104 = SECURITY_CLASSIFICATION_LEVEL ( 'unclassified' ) ; 138 | #105 = CALENDAR_DATE ( 2018, 11, 10 ) ; 139 | #106 = FACE_OUTER_BOUND ( 'NONE', #29, .T. ) ; 140 | #107 = CARTESIAN_POINT ( 'NONE', ( 3.719743878930865700, -0.2465087274367647500, -6.350000000000000500 ) ) ; 141 | #108 = CC_DESIGN_APPROVAL ( #148, ( #138 ) ) ; 142 | #109 = CARTESIAN_POINT ( 'NONE', ( 0.0000000000000000000, 30.00000000000000000, 0.0000000000000000000 ) ) ; 143 | #110 = APPROVAL_DATE_TIME ( #69, #85 ) ; 144 | #111 = ADVANCED_FACE ( 'NONE', ( #207 ), #183, .F. ) ; 145 | #112 = ORIENTED_EDGE ( 'NONE', *, *, #201, .F. ) ; 146 | #113 =( BOUNDED_CURVE ( ) B_SPLINE_CURVE ( 3, ( #168, #186, #79, #18 ), 147 | .UNSPECIFIED., .F., .T. ) 148 | B_SPLINE_CURVE_WITH_KNOTS ( ( 4, 4 ), 149 | ( 5.235987755982986200, 5.759586531581282600 ), 150 | .UNSPECIFIED. ) 151 | CURVE ( ) GEOMETRIC_REPRESENTATION_ITEM ( ) RATIONAL_B_SPLINE_CURVE ( ( 1.000000000000000000, 0.9772838841927123900, 0.9772838841927123900, 1.000000000000000000 ) ) 152 | REPRESENTATION_ITEM ( '' ) ); 153 | #114 = AXIS2_PLACEMENT_3D ( 'NONE', #45, #171, #28 ) ; 154 | #115 = CALENDAR_DATE ( 2018, 11, 10 ) ; 155 | #116 = ORIENTED_EDGE ( 'NONE', *, *, #49, .T. ) ; 156 | #117 = LINE ( 'NONE', #54, #92 ) ; 157 | #118 = CARTESIAN_POINT ( 'NONE', ( -7.512130657015614300, 10.16000000000000000, 4.337130657015509700 ) ) ; 158 | #119 = LOCAL_TIME ( 10, 58, 59.00000000000000000, #40 ) ; 159 | #120 = PRODUCT_DEFINITION_FORMATION_WITH_SPECIFIED_SOURCE ( 'ANY', '', #68, .NOT_KNOWN. ) ; 160 | #121 = FACE_OUTER_BOUND ( 'NONE', #196, .T. ) ; 161 | #122 = EDGE_CURVE ( 'NONE', #93, #133, #8, .T. ) ; 162 | #123 = CARTESIAN_POINT ( 'NONE', ( 0.0000000000000000000, 0.0000000000000000000, 0.0000000000000000000 ) ) ; 163 | #124 = CARTESIAN_POINT ( 'NONE', ( 7.776507174585692200E-016, 30.00000000000000000, 6.349999999999999600 ) ) ; 164 | #125 = CARTESIAN_POINT ( 'NONE', ( -1.121135126298632000, 3.176221088771511900, 6.349999999999998800 ) ) ; 165 | #126 = FACE_OUTER_BOUND ( 'NONE', #244, .T. ) ; 166 | #127 = CARTESIAN_POINT ( 'NONE', ( -3.174999999999987800, 5.670250450498002700, -5.499261314031193700 ) ) ; 167 | #128 = CARTESIAN_POINT ( 'NONE', ( 7.776507174585692200E-016, 0.0000000000000000000, 6.349999999999999600 ) ) ; 168 | #129 = AXIS2_PLACEMENT_3D ( 'NONE', #83, #67, #136 ) ; 169 | #130 = EDGE_LOOP ( 'NONE', ( #52, #35, #218, #102, #116 ) ) ; 170 | #131 = VERTEX_POINT ( 'NONE', #37 ) ; 171 | #132 = ORIENTED_EDGE ( 'NONE', *, *, #49, .F. ) ; 172 | #133 = VERTEX_POINT ( 'NONE', #145 ) ; 173 | #134 = APPROVAL_STATUS ( 'not_yet_approved' ) ; 174 | #135 =( NAMED_UNIT ( * ) SI_UNIT ( $, .STERADIAN. ) SOLID_ANGLE_UNIT ( ) ); 175 | #136 = DIRECTION ( 'NONE', ( 0.0000000000000000000, 0.0000000000000000000, 1.000000000000000000 ) ) ; 176 | #137 = DIRECTION ( 'NONE', ( -0.0000000000000000000, -1.000000000000000000, -0.0000000000000000000 ) ) ; 177 | #138 = PRODUCT_DEFINITION ( 'UNKNOWN', '', #120, #44 ) ; 178 | #139 = APPROVAL_ROLE ( '' ) ; 179 | #140 = FACE_OUTER_BOUND ( 'NONE', #32, .T. ) ; 180 | #141 = CC_DESIGN_PERSON_AND_ORGANIZATION_ASSIGNMENT ( #38, #86, ( #138 ) ) ; 181 | #142 = PERSON_AND_ORGANIZATION ( #235, #1 ) ; 182 | #143 = VERTEX_POINT ( 'NONE', #166 ) ; 183 | #144 = CARTESIAN_POINT ( 'NONE', ( -2.204068499550337000, 4.297261705067620100, 6.059828877180491300 ) ) ; 184 | #145 = CARTESIAN_POINT ( 'NONE', ( -3.174999999999987800, 5.670250450498002700, -5.499261314031193700 ) ) ; 185 | #146 = ADVANCED_FACE ( 'NONE', ( #140 ), #33, .F. ) ; 186 | #147 = CARTESIAN_POINT ( 'NONE', ( 7.776507174585692200E-016, 2.383525667002992100, 6.349999999999999600 ) ) ; 187 | #148 = APPROVAL ( #156, 'UNSPECIFIED' ) ; 188 | #149 = DATE_AND_TIME ( #9, #119 ) ; 189 | #150 = ORIENTED_EDGE ( 'NONE', *, *, #187, .F. ) ; 190 | #151 = CC_DESIGN_DATE_AND_TIME_ASSIGNMENT ( #47, #66, ( #138 ) ) ; 191 | #152 = APPLICATION_PROTOCOL_DEFINITION ( 'international standard', 'config_control_design', 1994, #26 ) ; 192 | #153 = APPLICATION_PROTOCOL_DEFINITION ( 'international standard', 'config_control_design', 1994, #215 ) ; 193 | #154 =( GEOMETRIC_REPRESENTATION_CONTEXT ( 3 ) GLOBAL_UNCERTAINTY_ASSIGNED_CONTEXT ( ( #14 ) ) GLOBAL_UNIT_ASSIGNED_CONTEXT ( ( #5, #173, #135 ) ) REPRESENTATION_CONTEXT ( 'NONE', 'WORKASPACE' ) ); 194 | #155 = PLANE ( 'NONE', #3 ) ; 195 | #156 = APPROVAL_STATUS ( 'not_yet_approved' ) ; 196 | #157 = CARTESIAN_POINT ( 'NONE', ( 0.0000000000000000000, 0.0000000000000000000, -6.349999999999999600 ) ) ; 197 | #158 = PERSON_AND_ORGANIZATION ( #235, #1 ) ; 198 | #159 = CYLINDRICAL_SURFACE ( 'NONE', #210, 6.349999999999999600 ) ; 199 | #160 = EDGE_CURVE ( 'NONE', #182, #208, #72, .T. ) ; 200 | #161 = DIRECTION ( 'NONE', ( 0.0000000000000000000, -0.0000000000000000000, 1.000000000000000000 ) ) ; 201 | #162 = CARTESIAN_POINT ( 'NONE', ( 6.349999999999994300, 5.670250450498004500, 4.396328926264539200E-014 ) ) ; 202 | #163 = LOCAL_TIME ( 10, 58, 59.00000000000000000, #233 ) ; 203 | #164 = CARTESIAN_POINT ( 'NONE', ( 6.350000000000026300, 1.114897253674619600, -3.719743878930823000 ) ) ; 204 | #165 = ORIENTED_EDGE ( 'NONE', *, *, #172, .F. ) ; 205 | #166 = CARTESIAN_POINT ( 'NONE', ( 6.349999999999994300, 5.670250450498004500, 4.396328926264539200E-014 ) ) ; 206 | #167 = CYLINDRICAL_SURFACE ( 'NONE', #180, 6.349999999999999600 ) ; 207 | #168 = CARTESIAN_POINT ( 'NONE', ( -3.174999999999987800, 5.670250450498002700, -5.499261314031193700 ) ) ; 208 | #169 =( BOUNDED_CURVE ( ) B_SPLINE_CURVE ( 3, ( #209, #24, #174, #42 ), 209 | .UNSPECIFIED., .F., .T. ) 210 | B_SPLINE_CURVE_WITH_KNOTS ( ( 4, 4 ), 211 | ( 5.235987755982986200, 6.806784082777875600 ), 212 | .UNSPECIFIED. ) 213 | CURVE ( ) GEOMETRIC_REPRESENTATION_ITEM ( ) RATIONAL_B_SPLINE_CURVE ( ( 1.000000000000000000, 0.8047378541243667100, 0.8047378541243667100, 1.000000000000000000 ) ) 214 | REPRESENTATION_ITEM ( '' ) ); 215 | #170 = VECTOR ( 'NONE', #21, 1000.000000000000000 ) ; 216 | #171 = DIRECTION ( 'NONE', ( 0.0000000000000000000, 0.0000000000000000000, 1.000000000000000000 ) ) ; 217 | #172 = EDGE_CURVE ( 'NONE', #133, #39, #113, .T. ) ; 218 | #173 =( NAMED_UNIT ( * ) PLANE_ANGLE_UNIT ( ) SI_UNIT ( $, .RADIAN. ) ); 219 | #174 = CARTESIAN_POINT ( 'NONE', ( 3.719743878930825700, -0.2465087274367322700, 6.349999999999997000 ) ) ; 220 | #175 = EDGE_CURVE ( 'NONE', #39, #208, #65, .T. ) ; 221 | #176 = CC_DESIGN_PERSON_AND_ORGANIZATION_ASSIGNMENT ( #193, #7, ( #68 ) ) ; 222 | #177 = PRODUCT_DEFINITION_SHAPE ( 'NONE', 'NONE', #138 ) ; 223 | #178 = VERTEX_POINT ( 'NONE', #96 ) ; 224 | #179 = DIRECTION ( 'NONE', ( 0.4082597620358440800, 0.5773178213166078000, -0.7071266505320538100 ) ) ; 225 | #180 = AXIS2_PLACEMENT_3D ( 'NONE', #4, #137, #25 ) ; 226 | #181 = COORDINATED_UNIVERSAL_TIME_OFFSET ( 5, 0, .BEHIND. ) ; 227 | #182 = VERTEX_POINT ( 'NONE', #128 ) ; 228 | #183 = PLANE ( 'NONE', #239 ) ; 229 | #184 = LINE ( 'NONE', #124, #100 ) ; 230 | #185 = CARTESIAN_POINT ( 'NONE', ( 0.0000000000000000000, 30.00000000000000000, -6.349999999999999600 ) ) ; 231 | #186 = CARTESIAN_POINT ( 'NONE', ( -2.204068499550309900, 4.297261705067688500, -6.059828877180497600 ) ) ; 232 | #187 = EDGE_CURVE ( 'NONE', #178, #93, #94, .T. ) ; 233 | #188 = DATE_AND_TIME ( #236, #43 ) ; 234 | #189 = ORIENTED_EDGE ( 'NONE', *, *, #187, .T. ) ; 235 | #190 = CC_DESIGN_APPROVAL ( #6, ( #120 ) ) ; 236 | #191 = PERSON_AND_ORGANIZATION_ROLE ( 'classification_officer' ) ; 237 | #192 = DATE_TIME_ROLE ( 'classification_date' ) ; 238 | #193 = PERSON_AND_ORGANIZATION ( #235, #1 ) ; 239 | #194 = ORIENTED_EDGE ( 'NONE', *, *, #17, .T. ) ; 240 | #195 = ADVANCED_BREP_SHAPE_REPRESENTATION ( 'Web Model 43296_43296', ( #197, #114 ), #154 ) ; 241 | #196 = EDGE_LOOP ( 'NONE', ( #217, #189, #13, #228, #99, #70 ) ) ; 242 | #197 = MANIFOLD_SOLID_BREP ( 'CirPattern1', #87 ) ; 243 | #198 = DIRECTION ( 'NONE', ( 0.8165195240716848200, -0.5773178213166032500, 5.641021855165955800E-015 ) ) ; 244 | #199 = VECTOR ( 'NONE', #179, 1000.000000000000000 ) ; 245 | #200 = CARTESIAN_POINT ( 'NONE', ( -3.175000000000025600, 5.670250450497947600, 5.499261314031170600 ) ) ; 246 | #201 = EDGE_CURVE ( 'NONE', #143, #178, #169, .T. ) ; 247 | #202 = CARTESIAN_POINT ( 'NONE', ( 7.512130657015579700, 10.16000000000000000, 4.337130657015634900 ) ) ; 248 | #203 = PLANE ( 'NONE', #226 ) ; 249 | #204 = CC_DESIGN_APPROVAL ( #85, ( #11 ) ) ; 250 | #205 = ADVANCED_FACE ( 'NONE', ( #121 ), #167, .T. ) ; 251 | #206 = DATE_AND_TIME ( #84, #64 ) ; 252 | #207 = FACE_OUTER_BOUND ( 'NONE', #55, .T. ) ; 253 | #208 = VERTEX_POINT ( 'NONE', #157 ) ; 254 | #209 = CARTESIAN_POINT ( 'NONE', ( 6.349999999999994300, 5.670250450498004500, 4.396328926264539200E-014 ) ) ; 255 | #210 = AXIS2_PLACEMENT_3D ( 'NONE', #109, #101, #212 ) ; 256 | #211 = COORDINATED_UNIVERSAL_TIME_OFFSET ( 5, 0, .BEHIND. ) ; 257 | #212 = DIRECTION ( 'NONE', ( 0.0000000000000000000, 0.0000000000000000000, -1.000000000000000000 ) ) ; 258 | #213 = ORIENTED_EDGE ( 'NONE', *, *, #56, .T. ) ; 259 | #214 = CC_DESIGN_PERSON_AND_ORGANIZATION_ASSIGNMENT ( #27, #216, ( #120 ) ) ; 260 | #215 = APPLICATION_CONTEXT ( 'configuration controlled 3d designs of mechanical parts and assemblies' ) ; 261 | #216 = PERSON_AND_ORGANIZATION_ROLE ( 'design_supplier' ) ; 262 | #217 = ORIENTED_EDGE ( 'NONE', *, *, #36, .F. ) ; 263 | #218 = ORIENTED_EDGE ( 'NONE', *, *, #160, .T. ) ; 264 | #219 = FACE_OUTER_BOUND ( 'NONE', #130, .T. ) ; 265 | #220 = EDGE_CURVE ( 'NONE', #208, #182, #20, .T. ) ; 266 | #221 = DIRECTION ( 'NONE', ( 0.0000000000000000000, 1.000000000000000000, 0.0000000000000000000 ) ) ; 267 | #222 = CC_DESIGN_PERSON_AND_ORGANIZATION_ASSIGNMENT ( #158, #81, ( #120 ) ) ; 268 | #223 = ADVANCED_FACE ( 'NONE', ( #219 ), #159, .T. ) ; 269 | #224 = DIRECTION ( 'NONE', ( 0.0000000000000000000, -0.7745705483267489900, -0.6324875221415829900 ) ) ; 270 | #225 = DIRECTION ( 'NONE', ( 0.8164736358495328100, -0.5773827170626467700, 0.0000000000000000000 ) ) ; 271 | #226 = AXIS2_PLACEMENT_3D ( 'NONE', #123, #238, #161 ) ; 272 | #227 = ORIENTED_EDGE ( 'NONE', *, *, #17, .F. ) ; 273 | #228 = ORIENTED_EDGE ( 'NONE', *, *, #172, .T. ) ; 274 | #229 = LINE ( 'NONE', #51, #199 ) ; 275 | #230 = DIRECTION ( 'NONE', ( 0.0000000000000000000, 0.7745705483267515400, -0.6324875221415798800 ) ) ; 276 | #231 = CC_DESIGN_SECURITY_CLASSIFICATION ( #11, ( #120 ) ) ; 277 | #232 = APPROVAL_ROLE ( '' ) ; 278 | #233 = COORDINATED_UNIVERSAL_TIME_OFFSET ( 5, 0, .BEHIND. ) ; 279 | #234 = ORIENTED_EDGE ( 'NONE', *, *, #160, .F. ) ; 280 | #235 = PERSON ( 'UNSPECIFIED', 'UNSPECIFIED', 'UNSPECIFIED', ('UNSPECIFIED'), ('UNSPECIFIED'), ('UNSPECIFIED') ) ; 281 | #236 = CALENDAR_DATE ( 2018, 11, 10 ) ; 282 | #237 = ORIENTED_EDGE ( 'NONE', *, *, #71, .F. ) ; 283 | #238 = DIRECTION ( 'NONE', ( 0.0000000000000000000, 1.000000000000000000, 0.0000000000000000000 ) ) ; 284 | #239 = AXIS2_PLACEMENT_3D ( 'NONE', #202, #243, #224 ) ; 285 | #240 = ADVANCED_FACE ( 'NONE', ( #106 ), #155, .F. ) ; 286 | #241 = APPROVAL_DATE_TIME ( #206, #6 ) ; 287 | #242 = PERSON_AND_ORGANIZATION ( #235, #1 ) ; 288 | #243 = DIRECTION ( 'NONE', ( -0.4082368179247697300, -0.5773827170626467700, 0.7070869101659385000 ) ) ; 289 | #244 = EDGE_LOOP ( 'NONE', ( #16, #234 ) ) ; 290 | ENDSEC; 291 | END-ISO-10303-21; 292 | -------------------------------------------------------------------------------- /pyccx/bc/boundarycondition.py: -------------------------------------------------------------------------------- 1 | import abc 2 | from enum import auto, Flag 3 | from typing import Any, Iterable, List, Optional, Tuple, Union 4 | 5 | import numpy as np 6 | 7 | from ..core import Amplitude, ElementSet, ModelObject, NodeSet, SurfaceSet, DOF 8 | 9 | 10 | class BoundaryConditionType(Flag): 11 | """ 12 | Boundary condition type specifies which type of analyses the boundary condition may be applied to. Flags may be 13 | mixed when coupled analyses are performed (e.g. thermo-mechanical analysis: STRUCTURAL | THERMAL) 14 | """ 15 | 16 | ANY = auto() 17 | """ Boundary condition can be used in any analysis""" 18 | 19 | STRUCTURAL = auto() 20 | """ Boundary condition can be used in a structural analysis""" 21 | 22 | THERMAL = auto() 23 | """ Boundary condition can be used in a thermal analysis""" 24 | 25 | FLUID = auto() 26 | """ Boundary condition can be used in a fluid analysis""" 27 | 28 | 29 | class BoundaryCondition(ModelObject): 30 | """ 31 | Base class for all boundary conditions 32 | """ 33 | 34 | def __init__(self, name, target, amplitude: Amplitude = None, timeDelay: Optional[float] = None): 35 | 36 | self.init = True 37 | self._target = target 38 | 39 | if not name: 40 | name = '' 41 | 42 | self._resetBoundaryCondition = False 43 | self._amplitude = amplitude 44 | self._timeDelay = timeDelay 45 | 46 | super().__init__(name) 47 | 48 | @property 49 | def resetBoundaryCondition(self) -> bool: 50 | """ 51 | Reset the boundary condition so that previous conditions in the BC are reset/ignored. By default, this value is 52 | to `False` to match the behavior in Calculix. This is useful for applying different boundary conditions across 53 | multiple seperated loadcases. 54 | """ 55 | return self._resetBoundaryCondition 56 | 57 | @resetBoundaryCondition.setter 58 | def resetBoundaryCondition(self, value: bool): 59 | self._resetBoundaryCondition = value 60 | 61 | @property 62 | def amplitude(self) -> Union[None, Amplitude]: 63 | """ 64 | Apply a single Amplitude (time based profile) for the boundary condition 65 | """ 66 | return self._amplitude 67 | 68 | @amplitude.setter 69 | def amplitude(self, amplitude: Amplitude): 70 | if not isinstance(amplitude, Amplitude): 71 | raise TypeError('Boundary condition\'s amplitude must be an Amplitude object') 72 | 73 | self._amplitude = amplitude 74 | 75 | @property 76 | def timeDelay(self) -> Union[None, float]: 77 | """ 78 | A time delay can be added before initiating an Amplitude profile on the boundary condition 79 | """ 80 | return self._timeDelay 81 | 82 | @timeDelay.setter 83 | def timeDelay(self, time): 84 | if time < 1e-8: 85 | self._timeDelay = None 86 | else: 87 | self._timeDelay = time 88 | 89 | @property 90 | def target(self): 91 | """ 92 | The target feature set of the boundary condition 93 | """ 94 | return self._target 95 | 96 | @target.setter 97 | def target(self, target): 98 | self._target = target 99 | 100 | def getTargetName(self) -> str: 101 | """ 102 | Returns the name of the target feature set 103 | """ 104 | return self._target.name 105 | 106 | def getBoundaryElements(self) -> Union[None, np.ndarray]: 107 | """ 108 | Returns the elements associated with the target ElementSet (if applicable) for the boundary condition 109 | """ 110 | if isinstance(self._target, ElementSet): 111 | return self._target.els 112 | 113 | return None 114 | 115 | def getBoundaryFaces(self): 116 | """ 117 | Returns the surface pairs with the target SurfaceSet (if applicable) for the boundary condition 118 | """ 119 | if isinstance(self._target, SurfaceSet): 120 | return self._target.surfacePairs 121 | 122 | return None 123 | 124 | def getBoundaryNodes(self) -> Union[None, np.ndarray]: 125 | """ 126 | Returns the nodes associated with the target NodeSet (if applicable) for the boundary condition 127 | """ 128 | if isinstance(self._target, NodeSet): 129 | return self._target.nodes 130 | 131 | return None 132 | 133 | @abc.abstractmethod 134 | def type(self) -> BoundaryConditionType: 135 | """ 136 | Returns the BC type so that they are only applied to suitable load cases 137 | """ 138 | raise NotImplementedError() 139 | 140 | @abc.abstractmethod 141 | def writeInput(self) -> str: 142 | raise NotImplementedError() 143 | 144 | 145 | class Film(BoundaryCondition): 146 | """ 147 | The film or convective heat transfer boundary condition applies the Newton's law of cooling :math:`q = h_{ 148 | c}\\left(T-T_{amb}\\right)` to specified faces of boundaries elements (correctly ordered according to Calculix's 149 | requirements). This BC may be used in thermal and coupled thermo-mechanical analyses. 150 | """ 151 | 152 | def __init__(self, target, h: float = 0.0, TAmbient: float = 0.0, 153 | name: Optional[str] = None, amplitude: Optional[Amplitude] = None, timeDelay: Optional[float] = None): 154 | 155 | self.h = h 156 | self.T_amb = TAmbient 157 | 158 | if not isinstance(target, SurfaceSet): 159 | raise ValueError('A SurfaceSet must be used for a Film Boundary Condition') 160 | 161 | super().__init__(name, target, amplitude, timeDelay) 162 | 163 | def type(self) -> BoundaryConditionType: 164 | return BoundaryConditionType.THERMAL 165 | 166 | @property 167 | def heatTransferCoefficient(self) -> float: 168 | """ 169 | The heat transfer coefficient :math:`h_{c}` used for the Film boundary condition 170 | """ 171 | return self.h 172 | 173 | @heatTransferCoefficient.setter 174 | def heatTransferCoefficient(self, h: float) -> None: 175 | self.h = h 176 | 177 | @property 178 | def ambientTemperature(self) -> float: 179 | """ 180 | The ambient temperature :math:`T_{amb}` used for the Film boundary condition 181 | """ 182 | return self.T_amb 183 | 184 | @ambientTemperature.setter 185 | def ambientTemperature(self, Tamb: float) -> None: 186 | self.T_amb = Tamb 187 | 188 | def writeInput(self) -> str: 189 | bCondStr = '*FILM' 190 | 191 | if self._amplitude: 192 | bCondStr += ', AMPLITUDE = {:s}'.format(self._amplitude.name) 193 | 194 | if self._timeDelay: 195 | bCondStr += ', TIMEDELAY = {:e}'.format(self._timeDelay) 196 | 197 | if self._resetBoundaryCondition: 198 | bCondStr += ', OP = NEW' 199 | 200 | bCondStr += '\n' 201 | 202 | bfaces = self.getBoundaryFaces() 203 | 204 | for i in len(bfaces): 205 | bCondStr += '{:d},F{:d}, {:e}, {:e}\n'.format(bfaces[i, 0], bfaces[i, 1], self.T_amb, self.h) 206 | 207 | return bCondStr 208 | 209 | 210 | class HeatFlux(BoundaryCondition): 211 | """ 212 | The flux boundary condition applies a uniform external heat flux :math:`q` to faces of surface boundaries 213 | elements (correctly ordered according to Calculix's requirements). This BC may be used in thermal and coupled 214 | thermo-mechanical analyses. 215 | """ 216 | 217 | def __init__(self, target, flux: float = 0.0, 218 | name: Optional[str] = None, amplitude: Optional[Amplitude] = None, timeDelay: Optional[float] = None): 219 | 220 | self._flux = flux 221 | 222 | if not isinstance(target, SurfaceSet): 223 | raise ValueError('A SurfaceSet must be used for a Heat Flux Boundary Condition') 224 | 225 | super().__init__(name, target, amplitude, timeDelay) 226 | 227 | def type(self) -> BoundaryConditionType: 228 | return BoundaryConditionType.THERMAL 229 | 230 | @property 231 | def flux(self) -> float: 232 | """ 233 | The flux value :math:`q` used for the Heat Flux boundary condition 234 | """ 235 | return self._flux 236 | 237 | @flux.setter 238 | def flux(self, fluxVal: float) -> None: 239 | self._flux = fluxVal 240 | 241 | def writeInput(self) -> str: 242 | 243 | bCondStr = '*DFLUX' 244 | 245 | if self._amplitude: 246 | bCondStr += ', AMPLITUDE = {:s}'.format(self._amplitude.name) 247 | 248 | if self._timeDelay: 249 | bCondStr += ', TIMEDELAY = {:e}'.format(self._timeDelay) 250 | 251 | if self._resetBoundaryCondition: 252 | bCondStr += ', OP = NEW' 253 | 254 | bCondStr += '\n' 255 | 256 | bfaces = self.getBoundaryFaces() 257 | 258 | for i in range(len(bfaces)): 259 | bCondStr += '{:d}, S{:d},{:e}\n'.format(bfaces[i, 0], bfaces[i, 1], self._flux) 260 | 261 | return bCondStr 262 | 263 | 264 | class Radiation(BoundaryCondition): 265 | """ 266 | The radiation boundary condition applies Black-body radiation using the Stefan-Boltzmann Law, :math:`q_{rad} = 267 | \\epsilon \\sigma_b\\left(T-T_{amb}\\right)^4`, which is imposed on the faces of boundary elements (correctly 268 | ordered according to Calculix's requirements). 269 | 270 | Ensure that the Stefan-Boltzmann constant :math:`\\sigma_b`, 271 | has consistent units, which is set in the :attr:`~pyccx.analysis.Simulation.SIGMAB`. This BC may be used in 272 | thermal and coupled thermo-mechanical analyses. 273 | """ 274 | 275 | def __init__(self, target: SurfaceSet, 276 | epsilon: Optional[float] = 1.0, TAmbient: Optional[float] = 0.0, 277 | name: Optional[str] = None, amplitude: Optional[Amplitude] = None, 278 | timeDelay: Optional[float] = None): 279 | 280 | self.T_amb = TAmbient 281 | self._epsilon = epsilon 282 | 283 | if not isinstance(target, SurfaceSet): 284 | raise TypeError('A SurfaceSet must be used for a Radiation Boundary Condition') 285 | 286 | super().__init__(name, target, amplitude, timeDelay) 287 | 288 | def type(self) -> BoundaryConditionType: 289 | return BoundaryConditionType.THERMAL 290 | 291 | @property 292 | def emissivity(self) -> float: 293 | """ 294 | The emissivity value :math:`\\epsilon` used for the Radiation boundary condition 295 | """ 296 | return self._epsilon 297 | 298 | @emissivity.setter 299 | def emissivity(self, val: float) -> None: 300 | self._epsilon = val 301 | 302 | @property 303 | def ambientTemperature(self) -> float: 304 | """ 305 | The ambient temperature :math:`T_{amb}`. used for the Radiation boundary condition 306 | """ 307 | return self.T_amb 308 | 309 | @ambientTemperature.setter 310 | def ambientTemperature(self, Tamb: float) -> None: 311 | self.T_amb = Tamb 312 | 313 | def writeInput(self) -> str: 314 | 315 | bCondStr = '*RADIATE' 316 | 317 | if self._amplitude: 318 | bCondStr += ', AMPLITUDE = {:s}'.format(self._amplitude.name) 319 | 320 | if self._timeDelay: 321 | bCondStr += ', TIMEDELAY = {:e}'.format(self._timeDelay) 322 | 323 | if self._resetBoundaryCondition: 324 | bCondStr += ', OP = NEW' 325 | 326 | bCondStr += '\n' 327 | 328 | bfaces = self.getBoundaryFaces() 329 | 330 | for i in range(len(bfaces)): 331 | bCondStr += '{:d}, F{:d}, {:e}, {:e}\n'.format(bfaces[i, 0], bfaces[i, 1], self.T_amb, self._epsilon) 332 | 333 | return bCondStr 334 | 335 | 336 | class Fixed(BoundaryCondition): 337 | """ 338 | The fixed boundary condition removes or sets the DOF (e.g. displacement components, temperature) specifically on 339 | a :class:`NodeSet`. This BC may be used in thermal and coupled thermo-mechanical analyses provided the DOF is 340 | applicable to the analysis type. 341 | """ 342 | 343 | def __init__(self, target: Any, dof: Union[DOF, Iterable[DOF]] = (), 344 | values: Optional[Any] = None, 345 | name: Optional[str] = None, 346 | amplitude: Optional[Amplitude] = None, 347 | timeDelay: Optional[float] = None): 348 | 349 | if not isinstance(target, NodeSet): 350 | raise ValueError('The target for a Fixed Boundary Condition must be a NodeSet') 351 | 352 | if not isinstance(dof, list): 353 | dof = list(dof) # Convert DOF to list if singular 354 | 355 | self._dof = dof 356 | self._values = values 357 | 358 | super().__init__(name, target, amplitude, timeDelay) 359 | 360 | def type(self) -> BoundaryConditionType: 361 | return BoundaryConditionType.ANY 362 | 363 | @property 364 | def dof(self) -> List[DOF]: 365 | """ 366 | Degree of Freedoms to be fixed 367 | """ 368 | return self._dof 369 | 370 | @dof.setter 371 | def dof(self, vals: List[DOF]): 372 | self._dof = vals 373 | 374 | @property 375 | def values(self) -> Any: 376 | """ 377 | Values to assign to the selected DOF to be fixed 378 | """ 379 | return self._dof 380 | 381 | @values.setter 382 | def values(self, vals): 383 | self._values = vals 384 | 385 | def writeInput(self) -> str: 386 | 387 | bCondStr = '*BOUNDARY' 388 | 389 | if self._amplitude: 390 | bCondStr += ', AMPLITUDE = {:s}'.format(self._amplitude.name) 391 | 392 | if self._timeDelay: 393 | bCondStr += ', TIMEDELAY = {:e}'.format(self._timeDelay) 394 | 395 | if self._resetBoundaryCondition: 396 | bCondStr += ', OP = NEW' 397 | 398 | bCondStr += '\n' 399 | 400 | nodesetName = self.getTargetName() 401 | 402 | if len(self.dof) != len(self._values): 403 | raise ValueError('DOF and Prescribed DOF must have a matching size') 404 | 405 | # 1-3 U, 4-6, rotational DOF, 11 = Temp 406 | for i in range(len(self._dof)): 407 | if self._values: 408 | # Inhomogeneous boundary conditions 409 | bCondStr += '{:s},{:d},, {:e}\n'.format(nodesetName, self._dof[i], self._values[i]) 410 | else: 411 | # Fixed boundary condition 412 | bCondStr += '{:s},{:d}\n'.format(nodesetName, self._dof[i]) 413 | 414 | return bCondStr 415 | 416 | 417 | class Acceleration(BoundaryCondition): 418 | """ 419 | The Acceleration Boundary Condition applies an acceleration term across a Volume (i.e. Element Set) during a 420 | structural analysis. This is provided as magnitude, direction of the acceleration on the body. 421 | """ 422 | 423 | def __init__(self, target: ElementSet, direction: Optional[Iterable] = None, mag: float = 1.0, 424 | name: Optional[str] = None, amplitude: Optional[Amplitude] = None, 425 | timeDelay: Optional[float] = None): 426 | 427 | self._mag = mag 428 | 429 | if not isinstance(target, NodeSet) or not isinstance(target, ElementSet): 430 | raise ValueError('The target for an Acceleration BC should be a node or element set.') 431 | 432 | if direction: 433 | self._dir = np.asanyarray(direction) 434 | else: 435 | self._dir = np.array([0.0, 0.0, 1.0]) 436 | 437 | super().__init__(name, target, amplitude, timeDelay) 438 | 439 | def type(self) -> BoundaryConditionType: 440 | return BoundaryConditionType.STRUCTURAL 441 | 442 | def setVector(self, v: Iterable) -> None: 443 | """ 444 | The acceleration of the body set by an acceleration vector 445 | 446 | :param v: The vector of the acceleration 447 | """ 448 | 449 | from numpy import linalg 450 | 451 | vec = np.asanyarray(v) 452 | mag = linalg.norm(vec) 453 | self._dir = vec / linalg.norm(vec) 454 | self._mag = mag 455 | 456 | @property 457 | def magnitude(self) -> float: 458 | """ 459 | The acceleration magnitude applied onto the body 460 | """ 461 | return self._mag 462 | 463 | @magnitude.setter 464 | def magnitude(self, magVal: float) -> None: 465 | self._mag = magVal 466 | 467 | @property 468 | def direction(self) -> np.ndarray: 469 | """ 470 | The acceleration direction (normalised vector) 471 | """ 472 | return self._dir 473 | 474 | @direction.setter 475 | def direction(self, v: Iterable) -> None: 476 | from numpy import linalg 477 | vec = np.asanyarray(v) 478 | self._dir = vec / linalg.norm(vec) 479 | 480 | def writeInput(self) -> str: 481 | 482 | bCondStr = '*DLOAD' 483 | 484 | if self._amplitude: 485 | bCondStr += ', AMPLITUDE = {:s}'.format(self._amplitude.name) 486 | 487 | if self._timeDelay: 488 | bCondStr += ', TIMEDELAY = {:e}'.format(self._timeDelay) 489 | 490 | if self._resetBoundaryCondition: 491 | bCondStr += ', OP = NEW' 492 | 493 | bCondStr += '\n' 494 | 495 | bCondStr += '{:s},GRAV,{:.5f}, {:e},{:e},{:e}\n'.format(self.target.name, self._mag, *self._dir) 496 | return bCondStr 497 | 498 | 499 | class Pressure(BoundaryCondition): 500 | """ 501 | The Pressure boundary condition applies a uniform pressure applied to the faces across an element boundary. 502 | """ 503 | 504 | def __init__(self, target: SurfaceSet, magnitude: Optional[float] = 0.0, 505 | name: Optional[str] = None, 506 | amplitude: Optional[Amplitude] = None, 507 | timeDelay: Optional[float] = None): 508 | 509 | self._mag = magnitude 510 | 511 | if not isinstance(target, SurfaceSet): 512 | raise ValueError('A surface set must be assigned to a Pressure boundary condition.') 513 | 514 | super().__init__(name, target, amplitude, timeDelay) 515 | 516 | def type(self) -> BoundaryConditionType: 517 | return BoundaryConditionType.STRUCTURAL 518 | 519 | @property 520 | def magnitude(self) -> float: 521 | """ 522 | The magnitude of pressure applied onto the surface 523 | """ 524 | return self._mag 525 | 526 | @magnitude.setter 527 | def magnitude(self, magVal: float) -> None: 528 | self._mag = magVal 529 | 530 | def writeInput(self) -> str: 531 | 532 | bCondStr = '*DLOAD' 533 | 534 | if self._amplitude: 535 | bCondStr += ', AMPLITUDE = {:s}'.format(self._amplitude.name) 536 | 537 | if self._timeDelay: 538 | bCondStr += ', TIMEDELAY = {:e}'.format(self._timeDelay) 539 | 540 | if self._resetBoundaryCondition: 541 | bCondStr += ', OP = NEW' 542 | 543 | bCondStr += '\n' 544 | 545 | bfaces = self.getBoundaryFaces() 546 | 547 | for i in range(len(bfaces)): 548 | bCondStr += '{:6d}, P{:d}, {:e}\n'.format(bfaces[i, 0], bfaces[i, 1], self._mag) 549 | 550 | return bCondStr 551 | 552 | 553 | class Force(BoundaryCondition): 554 | """ 555 | The Force Boundary applies a uniform force directly to nodes. This BC may be used in thermal and 556 | coupled thermo-mechanical analyses provided the :class:`DOF` is applicable to the analysis type. 557 | """ 558 | 559 | def __init__(self, target, name: Optional[str] = None, amplitude: Optional[Amplitude] = None, 560 | timeDelay: Optional[float] = None): 561 | 562 | self._mag = 0.0 563 | self._dir = np.array([0.0, 0.0, 1.0]) 564 | 565 | super().__init__(name, target, amplitude, timeDelay) 566 | 567 | def type(self) -> BoundaryConditionType: 568 | return BoundaryConditionType.STRUCTURAL 569 | 570 | def setVector(self, v: Iterable) -> None: 571 | """ 572 | The applied force set by the vector. 573 | 574 | .. note:: 575 | 576 | The force vector is normalised to ensure that the direction is consistent. 577 | 578 | :param v: The force vector 579 | """ 580 | from numpy import linalg 581 | 582 | vec = np.asanyarray(v) 583 | mag = linalg.norm(vec) 584 | 585 | self._dir = vec / mag 586 | self._mag = mag 587 | 588 | @property 589 | def magnitude(self) -> float: 590 | """ 591 | The magnitude of the force applied 592 | """ 593 | return self._mag 594 | 595 | @magnitude.setter 596 | def magnitude(self, magVal: float) -> None: 597 | self._mag = magVal 598 | 599 | @property 600 | def direction(self) -> np.ndarray: 601 | """ 602 | The normalised vector of the force direction 603 | """ 604 | return self._dir 605 | 606 | @direction.setter 607 | def direction(self, v: Iterable) -> None: 608 | from numpy import linalg 609 | 610 | vec = np.asanyarray(v) 611 | self._dir = vec / linalg.norm(vec) 612 | 613 | def writeInput(self) -> str: 614 | 615 | bCondStr = '*CLOAD' 616 | 617 | if self._amplitude: 618 | bCondStr += ', AMPLITUDE = {:s}'.format(self._amplitude.name) 619 | 620 | if self._timeDelay: 621 | bCondStr += ', TIMEDELAY = {:e}'.format(self._timeDelay) 622 | 623 | if self._resetBoundaryCondition: 624 | bCondStr += ', OP = NEW' 625 | 626 | bCondStr += '\n' 627 | 628 | nodesetName = self.getTargetName() 629 | 630 | for i in range(3): 631 | compMag = self._mag * self._dir[i] 632 | bCondStr += '{:s},{:d},{:e} \n'.format(nodesetName, i+1, compMag) 633 | 634 | return bCondStr 635 | -------------------------------------------------------------------------------- /pyccx/analysis/analysis.py: -------------------------------------------------------------------------------- 1 | import re 2 | import os 3 | import sys 4 | import subprocess 5 | import logging 6 | 7 | from enum import IntEnum, auto 8 | from typing import Any, List, Type, Optional 9 | 10 | import numpy as np 11 | 12 | from ..bc import BoundaryCondition 13 | from ..core import Amplitude, Connector, ModelObject, MeshSet, ElementSet, NodeSet, SurfaceSet 14 | from ..loadcase import LoadCase 15 | from ..material import Material 16 | from ..mesh import Mesher 17 | from ..results import ElementResult, NodalResult, ResultProcessor 18 | 19 | 20 | class AnalysisError(Exception): 21 | """Exception raised for errors generated during the analysis 22 | 23 | Attributes: 24 | expression -- input expression in which the error occurred 25 | message -- explanation of the error 26 | """ 27 | 28 | def __init__(self, expression, message): 29 | self.expression = expression 30 | self.message = message 31 | 32 | 33 | class AnalysisType(IntEnum): 34 | """ 35 | The analysis types available in Calculix that may be used for analyses 36 | """ 37 | 38 | STRUCTURAL = auto() 39 | """ Structural Analysis """ 40 | 41 | THERMAL = auto() 42 | """ Thermal Analysis """ 43 | 44 | FLUID = auto() 45 | """ Fluid Dynamics Analysis""" 46 | 47 | 48 | class MaterialAssignment(ModelObject): 49 | """ 50 | MaterialAssignment is a base class for defining the Element Types and :class:`~pyccx.material.Material` that are 51 | specified for an :class:`~pyccx.core.ElementSet` within the model. These are required to be set for all elements 52 | that exist within :class:`pyccx.mesh.Mesher` that are defined and exported for use in Calculix. 53 | """ 54 | 55 | def __init__(self, name: str, elementSet: ElementSet, material: Material): 56 | 57 | self._elSet = elementSet 58 | self._material = material 59 | 60 | super().__init__(name) 61 | 62 | @property 63 | def material(self) -> Material: 64 | """ The Material model and parameters assigned to the Material Assignment """ 65 | return self._material 66 | 67 | @material.setter 68 | def material(self, material: Material) -> None: 69 | 70 | if not isinstance(material, Material): 71 | raise TypeError('Invalid material assignment provided to MaterialAssignment ({:s})'.format(self.name)) 72 | 73 | self._material = material 74 | 75 | @property 76 | def els(self) -> ElementSet: 77 | """ 78 | Elements contains the list of Node IDs 79 | """ 80 | return self._elSet 81 | 82 | @els.setter 83 | def els(self, elementSet: ElementSet): 84 | 85 | if not isinstance(elementSet, ElementSet): 86 | raise TypeError('Invalid element set type provided to MaterialAssignment ({:s}).'.format(self.name)) 87 | 88 | self._elSet = elementSet 89 | 90 | def writeInput(self) -> str: 91 | raise Exception('Not implemented') 92 | 93 | 94 | class SolidMaterialAssignment(MaterialAssignment): 95 | """ 96 | SolidMaterialAssignment designates elements as solid 3D continuum elements, for the selected elements in a provided 97 | :class:`~pyccx.core.ElementSet` with the given :class:`Material`. This option should be used for the following class of elements 98 | including assigning material properties to 3D, plane stress, plane strain and axisymmetric element types. For 99 | plane stress and plane strain elements the thickness parameter can be specified. 100 | """ 101 | def __init__(self, name, elementSet: ElementSet, material: Material, thickness: Optional[float] = None): 102 | 103 | self._thickness = thickness 104 | super().__init__(name, elementSet, material) 105 | 106 | @property 107 | def thickness(self) -> float: 108 | return self._thickness 109 | 110 | @thickness.setter 111 | def thickness(self, thickness: float): 112 | 113 | if thickness is None: 114 | self._thickness = None 115 | elif thickness < 1e-8: 116 | self._thickness = None 117 | else: 118 | self._thickness = thickness 119 | 120 | def writeInput(self) -> str: 121 | 122 | outStr = '*solid section, elset={:s}, material={:s}\n'.format(self._elSet.name, self._material.name) 123 | 124 | if self._thickness: 125 | outStr += '{:e}'.format(self._thickness) 126 | 127 | return outStr 128 | 129 | 130 | class ShellMaterialAssignment(MaterialAssignment): 131 | """ 132 | The ShellMaterialAssignment class is used to select shell elements for the selected elements in a provided 133 | :class:`~pyccx.core.ElementSet` with the given :class:`~pyccx.material.Material`. A thickness must be provided for 134 | the selected shell elements. 135 | """ 136 | 137 | def __init__(self, name, elementSet: ElementSet, material: Material, thickness: float): 138 | 139 | super().__init__(name, elementSet, material) 140 | 141 | self._thickness = thickness 142 | 143 | @property 144 | def thickness(self) -> float: 145 | """ 146 | The thickness of the shell elements 147 | 148 | .. warning:: 149 | The thickness of the shell type should be greater than zero and is required for shell elements. 150 | 151 | .. note:: 152 | The element thickness is constant for the shell assignment 153 | """ 154 | 155 | return self._thickness 156 | 157 | @thickness.setter 158 | def thickness(self, thickness: float): 159 | 160 | if thickness < 1e-8: 161 | raise ValueError('The thickness of the shell type should be greater than zero') 162 | 163 | self._thickness = thickness 164 | 165 | def writeInput(self) -> str: 166 | outStr = '*shell section, elset={:s}, material={:s}\n'.format(self._elSet.name, self._material.name) 167 | outStr += '{:e}\n'.format(self._thickness) 168 | return outStr 169 | 170 | 171 | class Simulation: 172 | """ 173 | Provides the class for running a Calculix Simulation 174 | """ 175 | 176 | NUMTHREADS: int = 1 177 | """ 178 | The total number of Threads used by the Calculix Solver 179 | """ 180 | 181 | CALCULIX_PATH: str = '' 182 | """ 183 | The calculix solver directory path used for Windows platforms. Within the solver directory the executable 184 | (ccx.exe) must exist and have execution permissions. 185 | 186 | .. note :: 187 | On Mac OS X, this is the complete path of the executable 188 | 189 | """ 190 | 191 | VERBOSE_OUTPUT: bool = True 192 | """ When enabled, the output during the analysis is redirected to the console""" 193 | 194 | def __init__(self, meshModel: Mesher): 195 | 196 | self._input = '' 197 | self._workingDirectory = '' 198 | self._analysisCompleted = False 199 | 200 | self._name = '' 201 | self.initialConditions = [] # 'dict of node set names, 202 | self._loadCases = [] 203 | self._mpcSets = [] 204 | self._connectors = [] 205 | self._materials = [] 206 | self._materialAssignments = [] 207 | self.model = meshModel 208 | 209 | self.initialTimeStep = 0.1 210 | self.defaultTimeStep = 0.1 211 | 212 | self.totalTime = 1.0 213 | self.useSteadyStateAnalysis = True 214 | 215 | self.TZERO = -273.15 216 | self.SIGMAB = 5.669E-8 217 | self._numThreads = 1 218 | 219 | # Private sets are used for the storage of additional user defined sets 220 | self._surfaceSets = [] 221 | self._nodeSets = [] 222 | self._elementSets = [] 223 | 224 | self.includes = [] 225 | self._runData = None 226 | 227 | def init(self): 228 | 229 | self._input = '' 230 | 231 | @classmethod 232 | def setNumThreads(cls, numThreads: int) -> None: 233 | """ 234 | Sets the number of simulation threads to use in Calculix 235 | 236 | :param numThreads: 237 | :return: 238 | """ 239 | cls.NUMTHREADS = numThreads 240 | 241 | @classmethod 242 | def getNumThreads(cls) -> int: 243 | """ 244 | Returns the number of threads used by Calculix and GMSH 245 | 246 | :return: int: 247 | """ 248 | return cls.NUMTHREADS 249 | 250 | @classmethod 251 | def setCalculixPath(cls, calculixPath: str) -> None: 252 | """ 253 | Sets the path for the Calculix executable. Necessary when using Windows where there is not a default 254 | installation procedure for Calculix 255 | 256 | :param calculixPath: Directory containing the Calculix Executable 257 | """ 258 | 259 | if os.path.isdir(calculixPath): 260 | cls.CALCULIX_PATH = calculixPath 261 | 262 | @classmethod 263 | def setVerboseOuput(cls, state: bool) -> None: 264 | """ 265 | Sets if the output from Calculix should be verbose i.e. printed to the console 266 | 267 | :param state: `True` if the output should be printed to the console 268 | """ 269 | 270 | cls.VERBOSE_OUTPUT = state 271 | 272 | def setWorkingDirectory(self, workDir: str) -> None: 273 | """ 274 | Sets the working directory used during the analysis. 275 | 276 | :param workDir: An accessible working directory path 277 | 278 | """ 279 | if os.path.isdir(workDir) and os.access(workDir, os.W_OK): 280 | self._workingDirectory = workDir 281 | else: 282 | raise ValueError(f"Working directory ({workDir}) is not accessible or writable") 283 | 284 | @property 285 | def name(self) -> str: 286 | return self._name 287 | 288 | def getBoundaryConditions(self) -> List[BoundaryCondition]: 289 | """ 290 | Collects all unique :class:`~pyccx.bc.BoundaryCondition` which are attached 291 | to each :class:`~pyccx.loadcase.LoadCase` in the analysis 292 | 293 | :return: All the boundary conditions in the analysis 294 | """ 295 | bcs = [] 296 | for loadcase in self._loadCases: 297 | bcs += loadcase.boundaryConditions 298 | 299 | return bcs 300 | 301 | @property 302 | def loadCases(self) -> List[LoadCase]: 303 | """ 304 | A list of :class:`~pyccx.loadcase.LoadCase` that have been attached to the analysis 305 | """ 306 | return self._loadCases 307 | 308 | @loadCases.setter 309 | def loadCases(self, loadCases: List[LoadCase]) -> None: 310 | self._loadCases = loadCases 311 | 312 | @property 313 | def connectors(self) -> List[Connector]: 314 | """ 315 | List of :class:`~pyccx.core.Connector` used in the analysis 316 | """ 317 | return self._connectors 318 | 319 | @connectors.setter 320 | def connectors(self, connectors: List[Connector]) -> None: 321 | self._connectors = connectors 322 | 323 | @property 324 | def mpcSets(self): 325 | return self._mpcSets 326 | 327 | @mpcSets.setter 328 | def mpcSets(self, value): 329 | self._mpcSets = value 330 | 331 | @property 332 | def materials(self) -> List[Material]: 333 | """ 334 | User defined :class:`~pyccx.material.Material` used in the analysis 335 | """ 336 | return self._materials 337 | 338 | @materials.setter 339 | def materials(self, materials: List[Material]) -> None: 340 | self._materials = materials 341 | 342 | @property 343 | def materialAssignments(self) -> List[MaterialAssignment]: 344 | """ 345 | Material Assignment applied to a set of elements 346 | """ 347 | return self._materialAssignments 348 | 349 | @materialAssignments.setter 350 | def materialAssignments(self, matAssignments: List[MaterialAssignment]) -> None: 351 | self._materialAssignments = matAssignments 352 | 353 | def _collectAmplitudes(self) -> List[Amplitude]: 354 | """ 355 | Private function returns a unique set of Element, Nodal, Surface sets which are used by 356 | the analysis during writing. This reduces the need to explicitly attach them to an analysis. 357 | """ 358 | amps = {} 359 | 360 | for loadcase in self.loadCases: 361 | 362 | for bc in loadcase.boundaryConditions: 363 | if bc.amplitude: 364 | amps[bc.amplitude.name] = bc.amplitude 365 | 366 | return list(amps.values()) 367 | 368 | def _collectSets(self, setType: Optional[Type[MeshSet]] = None) -> Any: 369 | """ 370 | Private function returns a unique set of Element, Nodal, Surface sets which are used by 371 | the analysis during writing. This reduces the need to explicitly attach them to an analysis. 372 | 373 | :param setType: The type of Mesh Set to collect 374 | :return: A list of unique MeshSets obtained for the analysis 375 | """ 376 | elementSets = {} 377 | nodeSets = {} 378 | surfaceSets = {} 379 | 380 | # Iterate through all user defined sets 381 | for elSet in self._elementSets: 382 | elementSets[elSet.name] = elSet 383 | 384 | for nodeSet in self._nodeSets: 385 | nodeSets[nodeSet.name] = nodeSet 386 | 387 | for surfSet in self._surfaceSets: 388 | surfaceSets[surfSet.name] = surfSet 389 | 390 | for materialAssignment in self.materialAssignments: 391 | elementSets[materialAssignment.els.name] = materialAssignment.els 392 | 393 | # Iterate through all loadcases and boundary conditions.and find unique values. This is greedy so will override 394 | # any with same name. 395 | for loadcase in self.loadCases: 396 | 397 | # Collect result sets node and element sets automatically 398 | for resultSet in loadcase.resultSet: 399 | if isinstance(resultSet, ElementResult): 400 | elementSets[resultSet.elementSet.name] = resultSet.elementSet 401 | elif isinstance(resultSet, NodalResult): 402 | if resultSet.nodeSet and isinstance(resultSet.nodeSet, NodeSet): 403 | nodeSets[resultSet.nodeSet.name] = resultSet.nodeSet 404 | 405 | for bc in loadcase.boundaryConditions: 406 | if isinstance(bc.target, ElementSet): 407 | elementSets[bc.target.name] = bc.target 408 | 409 | if isinstance(bc.target, NodeSet): 410 | nodeSets[bc.target.name] = bc.target 411 | 412 | if isinstance(bc.target, SurfaceSet): 413 | surfaceSets[bc.target.name] = bc.target 414 | 415 | for con in self.connectors: 416 | nodeSets[con.nodeset.name] = con.nodeset 417 | 418 | if setType is ElementSet: 419 | return list(elementSets.values()) 420 | elif setType is NodeSet: 421 | return list(nodeSets.values()) 422 | elif setType is SurfaceSet: 423 | return list(surfaceSets.values()) 424 | else: 425 | return list(elementSets.values()), list(nodeSets.values()), list(surfaceSets.values()) 426 | 427 | @property 428 | def elementSets(self) -> List[ElementSet]: 429 | """ 430 | User-defined :class:`~pyccx.core.ElementSet` manually added to the analysis 431 | """ 432 | return self._elementSets 433 | 434 | @elementSets.setter 435 | def elementSets(self, val: List[ElementSet]) -> None: 436 | self._elementSets = val 437 | 438 | @property 439 | def nodeSets(self) -> List[NodeSet]: 440 | """ 441 | User-defined :class:`~pyccx.core.NodeSet` manually added to the analysis 442 | """ 443 | return self._nodeSets 444 | 445 | @nodeSets.setter 446 | def nodeSets(self, val: List[NodeSet]) -> None: 447 | self._nodeSets = val 448 | 449 | @property 450 | def surfaceSets(self) -> List[SurfaceSet]: 451 | """ 452 | User-defined :class:`pyccx.core.SurfaceSet` manually added to the analysis 453 | """ 454 | return self._surfaceSets 455 | 456 | @surfaceSets.setter 457 | def surfaceSets(self, val: List[SurfaceSet]) -> None: 458 | self._surfaceSets = val 459 | 460 | def getElementSets(self) -> List[ElementSet]: 461 | """ 462 | Returns **all** the :class:`~pyccx.core.ElementSet` used and generated in the analysis 463 | """ 464 | return self._collectSets(setType=ElementSet) 465 | 466 | def getNodeSets(self) -> List[NodeSet]: 467 | """ 468 | Returns **all** the :class:`~pyccx.core.NodeSet` used and generated in the analysis 469 | """ 470 | return self._collectSets(setType=NodeSet) 471 | 472 | def getSurfaceSets(self) -> List[SurfaceSet]: 473 | """ 474 | Returns **all** the :class:`~pyccx.core.SurfaceSet` used and generated in the analysis 475 | """ 476 | return self._collectSets(setType=SurfaceSet) 477 | 478 | def getAmplitudes(self) -> List[Amplitude]: 479 | """ 480 | Returns *all** the :class:`pyccx.core.Amplitudes` used and generated in the analysis 481 | """ 482 | 483 | return self._collectAmplitudes() 484 | 485 | def writeInput(self) -> str: 486 | """ 487 | Writes the input deck for the simulation 488 | """ 489 | 490 | self.init() 491 | 492 | self._writeHeaders() 493 | 494 | self._writeMesh() 495 | logging.info('\t Analysis mesh written to file') 496 | self._writeNodeSets() 497 | self._writeElementSets() 498 | self._writeKinematicConnectors() 499 | self._writeMPCs() 500 | self._writeAmplitudes() 501 | self._writeMaterials() 502 | self._writeMaterialAssignments() 503 | self._writeInitialConditions() 504 | self._writeAnalysisConditions() 505 | self._writeLoadSteps() 506 | 507 | return self._input 508 | 509 | def _writeAmplitudes(self) -> None: 510 | 511 | amplitudes = self._collectAmplitudes() 512 | 513 | if len(amplitudes) == 0: 514 | return None 515 | 516 | self._input += '{:*^80}\n'.format(' AMPLITUDES ') 517 | 518 | for amp in amplitudes: 519 | self._input += amp.writeInput() 520 | self._input += os.linesep 521 | 522 | def _writeHeaders(self) -> None: 523 | 524 | self._input += '\n' 525 | self._input += '{:*^125}\n'.format(' INCLUDES ') 526 | 527 | for filename in self.includes: 528 | self._input += '*include,input={:s}'.format(filename) 529 | 530 | def _writeElementSets(self) -> None: 531 | 532 | # Collect all sets 533 | elementSets = self._collectSets(setType=ElementSet) 534 | 535 | if len(elementSets) == 0: 536 | return 537 | 538 | self._input += os.linesep 539 | self._input += '{:*^125}\n'.format(' ELEMENT SETS ') 540 | 541 | for elSet in elementSets: 542 | self._input += os.linesep 543 | self._input += elSet.writeInput() 544 | 545 | def _writeNodeSets(self) -> None: 546 | 547 | # Collect all sets 548 | nodeSets = self._collectSets(setType=NodeSet) 549 | 550 | if len(nodeSets) == 0: 551 | return 552 | 553 | self._input += os.linesep 554 | self._input += '{:*^125}\n'.format(' NODE SETS ') 555 | 556 | for nodeSet in nodeSets: 557 | self._input += os.linesep 558 | self._input += nodeSet.writeInput() 559 | 560 | def _writeKinematicConnectors(self): 561 | 562 | if len(self.connectors) < 1: 563 | return 564 | 565 | self._input += os.linesep 566 | self._input += '{:*^125}\n'.format(' KINEMATIC CONNECTORS ') 567 | 568 | for connector in self.connectors: 569 | 570 | # A nodeset is automatically created from the name of the connector 571 | self._input += connector.writeInput() 572 | 573 | def _writeMPCs(self): 574 | 575 | if len(self.mpcSets) < 1: 576 | return 577 | 578 | self._input += os.linesep 579 | self._input += '{:*^125}\n'.format(' MPCS ') 580 | 581 | for mpcSet in self.mpcSets: 582 | self._input += '*EQUATION\n' 583 | self._input += '{:d}\n'.format(len(mpcSet['numTerms'])) # Assume each line constrains two nodes and one dof 584 | for mpc in mpcSet['equations']: 585 | for i in range(len(mpc['eqn'])): 586 | self._input += '{:d},{:d},{:d}'.format(mpc['node'][i], mpc['dof'][i], mpc['eqn'][i]) 587 | 588 | self._input += os.linesep 589 | 590 | # *EQUATION 591 | # 2 # number of terms in equation # typically two 592 | # 28,2,1.,22,2,-1. # node a id, dof, node b id, dof b 593 | 594 | def _writeMaterialAssignments(self) -> None: 595 | self._input += os.linesep 596 | self._input += '{:*^80}\n'.format(' MATERIAL ASSIGNMENTS ') 597 | 598 | for matAssignment in self.materialAssignments: 599 | self._input += matAssignment.writeInput() 600 | 601 | def _writeMaterials(self) -> None: 602 | self._input += os.linesep 603 | self._input += '{:*^80}\n'.format(' MATERIALS ') 604 | for material in self.materials: 605 | self._input += material.writeInput() 606 | 607 | def _writeInitialConditions(self) -> None: 608 | self._input += os.linesep 609 | self._input += '{:*^80}\n'.format(' INITIAL CONDITIONS ') 610 | 611 | for initCond in self.initialConditions: 612 | self._input += '*INITIAL CONDITIONS,TYPE={:s}\n'.format(initCond['type'].upper()) 613 | self._input += '{:s},{:e}\n'.format(initCond['set'], initCond['value']) 614 | self._input += os.linesep 615 | 616 | # Write the Physical Constants 617 | self._input += '*PHYSICAL CONSTANTS,ABSOLUTE ZERO={:e},STEFAN BOLTZMANN={:e}\n'.format(self.TZERO, self.SIGMAB) 618 | 619 | def _writeAnalysisConditions(self) -> None: 620 | 621 | self._input += os.linesep 622 | self._input += '{:*^80}\n'.format(' ANALYSIS CONDITIONS ') 623 | 624 | def _writeLoadSteps(self) -> None: 625 | 626 | self._input += os.linesep 627 | self._input += '{:*^80}\n'.format(' LOAD STEPS ') 628 | 629 | for loadCase in self.loadCases: 630 | self._input += loadCase.writeInput() 631 | 632 | def _writeMesh(self) -> None: 633 | 634 | # TODO make a unique auto-generated name for the mesh 635 | meshFilename = 'mesh.inp' 636 | meshPath = os.path.join(self._workingDirectory, meshFilename) 637 | 638 | self.model.writeMesh(meshPath) 639 | self._input += '*include,input={:s}\n'.format(meshFilename) 640 | 641 | def checkAnalysis(self) -> bool: 642 | """ 643 | Routine checks that the analysis has been correctly generated 644 | 645 | :return: bool: True if no analysis error occur 646 | :raise: AnalysisError: Analysis error that occurred 647 | """ 648 | 649 | if len(self.materials) == 0: 650 | raise AnalysisError(self, 'No material models have been assigned to the analysis') 651 | 652 | if len(self.materialAssignments) == 0: 653 | raise AnalysisError(self, 'No material assignment has been assigned to the analysis') 654 | 655 | for material in self.materials: 656 | if not material.isValid(): 657 | raise AnalysisError(self, f"Material ({material.name}) is not valid") 658 | 659 | if len(self.model.identifyUnassignedElements()) > 0: 660 | raise AnalysisError(self, 'Mesh model has unassigned element types') 661 | 662 | return True 663 | 664 | @staticmethod 665 | def version(): 666 | 667 | if sys.platform == 'win32': 668 | cmdPath = Simulation.CALCULIX_PATH 669 | 670 | # Check executable can be opened and has permissions to be executable 671 | if not os.path.isfile(cmdPath): 672 | raise FileNotFoundError(f"Calculix executable not found at path: {cmdPath}") 673 | 674 | # check if the executable is executable 675 | if not os.access(cmdPath, os.X_OK): 676 | raise PermissionError(f"Calculix executable at path: {cmdPath} is not executable") 677 | 678 | p = subprocess.Popen([cmdPath, '-v'], stdout=subprocess.PIPE, universal_newlines=True) 679 | stdout, stderr = p.communicate() 680 | version = re.search(r"(\d+).(\d+)", stdout) 681 | return int(version.group(1)), int(version.group(2)) 682 | 683 | elif sys.platform == 'linux': 684 | 685 | p = subprocess.Popen(['ccx', '-v'], stdout=subprocess.PIPE, universal_newlines=True) 686 | stdout, stderr = p.communicate() 687 | version = re.search(r"(\d+).(\d+)", stdout) 688 | return int(version.group(1)), int(version.group(2)) 689 | 690 | elif sys.platform == 'darwin': 691 | 692 | # Check executable can be opened and has permissions to be executable 693 | if not os.path.isfile(Simulation.CALCULIX_PATH): 694 | raise FileNotFoundError(f"Calculix executable not found at path: {Simulation.CALCULIX_PATH}") 695 | 696 | # check if the executable is executable 697 | if not os.access(Simulation.CALCULIX_PATH, os.X_OK): 698 | raise PermissionError(f"Calculix executable at path: {Simulation.CALCULIX_PATH} is not executable") 699 | 700 | p = subprocess.Popen([Simulation.CALCULIX_PATH, '-v'], stdout=subprocess.PIPE, universal_newlines=True) 701 | stdout, stderr = p.communicate() 702 | version = re.search(r"(\d+).(\d+)", stdout) 703 | return int(version.group(1)), int(version.group(2)) 704 | else: 705 | raise NotImplementedError(' Platform is not currently supported') 706 | 707 | def results(self) -> ResultProcessor: 708 | """ 709 | The results obtained after running an analysis 710 | """ 711 | 712 | workingResultsPath = os.path.join(self._workingDirectory, 'input') 713 | 714 | if self.isAnalysisCompleted(): 715 | return ResultProcessor(workingResultsPath) 716 | else: 717 | raise RuntimeError('Results were not available') 718 | 719 | def isAnalysisCompleted(self) -> bool: 720 | """ Returns ``True`` if the analysis was completed successfully """ 721 | return self._analysisCompleted 722 | 723 | def clearAnalysis(self, includeResults: Optional[bool] = False) -> None: 724 | """ 725 | Clears any previous files generated from the analysis 726 | 727 | :param includeResults: If set `True` will also delete the result files generated from the analysis 728 | """ 729 | 730 | filename = 'input' # Base filename for the analysis 731 | 732 | files = [filename + '.inp', 733 | filename + '.cvg', 734 | filename + '.sta'] 735 | 736 | if includeResults: 737 | files.append(filename + '.frd') 738 | files.append(filename + '.dat') 739 | 740 | try: 741 | for file in files: 742 | filePath = os.path.join(self._workingDirectory, file) 743 | os.remove(filePath) 744 | except: 745 | pass 746 | 747 | def monitor(self, filename: str): 748 | 749 | # load the .sta file for convegence monitoring 750 | 751 | staFilename = '{:s}.sta'.format(filename) 752 | 753 | """ 754 | Note: 755 | Format of each row in the .sta file corresponds with 756 | 0 STEP 757 | 1 INC 758 | 2 ATT 759 | 3 ITRS 760 | 4 TOT TIME 761 | 5 STEP TIME 762 | 6 INC TIME 763 | """ 764 | with open(staFilename, 'r') as f: 765 | 766 | # check the first two lines of the .sta file are correct 767 | line1 = f.readline() 768 | line2 = f.readline() 769 | 770 | if not('SUMMARY OF JOB INFORMATION' in line1 and 'STEP' in line2): 771 | raise Exception('Invalid .sta file generated') 772 | 773 | line = f.readline() 774 | 775 | convergenceOutput = [] 776 | 777 | while line: 778 | out = re.search(r"\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\S+)\s+(\S+)\s+(\S+)*", line) 779 | 780 | if out: 781 | out = [float(val) for val in out.groups()] 782 | convergenceOutput.append(out) 783 | 784 | line = f.readline() 785 | 786 | convergenceOutput = np.array(convergenceOutput) 787 | 788 | cvgFilename = f"{filename}.cvg" 789 | 790 | """ 791 | Note: 792 | 793 | Format of the CVF format consists of the following parameters 794 | 0 STEP 795 | 1 INC 796 | 2 ATT 797 | 3 ITER 798 | 4 CONT EL 799 | 5 RESID FORCE 800 | 6 CORR DISP 801 | 7 RESID FLUX 802 | 8 CORR TEMP 803 | """ 804 | with open(cvgFilename, 'r') as f: 805 | 806 | # check the first two lines of the .sta file are correct 807 | line1 = f.readline() 808 | line2 = f.readline() 809 | line3 = f.readline() 810 | line4 = f.readline() 811 | 812 | if not ('SUMMARY OF C0NVERGENCE INFORMATION' in line1 and 813 | 'STEP' in line2): 814 | raise Exception('Invalid .cvg file generated') 815 | 816 | line = f.readline() 817 | 818 | convergenceOutput2 = [] 819 | 820 | while line: 821 | out = re.search(r"\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)*", line) 822 | 823 | if out: 824 | out = [float(val) for val in out.groups()] 825 | convergenceOutput2.append(out) 826 | 827 | line = f.readline() 828 | 829 | convergenceOutput2 = np.array(convergenceOutput2) 830 | 831 | return convergenceOutput, convergenceOutput2 832 | 833 | def checkLine(self, line): 834 | 835 | self._runData = {} 836 | 837 | if 'Total CalculiX Time:' in line: 838 | runTime = re.search(r"Total CalculiX Time: (\S*)", line)[1] 839 | runTime = float(runTime) 840 | 841 | self._runData['runTime'] = runTime 842 | 843 | def run(self): 844 | """ 845 | Performs pre-analysis checks on the model and submits the job for Calculix to perform. 846 | """ 847 | 848 | # Reset analysis status 849 | self._analysisCompleted = False 850 | 851 | logging.info('{:=^60}'.format(' RUNNING PRE-ANALYSIS CHECKS ')) 852 | if self.checkAnalysis(): 853 | logging.info('\t Analysis checks were successfully completed') 854 | 855 | logging.info('{:=^60}'.format(' WRITING ANALYSIS INPUT FILE ')) 856 | inputDeckContents = self.writeInput() 857 | 858 | logging.info('\t Analysis input file has been generated') 859 | inputDeckPath = os.path.join(self._workingDirectory, 'input.inp') 860 | 861 | with open(inputDeckPath, "w") as text_file: 862 | text_file.write(inputDeckContents) 863 | 864 | logging.info('\t Analysis input file ({:s}) has been written to file'.format(inputDeckPath)) 865 | 866 | # Set environment variables for performing multi-threaded 867 | os.environ["CCX_NPROC_STIFFNESS"] = '{:d}'.format(Simulation.NUMTHREADS) 868 | os.environ["CCX_NPROC_EQUATION_SOLVER"] = '{:d}'.format(Simulation.NUMTHREADS) 869 | os.environ["NUMBER_OF_PROCESSORS"] = '{:d}'.format(Simulation.NUMTHREADS) 870 | os.environ["OMP_NUM_THREADS"] = '{:d}'.format(Simulation.NUMTHREADS) 871 | 872 | logging.info('{:=^60}'.format(' RUNNING CALCULIX ')) 873 | 874 | if sys.platform == 'win32': 875 | cmdPath = self.CALCULIX_PATH 876 | 877 | # Check executable can be opened and has permissions to be executable 878 | if not os.path.isfile(cmdPath): 879 | raise FileNotFoundError(f"Calculix executable not found at path: {cmdPath}") 880 | 881 | # check if the executable is executable 882 | if not os.access(cmdPath, os.X_OK): 883 | raise PermissionError(f"Calculix executable at path: {cmdPath} is not executable") 884 | 885 | arguments = '-i input' 886 | 887 | cmd = cmdPath + arguments 888 | 889 | popen = subprocess.Popen(cmd, cwd=self._workingDirectory, stdout=subprocess.PIPE, universal_newlines=True) 890 | 891 | for stdout_line in iter(popen.stdout.readline, ""): 892 | 893 | if not stdout_line or stdout_line == '\n': 894 | continue 895 | 896 | if "Using up to " in stdout_line: 897 | continue 898 | 899 | if Simulation.VERBOSE_OUTPUT: 900 | print(stdout_line, end='') 901 | 902 | self.checkLine(stdout_line) 903 | 904 | popen.stdout.close() 905 | return_code = popen.wait() 906 | if return_code: 907 | raise subprocess.CalledProcessError(return_code, cmd) 908 | 909 | # Analysis was completed successfully 910 | self._analysisCompleted = True 911 | 912 | elif sys.platform == 'linux': 913 | 914 | filename = 'input' 915 | 916 | cmdSt = ['ccx', '-i', filename] 917 | 918 | popen = subprocess.Popen(cmdSt, cwd=self._workingDirectory, 919 | stdout=subprocess.PIPE, 920 | universal_newlines=True) 921 | 922 | for stdout_line in iter(popen.stdout.readline, ""): 923 | 924 | if not stdout_line or stdout_line == '\n': 925 | continue 926 | 927 | if "Using up to " in stdout_line: 928 | continue 929 | 930 | if Simulation.VERBOSE_OUTPUT: 931 | print(stdout_line, end='') 932 | self.checkLine(stdout_line) 933 | 934 | popen.stdout.close() 935 | return_code = popen.wait() 936 | if return_code: 937 | raise subprocess.CalledProcessError(return_code, cmdSt) 938 | 939 | # Analysis was completed successfully 940 | self._analysisCompleted = True 941 | 942 | elif sys.platform == 'darwin': 943 | 944 | filename = 'input' 945 | 946 | # Check executable can be opened and has permissions to be executable 947 | if not os.path.isfile(Simulation.CALCULIX_PATH): 948 | raise FileNotFoundError(f"Calculix executable not found at path: {Simulation.CALCULIX_PATH}") 949 | 950 | # check if the executable is executable 951 | if not os.access(Simulation.CALCULIX_PATH, os.X_OK): 952 | raise PermissionError(f"Calculix executable at path: {Simulation.CALCULIX_PATH} is not executable") 953 | 954 | cmdSt = [self.CALCULIX_PATH, '-i', filename] 955 | 956 | popen = subprocess.Popen(cmdSt, cwd=self._workingDirectory, 957 | stdout=subprocess.PIPE, 958 | universal_newlines=True) 959 | 960 | for stdout_line in iter(popen.stdout.readline, ""): 961 | 962 | if not stdout_line or stdout_line == '\n': 963 | continue 964 | 965 | if "Using up to " in stdout_line: 966 | continue 967 | 968 | if Simulation.VERBOSE_OUTPUT: 969 | print(stdout_line, end='') 970 | 971 | self.checkLine(stdout_line) 972 | 973 | popen.stdout.close() 974 | return_code = popen.wait() 975 | 976 | if return_code: 977 | raise subprocess.CalledProcessError(return_code, cmdSt) 978 | 979 | # Analysis was completed successfully 980 | self._analysisCompleted = True 981 | 982 | else: 983 | raise NotImplementedError(' Platform is not currently supported') 984 | --------------------------------------------------------------------------------