├── .clang-format
├── .cmake-format
├── .gitattributes
├── .github
├── dependabot.yml
├── pull_request_template.md
└── workflows
│ ├── build_and_test.yaml
│ ├── build_and_test_mac.yaml
│ ├── docker.yaml
│ ├── run-checks.yml
│ └── test_pyfans.yaml
├── .gitignore
├── .markdown-link-check-config.json
├── .markdownlint.json
├── .pre-commit-config.yaml
├── CHANGELOG.md
├── CITATION.cff
├── CMakeLists.txt
├── CONTRIBUTING.md
├── FANS_Dashboard
├── FANS_Dashboard.ipynb
├── README.md
├── fans_dashboard
│ ├── __init__.py
│ ├── core
│ │ ├── __init__.py
│ │ ├── postprocessing.py
│ │ ├── tensortools.py
│ │ └── utils.py
│ └── plotting
│ │ ├── PlotYoungsModulus.py
│ │ ├── __init__.py
│ │ ├── h52xdmf.py
│ │ └── plotting.py
└── pyproject.toml
├── LICENSE
├── README.md
├── cmake
├── FANSConfig.cmake.in
├── modules
│ └── FindFFTW3.cmake
└── packaging
│ └── CMakeLists.txt
├── docker
├── Dockerfile
├── Dockerfile_user_env_entrypoint.sh
└── README.md
├── docs
├── ReleaseGuide.md
└── images
│ └── FANS_example.png
├── include
├── general.h
├── json.hpp
├── material_models
│ ├── GBDiffusion.h
│ ├── J2Plasticity.h
│ ├── LinearElastic.h
│ ├── LinearThermal.h
│ └── PseudoPlastic.h
├── matmodel.h
├── mixedBCs.h
├── reader.h
├── setup.h
├── solver.h
├── solverCG.h
├── solverFP.h
└── version.h.in
├── pixi.lock
├── pixi.toml
├── pyfans
├── CMakeLists.txt
├── README.md
├── micro.cpp
└── micro.hpp
├── src
├── main.cpp
└── reader.cpp
└── test
├── CMakeLists.txt
├── README.md
├── input_files
├── test_J2Plasticity.json
├── test_LinearElastic.json
├── test_LinearThermal.json
├── test_MixedBCs.json
└── test_PseudoPlastic.json
├── microstructures
└── sphere32.h5
├── pytest
├── test_displacement_averaging.py
├── test_homogenization_consistency.py
├── test_homogenized_tangent_spd.py
├── test_homogenized_tangent_within_VRbounds.py
├── test_loading_to_strain_average.py
├── test_strain_stress_averaging.py
└── test_tensortools.py
├── run_tests.sh
└── test_pyfans
├── README.md
├── input.json
├── macro-cube.py
├── micro-manager-config.json
└── precice-config.xml
/.clang-format:
--------------------------------------------------------------------------------
1 | ---
2 | Language: Cpp
3 | AccessModifierOffset: -2
4 | AlignAfterOpenBracket: Align
5 | AlignConsecutiveAssignments: true
6 | AlignConsecutiveDeclarations: true
7 | AlignEscapedNewlines: Right
8 | AlignOperands: true
9 | AlignTrailingComments: true
10 | AllowAllParametersOfDeclarationOnNextLine: true
11 | AllowShortBlocksOnASingleLine: false
12 | AllowShortCaseLabelsOnASingleLine: false
13 | AllowShortFunctionsOnASingleLine: Empty
14 | AllowShortIfStatementsOnASingleLine: false
15 | AllowShortLoopsOnASingleLine: false
16 | AlwaysBreakAfterDefinitionReturnType: None
17 | AlwaysBreakAfterReturnType: None
18 | AlwaysBreakBeforeMultilineStrings: false
19 | AlwaysBreakTemplateDeclarations: MultiLine
20 | BinPackArguments: true
21 | BinPackParameters: true
22 | BraceWrapping:
23 | AfterClass: false
24 | AfterControlStatement: false
25 | AfterEnum: false
26 | AfterFunction: true
27 | AfterNamespace: false
28 | AfterObjCDeclaration: false
29 | AfterStruct: false
30 | AfterUnion: false
31 | AfterExternBlock: false
32 | BeforeCatch: false
33 | BeforeElse: false
34 | IndentBraces: false
35 | SplitEmptyFunction: true
36 | SplitEmptyRecord: true
37 | SplitEmptyNamespace: true
38 | BreakBeforeBinaryOperators: None
39 | BreakBeforeBraces: Custom
40 | BreakBeforeInheritanceComma: false
41 | BreakInheritanceList: BeforeColon
42 | BreakBeforeTernaryOperators: true
43 | BreakConstructorInitializersBeforeComma: false
44 | BreakConstructorInitializers: BeforeColon
45 | BreakAfterJavaFieldAnnotations: false
46 | BreakStringLiterals: true
47 | ColumnLimit: 0
48 | CommentPragmas: '^ IWYU pragma:'
49 | CompactNamespaces: false
50 | ConstructorInitializerAllOnOneLineOrOnePerLine: false
51 | ConstructorInitializerIndentWidth: 4
52 | ContinuationIndentWidth: 4
53 | Cpp11BracedListStyle: true
54 | DerivePointerAlignment: false
55 | DisableFormat: false
56 | ExperimentalAutoDetectBinPacking: false
57 | FixNamespaceComments: true
58 | ForEachMacros:
59 | - foreach
60 | - Q_FOREACH
61 | - BOOST_FOREACH
62 | IncludeBlocks: Preserve
63 | IncludeCategories:
64 | - Regex: '^(<|"(gtest|isl|json)/)'
65 | Priority: 1
66 | - Regex: '.*'
67 | Priority: 2
68 | - Regex: '.*'
69 | Priority: 1
70 | IncludeIsMainRegex: '$'
71 | IndentCaseLabels: false
72 | IndentPPDirectives: None
73 | IndentWidth: 4
74 | IndentWrappedFunctionNames: false
75 | JavaScriptQuotes: Leave
76 | JavaScriptWrapImports: true
77 | KeepEmptyLinesAtTheStartOfBlocks: true
78 | MacroBlockBegin: ''
79 | MacroBlockEnd: ''
80 | MaxEmptyLinesToKeep: 1
81 | NamespaceIndentation: None
82 | ObjCBinPackProtocolList: Auto
83 | ObjCBlockIndentWidth: 4
84 | ObjCSpaceAfterProperty: false
85 | ObjCSpaceBeforeProtocolList: true
86 | PenaltyBreakAssignment: 2
87 | PenaltyBreakBeforeFirstCallParameter: 19
88 | PenaltyBreakComment: 300
89 | PenaltyBreakFirstLessLess: 120
90 | PenaltyBreakString: 1000
91 | PenaltyBreakTemplateDeclaration: 10
92 | PenaltyExcessCharacter: 1000000
93 | PenaltyReturnTypeOnItsOwnLine: 100
94 | PointerAlignment: Right
95 | ReflowComments: true
96 | SortIncludes: false
97 | SortUsingDeclarations: true
98 | SpaceAfterCStyleCast: true
99 | SpaceAfterTemplateKeyword: true
100 | SpaceBeforeAssignmentOperators: true
101 | SpaceBeforeCpp11BracedList: false
102 | SpaceBeforeCtorInitializerColon: true
103 | SpaceBeforeInheritanceColon: true
104 | SpaceBeforeParens: ControlStatements
105 | SpaceBeforeRangeBasedForLoopColon: true
106 | SpaceInEmptyParentheses: false
107 | SpacesBeforeTrailingComments: 1
108 | SpacesInAngles: false
109 | SpacesInContainerLiterals: true
110 | SpacesInCStyleCastParentheses: false
111 | SpacesInParentheses: false
112 | SpacesInSquareBrackets: false
113 | Standard: Cpp11
114 | StatementMacros:
115 | - Q_UNUSED
116 | - QT_REQUIRE_VERSION
117 | TabWidth: 4
118 | UseTab: Never
119 | ...
120 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # SCM syntax highlighting & preventing 3-way merges
2 | pixi.lock merge=binary linguist-language=YAML linguist-generated=true
3 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for all configuration options:
4 | # https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
5 |
6 | version: 2
7 | updates:
8 | - package-ecosystem: "github-actions"
9 | directory: "/"
10 | schedule:
11 | # Check for updates to GitHub Actions every week
12 | interval: "weekly"
13 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | Checklist:
4 |
5 | - [ ] I made sure that the CI passed before I ask for a review.
6 | - [ ] I added a summary of the changes (compared to the last release) in the `CHANGELOG.md`.
7 | - [ ] If necessary, I made changes to the documentation and/or added new content.
8 | - [ ] I will remember to squash-and-merge, providing a useful summary of the changes of this PR.
9 |
--------------------------------------------------------------------------------
/.github/workflows/build_and_test.yaml:
--------------------------------------------------------------------------------
1 | name: Build and Test
2 | # Builds FANS inside various docker containers and runs the tests.
3 |
4 | on:
5 | push:
6 | branches:
7 | - main
8 | - develop
9 | pull_request:
10 | workflow_dispatch:
11 |
12 | concurrency:
13 | group: ${{ github.event_name }}-${{ github.workflow }}-${{ github.ref }}
14 | cancel-in-progress: ${{github.event_name == 'pull_request'}}
15 |
16 | jobs:
17 | build:
18 | name: ${{ format('Ubuntu {0}', matrix.UBUNTU_VERSION) }}
19 | runs-on: ubuntu-latest
20 | container: unistuttgartdae/fans-ci:${{ matrix.UBUNTU_VERSION }}
21 | defaults:
22 | run:
23 | shell: "bash --login -eo pipefail {0}"
24 | env:
25 | FANS_BUILD_DIR: build
26 | FANS_MPI_USER: fans
27 | strategy:
28 | fail-fast: false
29 | matrix:
30 | UBUNTU_VERSION: [noble, jammy]
31 | steps:
32 | - name: Checkout code
33 | uses: actions/checkout@v4
34 |
35 | - name: Set up pixi
36 | uses: prefix-dev/setup-pixi@v0.8.8
37 |
38 | - name: Generate build directory
39 | run: mkdir -p ${{ env.FANS_BUILD_DIR }}
40 |
41 | - name: Configure
42 | working-directory: ${{ env.FANS_BUILD_DIR }}
43 | run: |
44 | cmake --version
45 | cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_EXPORT_COMPILE_COMMANDS=ON ..
46 |
47 | - uses: actions/upload-artifact@v4
48 | if: failure()
49 | with:
50 | name: ${{ format('Ubuntu {0}', matrix.UBUNTU_VERSION) }} CMakeCache
51 | path: ${{ env.FANS_BUILD_DIR }}/CMakeCache.txt
52 | - uses: actions/upload-artifact@v4
53 | if: failure()
54 | with:
55 | name: ${{ format('Ubuntu {0}', matrix.UBUNTU_VERSION) }} CMakeLogs
56 | path: '${{ env.FANS_BUILD_DIR }}/CMakeFiles/*.log'
57 | - uses: actions/upload-artifact@v4
58 | if: failure()
59 | with:
60 | name: ${{ format('Ubuntu {0}', matrix.UBUNTU_VERSION) }} CompileCommands
61 | path: ${{ env.FANS_BUILD_DIR }}/compile_commands.json
62 |
63 | - name: Compile
64 | working-directory: ${{ env.FANS_BUILD_DIR }}
65 | run:
66 | cmake --build . -j $(nproc) || cmake --build . -j1
67 |
68 | - name: Adjust user rights
69 | run: chown -R ${{ env.FANS_MPI_USER }} ${{ env.FANS_BUILD_DIR }}
70 |
71 | - name: Tests
72 | working-directory: ${{ env.FANS_BUILD_DIR }}
73 | run: |
74 | su -c "ctest" ${{ env.FANS_MPI_USER }}
75 |
76 | - uses: actions/upload-artifact@v4
77 | if: failure()
78 | with:
79 | name: ${{ format('Ubuntu {0}', matrix.UBUNTU_VERSION) }} CTest logs
80 | path: ${{ env.FANS_BUILD_DIR }}/Testing/Temporary/LastTest.log
81 |
82 | # ────────────────────────────────────────────────────────────────
83 | # Pytest checks
84 | # ────────────────────────────────────────────────────────────────
85 | - name: Install Pixi Python deps
86 | run: |
87 | pixi --version
88 | pixi install
89 |
90 | - name: Run pytest checks on HDF5 output
91 | run: pixi run test
92 |
93 | - uses: actions/upload-artifact@v4
94 | if: failure()
95 | with:
96 | name: ${{ format('Ubuntu {0}', matrix.UBUNTU_VERSION) }} PyTest logs
97 | path: |
98 | **/pytest*.xml
99 | **/.pytest_cache
100 |
--------------------------------------------------------------------------------
/.github/workflows/build_and_test_mac.yaml:
--------------------------------------------------------------------------------
1 | name: Build and test macOS 15
2 | # Builds FANS for macOS 15 on Apple Silicon CPU and runs the tests.
3 |
4 | on:
5 | push:
6 | branches:
7 | - main
8 | - develop
9 | pull_request:
10 | workflow_dispatch:
11 |
12 | concurrency:
13 | group: ${{ github.event_name }}-${{ github.workflow }}-${{ github.ref }}
14 | cancel-in-progress: ${{github.event_name == 'pull_request'}}
15 |
16 | jobs:
17 | build-macos:
18 | name: macOS 15
19 | runs-on: macos-15
20 | env:
21 | FANS_BUILD_DIR: build
22 | strategy:
23 | fail-fast: false
24 | steps:
25 | - name: Checkout code
26 | uses: actions/checkout@v4
27 |
28 | - name: Install FANS dependencies
29 | run: |
30 | brew install gnu-time cmake gcc@14
31 | brew install open-mpi --build-from-source --cc=gcc-14
32 | brew install hdf5-mpi --build-from-source --cc=gcc-14
33 | brew install fftw eigen
34 |
35 | - name: Set up pixi
36 | uses: prefix-dev/setup-pixi@v0.8.8
37 |
38 | - name: Generate build directory
39 | run: mkdir -p ${{ env.FANS_BUILD_DIR }}
40 |
41 | - name: Configure
42 | working-directory: ${{ env.FANS_BUILD_DIR }}
43 | env:
44 | CC: gcc-14
45 | CXX: g++-14
46 | MPICC: mpicc
47 | MPICXX: mpicxx
48 | run: |
49 | cmake --version
50 | cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_EXPORT_COMPILE_COMMANDS=ON ..
51 |
52 | - uses: actions/upload-artifact@v4
53 | if: failure()
54 | with:
55 | name: macOS 15 CMakeCache
56 | path: ${{ env.FANS_BUILD_DIR }}/CMakeCache.txt
57 | - uses: actions/upload-artifact@v4
58 | if: failure()
59 | with:
60 | name: macOS 15 CMakeLogs
61 | path: '${{ env.FANS_BUILD_DIR }}/CMakeFiles/*.log'
62 | - uses: actions/upload-artifact@v4
63 | if: failure()
64 | with:
65 | name: macOS 15 CompileCommands
66 | path: ${{ env.FANS_BUILD_DIR }}/compile_commands.json
67 |
68 | - name: Compile
69 | working-directory: ${{ env.FANS_BUILD_DIR }}
70 | run:
71 | cmake --build . -j $(nproc) || cmake --build . -j1
72 |
73 | - name: Tests
74 | working-directory: ${{ env.FANS_BUILD_DIR }}
75 | env:
76 | CTEST_OUTPUT_ON_FAILURE: 1
77 | run: ctest
78 |
79 | - uses: actions/upload-artifact@v4
80 | if: failure()
81 | with:
82 | name: macOS 15 CTest logs
83 | path: ${{ env.FANS_BUILD_DIR }}/Testing/Temporary/LastTest.log
84 |
85 | # ────────────────────────────────────────────────────────────────
86 | # Pytest checks
87 | # ────────────────────────────────────────────────────────────────
88 | - name: Install Pixi Python deps
89 | run: |
90 | pixi --version
91 | pixi install
92 |
93 | - name: Run pytest checks on HDF5 output
94 | run: pixi run test
95 |
96 | - uses: actions/upload-artifact@v4
97 | if: failure()
98 | with:
99 | name: ${{ format('Ubuntu {0}', matrix.UBUNTU_VERSION) }} PyTest logs
100 | path: |
101 | **/pytest*.xml
102 | **/.pytest_cache
103 |
--------------------------------------------------------------------------------
/.github/workflows/docker.yaml:
--------------------------------------------------------------------------------
1 | name: Build and push docker images
2 |
3 | on: workflow_dispatch
4 |
5 | jobs:
6 | build-and-push:
7 | strategy:
8 | matrix:
9 | ubuntu-version: [noble, jammy]
10 |
11 | runs-on: ubuntu-latest
12 |
13 | steps:
14 | - name: Checkout code
15 | uses: actions/checkout@v4
16 |
17 | - name: Set up QEMU
18 | uses: docker/setup-qemu-action@v3
19 |
20 | - name: Set up Docker Buildx
21 | uses: docker/setup-buildx-action@v3
22 |
23 | - name: Login to DockerHub
24 | uses: docker/login-action@v3
25 | with:
26 | username: ${{ secrets.DOCKERHUB_USERNAME }}
27 | password: ${{ secrets.DOCKERHUB_TOKEN }}
28 |
29 | - name: Build and push fans-ci image
30 | uses: docker/build-push-action@v6
31 | with:
32 | context: ${{ github.workspace }}
33 | file: docker/Dockerfile
34 | platforms: |
35 | linux/amd64
36 | linux/arm64
37 | push: true
38 | tags: |
39 | unistuttgartdae/fans-ci:${{ matrix.ubuntu-version }}
40 | ${{ matrix.ubuntu-version == 'noble' && format('unistuttgartdae/fans-ci:latest') || '' }}
41 | target: fans_ci
42 | build-args: UBUNTU_VERSION=${{ matrix.ubuntu-version }}
43 |
44 | - name: Build and push fans-dev image
45 | uses: docker/build-push-action@v6
46 | with:
47 | context: ${{ github.workspace }}
48 | file: docker/Dockerfile
49 | platforms: |
50 | linux/amd64
51 | linux/arm64
52 | push: true
53 | tags: |
54 | unistuttgartdae/fans-dev:${{ matrix.ubuntu-version }}
55 | ${{ matrix.ubuntu-version == 'noble' && format('unistuttgartdae/fans-dev:latest') || '' }}
56 | target: fans_dev
57 | build-args: UBUNTU_VERSION=${{ matrix.ubuntu-version }}
58 |
--------------------------------------------------------------------------------
/.github/workflows/run-checks.yml:
--------------------------------------------------------------------------------
1 | name: Run checks for markdown, links, and pre-commit
2 | on:
3 | push:
4 | branches:
5 | - main
6 | - develop
7 | pull_request:
8 | branches:
9 | - "*"
10 | jobs:
11 | check_md:
12 | name: Lint markdown files
13 | runs-on: ubuntu-latest
14 | steps:
15 | - name: Check out repository
16 | uses: actions/checkout@v4
17 | - name: Lint markdown files (markdownlint)
18 | uses: articulate/actions-markdownlint@v1
19 | with:
20 | config: .markdownlint.json
21 | files: '.'
22 |
23 | check_links:
24 | name: Check links in markdown files
25 | runs-on: ubuntu-latest
26 | steps:
27 | - name: Check out repository
28 | uses: actions/checkout@v4
29 | - name: Check links in markdown files (markdown-link-check)
30 | uses: gaurav-nelson/github-action-markdown-link-check@v1
31 | with:
32 | use-quiet-mode: 'yes'
33 | use-verbose-mode: 'no'
34 | config-file: '.markdown-link-check-config.json'
35 |
36 | precommit:
37 | name: pre-commit checks
38 | runs-on: ubuntu-latest
39 | steps:
40 | - uses: actions/checkout@v4
41 | - name: Setup python
42 | uses: actions/setup-python@v5
43 | with:
44 | python-version: '3.10'
45 | check-latest: true
46 | - name: Install pre-commit
47 | run: pip install pre-commit
48 | - name: Run checks
49 | run: pre-commit run -a -v
50 | - name: Git status
51 | if: always()
52 | run: git status
53 | - name: Full diff
54 | if: always()
55 | run: git diff
56 |
--------------------------------------------------------------------------------
/.github/workflows/test_pyfans.yaml:
--------------------------------------------------------------------------------
1 | name: Test PyFans
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | - develop
8 | pull_request:
9 | branches:
10 | - "*"
11 |
12 | jobs:
13 | test-pyfans:
14 | runs-on: ubuntu-latest
15 | container: unistuttgartdae/fans-ci:noble
16 | defaults:
17 | run:
18 | shell: "bash --login -eo pipefail {0}"
19 | env:
20 | FANS_BUILD_DIR: build
21 | FANS_MPI_USER: fans
22 | steps:
23 |
24 | - name: Checkout repository
25 | uses: actions/checkout@v4
26 |
27 | - name: Generate build directory
28 | run: mkdir -p ${{ env.FANS_BUILD_DIR }}
29 |
30 | - name: Install dependencies
31 | run: |
32 | apt update
33 | apt install -y wget python3-venv
34 |
35 | - name: Install preCICE
36 | run: |
37 | wget https://github.com/precice/precice/releases/download/v3.2.0/libprecice3_3.2.0_noble.deb
38 | apt install -y ./libprecice3_3.2.0_noble.deb
39 |
40 | - name: Install the Micro Manager
41 | run: |
42 | python3 -m venv .venv
43 | . .venv/bin/activate
44 | pip install micro-manager-precice
45 |
46 | - name: Configure
47 | working-directory: ${{ env.FANS_BUILD_DIR }}
48 | run: |
49 | cmake .. -DFANS_LIBRARY_FOR_MICRO_MANAGER=ON
50 | make
51 |
52 | - name: Run a dummy macro-micro coupling test
53 | run: |
54 | . .venv/bin/activate
55 | cd test/test_pyfans
56 | python3 macro-cube.py & micro-manager-precice micro-manager-config.json
57 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Ignore build directories
2 | build/
3 | bin/
4 | lib/
5 | lib64/
6 | */build*/
7 |
8 | # Ignore CMake generated files
9 | CMakeFiles/
10 | CMakeCache.txt
11 | cmake_install.cmake
12 | Makefile
13 | CMakeLists.txt.user*
14 | .cmake/
15 |
16 | # Ignore compiled binaries and executables
17 | *.exe
18 | *.out
19 | *.app
20 | *.so
21 | *.dylib
22 |
23 | # Ignore object files and libraries
24 | *.o
25 | *.a
26 | *.lib
27 |
28 | # Ignore editor-specific files
29 | .vscode/
30 | .idea/
31 | *.vs/
32 | *.suo
33 | *.ntvs*
34 | *.njsproj
35 | *.sln
36 | *.suo
37 | *.swp
38 | *.sln.docstates
39 | *.user
40 | *.userosscache
41 | *.suo
42 | *.tsserver.log.*
43 | *.dbmdl
44 | *.dbproj
45 | *.jfm
46 | *.pfx
47 | *.publishsettings
48 | node_modules/
49 | bower_components/
50 |
51 | # Ignore OS generated files
52 | .DS_Store
53 | .DS_Store?
54 | ._*
55 | .Spotlight-V100
56 | .Trashes
57 | ehthumbs.db
58 | Thumbs.db
59 |
60 | # Ignore package manager directories
61 | pip-wheel-metadata/
62 |
63 | !fans_input.grid.json
64 | !fans_input.slides.json
65 |
66 | # Byte-compiled / optimized / DLL files
67 | __pycache__/
68 | *.py[cod]
69 | *$py.class
70 |
71 | # C extensions
72 | *.so
73 |
74 | # Distribution / packaging
75 | .Python
76 | build/
77 | develop-eggs/
78 | dist/
79 | downloads/
80 | eggs/
81 | .eggs/
82 | lib/
83 | lib64/
84 | parts/
85 | sdist/
86 | var/
87 | wheels/
88 | *.egg-info/
89 | .installed.cfg
90 | *.egg
91 |
92 | # PyInstaller
93 | # Usually these files are written by a python script from a template
94 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
95 | *.manifest
96 | *.spec
97 |
98 | # Installer logs
99 | pip-log.txt
100 | pip-delete-this-directory.txt
101 |
102 | # Unit test / coverage reports
103 | htmlcov/
104 | .tox/
105 | .nox/
106 | .coverage
107 | .cache
108 | nosetests.xml
109 | coverage.xml
110 | *.cover
111 | *.py,cover
112 | .hypothesis/
113 | .pytest_cache/
114 | cover/
115 |
116 | # Translations
117 | *.mo
118 | *.pot
119 |
120 | # Django stuff:
121 | *.log
122 | local_settings.py
123 | db.sqlite3
124 | db.sqlite3-journal
125 |
126 | # Flask stuff:
127 | instance/
128 | .webassets-cache
129 |
130 | # Scrapy stuff:
131 | .scrapy
132 |
133 | # Sphinx documentation
134 | docs/_build/
135 | docs/_static/
136 | docs/_autosummary/
137 |
138 | # Jupyter Notebook
139 | .ipynb_checkpoints
140 |
141 | # IPython
142 | profile_default/
143 | ipython_config.py
144 |
145 | # pyenv
146 | .python-version
147 |
148 | # celery beat schedule file
149 | celerybeat-schedule
150 |
151 | # SageMath parsed files
152 | *.sage.py
153 |
154 | # Environments
155 | .env
156 | .venv
157 | env/
158 | venv/
159 | ENV/
160 | env.bak/
161 | venv.bak/
162 |
163 | # Spyder project settings
164 | .spyderproject
165 | .spyproject
166 |
167 | # Rope project settings
168 | .ropeproject
169 |
170 | # MkDocs documentation
171 | /site
172 |
173 | # mypy
174 | .mypy_cache/
175 | .dmypy.json
176 | dmypy.json
177 |
178 | # Pyre type checker
179 | .pyre/
180 |
181 | # pyright type checker
182 | .pyright/
183 |
184 | # End of standard Python ignores
185 |
186 |
187 | # Extra
188 | *.xdmf
189 | FANS
190 | *.log
191 | run.sh
192 | *.h5
193 | data_gen/
194 | test/input_files/*.json
195 | test/input_files/**/*.json
196 |
197 | # Dedicated folder for personal projects
198 | **/scratch/
199 |
200 | # Test microstructure files
201 | !sphere32.h5
202 |
203 | # Test input files
204 | !test_LinearElastic.json
205 | !test_LinearThermal.json
206 | !test_PseudoPlastic.json
207 | !test_J2Plasticity.json
208 | !test_MixedBCs.json
209 |
210 | # pixi environments
211 | .pixi
212 | *.egg-info
213 |
--------------------------------------------------------------------------------
/.markdown-link-check-config.json:
--------------------------------------------------------------------------------
1 | {
2 | "aliveStatusCodes": [429, 200],
3 | "ignorePatterns": [
4 | {
5 | "pattern": "*.html"
6 | }
7 | ]
8 | }
9 |
--------------------------------------------------------------------------------
/.markdownlint.json:
--------------------------------------------------------------------------------
1 | {
2 | "MD013": false,
3 | "MD033": false,
4 | "MD034": false
5 | }
6 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | # Official repo for the clang-format hook
3 | repos:
4 | - repo: https://github.com/pre-commit/pre-commit-hooks
5 | rev: v2.3.0
6 | hooks:
7 | - id: check-xml
8 | - id: check-merge-conflict
9 | - id: mixed-line-ending
10 | - id: end-of-file-fixer
11 | - id: trailing-whitespace
12 | # black repo for python formatting
13 | - repo: https://github.com/ambv/black
14 | rev: 22.12.0
15 | hooks:
16 | - id: black
17 | # clang-format for C/C++ formatting
18 | - repo: https://github.com/pre-commit/mirrors-clang-format
19 | rev: v19.1.2
20 | hooks:
21 | - id: clang-format
22 | args: ['--style=file']
23 | exclude: "include/json.hpp"
24 | types_or: [c++]
25 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # FANS Changelog
2 |
3 | ## latest
4 |
5 | - Add pixi task `h52xdmf` to generate XDMF from H5 files directly as `pixi run h52xdmf {h5filepath}`
6 |
7 | ## v0.4.1
8 |
9 | - remove std::sqrt from constexpr - failed on Clang https://github.com/DataAnalyticsEngineering/FANS/pull/64
10 |
11 | ## v0.4.0
12 |
13 | - Support compilaion on MacOS X via conda-forge https://github.com/DataAnalyticsEngineering/FANS/pull/59
14 | - Add support for macroscale mixed stress-strain boundary conditions https://github.com/DataAnalyticsEngineering/FANS/pull/58
15 | - Add grain boundary diffusion material model for polycrystals https://github.com/DataAnalyticsEngineering/FANS/pull/52
16 | - Add a pixi environment for the FANS_dashboard and some tests https://github.com/DataAnalyticsEngineering/FANS/pull/55
17 | - Remove MPI initialization from pyFANS and add an integration test for it https://github.com/DataAnalyticsEngineering/FANS/pull/46
18 | - Native support for MacOS https://github.com/DataAnalyticsEngineering/FANS/pull/25
19 | - Remove Ubuntu 20.04 from testing and Docker support https://github.com/DataAnalyticsEngineering/FANS/pull/51
20 | - Add support for `--version` command line argument for checking the version of FANS
21 | - Modify way to provide micro structure in JSON input https://github.com/DataAnalyticsEngineering/FANS/pull/43
22 | - Add conda package for FANS https://github.com/DataAnalyticsEngineering/FANS/pull/39
23 | - Introduce system for checking compiler flags: `avx2` & `fma` https://github.com/DataAnalyticsEngineering/FANS/pull/34
24 | - Add `results_prefix` field in the JSON input https://github.com/DataAnalyticsEngineering/FANS/pull/36
25 | - Build FANS as a library to be coupled to a macro-scale simulation via preCICE and the Micro Manager https://github.com/DataAnalyticsEngineering/FANS/pull/23
26 |
27 | ## v0.3.0
28 |
29 | - Added Linear thermal and mechanical triclinic material models https://github.com/DataAnalyticsEngineering/FANS/pull/32
30 | - Added API to get homogenized stress and homogenized tangent https://github.com/DataAnalyticsEngineering/FANS/pull/31
31 |
32 | ## v0.2.0
33 |
34 | - Add integration tests https://github.com/DataAnalyticsEngineering/FANS/pull/20
35 | - Add GitHub Action workflow to build and test FANS https://github.com/DataAnalyticsEngineering/FANS/pull/19
36 |
37 | ## v0.1.2
38 |
39 | - Update TIK GitHub links in the documentation to public GitHub links https://github.com/DataAnalyticsEngineering/FANS/pull/13
40 |
41 | ## v0.1.1
42 |
43 | - Disable sorting of includes in clang-format https://github.com/DataAnalyticsEngineering/FANS/pull/7
44 |
45 | ## v0.1.0
46 |
47 | - Add release guide and a Changelog file https://github.com/DataAnalyticsEngineering/FANS/pull/4
48 | - Add clang-format check and format all relevant files https://github.com/DataAnalyticsEngineering/FANS/pull/1
49 |
--------------------------------------------------------------------------------
/CITATION.cff:
--------------------------------------------------------------------------------
1 | cff-version: "1.2.0"
2 | authors:
3 | - family-names: Leuschner
4 | given-names: Matthias
5 | orcid: "https://orcid.org/0000-0003-0477-3441"
6 | - family-names: Fritzen
7 | given-names: Felix
8 | orcid: "https://orcid.org/0000-0003-4926-0068"
9 | preferred-citation:
10 | authors:
11 | - family-names: Leuschner
12 | given-names: Matthias
13 | orcid: "https://orcid.org/0000-0003-0477-3441"
14 | - family-names: Fritzen
15 | given-names: Felix
16 | orcid: "https://orcid.org/0000-0003-4926-0068"
17 | date-published: 2017-11-30
18 | doi: 10.1007/s00466-017-1501-5
19 | issn: 1432-0924
20 | issue: 3
21 | journal: Computational Mechanics
22 | publisher:
23 | name: Springer
24 | title: "Fourier-Accelerated Nodal Solvers (FANS) for homogenization problems"
25 | type: article
26 | url: "https://link.springer.com/article/10.1007/s00466-017-1501-5"
27 | volume: 62
28 | title: "Fourier-Accelerated Nodal Solvers (FANS) for homogenization problems"
29 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to FANS
2 |
3 | Contributions to FANS are most welcome! Please refer to the steps below for more details.
4 |
5 | ## Changelog
6 |
7 | We maintain a `CHANGELOG.md` where all major changes and contributions are entered.
8 |
9 | ## How to contribute
10 |
11 | 1. **Fork and Clone**: Fork the repository on GitHub and clone your fork locally.
12 |
13 | ```bash
14 | git clone https://github.com/your-username/FANS.git
15 | cd FANS
16 | ```
17 |
18 | 2. **Create a Branch**: Create a branch for your work, using a descriptive name.
19 |
20 | ```bash
21 | git checkout -b feature/my-feature
22 | ```
23 |
24 | 3. **Make Changes**: Implement your changes, adhering to the [Code Style Guidelines](#code-style-guidelines).
25 |
26 | 4. **Write Tests**: Ensure new features or bug fixes are covered by tests.
27 |
28 | 5. **Commit and Push**: Commit your changes with a clear message, then push to your fork.
29 |
30 | ```bash
31 | git add .
32 | git commit -m "Describe your changes"
33 | git push origin feature/my-feature
34 | ```
35 |
36 | 6. **Create a Pull Request**: Open a pull request to the `develop` branch. Include relevant details, such as the issue being fixed or the feature being added.
37 |
38 | ### Code Style Guidelines
39 |
40 | - **C++ Standard**: Use C++17 or later.
41 | - **Indentation**: 4 spaces, no tabs.
42 | - **Naming**:
43 | - Functions: `camelCase`
44 | - Classes: `PascalCase`
45 | - Variables: `snake_case`
46 | - Constants: `ALL_CAPS`
47 | - **Documentation**: Use Doxygen-style comments.
48 |
49 | ### Branching and Merging
50 |
51 | - **`main`**: Latest stable release.
52 | - **`develop`**: Active development. Base your feature branches off `develop`.
53 | - **Feature branches**: Branch off `develop` and submit pull requests back to `develop`.
54 | - **Release branches**: Merged into `main` for new releases.
55 |
--------------------------------------------------------------------------------
/FANS_Dashboard/README.md:
--------------------------------------------------------------------------------
1 | # FANS Dashboard
2 |
3 | The FANS Dashboard is a comprehensive tool designed to streamline the post-processing, interpretation, and visualization of results generated by the FANS solver. This jupyter notebook provides a user-friendly environment to work with complex simulation data stored in HDF5 format, offering a step-by-step workflow that covers data extraction, postprocessing, visualization, and preparation for 3D visualization in tools like ParaView.
4 |
5 | For further details follow along [`FANS_Dashboard.ipynb`](FANS_Dashboard.ipynb)
6 |
--------------------------------------------------------------------------------
/FANS_Dashboard/fans_dashboard/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DataAnalyticsEngineering/FANS/3c681507289fe30d459fe6b378b2ccb6687414ab/FANS_Dashboard/fans_dashboard/__init__.py
--------------------------------------------------------------------------------
/FANS_Dashboard/fans_dashboard/core/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DataAnalyticsEngineering/FANS/3c681507289fe30d459fe6b378b2ccb6687414ab/FANS_Dashboard/fans_dashboard/core/__init__.py
--------------------------------------------------------------------------------
/FANS_Dashboard/fans_dashboard/core/postprocessing.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | def compute_rank2tensor_measures(tensor_matrix, measures_to_compute=None):
5 | """
6 | Computes various tensor measures from a given stress or strain tensor in Mandel notation.
7 | The user can specify which measures to compute. This function supports input tensors with arbitrary leading dimensions,
8 | as long as the last dimension is 6 (Mandel notation).
9 |
10 | Based on : https://doc.comsol.com/5.5/doc/com.comsol.help.sme/sme_ug_theory.06.16.html
11 |
12 | Parameters:
13 | - tensor_matrix: numpy array, tensor (stress or strain) in Mandel notation with shape (..., 6).
14 | The tensor should be organized as follows:
15 | [s11, s22, s33, s12, s13, s23] for stress or
16 | [e11, e22, e33, e12, e13, e23] for strain.
17 | - measures_to_compute: list of strings, optional, specifying which measures to compute.
18 | If not provided, default measures ['von_mises', 'hydrostatic', 'deviatoric'] will be computed.
19 | Available options include:
20 | - 'von_mises': Computes the von Mises stress/strain.
21 | - 'hydrostatic': Computes the hydrostatic stress/strain.
22 | - 'deviatoric': Computes the deviatoric stress/strain.
23 | - 'principal': Computes the principal stresses/strains (eigenvalues).
24 | - 'max_shear': Computes the maximum shear stress/strain.
25 | - 'I_invariants': Computes the I1, I2, I3 invariants.
26 | - 'J_invariants': Computes the J1, J2, J3 invariants of the deviatoric tensor.
27 | - 'eigenvalues': Computes the eigenvalues of the stress/strain tensor.
28 | - 'eigenvectors': Computes the eigenvectors of the stress/strain tensor.
29 | - 'lode_angle': Computes the Lode angle, useful in advanced plasticity models.
30 |
31 | Returns:
32 | - result: dictionary, keys are the requested measure names and values are the computed measures.
33 | Each returned measure will have the same leading dimensions as the input tensor_matrix,
34 | with the last dimension adjusted based on the measure (e.g., eigenvalues will have an extra dimension for the 3 components).
35 | """
36 | if measures_to_compute is None:
37 | measures_to_compute = ["von_mises", "hydrostatic", "deviatoric"]
38 |
39 | original_shape = tensor_matrix.shape[:-1] # All dimensions except the last one
40 | tensor_matrix = tensor_matrix.reshape(-1, 6) # Flatten to (N, 6) for processing
41 |
42 | result = {}
43 |
44 | # Hydrostatic stress/strain (mean of the diagonal components)
45 | hydrostatic = np.mean(tensor_matrix[:, :3], axis=1)
46 | if "hydrostatic" in measures_to_compute:
47 | result["hydrostatic"] = hydrostatic.reshape(original_shape)
48 |
49 | # Deviatoric stress/strain and von Mises stress/strain
50 | deviatoric = tensor_matrix[:, :3] - hydrostatic[:, np.newaxis]
51 | deviatoric_shear = tensor_matrix[:, 3:6]
52 | deviatoric_tensor = np.hstack([deviatoric, deviatoric_shear])
53 | if "deviatoric" in measures_to_compute:
54 | result["deviatoric"] = deviatoric_tensor.reshape(original_shape + (6,))
55 |
56 | if "von_mises" in measures_to_compute:
57 | von_mises = np.sqrt(
58 | 0.5
59 | * (
60 | (deviatoric[:, 0] - deviatoric[:, 1]) ** 2
61 | + (deviatoric[:, 1] - deviatoric[:, 2]) ** 2
62 | + (deviatoric[:, 2] - deviatoric[:, 0]) ** 2
63 | + 6
64 | * (
65 | deviatoric_shear[:, 0] ** 2
66 | + deviatoric_shear[:, 1] ** 2
67 | + deviatoric_shear[:, 2] ** 2
68 | )
69 | )
70 | )
71 | result["von_mises"] = von_mises.reshape(original_shape)
72 |
73 | # Compute I1, I2, I3 invariants if requested
74 | if "I_invariants" in measures_to_compute:
75 | I1 = np.sum(tensor_matrix[:, :3], axis=1)
76 | I2 = (
77 | tensor_matrix[:, 0] * tensor_matrix[:, 1]
78 | + tensor_matrix[:, 1] * tensor_matrix[:, 2]
79 | + tensor_matrix[:, 2] * tensor_matrix[:, 0]
80 | - tensor_matrix[:, 3] ** 2
81 | - tensor_matrix[:, 4] ** 2
82 | - tensor_matrix[:, 5] ** 2
83 | )
84 | if "full_tensor" not in locals():
85 | full_tensor = mandel_to_matrix(tensor_matrix)
86 | I3 = np.linalg.det(full_tensor)
87 | result["I_invariants"] = np.stack([I1, I2, I3], axis=-1).reshape(
88 | original_shape + (3,)
89 | )
90 |
91 | # Compute J1, J2, J3 invariants if requested
92 | if "J_invariants" in measures_to_compute or "lode_angle" in measures_to_compute:
93 | J1 = np.sum(deviatoric_tensor[:, :3], axis=1)
94 | J2 = 0.5 * np.sum(deviatoric**2 + 2 * deviatoric_shear**2, axis=1)
95 | full_deviatoric_tensor = mandel_to_matrix(deviatoric_tensor)
96 | J3 = np.linalg.det(full_deviatoric_tensor)
97 | result["J_invariants"] = np.stack([J1, J2, J3], axis=-1).reshape(
98 | original_shape + (3,)
99 | )
100 |
101 | # Principal stresses/strains, maximum shear, eigenvalues, and eigenvectors
102 | if any(
103 | measure in measures_to_compute
104 | for measure in ["principal", "max_shear", "eigenvalues", "eigenvectors"]
105 | ):
106 | full_tensor = mandel_to_matrix(tensor_matrix)
107 | eigenvalues, eigenvectors = np.linalg.eigh(full_tensor)
108 | if "principal" in measures_to_compute:
109 | result["principal"] = eigenvalues.reshape(original_shape + (3,))
110 | if "max_shear" in measures_to_compute:
111 | max_shear = 0.5 * (eigenvalues[:, 2] - eigenvalues[:, 0])
112 | result["max_shear"] = max_shear.reshape(original_shape)
113 | if "eigenvalues" in measures_to_compute:
114 | result["eigenvalues"] = eigenvalues.reshape(original_shape + (3,))
115 | if "eigenvectors" in measures_to_compute:
116 | result["eigenvectors"] = eigenvectors.reshape(original_shape + (3, 3))
117 |
118 | # Lode angle calculation
119 | if "lode_angle" in measures_to_compute:
120 | if "J2" not in locals(): # Compute J2 if not already computed
121 | J2 = 0.5 * np.sum(deviatoric**2 + 2 * deviatoric_shear**2, axis=1)
122 | if "J3" not in locals(): # Compute J3 if not already computed
123 | full_deviatoric_tensor = mandel_to_matrix(deviatoric_tensor)
124 | J3 = np.linalg.det(full_deviatoric_tensor)
125 | # Handle very small J2 values to prevent division by zero
126 | safe_J2 = np.where(J2 > 1e-12, J2, 1e-12)
127 | sqrt_3_3 = (3 * np.sqrt(3)) / 2
128 | cos_3theta = np.clip(sqrt_3_3 * (J3 / safe_J2 ** (3 / 2)), -1, 1)
129 | lode_angle = (1.0 / 3.0) * np.arccos(cos_3theta)
130 | result["lode_angle"] = lode_angle.reshape(original_shape)
131 |
132 | return result
133 |
134 |
135 | def mandel_to_matrix(mandel_tensor):
136 | """
137 | Convert a tensor from Mandel notation to a full 3x3 matrix.
138 |
139 | Parameters:
140 | - mandel_tensor: numpy array, tensor in Mandel notation with shape (n_steps, 6).
141 | The tensor should be organized as follows:
142 | [s11, s22, s33, s12, s13, s23] for stress or
143 | [e11, e22, e33, e12, e13, e23] for strain.
144 |
145 | Returns:
146 | - full_tensor: numpy array, tensor in full 3x3 matrix form with shape (n_steps, 3, 3).
147 | """
148 | full_tensor = np.zeros((mandel_tensor.shape[0], 3, 3))
149 | full_tensor[:, 0, 0] = mandel_tensor[:, 0] # s11 or e11
150 | full_tensor[:, 1, 1] = mandel_tensor[:, 1] # s22 or e22
151 | full_tensor[:, 2, 2] = mandel_tensor[:, 2] # s33 or e33
152 | full_tensor[:, 0, 1] = full_tensor[:, 1, 0] = mandel_tensor[:, 3] / np.sqrt(
153 | 2
154 | ) # s12 or e12
155 | full_tensor[:, 0, 2] = full_tensor[:, 2, 0] = mandel_tensor[:, 4] / np.sqrt(
156 | 2
157 | ) # s13 or e13
158 | full_tensor[:, 1, 2] = full_tensor[:, 2, 1] = mandel_tensor[:, 5] / np.sqrt(
159 | 2
160 | ) # s23 or e23
161 | return full_tensor
162 |
163 |
164 | def matrix_to_mandel(full_tensor, tolerance=1e-8):
165 | """
166 | Convert a full 3x3 symmetric tensor to Mandel notation in a vectorized and efficient way.
167 |
168 | Parameters:
169 | - full_tensor: numpy array, tensor in full 3x3 matrix form with shape (n_steps, 3, 3).
170 | - tolerance: float, optional, tolerance for checking symmetry. Default is 1e-8.
171 |
172 | Returns:
173 | - mandel_tensor: numpy array, tensor in Mandel notation with shape (n_steps, 6).
174 | The tensor will be organized as follows:
175 | [s11, s22, s33, s12, s13, s23] for stress or
176 | [e11, e22, e33, e12, e13, e23] for strain.
177 |
178 | Raises:
179 | - ValueError: if any of the tensors in the batch are not symmetric within the specified tolerance.
180 | """
181 | # Check if the tensors are symmetric within the given tolerance
182 | if not np.allclose(full_tensor, full_tensor.transpose(0, 2, 1), atol=tolerance):
183 | raise ValueError(
184 | "One or more tensors are not symmetric within the specified tolerance."
185 | )
186 |
187 | # Efficiently extract and scale the relevant components
188 | mandel_tensor = np.zeros((full_tensor.shape[0], 6))
189 | mandel_tensor[:, 0] = full_tensor[:, 0, 0] # s11 or e11
190 | mandel_tensor[:, 1] = full_tensor[:, 1, 1] # s22 or e22
191 | mandel_tensor[:, 2] = full_tensor[:, 2, 2] # s33 or e33
192 | mandel_tensor[:, 3] = np.sqrt(2) * full_tensor[:, 0, 1] # s12 or e12
193 | mandel_tensor[:, 4] = np.sqrt(2) * full_tensor[:, 0, 2] # s13 or e13
194 | mandel_tensor[:, 5] = np.sqrt(2) * full_tensor[:, 1, 2] # s23 or e23
195 |
196 | return mandel_tensor
197 |
--------------------------------------------------------------------------------
/FANS_Dashboard/fans_dashboard/plotting/PlotYoungsModulus.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import plotly.graph_objs as go
3 | import meshio
4 |
5 |
6 | def compute_YoungsModulus3D(C_batch):
7 | """
8 | Compute Young's modulus for all directions in 3D for a batch of stiffness tensors.
9 |
10 | Args:
11 | C_batch (ndarray): Batch of stiffness tensors in Mandel notation, shape (n, 6, 6).
12 |
13 | Returns:
14 | tuple: A tuple containing:
15 | - X_batch (ndarray): X-coordinates for plotting the modulus surface, shape (n, n_theta, n_phi).
16 | - Y_batch (ndarray): Y-coordinates for plotting the modulus surface, shape (n, n_theta, n_phi).
17 | - Z_batch (ndarray): Z-coordinates for plotting the modulus surface, shape (n, n_theta, n_phi).
18 | - E_batch (ndarray): Young's modulus in all directions, shape (n, n_theta, n_phi).
19 | """
20 | n = C_batch.shape[0]
21 | n_theta = 180
22 | n_phi = 360
23 |
24 | theta = np.linspace(0, np.pi, n_theta)
25 | phi = np.linspace(0, 2 * np.pi, n_phi)
26 | theta_grid, phi_grid = np.meshgrid(theta, phi, indexing="ij")
27 |
28 | d_x = np.sin(theta_grid) * np.cos(phi_grid) # Shape (n_theta, n_phi)
29 | d_y = np.sin(theta_grid) * np.sin(phi_grid)
30 | d_z = np.cos(theta_grid)
31 |
32 | N = np.stack(
33 | (
34 | d_x**2,
35 | d_y**2,
36 | d_z**2,
37 | np.sqrt(2) * d_x * d_y,
38 | np.sqrt(2) * d_x * d_z,
39 | np.sqrt(2) * d_y * d_z,
40 | ),
41 | axis=-1,
42 | ) # Shape (n_theta, n_phi, 6)
43 |
44 | N_flat = N.reshape(-1, 6) # Shape (n_points, 6)
45 |
46 | # Invert stiffness tensors to get compliance tensors
47 | S_batch = np.linalg.inv(C_batch) # Shape (n, 6, 6)
48 |
49 | # Compute E for each tensor in the batch
50 | NSN = np.einsum("pi,nij,pj->np", N_flat, S_batch, N_flat) # Shape (n, n_points)
51 | E_batch = 1.0 / NSN # Shape (n, n_points)
52 |
53 | # Reshape E_batch back to (n, n_theta, n_phi)
54 | E_batch = E_batch.reshape(n, *d_x.shape)
55 |
56 | X_batch = E_batch * d_x # Shape (n, n_theta, n_phi)
57 | Y_batch = E_batch * d_y
58 | Z_batch = E_batch * d_z
59 |
60 | return X_batch, Y_batch, Z_batch, E_batch
61 |
62 |
63 | def plot_YoungsModulus3D(C, title="Young's Modulus Surface"):
64 | """
65 | Plot a 3D surface of Young's modulus.
66 |
67 | Args:
68 | C (ndarray): Stiffness tensor in Mandel notation. Can be a single tensor of shape (6,6) or a batch of tensors of shape (n,6,6).
69 | title (str): Title of the plot.
70 |
71 | Raises:
72 | ValueError: If C is not of shape (6,6) or (1,6,6).
73 | """
74 | if C.shape == (6, 6):
75 | C_batch = C[np.newaxis, :, :]
76 | elif C.shape == (1, 6, 6):
77 | C_batch = C
78 | else:
79 | raise ValueError(
80 | "C must be either a (6,6) tensor or a batch with one tensor of shape (1,6,6)."
81 | )
82 |
83 | X_batch, Y_batch, Z_batch, E_batch = compute_YoungsModulus3D(C_batch)
84 | X, Y, Z, E = X_batch[0], Y_batch[0], Z_batch[0], E_batch[0]
85 |
86 | surface = go.Surface(x=X, y=Y, z=Z, surfacecolor=E, colorscale="Viridis")
87 | layout = go.Layout(
88 | title=title,
89 | scene=dict(
90 | xaxis=dict(title="X"),
91 | yaxis=dict(title="Y"),
92 | zaxis=dict(title="Z"),
93 | aspectmode="auto",
94 | ),
95 | )
96 |
97 | fig = go.Figure(data=[surface], layout=layout)
98 | fig.show()
99 |
100 |
101 | def export_YoungsModulus3D_to_vtk(C, prefix="youngs_modulus_surface"):
102 | """
103 | Export the computed Young's modulus surfaces to VTK files for Paraview visualization.
104 |
105 | Args:
106 | C (ndarray): Stiffness tensor in Mandel notation. Can be a single tensor of shape (6,6) or a batch of tensors of shape (n,6,6).
107 | prefix (str): Prefix for the output files.
108 |
109 | Returns:
110 | None
111 | """
112 | X_batch, Y_batch, Z_batch, E_batch = compute_YoungsModulus3D(C)
113 | n, n_theta, n_phi = X_batch.shape
114 |
115 | for k in range(n):
116 | points = np.vstack(
117 | (X_batch[k].ravel(), Y_batch[k].ravel(), Z_batch[k].ravel())
118 | ).T
119 | cells = [
120 | (
121 | "quad",
122 | np.array(
123 | [
124 | [
125 | i * n_phi + j,
126 | (i + 1) * n_phi + j,
127 | (i + 1) * n_phi + (j + 1),
128 | i * n_phi + (j + 1),
129 | ]
130 | for i in range(n_theta - 1)
131 | for j in range(n_phi - 1)
132 | ],
133 | dtype=np.int32,
134 | ),
135 | )
136 | ]
137 | mesh = meshio.Mesh(
138 | points=points,
139 | cells=cells,
140 | point_data={"Youngs_Modulus": E_batch[k].ravel()},
141 | )
142 | filename = f"{prefix}_{k}.vtk"
143 | meshio.write(filename, mesh)
144 | print(f"Exported {filename}")
145 |
146 |
147 | def demoCubic():
148 | """
149 | Demonstrates the Young's modulus surface plotting routine for a cubic material (Copper).
150 |
151 | This function generates the stiffness tensor for a cubic material, specifically copper,
152 | and then plots the 3D Young's modulus surface using the generated tensor.
153 |
154 | Args:
155 | None
156 |
157 | Returns:
158 | None
159 | """
160 | P1 = np.zeros((6, 6))
161 | P1[:3, :3] = 1.0 / 3.0
162 | D = np.diag([1, 1, 1, 0, 0, 0])
163 | P2 = D - P1
164 | P3 = np.eye(6) - D
165 |
166 | # generate stiffness for a cubic material: copper
167 | l1, l2, l3 = 136.67, 46, 150
168 | C = 3 * l1 * P1 + l2 * P2 + l3 * P3
169 |
170 | # show the 3D Young's modulus plot for copper
171 | plot_YoungsModulus3D(C, title="Young's Modulus Surface for Copper")
172 |
--------------------------------------------------------------------------------
/FANS_Dashboard/fans_dashboard/plotting/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DataAnalyticsEngineering/FANS/3c681507289fe30d459fe6b378b2ccb6687414ab/FANS_Dashboard/fans_dashboard/plotting/__init__.py
--------------------------------------------------------------------------------
/FANS_Dashboard/fans_dashboard/plotting/plotting.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import plotly.graph_objects as go
3 | from plotly.subplots import make_subplots
4 |
5 |
6 | def plot_subplots(
7 | data1,
8 | data2,
9 | labels_x=None,
10 | labels_y=None,
11 | subplot_titles=None,
12 | title="",
13 | nrows=None,
14 | ncols=None,
15 | linewidth=1,
16 | markersize=4,
17 | linecolor=None,
18 | markercolor=None,
19 | fontsize=12,
20 | fig=None,
21 | ):
22 | """
23 | Plot a grid of subplots using Plotly, handling both single-component (scalar vs scalar) and multi-component data.
24 |
25 | Parameters:
26 | - data1: numpy array, first set of data to plot (e.g., strain, time) with shape (n_datapoints, n_plots)
27 | - data2: numpy array, second set of data to plot (e.g., stress) with shape (n_datapoints, n_plots)
28 | - labels_x: list of strings, labels for the x axes of each subplot (optional, default=None)
29 | - labels_y: list of strings, labels for the y axes of each subplot (optional, default=None)
30 | - subplot_titles: list of strings, titles for each subplot (optional, default=None)
31 | - title: string, title of the overall plot
32 | - nrows: int, number of rows in the subplot grid (optional)
33 | - ncols: int, number of columns in the subplot grid (optional)
34 | - linewidth: int, line width for the plots (optional, default=1)
35 | - markersize: int, size of the markers (optional, default=4)
36 | - linecolor: list of strings, colors of the lines for each subplot (optional, default=None, all blue)
37 | - markercolor: list of strings, colors of the markers for each subplot (optional, default=None, all blue)
38 | - fontsize: int, font size for axis labels, subplot titles, and tick labels (optional, default=12)
39 | - fig: existing Plotly figure to overlay the new subplots (optional, default=None, creates a new figure)
40 | """
41 | # Validate data shapes
42 | if not isinstance(data1, np.ndarray) or not isinstance(data2, np.ndarray):
43 | raise ValueError("data1 and data2 must be numpy arrays.")
44 |
45 | if data1.shape[0] != data2.shape[0]:
46 | raise ValueError(
47 | "data1 and data2 must have the same number of data points (rows)."
48 | )
49 |
50 | if data1.shape[1] != data2.shape[1]:
51 | raise ValueError(
52 | "data1 and data2 must have the same number of components (columns)."
53 | )
54 |
55 | # Set the number of components based on data shape
56 | n_components = data1.shape[1]
57 |
58 | # Initialize linecolor and markercolor lists if not provided
59 | if linecolor is None:
60 | linecolor = ["blue"] * n_components
61 | elif len(linecolor) != n_components:
62 | raise ValueError(
63 | f"The length of linecolor must match the number of components ({n_components})."
64 | )
65 |
66 | if markercolor is None:
67 | markercolor = ["blue"] * n_components
68 | elif len(markercolor) != n_components:
69 | raise ValueError(
70 | f"The length of markercolor must match the number of components ({n_components})."
71 | )
72 |
73 | # If nrows or ncols is not specified, determine an optimal grid layout
74 | if nrows is None or ncols is None:
75 | nrows = int(np.ceil(np.sqrt(n_components)))
76 | ncols = int(np.ceil(n_components / nrows))
77 |
78 | # Handle subplot titles
79 | if subplot_titles is None:
80 | subplot_titles = [f"Component {i+1}" for i in range(n_components)]
81 | elif len(subplot_titles) != n_components:
82 | raise ValueError(
83 | f"The length of subplot_titles must match the number of components ({n_components})."
84 | )
85 |
86 | # Handle labels_x and labels_y
87 | if labels_x is None:
88 | labels_x = [""] * n_components
89 | elif len(labels_x) != n_components:
90 | raise ValueError(
91 | f"The length of labels_x must match the number of components ({n_components})."
92 | )
93 |
94 | if labels_y is None:
95 | labels_y = [""] * n_components
96 | elif len(labels_y) != n_components:
97 | raise ValueError(
98 | f"The length of labels_y must match the number of components ({n_components})."
99 | )
100 |
101 | # Create the subplot figure if not provided
102 | if fig is None:
103 | fig = make_subplots(rows=nrows, cols=ncols, subplot_titles=subplot_titles)
104 |
105 | # Add traces for each component
106 | for i in range(n_components):
107 | row = i // ncols + 1
108 | col = i % ncols + 1
109 | fig.add_trace(
110 | go.Scatter(
111 | x=data1[:, i],
112 | y=data2[:, i],
113 | mode="lines+markers",
114 | marker=dict(symbol="x", size=markersize, color=markercolor[i]),
115 | line=dict(width=linewidth, color=linecolor[i]),
116 | name=f"Component {i+1}",
117 | ),
118 | row=row,
119 | col=col,
120 | )
121 |
122 | # Update axes with text labels
123 | fig.update_xaxes(
124 | title_text=labels_x[i],
125 | row=row,
126 | col=col,
127 | showgrid=True,
128 | mirror=True,
129 | ticks="inside",
130 | tickwidth=2,
131 | ticklen=6,
132 | title_font=dict(size=fontsize),
133 | tickfont=dict(size=fontsize),
134 | automargin=True,
135 | )
136 | fig.update_yaxes(
137 | title_text=labels_y[i],
138 | row=row,
139 | col=col,
140 | showgrid=True,
141 | mirror=True,
142 | ticks="inside",
143 | tickwidth=2,
144 | ticklen=6,
145 | title_font=dict(size=fontsize),
146 | tickfont=dict(size=fontsize),
147 | automargin=True,
148 | )
149 |
150 | # Update layout with the overall plot title and styling
151 | fig.update_layout(
152 | height=1000,
153 | width=1600,
154 | title_text=title,
155 | title_font=dict(size=fontsize),
156 | showlegend=False, # Legends removed
157 | template="plotly_white",
158 | margin=dict(l=50, r=50, t=50, b=50), # Adjust margins to prevent overlap
159 | title_x=0.5,
160 | autosize=False,
161 | )
162 |
163 | # Add a box outline around all subplots
164 | for i in range(1, nrows * ncols + 1):
165 | fig.update_xaxes(
166 | showline=True,
167 | linewidth=2,
168 | linecolor="black",
169 | row=(i - 1) // ncols + 1,
170 | col=(i - 1) % ncols + 1,
171 | )
172 | fig.update_yaxes(
173 | showline=True,
174 | linewidth=2,
175 | linecolor="black",
176 | row=(i - 1) // ncols + 1,
177 | col=(i - 1) % ncols + 1,
178 | )
179 |
180 | # Update subplot titles with the specified fontsize
181 | for annotation in fig["layout"]["annotations"]:
182 | annotation["font"] = dict(size=fontsize)
183 |
184 | # Return the figure for further customization or overlaying
185 | return fig
186 |
--------------------------------------------------------------------------------
/FANS_Dashboard/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["hatchling"]
3 | build-backend = "hatchling.build"
4 |
5 | [project]
6 | name = "fans-dashboard"
7 | version = "0.4.1"
8 | requires-python = ">=3.13"
9 | dependencies = [
10 | "numpy>=2.2.5,<3",
11 | "h5py>=3.13.0,<4",
12 | "plotly>=6.0.1,<7",
13 | "lxml>=5.4.0,<6",
14 | "nbformat>=5.10.4,<6",
15 | "matplotlib>=3.10.1,<4",
16 | "scipy>=1.15.2,<2",
17 | "meshio>=5.3.5,<6",
18 | "ipykernel>=6.29.5,<7",
19 | "nbclient>=0.10.2,<0.11",
20 | ]
21 |
22 | [tool.hatch.build.targets.wheel]
23 | packages = ["fans_dashboard"]
24 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU LESSER GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 |
9 | This version of the GNU Lesser General Public License incorporates
10 | the terms and conditions of version 3 of the GNU General Public
11 | License, supplemented by the additional permissions listed below.
12 |
13 | 0. Additional Definitions.
14 |
15 | As used herein, "this License" refers to version 3 of the GNU Lesser
16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU
17 | General Public License.
18 |
19 | "The Library" refers to a covered work governed by this License,
20 | other than an Application or a Combined Work as defined below.
21 |
22 | An "Application" is any work that makes use of an interface provided
23 | by the Library, but which is not otherwise based on the Library.
24 | Defining a subclass of a class defined by the Library is deemed a mode
25 | of using an interface provided by the Library.
26 |
27 | A "Combined Work" is a work produced by combining or linking an
28 | Application with the Library. The particular version of the Library
29 | with which the Combined Work was made is also called the "Linked
30 | Version".
31 |
32 | The "Minimal Corresponding Source" for a Combined Work means the
33 | Corresponding Source for the Combined Work, excluding any source code
34 | for portions of the Combined Work that, considered in isolation, are
35 | based on the Application, and not on the Linked Version.
36 |
37 | The "Corresponding Application Code" for a Combined Work means the
38 | object code and/or source code for the Application, including any data
39 | and utility programs needed for reproducing the Combined Work from the
40 | Application, but excluding the System Libraries of the Combined Work.
41 |
42 | 1. Exception to Section 3 of the GNU GPL.
43 |
44 | You may convey a covered work under sections 3 and 4 of this License
45 | without being bound by section 3 of the GNU GPL.
46 |
47 | 2. Conveying Modified Versions.
48 |
49 | If you modify a copy of the Library, and, in your modifications, a
50 | facility refers to a function or data to be supplied by an Application
51 | that uses the facility (other than as an argument passed when the
52 | facility is invoked), then you may convey a copy of the modified
53 | version:
54 |
55 | a) under this License, provided that you make a good faith effort to
56 | ensure that, in the event an Application does not supply the
57 | function or data, the facility still operates, and performs
58 | whatever part of its purpose remains meaningful, or
59 |
60 | b) under the GNU GPL, with none of the additional permissions of
61 | this License applicable to that copy.
62 |
63 | 3. Object Code Incorporating Material from Library Header Files.
64 |
65 | The object code form of an Application may incorporate material from
66 | a header file that is part of the Library. You may convey such object
67 | code under terms of your choice, provided that, if the incorporated
68 | material is not limited to numerical parameters, data structure
69 | layouts and accessors, or small macros, inline functions and templates
70 | (ten or fewer lines in length), you do both of the following:
71 |
72 | a) Give prominent notice with each copy of the object code that the
73 | Library is used in it and that the Library and its use are
74 | covered by this License.
75 |
76 | b) Accompany the object code with a copy of the GNU GPL and this license
77 | document.
78 |
79 | 4. Combined Works.
80 |
81 | You may convey a Combined Work under terms of your choice that,
82 | taken together, effectively do not restrict modification of the
83 | portions of the Library contained in the Combined Work and reverse
84 | engineering for debugging such modifications, if you also do each of
85 | the following:
86 |
87 | a) Give prominent notice with each copy of the Combined Work that
88 | the Library is used in it and that the Library and its use are
89 | covered by this License.
90 |
91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license
92 | document.
93 |
94 | c) For a Combined Work that displays copyright notices during
95 | execution, include the copyright notice for the Library among
96 | these notices, as well as a reference directing the user to the
97 | copies of the GNU GPL and this license document.
98 |
99 | d) Do one of the following:
100 |
101 | 0) Convey the Minimal Corresponding Source under the terms of this
102 | License, and the Corresponding Application Code in a form
103 | suitable for, and under terms that permit, the user to
104 | recombine or relink the Application with a modified version of
105 | the Linked Version to produce a modified Combined Work, in the
106 | manner specified by section 6 of the GNU GPL for conveying
107 | Corresponding Source.
108 |
109 | 1) Use a suitable shared library mechanism for linking with the
110 | Library. A suitable mechanism is one that (a) uses at run time
111 | a copy of the Library already present on the user's computer
112 | system, and (b) will operate properly with a modified version
113 | of the Library that is interface-compatible with the Linked
114 | Version.
115 |
116 | e) Provide Installation Information, but only if you would otherwise
117 | be required to provide such information under section 6 of the
118 | GNU GPL, and only to the extent that such information is
119 | necessary to install and execute a modified version of the
120 | Combined Work produced by recombining or relinking the
121 | Application with a modified version of the Linked Version. (If
122 | you use option 4d0, the Installation Information must accompany
123 | the Minimal Corresponding Source and Corresponding Application
124 | Code. If you use option 4d1, you must provide the Installation
125 | Information in the manner specified by section 6 of the GNU GPL
126 | for conveying Corresponding Source.)
127 |
128 | 5. Combined Libraries.
129 |
130 | You may place library facilities that are a work based on the
131 | Library side by side in a single library together with other library
132 | facilities that are not Applications and are not covered by this
133 | License, and convey such a combined library under terms of your
134 | choice, if you do both of the following:
135 |
136 | a) Accompany the combined library with a copy of the same work based
137 | on the Library, uncombined with any other library facilities,
138 | conveyed under the terms of this License.
139 |
140 | b) Give prominent notice with the combined library that part of it
141 | is a work based on the Library, and explaining where to find the
142 | accompanying uncombined form of the same work.
143 |
144 | 6. Revised Versions of the GNU Lesser General Public License.
145 |
146 | The Free Software Foundation may publish revised and/or new versions
147 | of the GNU Lesser General Public License from time to time. Such new
148 | versions will be similar in spirit to the present version, but may
149 | differ in detail to address new problems or concerns.
150 |
151 | Each version is given a distinguishing version number. If the
152 | Library as you received it specifies that a certain numbered version
153 | of the GNU Lesser General Public License "or any later version"
154 | applies to it, you have the option of following the terms and
155 | conditions either of that published version or of any later version
156 | published by the Free Software Foundation. If the Library as you
157 | received it does not specify a version number of the GNU Lesser
158 | General Public License, you may choose any version of the GNU Lesser
159 | General Public License ever published by the Free Software Foundation.
160 |
161 | If the Library as you received it specifies that a proxy can decide
162 | whether future versions of the GNU Lesser General Public License shall
163 | apply, that proxy's public statement of acceptance of any version is
164 | permanent authorization for you to choose that version for the
165 | Library.
166 |
--------------------------------------------------------------------------------
/cmake/FANSConfig.cmake.in:
--------------------------------------------------------------------------------
1 | @PACKAGE_INIT@
2 |
3 | set(CMAKE_MODULE_PATH_save "${CMAKE_MODULE_PATH}")
4 | list(INSERT CMAKE_MODULE_PATH 0 "${CMAKE_CURRENT_LIST_DIR}/modules")
5 |
6 | if ("$ENV{SETVARS_COMPLETED}" STREQUAL "1")
7 | message(
8 | WARNING
9 | "Intel OneAPI environment is active, which might lead to issues with MPI discovery."
10 | )
11 | endif ()
12 |
13 | include(CMakeFindDependencyMacro)
14 | set(HDF5_ENABLE_PARALLEL ON)
15 | set(HDF5_PREFER_PARALLEL ON)
16 | find_dependency(HDF5 REQUIRED COMPONENTS C CXX)
17 | if (NOT HDF5_C_IS_PARALLEL)
18 | message(FATAL_ERROR "Parallel HDF5 implementation (mpi) required but not found!")
19 | endif()
20 | find_dependency(Eigen3)
21 | find_dependency(MPI)
22 | find_dependency(FFTW3 COMPONENTS DOUBLE MPI)
23 |
24 | set(CMAKE_MODULE_PATH "${CMAKE_MODULE_PATH_save}")
25 | unset(CMAKE_MODULE_PATH_save)
26 |
27 | include(${CMAKE_CURRENT_LIST_DIR}/FANSTargets.cmake)
28 |
29 | get_target_property(FANS_LOCATION FANS::FANS LOCATION)
30 | message(STATUS "Found FANS: ${FANS_LOCATION} (found version \"@PROJECT_VERSION@\")")
31 |
--------------------------------------------------------------------------------
/cmake/modules/FindFFTW3.cmake:
--------------------------------------------------------------------------------
1 | # ###########################################################################################
2 | # copied from: https://github.com/UCL/GreatCMakeCookOff/blob/master/modules/FindFFTW3.cmake #
3 | # ###########################################################################################
4 |
5 | # - Try to find FFTW
6 | #
7 | # By default, it will look only for the serial libraries with single, double,
8 | # and long double precision. Any combination of precision (SINGLE, DOUBLE,
9 | # LONGDOUBLE) and library type (SERIAL, [THREADS|OPENMP], MPI) is possible by
10 | # using the COMPONENTS keyword. For example,
11 | #
12 | # find_package(FFTW3 COMPONENTS SINGLE DOUBLE OPENMP MPI)
13 | #
14 | # Once done this will define
15 | # FFTW3_FOUND - System has FFTW3
16 | # FFTW3_INCLUDE_DIRS - The FFTW3 include directories
17 | # FFTW3_LIBRARIES - The libraries needed to use FFTW3
18 | # FFTW3_DEFINITIONS - Compiler switches required for using FFTW3
19 | # FFTW3_$KIND_$PARALLEL_FOUND- Set if FFTW3 exists in KIND precision format for PARALLEL mode.
20 | # where KIND can be: SINGLE, DOUBLE, LONGDOUBLE
21 | # and PARALLEL: SERIAL, OPENMP, MPI, THREADS.
22 | # FFTW3_$KIND_$PARALLEL_LIBRARY - The libraries needed to use.
23 | # FFTW3_INCLUDE_DIR_PARALLEL - The FFTW3 include directories for parallels mode.
24 |
25 | cmake_policy(SET CMP0054 NEW)
26 |
27 | if(FFTW3_FOUND)
28 | return()
29 | endif()
30 |
31 | if(FFTW3_INCLUDE_DIR AND FFTW3_LIBRARIES)
32 | set(FFTW3_FOUND TRUE)
33 | foreach(component ${FFTW3_FIND_COMPONENTS})
34 | if("${FFTW3_${component}_LIBRARY}" STREQUAL "")
35 | set(FFTW3_${component}_LIBRARY "${FFTW3_LIBRARIES}")
36 | endif()
37 | endforeach()
38 | return()
39 | endif()
40 |
41 | macro(find_specific_libraries KIND PARALLEL)
42 | list(APPEND FFTW3_FIND_COMPONENTS ${KIND}_${PARALLEL})
43 | if(NOT (${PARALLEL} STREQUAL "SERIAL") AND NOT ${PARALLEL}_FOUND)
44 | message(FATAL_ERROR "Please, find ${PARALLEL} libraries before FFTW")
45 | endif()
46 |
47 | find_library(FFTW3_${KIND}_${PARALLEL}_LIBRARY NAMES
48 | fftw3${SUFFIX_${KIND}}${SUFFIX_${PARALLEL}}${SUFFIX_FINAL} HINTS ${HINT_DIRS})
49 | if(FFTW3_${KIND}_${PARALLEL}_LIBRARY MATCHES fftw3)
50 | list(APPEND FFTW3_LIBRARIES ${FFTW3_${KIND}_${PARALLEL}_LIBRARY})
51 | set(FFTW3_${KIND}_${PARALLEL}_FOUND TRUE)
52 |
53 | STRING(TOLOWER "${KIND}" kind)
54 | STRING(TOLOWER "${PARALLEL}" parallel)
55 | if(FFTW3_${kind}_${parallel}_LIBRARY MATCHES "\\.a$")
56 | add_library(fftw3::${kind}::${parallel} STATIC IMPORTED GLOBAL)
57 | else()
58 | add_library(fftw3::${kind}::${parallel} SHARED IMPORTED GLOBAL)
59 | endif()
60 |
61 | # MPI Has a different included library than the others
62 | # FFTW3_INCLUDE_DIR_PARALLEL will change depending of which on is used.
63 | set(FFTW3_INCLUDE_DIR_PARALLEL ${FFTW3_INCLUDE_DIR} )
64 | if(PARALLEL STREQUAL "MPI")
65 | set(FFTW3_INCLUDE_DIR_PARALLEL ${FFTW3_${PARALLEL}_INCLUDE_DIR})
66 | endif()
67 |
68 | set_target_properties(fftw3::${kind}::${parallel} PROPERTIES
69 | IMPORTED_LOCATION "${FFTW3_${KIND}_${PARALLEL}_LIBRARY}"
70 | INTERFACE_INCLUDE_DIRECTORIES "${FFTW3_INCLUDE_DIR_PARALLEL}")
71 |
72 | # adding target properties to the different cases
73 | ## MPI
74 | if(PARALLEL STREQUAL "MPI")
75 | if(MPI_C_LIBRARIES)
76 | set_target_properties(fftw3::${kind}::mpi PROPERTIES
77 | IMPORTED_LOCATION "${FFTW3_${KIND}_${PARALLEL}_LIBRARY}"
78 | INTERFACE_INCLUDE_DIRECTORIES "${FFTW3_INCLUDE_DIR_PARALLEL}"
79 | IMPORTED_LINK_INTERFACE_LIBRARIES ${MPI_C_LIBRARIES})
80 | endif()
81 | endif()
82 | ## OpenMP
83 | if(PARALLEL STREQUAL "OPENMP")
84 | if(OPENMP_C_FLAGS)
85 | set_target_properties(fftw3::${kind}::${parallel} PROPERTIES
86 | IMPORTED_LOCATION "${FFTW3_${KIND}_${PARALLEL}_LIBRARY}"
87 | INTERFACE_INCLUDE_DIRECTORIES "${FFTW3_INCLUDE_DIR_PARALLEL}"
88 | INTERFACE_COMPILE_OPTIONS "${OPENMP_C_FLAGS}")
89 | endif()
90 | endif()
91 | ## THREADS
92 | if(PARALLEL STREQUAL "THREADS")
93 | if(CMAKE_THREAD_LIBS_INIT) # TODO: this is not running
94 | set_target_properties(fftw3::${kind}::${parallel} PROPERTIES
95 | IMPORTED_LOCATION "${FFTW3_${KIND}_${PARALLEL}_LIBRARY}"
96 | INTERFACE_INCLUDE_DIRECTORIES "${FFTW3_INCLUDE_DIR_PARALLEL}"
97 | INTERFACE_COMPILE_OPTIONS "${CMAKE_THREAD_LIBS_INIT}")
98 | endif()
99 | endif()
100 | endif()
101 | endmacro()
102 |
103 |
104 |
105 |
106 | if(NOT FFTW3_FIND_COMPONENTS)
107 | set(FFTW3_FIND_COMPONENTS SINGLE DOUBLE LONGDOUBLE SERIAL)
108 | endif()
109 |
110 | string(TOUPPER "${FFTW3_FIND_COMPONENTS}" FFTW3_FIND_COMPONENTS)
111 |
112 | list(FIND FFTW3_FIND_COMPONENTS SINGLE LOOK_FOR_SINGLE)
113 | list(FIND FFTW3_FIND_COMPONENTS DOUBLE LOOK_FOR_DOUBLE)
114 | list(FIND FFTW3_FIND_COMPONENTS LONGDOUBLE LOOK_FOR_LONGDOUBLE)
115 | list(FIND FFTW3_FIND_COMPONENTS THREADS LOOK_FOR_THREADS)
116 | list(FIND FFTW3_FIND_COMPONENTS OPENMP LOOK_FOR_OPENMP)
117 | list(FIND FFTW3_FIND_COMPONENTS MPI LOOK_FOR_MPI)
118 | list(FIND FFTW3_FIND_COMPONENTS SERIAL LOOK_FOR_SERIAL)
119 |
120 | # FIXME - This may fail in computers wihtout serial
121 | # Default serial to obtain version number
122 | set(LOOK_FOR_SERIAL 1)
123 |
124 | # set serial as default if none parallel component has been set
125 | if((LOOK_FOR_THREADS LESS 0) AND (LOOK_FOR_MPI LESS 0) AND
126 | (LOOK_FOR_OPENMP LESS 0))
127 | set(LOOK_FOR_SERIAL 1)
128 | endif()
129 |
130 | if(MPI_C_FOUND)
131 | set(MPI_FOUND ${MPI_C_FOUND})
132 | endif()
133 | unset(FFTW3_FIND_COMPONENTS)
134 |
135 |
136 |
137 |
138 | if(WIN32)
139 | set(HINT_DIRS ${FFTW3_DIRECTORY} $ENV{FFTW3_DIRECTORY})
140 | else()
141 | find_package(PkgConfig)
142 | if(PKG_CONFIG_FOUND)
143 | pkg_check_modules(PC_FFTW QUIET fftw3)
144 | set(FFTW3_DEFINITIONS ${PC_FFTW3_CFLAGS_OTHER})
145 | endif()
146 | set(HINT_DIRS ${PC_FFTW3_INCLUDEDIR} ${PC_FFTW3_INCLUDE_DIRS}
147 | ${FFTW3_INCLUDE_DIR} $ENV{FFTW3_INCLUDE_DIR} )
148 | endif()
149 |
150 | find_path(FFTW3_INCLUDE_DIR NAMES fftw3.h HINTS ${HINT_DIRS})
151 | if (LOOK_FOR_MPI) # Probably is going to be the same as fftw3.h
152 | find_path(FFTW3_MPI_INCLUDE_DIR NAMES fftw3-mpi.h HINTS ${HINT_DIRS})
153 | endif()
154 |
155 | function(find_version OUTVAR LIBRARY SUFFIX)
156 | file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/fftw${SUFFIX}/main.c
157 | # TODO: do we need to add include for mpi headers?
158 | "#include
159 | #include
160 | int main(int nargs, char const *argv[]) {
161 | printf(\"%s\", fftw${SUFFIX}_version);
162 | return 0;
163 | }"
164 | )
165 | if(NOT CMAKE_CROSSCOMPILING)
166 | try_run(RUN_RESULT COMPILE_RESULT
167 | "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/fftw${SUFFIX}/"
168 | "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/fftw${SUFFIX}/main.c"
169 | CMAKE_FLAGS
170 | -DLINK_LIBRARIES=${LIBRARY}
171 | -DINCLUDE_DIRECTORIES=${FFTW3_INCLUDE_DIR}
172 | RUN_OUTPUT_VARIABLE OUTPUT
173 | COMPILE_OUTPUT_VARIABLE COUTPUT
174 | )
175 | endif()
176 | if(RUN_RESULT EQUAL 0)
177 | string(REGEX REPLACE
178 | ".*([0-9]+\\.[0-9]+\\.[0-9]+).*"
179 | "\\1" VERSION_STRING "${OUTPUT}"
180 | )
181 | set(${OUTVAR} ${VERSION_STRING} PARENT_SCOPE)
182 | endif()
183 | endfunction()
184 |
185 | set(SUFFIX_DOUBLE "")
186 | set(SUFFIX_SINGLE "f")
187 | set(SUFFIX_LONGDOUBLE "l")
188 | set(SUFFIX_SERIAL "")
189 | set(SUFFIX_OPENMP "_omp")
190 | set(SUFFIX_MPI "_mpi")
191 | set(SUFFIX_THREADS "_threads")
192 | set(SUFFIX_FINAL "")
193 |
194 | if(WIN32)
195 | set(SUFFIX_FINAL "-3")
196 | else()
197 | set(HINT_DIRS ${PC_FFTW3_LIBDIR} ${PC_FFTW3_LIBRARY_DIRS}
198 | $ENV{FFTW3_LIBRARY_DIR} ${FFTW3_LIBRARY_DIR} )
199 | endif(WIN32)
200 |
201 | unset(FFTW3_LIBRARIES)
202 | set(FFTW3_INCLUDE_DIRS ${FFTW3_INCLUDE_DIR} ) # TODO what's for?
203 | set(FFTW3_FLAGS_C "")
204 | foreach(KIND SINGLE DOUBLE LONGDOUBLE)
205 | if(LOOK_FOR_${KIND} LESS 0)
206 | continue()
207 | endif()
208 | foreach(PARALLEL SERIAL MPI OPENMP THREADS)
209 | if(LOOK_FOR_${PARALLEL} LESS 0)
210 | continue()
211 | endif()
212 | find_specific_libraries(${KIND} ${PARALLEL})
213 | endforeach()
214 | endforeach()
215 |
216 | if(FFTW3_INCLUDE_DIR)
217 | list(GET FFTW3_FIND_COMPONENTS 0 smallerrun)
218 | string(REPLACE "_" ";" RUNLIST ${smallerrun})
219 | list(GET RUNLIST 0 KIND)
220 | list(GET RUNLIST 1 PARALLEL)
221 | unset(smallerrun)
222 | unset(RUNLIST)
223 | # suffix is quoted so it pass empty in the case of double as it's empty
224 | find_version(FFTW3_VERSION_STRING ${FFTW3_${KIND}_${PARALLEL}_LIBRARY}
225 | "${SUFFIX_${KIND}}")
226 | endif()
227 |
228 | # FIXME: fails if use REQUIRED.
229 | include(FindPackageHandleStandardArgs)
230 | # handle the QUIETLY and REQUIRED arguments and set FFTW3_FOUND to TRUE
231 | # if all listed variables are TRUE
232 | find_package_handle_standard_args(FFTW3
233 | REQUIRED_VARS FFTW3_LIBRARIES FFTW3_INCLUDE_DIR
234 | VERSION_VAR FFTW3_VERSION_STRING
235 | HANDLE_COMPONENTS
236 | )
237 |
--------------------------------------------------------------------------------
/cmake/packaging/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | # ##############################################################################
2 | # PACKAGING
3 | # ##############################################################################
4 |
5 | set(CPACK_OUTPUT_FILE_PREFIX "${CMAKE_BINARY_DIR}/packages")
6 |
7 | set(CPACK_PACKAGE_NAME "${PROJECT_NAME}")
8 | set(CPACK_PACKAGE_VENDOR "MIB DAE Stuttgart")
9 | set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "FANS - Fourier Accelerated Nodal Solver" CACHE STRING "Extended summary.")
10 | set(CPACK_PACKAGE_HOMEPAGE_URL "https://github.com/DataAnalyticsEngineering/FANS")
11 | set(CPACK_DEBIAN_PACKAGE_MAINTAINER "MIB DAE Stuttgart")
12 |
13 | set(CPACK_PACKAGE_INSTALL_DIRECTORY ${CPACK_PACKAGE_NAME})
14 | set(CPACK_PACKAGE_VERSION_MAJOR ${PROJECT_VERSION_MAJOR})
15 | set(CPACK_PACKAGE_VERSION_MINOR ${PROJECT_VERSION_MINOR})
16 | set(CPACK_PACKAGE_VERSION_PATCH ${PROJECT_VERSION_PATCH})
17 | set(CPACK_VERBATIM_VARIABLES YES)
18 | set(CPACK_DEBIAN_FILE_NAME DEB-DEFAULT)
19 |
20 | # set(CPACK_PACKAGE_DESCRIPTION_FILE ${CMAKE_CURRENT_LIST_DIR}/Description.txt)
21 | # set(CPACK_RESOURCE_FILE_WELCOME ${CMAKE_CURRENT_LIST_DIR}/Welcome.txt)
22 | # set(CPACK_RESOURCE_FILE_LICENSE ${CMAKE_CURRENT_LIST_DIR}/License.txt)
23 | # set(CPACK_RESOURCE_FILE_README ${CMAKE_CURRENT_LIST_DIR}/Readme.txt)
24 |
25 | set(CPACK_DEB_COMPONENT_INSTALL ON)
26 | set(CPACK_DEBIAN_ENABLE_COMPONENT_DEPENDS ON)
27 |
28 | set(CPACK_DEBIAN_FANS_RUNTIME_PACKAGE_NAME "fans")
29 | set(CPACK_DEBIAN_FANS_DEVELOPMENT_PACKAGE_NAME "fans-dev")
30 |
31 | # this option automatically computes the dependencies of shared libraries (by looking at the libs they are themselves
32 | # linked to). Requires 'dpkg-shlibdeps' to be available.
33 | set(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON)
34 |
35 | # this package is not autodetected but is required for mpi to function properly
36 | set(CPACK_DEBIAN_FANS_RUNTIME_PACKAGE_DEPENDS "openmpi-bin")
37 |
38 | # add header packages of dependencies as recommended (they are required to build from the FANS headers).
39 | set(CPACK_DEBIAN_FANS_DEVELOPMENT_PACKAGE_DEPENDS "libhdf5-dev, libopenmpi-dev, libeigen3-dev, libfftw3-dev, libfftw3-mpi-dev")
40 |
41 | include(CPack)
42 |
43 | cpack_add_component(FANS_Runtime
44 | DISPLAY_NAME "FANS Runtime"
45 | DESCRIPTION "FANS shared library and executable"
46 | REQUIRED
47 | INSTALL_TYPES Full Developer Minimal
48 | )
49 | cpack_add_component(FANS_Development
50 | DISPLAY_NAME "FANS Development"
51 | DESCRIPTION "FANS headers and CMake files"
52 | DEPENDS FANS_Runtime
53 | INSTALL_TYPES Full Developer
54 | )
55 | cpack_add_install_type(Full)
56 | cpack_add_install_type(Minimal)
57 | cpack_add_install_type(Developer)
58 |
--------------------------------------------------------------------------------
/docker/Dockerfile:
--------------------------------------------------------------------------------
1 | # During build time, don't ask for user input (has to be included in every stage
2 | # to take effect)
3 | ARG DEBIAN_FRONTEND=noninteractive
4 | ARG UBUNTU_VERSION=noble
5 | ARG USER=fans
6 |
7 | ################################################################################
8 |
9 | FROM ubuntu:${UBUNTU_VERSION} AS fans_base
10 | ARG DEBIAN_FRONTEND
11 | ARG USER
12 |
13 | # Context: https://askubuntu.com/questions/1513927/ubuntu-24-04-docker-images-now-includes-user-ubuntu-with-uid-gid-1000
14 | RUN bash -c 'if id "ubuntu" &>/dev/null; then \
15 | touch /var/mail/ubuntu && \
16 | chown ubuntu /var/mail/ubuntu && \
17 | userdel -r ubuntu && \
18 | echo "Deleted user ubuntu."; \
19 | fi'
20 |
21 | # Create a non-root user
22 | RUN useradd -m -s /bin/bash ${USER}
23 |
24 | ################################################################################
25 |
26 | FROM fans_base AS fans_ci
27 | ARG DEBIAN_FRONTEND
28 |
29 | RUN apt-get update -qq && apt-get install -y --no-install-recommends \
30 | # Build basics
31 | software-properties-common \
32 | build-essential \
33 | # CMake + git for FetchContent + file for CPack
34 | cmake \
35 | git \
36 | file \
37 | # FANS dependencies \
38 | libhdf5-dev \
39 | libopenmpi-dev \
40 | libeigen3-dev \
41 | libfftw3-dev \
42 | libfftw3-mpi-dev \
43 | # Required for preCICE Micro Manager Python bindings
44 | python3-dev \
45 | # Clean up
46 | && apt-get clean \
47 | && apt-get autoremove --purge -y \
48 | && rm -rf /var/lib/apt/lists/*
49 |
50 | ################################################################################
51 |
52 | FROM fans_ci AS fans_dev
53 | ARG DEBIAN_FRONTEND
54 | ARG USER
55 | ARG FANS_venv=FANS_venv
56 |
57 | RUN apt-get update -qq && apt-get install -y --no-install-recommends \
58 | # Packages required for setting up the non-root user
59 | sudo \
60 | gosu \
61 | # Some additional packages for convenience
62 | time \
63 | htop \
64 | vim \
65 | python3-pip \
66 | python3-venv \
67 | python-is-python3 \
68 | # Clean up
69 | && apt-get clean \
70 | && apt-get autoremove --purge -y \
71 | && rm -rf /var/lib/apt/lists/*
72 |
73 | # Create a python venv for test/h52xdmf.py script
74 | USER ${USER}
75 |
76 | RUN python -m venv /home/${USER}/venvs/${FANS_venv} && \
77 | echo "\nsource /home/${USER}/venvs/${FANS_venv}/bin/activate\n" >> /home/${USER}/.bashrc && \
78 | . /home/${USER}/venvs/${FANS_venv}/bin/activate && \
79 | python -m pip install --no-cache-dir h5py lxml
80 |
81 | USER root
82 |
83 | # Add fans user to sudoers
84 | RUN echo ${USER} ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/${USER} \
85 | && chmod 440 /etc/sudoers.d/${USER}
86 |
87 | # Entrypoint script changes UID and GID to match given host UID and GID
88 | COPY --chmod=755 docker/Dockerfile_user_env_entrypoint.sh /entrypoint.sh
89 | ENTRYPOINT ["/entrypoint.sh"]
90 |
91 | CMD ["bash"]
92 |
--------------------------------------------------------------------------------
/docker/Dockerfile_user_env_entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash --login
2 |
3 | # Abort script at first error, when a command exits with non-zero status (except in until or while loops, if-tests, list constructs)
4 | set -e
5 |
6 | ### workaround to fix permissions in mounted volumes ###
7 | # This is necessary because the user in the container has a different UID and GID than the user on the host.
8 | # USAGE: docker run -e HOST_UID=$(id -u) -e HOST_GID=$(id -g) ...
9 | # open issue on this topic: https://github.com/docker/roadmap/issues/398
10 | hostgroup="hostgroup"
11 | container_user="fans"
12 |
13 | if [ "$(id -u -n)" = "root" ]; then
14 | if [ -n "$HOST_UID" ] && [ -n "$HOST_GID" ]; then
15 | echo "Setting UID and GID to match provided host UID and GID..."
16 | # echo "'id' before changes: $(id $container_user)"
17 |
18 | if ! getent group $hostgroup >/dev/null; then
19 | groupadd -o -g $HOST_GID $hostgroup
20 | fi
21 |
22 | old_group=$(id -g -n $container_user)
23 |
24 | if ! id -nG $container_user | grep -qw $hostgroup; then
25 | usermod -g $hostgroup $container_user
26 | fi
27 |
28 | if ! id -nG $container_user | grep -qw $old_group; then
29 | usermod -a -G $old_group $container_user
30 | fi
31 |
32 | if [ "$(id -u $container_user)" != "$HOST_UID" ]; then
33 | usermod -u $HOST_UID $container_user
34 | fi
35 |
36 | # echo "'id' after changes: $(id $container_user)"
37 | else
38 | echo "WARNING: Please provide HOST_UID and HOST_GID as environment variables (docker run -e)! UID and GID will not be changed. This will probably lead to permission issues with mounted volumes."
39 | fi
40 | else
41 | echo "WARNING: Can't change UID and GID to given host UID and GID. entrypoint.sh must run as root! UID and GID will not be changed. This will probably lead to permission issues with mounted volumes."
42 | fi
43 |
44 | # drop privileges and execute given commands as the user $container_user
45 | exec gosu $container_user "$@"
46 |
--------------------------------------------------------------------------------
/docker/README.md:
--------------------------------------------------------------------------------
1 | # Docker
2 |
3 | We provide a set of docker images for different use cases on our [Dockerhub profile](https://hub.docker.com/u/unistuttgartdae):
4 |
5 | - **fans-ci**: Contains the minimum tools to build FANS (including dev packages of dependencies with the required headers), but does not include FANS itself. Meant for a CI workflow.
6 | - **fans-dev**: Based upon fans-ci, but offers a non-root user (`fans`) and handling of UID and GID to not mess up permissions when volume mounting into the container. Meant as an quick to setup build environment for FANS.
7 |
8 | Both images are built for linux/amd64 and linux/arm64 as well as for the two most recent Ubuntu LTS versions (jammy and noble). The Ubuntu version can be selected through tags, e.g. `fans-dev:jammy`; `noble` is equivalent to the `latest` tag. The architecture is selected automatically depending on your host platform.
9 |
10 | ## Set up a Container
11 |
12 | Set up a development container with your current working directory (in there, use `git clone` to obtain the latest FANS version) mounted into it. You need to have [Docker Desktop](https://www.docker.com/products/docker-desktop/) installed on your machine.
13 |
14 | First, clone FANS:
15 |
16 | ```bash
17 | git clone https://github.com/DataAnalyticsEngineering/FANS.git
18 | cd FANS
19 | ```
20 |
21 | Then we create the container using our `fans-dev` image.
22 |
23 | ### In a Linux, MacOS or Windows Subsystem for Linux (WSL) Shell
24 |
25 | ```bash
26 | docker create --name fans-dev -it \
27 | -e HOST_UID=$(id -u) \
28 | -e HOST_GID=$(id -g) \
29 | -v /etc/localtime:/etc/localtime:ro \
30 | -v /etc/timezone:/etc/timezone:ro \
31 | -v $PWD/:/FANS/ \
32 | unistuttgartdae/fans-dev:latest
33 | ```
34 |
35 | The `-e` options provide the entrypoint script of the container with your host user ID and GID, such that the user ID and GID inside the container can be adapted to match yours. This is done to not mess up file permissions in the mounted volumes. The two volume mounts of `/etc/localtime` and `/etc/timezone` are required to have the host date and time inside the container.
36 |
37 | ### In Windows PowerShell
38 |
39 | Using PowerShell is not recommended since it only has limited support of file permissions and completely ignores file ownership in the WSL->Container direction.
40 |
41 | ```bash
42 | docker create --name fans-dev -it `
43 | --env HOST_UID=1000 `
44 | --env HOST_GID=1000 `
45 | --env TZ=Europe/Berlin `
46 | --volume ${PWD}:/FANS/ `
47 | unistuttgartdae/fans-dev
48 | ```
49 |
50 | ## Working with the container
51 |
52 | The following workflow is suggested: You would work on the code as usual on your host; and only to build and run FANS you would attach to the container:
53 |
54 | ```bash
55 | docker start fans-dev
56 | docker attach fans-dev
57 |
58 | cd /FANS
59 | mkdir build
60 | cd build
61 | cmake ..
62 | cmake --build . -j
63 |
64 | cd ../test
65 | ./FANS
66 | ./run_tests.sh
67 | cat nohup_test_*.log
68 | ```
69 |
70 | For convenience we added some basic utilities to our `fans-dev` image including `htop`, `vim` and `python`.
71 |
72 | ### Attaching Visual Studio Code
73 |
74 | You can attach VS Code to the newly created container in order to actually work inside the container. This has the benefit that IntelliSense and other static analysis tools have access to all the headers of FANS' dependencies which would not be possible when developing on the host and only using the container for building FANS.
75 |
76 | To attach VS Code you need to install the `Remote Development Extension Pack` and the `Docker` Extension. Then open the Docker menu, right click our newly created `fans-dev` container and select "Start" (if not running already) and then "Attach Visual Studio Code".
77 |
78 | After attaching VS Code you unfortunately are user `root` in VS Code due to the way the UID and GID mapping is implemented: The container starts as root, executes the entrypoint script which changes UID and GID and only then drops privileges using `gosu`. VS Code though skips the entrypoint script and thus doesn't switch to the non-root user `fans`. You however can do so manually by typing `gosu fans bash` in your terminal sessions inside VS Code.
79 |
80 | For further reading and alternative approaches like a full DevContainer setup have a look at
81 |
82 | - [Developing inside a Container](https://code.visualstudio.com/docs/devcontainers/containers)
83 | - [Attach to a running Container](https://code.visualstudio.com/docs/devcontainers/attach-container)
84 | - [Specifying the default container user](https://code.visualstudio.com/remote/advancedcontainers/add-nonroot-user#_specifying-the-default-container-user)
85 |
86 | ### Calling Containerized FANS from the Host
87 |
88 | By building inside the container, FANS is linked against the container's libs and therefore must run inside the container. After attaching to the container you can then continue to use FANS as described in the main [README](../README.md#usage). Just remember that any input and output files need to visible to the container and thus must lie somewhere inside the mounted volumes.
89 |
90 | Special care has to be taken if you need to use FANS within scripts on the host, as Docker's interactive mode (`-i`) is not suitable in this case. Instead you need to use `docker exec`. One basically replaces the original `FANS` call by `docker exec -u fans -w /FANS/test fans-dev [original call]`. For example in conjunction with nohup:
91 |
92 | ```bash
93 | docker start fans-dev
94 | nohup /usr/bin/time -v docker exec -u fans -w /FANS/test fans-dev [original call] &
95 | docker stop fans-dev
96 | ```
97 |
--------------------------------------------------------------------------------
/docs/ReleaseGuide.md:
--------------------------------------------------------------------------------
1 | # Guide to release new version of FANS
2 |
3 | The developer who is releasing a new version of FANS is expected to follow this work flow:
4 |
5 | The release of the `FANS` repository is made directly from a release branch called `FANS-v1.2.3`. This branch is mainly needed to help other developers with testing.
6 |
7 | 1. Create a branch called `FANS-v1.2.3` from the latest commit of the `develop` branch.
8 |
9 | 2. Bump the version in the `CHANGELOG.md`, the base `CMakeLists.txt`, and in the file `FANS_Dashboard/pyproject.toml` on the branch `FANS-v1.2.3`.
10 |
11 | 3. Assuming you have pixi installed, run the command `pixi lock` in the base directory file to update the version of the FANS dashboard in the pixi lock file.
12 |
13 | 4. If it is a real release, [open a Pull Request `main` <-- `FANS-v1.2.3`](https://github.com/DataAnalyticsEngineering/FANS/compare/main...main) named after the version (i.e. `Release v1.2.3`) and briefly describe the new features of the release in the PR description.
14 |
15 | 5. [Draft a new release](https://github.com/DataAnalyticsEngineering/FANS/releases/new) in the `Releases` section of the repository page in a web browser. The release tag needs to be the exact version number (i.e.`v1.2.3` or `v1.2.3rc1`, compare to [existing tags](https://github.com/DataAnalyticsEngineering/FANS/tags)). Use `@target:main`. Release title is also the version number (i.e. `v1.2.3` or `v1.2.3rc1`, compare to [existing releases](https://github.com/DataAnalyticsEngineering/FANS/tags)).
16 |
17 | * *Note:* If it is a pre-release then the option *This is a pre-release* needs to be selected at the bottom of the page. Use `@target:FANS-v1.2.3` for a pre-release, since we will never merge a pre-release into `main`.
18 | * Use the `Auto-generate release notes` feature.
19 |
20 | a) If a pre-release is made: Directly hit the "Publish release" button in your Release Draft.
21 |
22 | b) If this is a "real" release: As soon as one approving review is made, merge the release PR (from `FANS-v1.2.3`) into `main`.
23 |
24 | 6. Merge `main` into `develop` for synchronization of `develop`.
25 |
26 | 7. If everything is in order up to this point then the new version can be released by hitting the "Publish release" button in your Release Draft. This will create the corresponding tag.
27 |
--------------------------------------------------------------------------------
/docs/images/FANS_example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DataAnalyticsEngineering/FANS/3c681507289fe30d459fe6b378b2ccb6687414ab/docs/images/FANS_example.png
--------------------------------------------------------------------------------
/include/general.h:
--------------------------------------------------------------------------------
1 |
2 |
3 | #ifndef GENERAL_H_
4 | #define GENERAL_H_
5 |
6 | #include
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 | #include
13 | #include