├── .github ├── ISSUE_TEMPLATE │ ├── ✨-feature-request.md │ ├── 🌈-generic-ticket.md │ └── 🐛-bug-report.md └── workflows │ └── ci-cd.yml ├── .gitignore ├── .markdownlint.yml ├── .python-version ├── CONTRIBUTING.md ├── LICENSE.md ├── README.rst ├── Taskfile.yml ├── docs ├── changelog.md ├── contributing.md ├── css │ └── video.css ├── diffcrash │ ├── diffcrash_run.md │ ├── index.md │ └── run.md ├── dimred │ ├── DimredRun.md │ ├── KeywordTypes.md │ ├── clustering_betas.md │ ├── command_line_interface.md │ ├── index.md │ ├── plot_beta_clusters.md │ ├── pod_functions.md │ ├── subsampling_methods.md │ └── svd.md ├── dyna │ ├── ArrayType.md │ ├── Binout.md │ ├── D3plot.md │ ├── D3plotHeader.md │ ├── FilterType.md │ ├── index.md │ └── performance_info.md ├── femzip │ ├── FemzipAPI.md │ ├── FemzipAPIStatus.md │ ├── FemzipBufferInfo.md │ ├── FemzipError.md │ ├── FemzipFileMetadata.md │ ├── FemzipVariableCategory.md │ ├── VariableInfo.md │ └── index.md ├── icon-discord.png ├── icon-home.png ├── index.md ├── lasso-logo.png └── ls-dyna_database_Apr2020.pdf ├── mkdocs.yml ├── pyproject.toml ├── src └── lasso │ ├── __init__.py │ ├── diffcrash │ ├── __init__.py │ ├── diffcrash_run.py │ └── run.py │ ├── dimred │ ├── __init__.py │ ├── dimred_run.py │ ├── graph_laplacian.py │ ├── hashing.py │ ├── hashing_sphere.py │ ├── run.py │ ├── sphere │ │ ├── __init__.py │ │ └── algorithms.py │ └── svd │ │ ├── __init__.py │ │ ├── clustering_betas.py │ │ ├── html_str_eles.py │ │ ├── keyword_types.py │ │ ├── plot_beta_clusters.py │ │ ├── plotly_latest.min.js │ │ ├── pod_functions.py │ │ └── subsampling_methods.py │ ├── dyna │ ├── __init__.py │ ├── array_type.py │ ├── binout.py │ ├── d3plot.py │ ├── d3plot_header.py │ ├── femzip_mapper.py │ ├── filter_type.py │ └── lsda_py3.py │ ├── femzip │ ├── __init__.py │ ├── femzip_api.py │ ├── fz_config.py │ └── lib │ │ ├── linux │ │ ├── api_extended.so │ │ ├── libfemzip_a_dyna_sidact_generic.so │ │ ├── libfemzip_post_licgenerator_ext_flexlm.so │ │ ├── libimf.so │ │ ├── libintlc.so.5 │ │ ├── libiomp5.so │ │ ├── libirng.so │ │ └── libsvml.so │ │ └── windows │ │ ├── api_extended.dll │ │ ├── femzip_a_dyna_sidact_generic.dll │ │ ├── libfemzip_post_licgenerator_ext_flexlm.dll │ │ ├── libifcoremd.dll │ │ ├── libifportmd.dll │ │ ├── libiomp5md.dll │ │ └── libmmd.dll │ ├── io │ ├── __init__.py │ ├── binary_buffer.py │ └── files.py │ ├── logging.py │ ├── math │ ├── __init__.py │ ├── sampling.py │ └── stochastic.py │ ├── plotting │ ├── __init__.py │ ├── plot_shell_mesh.py │ └── resources │ │ ├── __init__.py │ │ ├── chroma.min.js │ │ ├── jquery.min.js │ │ ├── jszip.min.js │ │ ├── template.html │ │ ├── three.min.js │ │ └── three_latest.min.js │ └── utils │ ├── __init__.py │ ├── console_coloring.py │ ├── decorators.py │ ├── language.py │ └── rich_progress_bars.py ├── test ├── __init__.py ├── plot_creator_helper.py ├── test_data │ ├── DimredRunTest │ │ └── verificationFile.hdf5 │ ├── d3plot_beamip │ │ ├── d3plot │ │ └── d3plot01 │ ├── d3plot_node_temperature │ │ ├── d3plot │ │ └── d3plot01 │ ├── d3plot_solid_int │ │ ├── d3plot │ │ ├── d3plot01 │ │ ├── d3plot02 │ │ ├── d3plot03 │ │ ├── d3plot04 │ │ ├── d3plot05 │ │ ├── d3plot06 │ │ ├── d3plot07 │ │ ├── d3plot08 │ │ ├── d3plot09 │ │ ├── d3plot10 │ │ ├── d3plot11 │ │ ├── d3plot12 │ │ ├── d3plot13 │ │ ├── d3plot14 │ │ ├── d3plot15 │ │ ├── d3plot16 │ │ ├── d3plot17 │ │ ├── d3plot18 │ │ ├── d3plot19 │ │ ├── d3plot20 │ │ ├── d3plot21 │ │ └── d3plot22 │ ├── femzip │ │ ├── d3plot │ │ ├── d3plot.fz │ │ └── d3plot01 │ ├── io_test │ │ ├── file1.txt │ │ └── subfolder │ │ │ ├── file2.txt │ │ │ └── file3.yay │ ├── order_d3plot │ │ ├── d3plot │ │ ├── d3plot01 │ │ ├── d3plot02 │ │ ├── d3plot10 │ │ ├── d3plot100 │ │ ├── d3plot11 │ │ ├── d3plot12 │ │ └── d3plot22 │ └── simple_d3plot │ │ ├── d3plot │ │ └── d3plot01 └── unit_tests │ ├── __init__.py │ ├── dimred │ ├── __init__.py │ ├── svd │ │ ├── __init__.py │ │ ├── test_clustering_betas.py │ │ ├── test_plot_betas_clusters.py │ │ ├── test_pod_functions.py │ │ └── test_subsampling_methods.py │ └── test_dimred_run.py │ ├── dyna │ ├── __init__.py │ ├── test_d3plot.py │ ├── test_d3plot_header.py │ └── test_mapper.py │ ├── io │ ├── __init__.py │ ├── test_binary_buffer.py │ └── test_files.py │ └── math │ ├── __init__.py │ ├── test_sampling.py │ └── test_stochastic.py └── uv.lock /.github/ISSUE_TEMPLATE/✨-feature-request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: "✨ Feature request" 3 | about: Suggest an idea for this project. 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/🌈-generic-ticket.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: "\U0001F308 Generic Ticket" 3 | about: Anything not being a bug report or a feature. 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/🐛-bug-report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: "\U0001F41B Bug report" 3 | about: Create a report to help us fix a software bug. 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **🐛 Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **🔢 To Reproduce** 14 | Steps to reproduce the behavior: 15 | 16 | 1. Use file '...' 17 | 2. Run '...' 18 | 3. See error 19 | 20 | **💘 Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **📷 Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **🖥️ Setup** 27 | - lasso-python version: [e.g. 1.5.1] 28 | - OS: [e.g. iOS] 29 | 30 | **ℹ️ Additional context** 31 | Add any other context about the problem here. 32 | -------------------------------------------------------------------------------- /.github/workflows/ci-cd.yml: -------------------------------------------------------------------------------- 1 | name: Python Linting, Test and Upload 2 | 3 | on: 4 | push: 5 | pull_request: 6 | workflow_dispatch: 7 | 8 | jobs: 9 | # JOB 10 | # This job runs unit tests, linting and format checks 11 | tests: 12 | runs-on: ubuntu-latest 13 | 14 | strategy: 15 | # If either the tests for 3.9 or 3.12 fail, all workflows 16 | # are terminated to save computing resources. 17 | fail-fast: true 18 | # To safe runtime least and latest version supported are 19 | # chosen. For more info see the pyproject.toml 20 | matrix: 21 | python-version: ["3.9", "3.12"] 22 | 23 | steps: 24 | - uses: actions/checkout@v3 25 | 26 | - name: Set up Python ${{ matrix.python-version }} 27 | uses: actions/setup-python@v4 28 | with: 29 | python-version: ${{ matrix.python-version }} 30 | 31 | - name: Install Task 32 | run: | 33 | sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d 34 | 35 | # Cache dependencies from poetry to speed things up 36 | - name: Load cached venv 37 | id: cached-poetry-dependencies 38 | uses: actions/cache@v3 39 | with: 40 | path: .venv 41 | key: venv-${{ runner.os }}-${{ hashFiles('**/uv.lock') }} 42 | 43 | - name: Install and upgrade pip and uv 44 | run: python -m pip install --upgrade pip uv 45 | 46 | - name: Install Dependencies 47 | run: ./bin/task setup 48 | 49 | - name: Lint code 50 | run: ./bin/task lint 51 | 52 | - name: Test code 53 | run: ./bin/task test 54 | 55 | # JOB 56 | # This job publishes the package to test-pipy. 57 | test-publish: 58 | # Will run after the job 'tests' 59 | needs: [tests] 60 | 61 | if: > 62 | startsWith(github.ref, 'refs/tags/') || 63 | startsWith(github.ref, 'refs/heads/release/') 64 | runs-on: ubuntu-latest 65 | # Required for installation of the test package in the 66 | # next job. 67 | outputs: 68 | version: ${{ steps.extract_version.outputs.version }} 69 | 70 | steps: 71 | - uses: actions/checkout@v3 72 | 73 | - name: Remember version 74 | id: extract_version 75 | run: | 76 | VERSION=$(cat pyproject.toml | grep -oE -m 1 "version = \"(.*)\"" | cut -f2 -d '"') 77 | echo "Version: ${VERSION}" 78 | echo "version=${VERSION}" >> $GITHUB_OUTPUT 79 | 80 | # For publishing any version will do 81 | - name: Set up Python 3.11 82 | uses: actions/setup-python@v4 83 | with: 84 | python-version: "3.11" 85 | 86 | - name: Install Task 87 | run: | 88 | sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d 89 | 90 | - name: Load cached venv 91 | id: cached-poetry-dependencies 92 | uses: actions/cache@v3 93 | with: 94 | path: .venv 95 | key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }} 96 | 97 | - name: Install Dependencies 98 | run: | 99 | python -m pip install --upgrade pip poetry 100 | ./bin/task setup 101 | 102 | - name: Build packages for release 103 | run: ./bin/task build 104 | 105 | - name: Publish distribution to Test PyPI 106 | env: 107 | TWINE_REPOSITORY_URL: https://test.pypi.org/legacy/ 108 | TWINE_USERNAME: __token__ 109 | TWINE_NON_INTERACTIVE: 1 110 | TWINE_PASSWORD: ${{ secrets.TEST_PYPI_API_TOKEN }} 111 | run: poetry run twine upload --skip-existing --verbose 'dist/*' 112 | 113 | # JOB 114 | # Test install from pypi to see if we have any installation bugs. 115 | test-install: 116 | needs: [test-publish] 117 | if: > 118 | startsWith(github.ref, 'refs/tags/') || 119 | startsWith(github.ref, 'refs/heads/release/') 120 | 121 | runs-on: ubuntu-latest 122 | 123 | # Use the version from the previous job 124 | env: 125 | VERSION: ${{ needs.test-publish.outputs.version }} 126 | 127 | steps: 128 | # Install python (be aware NO checkout action) 129 | - name: Set up Python 3.11 130 | uses: actions/setup-python@v4 131 | with: 132 | python-version: "3.11" 133 | 134 | # Check if it installs without errors 135 | - name: Install package 136 | run: | 137 | python -m pip install \ 138 | --index-url https://test.pypi.org/simple/ \ 139 | --extra-index-url https://pypi.org/simple \ 140 | lasso-python=="${VERSION}" 141 | 142 | # We run the D3plot import here as it is the most delicate piece of the 143 | # package for importing C-libraries. 144 | - name: Test if the installed package works 145 | run: python -c 'from lasso.dyna import D3plot' 146 | 147 | # JOB 148 | # Finally publish the code to pypi 149 | publish: 150 | needs: [test-install] 151 | if: startsWith(github.ref, 'refs/tags/') 152 | runs-on: ubuntu-latest 153 | 154 | steps: 155 | - uses: actions/checkout@v3 156 | # We need the entire git history for building the docs 157 | with: 158 | fetch-depth: 0 159 | 160 | - name: Set up Python 3.11 161 | uses: actions/setup-python@v4 162 | with: 163 | python-version: "3.11" 164 | 165 | - name: Install Task 166 | run: | 167 | sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d 168 | 169 | - name: Load cached venv 170 | id: cached-poetry-dependencies 171 | uses: actions/cache@v3 172 | with: 173 | path: .venv 174 | key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }} 175 | 176 | - name: Install Dependencies 177 | run: | 178 | python -m pip install --upgrade poetry pip 179 | ./bin/task setup 180 | 181 | - name: Build packages for release 182 | run: ./bin/task build 183 | 184 | # Not required but this saves the distribution files 185 | # with the package upload for debugging purposes. 186 | - name: Save packages as artifacts 187 | uses: actions/upload-artifact@v2 188 | with: 189 | name: dist 190 | path: dist 191 | if-no-files-found: error 192 | 193 | - name: Publish distribution to PyPI 194 | env: 195 | TWINE_USERNAME: __token__ 196 | TWINE_NON_INTERACTIVE: 1 197 | TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} 198 | run: poetry run twine upload --skip-existing --verbose 'dist/*' 199 | 200 | - name: Upload new docs 201 | # We run a git pull first to ensure the runner has the latest pages 202 | # branch. It did fail because of it in the past. 203 | run: | 204 | git pull origin gh-pages --rebase 205 | ./bin/task docs:deploy 206 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # Distribution / packaging 7 | .Python 8 | ./build/ 9 | build/ 10 | develop-eggs/ 11 | dist/ 12 | downloads/ 13 | eggs/ 14 | .eggs/ 15 | lib64/ 16 | parts/ 17 | sdist/ 18 | var/ 19 | wheels/ 20 | *.egg-info/ 21 | .installed.cfg 22 | *.egg 23 | MANIFEST 24 | 25 | # Compiled Object files 26 | *.slo 27 | *.lo 28 | *.o 29 | *.obj 30 | 31 | # Compiled Dynamic libraries 32 | *.so 33 | *.dylib 34 | *.dll 35 | 36 | # Exclude femzip shared libraries 37 | !**/lib/**/*.dll 38 | !**/lib/**/*.so 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | 53 | # Sphinx documentation 54 | docs/_build/ 55 | 56 | # Environments 57 | .env 58 | .venv 59 | env/ 60 | venv/ 61 | ENV/ 62 | env.bak/ 63 | venv.bak/ 64 | 65 | # mkdocs documentation 66 | /site 67 | 68 | # Mac specific files 69 | .DS_Store 70 | 71 | # Notepad++ backup file 72 | .bak 73 | 74 | # Vscode configurations 75 | .vscode 76 | 77 | # Ignore generated changelog 78 | CHANGELOG.md 79 | 80 | # Custom test file 81 | test/read_write_test.py 82 | -------------------------------------------------------------------------------- /.markdownlint.yml: -------------------------------------------------------------------------------- 1 | # There are different style types for markdown code blocks and strangely 2 | # indentation is the default. We change it here to the more often used 'fenced' 3 | # style denoted by ``` 4 | MD046: 5 | style: fenced 6 | -------------------------------------------------------------------------------- /.python-version: -------------------------------------------------------------------------------- 1 | 3.9 2 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Open LASSO Python Contribution Guide 2 | 3 | ## Roles 4 | 5 | There are roughly two roles in which you can contribute: 6 | 7 | - Contributors who just want to add changes from time to time 8 | - Maintainers who oversee the repo, code themselves and review contribution 9 | before they can be merged 10 | 11 | ## Community 12 | 13 | The community can be found on [discord]. 14 | Nothing beats a good discussion about existing features, new features or ideas 15 | so jump right in. 16 | 17 | [discord]: https://discord.gg/jYUgTsEWtN 18 | 19 | ## Spirit 20 | 21 | We are all learners, some in the early stage some in the later. 22 | In a code review, we take the patience to imaginarily sit down together and 23 | explain other people why something is recommended differently or how things are 24 | usually done in software or python. 25 | This often seems tedious at first but growing together is important for any kind 26 | of project which wants to grow itself. 27 | So no fear in case of lack of experience but bring your learning spirit. 28 | Samewise to any experienced developer, have patience and explain things. 29 | Take the opportunity to sit down together on discord. 30 | 31 | ## How to make a Contribution 32 | 33 | Tl;dr; 34 | 35 | 1. Fork the open lasso python repository 36 | 2. Clone the repo to your filesystem 37 | 3. Install [task][task_install] 38 | 4. Check out the `develop` branch 39 | 5. Set up the repo for development through `task setup` 40 | 6. Create a new branch from `develop` with the naming pattern `feature/...` 41 | 7. Make changes, commit and push them 42 | 8. Create a Pull Request in your for repo with target on the original repo 43 | 9. Add as reviewer `open-lasso-python/developers` 44 | 10. Wait for review patiently but you may nudge us a bit 🫶 45 | 11. Perform a Squash Merge and give a reasonable commit message as 46 | `: ` where `branch_type` is one of the categories 47 | below. 48 | 49 | [task_install]:https://taskfile.dev/installation/ 50 | 51 | You can fork the repo (1) by clicking on for in the top-right of the original 52 | repo. 53 | Cloning the repo (2) is traditionally done with git then of course. 54 | Task is required (3) since it is used to store complex commands such as testing, 55 | linting, build docs, etc. 56 | (4) All development activities originate from the `develop` branch in which all 57 | Pull Requests are finally merged again. 58 | To create a branch choose a respective naming pattern following the angular 59 | scheme: `/-`. 60 | Typical branch types are: 61 | 62 | - `feature` for new features or if you got no clue what it is 63 | - `bugfix` for 🐛-fixes 64 | - `ci` for changes on the Continuous Integration pipeline 65 | - `docs` for documentation related works 66 | - `refactor` if the PR just does cleanup 🧹 and improves the code 67 | - `test` for solely test-related work 68 | 69 | Don't take these too seriously but they ought to provide rough categories. 70 | **They are also used in the commit message when you squash merge a PR where it 71 | is important!** 72 | Install all dependencies otherwise obviously you can't code (5). 73 | After making changes and pushing your branch to your forked repo (7 & 8), open a 74 | Pull Request but make the target not `develop` in your fork but `develop` in the 75 | original repo (7). 76 | If not done automatically, add the maintainer group as reviewers (9). 77 | Enjoy a healthy code review but be a bit patient with time as people contribute 78 | voluntarily and may privately be occupied (10). 79 | After approval, perform a Squash Merge (11). 80 | A Squash Merge tosses away all the little, dirty commits we all do during 81 | development. 82 | What stays is the **final merge commit message and please pay attention here** 83 | to format it right. 84 | Why is this important? 85 | This is needed to automatically generate a reasonable changelog during releases. 86 | Thanks for contributing at this point. 87 | Go wild and have fun 🥳 88 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | Copyright 2022 lasso-open-source 2 | 3 | Redistribution and use in source and binary forms, with or without modification, 4 | are permitted provided that the following conditions are met: 5 | 6 | 1. Redistributions of source code must retain the above copyright notice, this 7 | list of conditions and the following disclaimer. 8 | 2. Redistributions in binary form must reproduce the above copyright notice, 9 | this list of conditions and the following disclaimer in the documentation 10 | and/or other materials provided with the distribution. 11 | 3. Neither the name of the copyright holder nor the names of its contributors 12 | may be used to endorse or promote products derived from this software without 13 | specific prior written permission. 14 | 15 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 16 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 17 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR 19 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 20 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 21 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 22 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 24 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | 2 | |LASSO| Python Library 3 | ====================== 4 | 5 | |test-main| |test-dev| 6 | 7 | .. |test-main| image:: https://github.com/open-lasso-python/lasso-python/actions/workflows/ci-cd.yml/badge.svg?branch=main 8 | :target: https://github.com/open-lasso-python/lasso-python/actions/workflows/test-runner.yml 9 | 10 | .. |test-dev| image:: https://github.com/open-lasso-python/lasso-python/actions/workflows/ci-cd.yml/badge.svg?branch=develop 11 | :target: https://github.com/open-lasso-python/lasso-python/actions/workflows/test-runner.yml 12 | 13 | This python library is designed for general purpose usage in the field of 14 | Computer Aided Engineering (CAE). 15 | It's name originates from the original initiator and donator of the project 16 | `LASSO GmbH`_. 17 | The library is now maintained by an open-source community. 18 | 19 | Module Overview: 20 | - `lasso.dyna`_ 21 | - `lasso.dimred`_ 22 | - `lasso.femzip`_ 23 | - `lasso.diffcrash`_ 24 | 25 | For further infos please read the Documentation: 26 | 27 | |DOCS| `Documentation`_ 28 | 29 | .. _LASSO GmbH: https://www.lasso.de/en 30 | .. _Documentation: https://open-lasso-python.github.io/lasso-python/ 31 | .. _lasso.dyna: https://open-lasso-python.github.io/lasso-python/dyna/ 32 | .. _lasso.dimred: https://open-lasso-python.github.io/lasso-python/dimred/ 33 | .. _lasso.femzip: https://open-lasso-python.github.io/lasso-python/femzip/ 34 | .. _lasso.diffcrash: https://open-lasso-python.github.io/lasso-python/diffcrash/ 35 | 36 | 37 | Installation 38 | ------------ 39 | 40 | .. code-block:: bash 41 | 42 | python -m pip install lasso-python 43 | 44 | 45 | Community 46 | --------- 47 | 48 | Join our open-source community on: 49 | 50 | |DISCORD| `Discord`_ 51 | 52 | .. _Discord: https://discord.gg/jYUgTsEWtN 53 | 54 | .. |LASSO| image:: ./docs/lasso-logo.png 55 | :target: https://open-lasso-python.github.io/lasso-python/build/html/index.html 56 | .. |DOCS| image:: ./docs/icon-home.png 57 | :target: https://open-lasso-python.github.io/lasso-python/build/html/index.html 58 | .. |DISCORD| image:: ./docs/icon-discord.png 59 | :target: https://discord.gg/GeHu79b 60 | 61 | 62 | Development 63 | ----------- 64 | 65 | For development install `poetry`_ and `task`_: 66 | 67 | .. code-block:: bash 68 | 69 | python -m pip install poetry 70 | sh -c "$(curl --location https://taskfile.dev/install.sh)" \ 71 | -- -d -b ~/.local/bin 72 | 73 | Then by simply running the command ``task`` you can find a variety of available 74 | commands such as ``task setup`` to install all dependencies or ``task test`` to 75 | run the test suite. 76 | Happy Coding 🥳🎉 77 | 78 | .. _poetry: https://python-poetry.org/ 79 | .. _task: https://taskfile.dev/ 80 | -------------------------------------------------------------------------------- /Taskfile.yml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | 3 | tasks: 4 | setup: 5 | desc: Setup the development environment. 6 | cmds: 7 | - uv sync 8 | 9 | lint: 10 | desc: Lints the code and reports on issues. 11 | cmds: 12 | - uv run ruff check . 13 | # - uv run ruff format . --check 14 | 15 | build: 16 | desc: Builds the python package 17 | cmds: 18 | - uv build 19 | 20 | test: 21 | desc: Runs tests on the code 22 | cmds: 23 | - > 24 | uv run pytest test 25 | --cov=lasso 26 | --cov-report=html 27 | 28 | docs:serve: 29 | desc: Serve the documentation locally for development and testing 30 | deps: 31 | - test 32 | - docs:changelog 33 | cmds: 34 | - uv run mkdocs serve 35 | 36 | docs:build: 37 | desc: Build the documentation 38 | deps: 39 | - test 40 | - docs:changelog 41 | cmds: 42 | - uv run mkdocs build 43 | 44 | docs:deploy: 45 | desc: Deploys the docs to GitHub (be careful!) 46 | deps: 47 | - test 48 | - docs:changelog 49 | cmds: 50 | - uv run mkdocs gh-deploy 51 | 52 | docs:changelog: 53 | desc: Generates the changelog 54 | cmds: 55 | - > 56 | uv run git-changelog . 57 | --output CHANGELOG.md 58 | --style angular 59 | --template angular 60 | -------------------------------------------------------------------------------- /docs/changelog.md: -------------------------------------------------------------------------------- 1 | --8<-- "./CHANGELOG.md" 2 | -------------------------------------------------------------------------------- /docs/contributing.md: -------------------------------------------------------------------------------- 1 | --8<-- "./CONTRIBUTING.md" 2 | -------------------------------------------------------------------------------- /docs/css/video.css: -------------------------------------------------------------------------------- 1 | .video-wrapper { 2 | position: relative; 3 | display: block; 4 | height: 0; 5 | padding: 0; 6 | overflow: hidden; 7 | padding-bottom: 56.25%; 8 | } 9 | .video-wrapper > iframe { 10 | position: absolute; 11 | top: 0; 12 | bottom: 0; 13 | left: 0; 14 | width: 100%; 15 | height: 100%; 16 | border: 0; 17 | } 18 | -------------------------------------------------------------------------------- /docs/diffcrash/diffcrash_run.md: -------------------------------------------------------------------------------- 1 | # DiffcrashRun 2 | 3 | This class can be used in order to automate diffcrash runs in a hopefully 4 | comfortable manner. 5 | 6 | ::: lasso.diffcrash.DiffcrashRun 7 | -------------------------------------------------------------------------------- /docs/diffcrash/index.md: -------------------------------------------------------------------------------- 1 | # lasso.diffcrash 2 | 3 | This is the diffcrash module, which consists of a command line utility 4 | as well as a class for writing scripts. 5 | [Diffcrash] is a software from [Sidact] which is designed for robustness 6 | analysis of simulation runs. 7 | It can be used as a set of independent executables or as a postprocessor plugin. 8 | [Diffcrash] itself must be licensed. 9 | Please therefore contact [Sidact] directly. 10 | This module makes running a Diffcrash analysis much easier. 11 | 12 | - Command Line Utility 13 | - Code 14 | 15 | [diffcrash]: https://www.sidact.de/diffcrash 16 | [sidact]: https://www.sidact.de/ 17 | -------------------------------------------------------------------------------- /docs/diffcrash/run.md: -------------------------------------------------------------------------------- 1 | # Command Line Tool 2 | 3 | ## What is Diffcrash? 4 | 5 | [Diffcrash] is a software from [Sidact] which is designed for robustness 6 | analysis of simulation runs. 7 | It can be used as a set of independent executables or as a postprocessor plugin. 8 | [Diffcrash] itself must be licensed. 9 | Please therefore contact [Sidact] directly. 10 | This command line utility makes running a Diffcrash analysis much 11 | easier. 12 | 13 | [diffcrash]: https://www.sidact.de/diffcrash 14 | [sidact]: https://www.sidact.de/ 15 | 16 | ## How to use the utility? 17 | 18 | You can get the run info by performing: 19 | 20 | ```console 21 | $ diffcrash --help 22 | 23 | ==== D I F F C R A S H ==== 24 | 25 | an open lasso utility script 26 | 27 | usage: run.py [-h] --reference-run REFERENCE_RUN 28 | [--exclude-runs [EXCLUDE_RUNS [EXCLUDE_RUNS ...]]] --crash-code 29 | CRASH_CODE [--start-stage [START_STAGE]] 30 | [--end-stage [END_STAGE]] [--diffcrash-home [DIFFCRASH_HOME]] 31 | [--use-id-mapping [USE_ID_MAPPING]] 32 | [--project-dir [PROJECT_DIR]] [--config-file [CONFIG_FILE]] 33 | [--parameter-file [PARAMETER_FILE]] 34 | [--n-processes [N_PROCESSES]] 35 | [simulation_runs [simulation_runs ...]] 36 | 37 | Python utility script for Diffcrash. 38 | 39 | positional arguments: 40 | simulation_runs Simulation runs or patterns used to search for 41 | simulation runs. 42 | 43 | optional arguments: 44 | -h, --help show this help message and exit 45 | --reference-run REFERENCE_RUN 46 | filepath of the reference run. 47 | --exclude-runs [EXCLUDE_RUNS [EXCLUDE_RUNS ...]] 48 | Runs to exclude from the analysis. 49 | --crash-code CRASH_CODE 50 | Which crash code is used ('dyna', 'pam' or 'radioss'). 51 | --start-stage [START_STAGE] 52 | At which specific stage to start the analysis (SETUP, 53 | IMPORT, MATH, EXPORT, MATRIX, EIGEN, MERGE). 54 | --end-stage [END_STAGE] 55 | At which specific stage to stop the analysis (SETUP, 56 | IMPORT, MATH, EXPORT, MATRIX, EIGEN, MERGE). 57 | --diffcrash-home [DIFFCRASH_HOME] 58 | Home directory where Diffcrash is installed. Uses 59 | environment variable 'DIFFCRASHHOME' if unspecified. 60 | --use-id-mapping [USE_ID_MAPPING] 61 | Whether to use id-based mapping (default is nearest 62 | neighbour). 63 | --project-dir [PROJECT_DIR] 64 | Project dir to use for femzip. 65 | --config-file [CONFIG_FILE] 66 | Path to the config file. 67 | --parameter-file [PARAMETER_FILE] 68 | Path to the parameter file. 69 | --n-processes [N_PROCESSES] 70 | Number of processes to use (default: max-1). 71 | ``` 72 | 73 | It is important to specify a `--reference-run` for the 74 | analysis. If the reference run is contained within the rest of the 75 | `simulation_runs`, it is automatically removed from that 76 | list. `simulation_runs` can be either tagged individually 77 | or by using placeholders for entire directories (e.g. '\*.fz') and 78 | subdirectories (e.g. '/\*\*/\*.fz'). 79 | 80 | !!! warning 81 | Every run clears the project directory entirely! 82 | 83 | ## Example 84 | 85 | ```console 86 | $ python -m lasso.diffcrash.run \ 87 | $ --reference-run ./runs/run_1.fz \ 88 | $ --crash-code dyna \ 89 | $ --project-dir diffcrash_project \ 90 | $ ./runs/*.fz 91 | 92 | 93 | ==== D I F F C R A S H ==== 94 | 95 | an open lasso utility script 96 | 97 | [/] diffcrash-home : /sw/Linux/diffcrash/V6.1.24 98 | [/] project-dir : test-example-project 99 | [/] crash-code : dyna 100 | [/] reference-run : bus/run_1.fz 101 | [/] use-id-mapping : False 102 | [/] # simul.-files : 37 103 | [/] # excluded files: 0 104 | [/] config-file : None 105 | [!] Config file missing. Consider specifying the path with the option '--config-file'. 106 | [/] parameter-file : None 107 | [!] Parameter file missing. Consider specifying the path with the option '--parameter-file'. 108 | [/] n-processes : 4 109 | 110 | ---- Running Routines ---- 111 | 112 | [✔] Running Setup ... done in 3.88s 113 | [✔] Running Imports ... done in 58.20s 114 | [✔] Running Math ... done in 56.22s 115 | [✔] Running Export ... done in 2.22s 116 | [✔] Running Matrix ... done in 9.78s 117 | [✔] Running Eigen ... done in 0.46s 118 | [✔] Running Merge ... done in 23.29s 119 | ``` 120 | -------------------------------------------------------------------------------- /docs/dimred/DimredRun.md: -------------------------------------------------------------------------------- 1 | # DimredRun 2 | 3 | This class can be used in order to comfortably represent multiple D3plot samples 4 | in a 3D graph through dimensionality reduction. 5 | In the 3D graph every simulation is a data point and the closeness of the data 6 | points represents the similarity of the result field. 7 | It offers functions to: 8 | 9 | - Subsample simulations to reduce the computational effort and account for 10 | different meshing. 11 | - Reduce order which is the black magic reducing complex field results to 12 | a few numbers which can then be plotted in 3D. 13 | - Clustering with sklearn to group simulations semi-automatically 14 | (mostly just a visual feature). 15 | - Output 3D similarity plot as a webpage 16 | 17 | For ease of use, check out the `Tool` section, which explains the command line 18 | tool for this dimensionality reduction feature. 19 | 20 | ::: lasso.dimred.dimred_run.DimredRun 21 | options: 22 | members: 23 | - __init__ 24 | - process_reference_run 25 | - subsample_to_reference_run 26 | - dimension_reduction_svd 27 | - clustering_results 28 | - visualize_results 29 | -------------------------------------------------------------------------------- /docs/dimred/KeywordTypes.md: -------------------------------------------------------------------------------- 1 | # Keyword Types 2 | 3 | Keywords for using the `clustering_betas`{.interpreted-text role="doc"} 4 | functions. 5 | 6 | ::: lasso.dimred.svd.keyword_types 7 | -------------------------------------------------------------------------------- /docs/dimred/clustering_betas.md: -------------------------------------------------------------------------------- 1 | # Clustering Betas 2 | 3 | Methods that apply different clustering and outlier detection algorithms. 4 | Offers additional functions to convert console command arguments into variables 5 | to use in the function. 6 | 7 | !!! warning 8 | This function is optimized to be used by the console commandline tool 9 | 10 | ::: lasso.dimred.svd.clustering_betas 11 | options: 12 | members: 13 | - group_betas 14 | - create_cluster_arg_dict 15 | - create_detector_arg_dict 16 | -------------------------------------------------------------------------------- /docs/dimred/index.md: -------------------------------------------------------------------------------- 1 | # lasso.dimred 2 | 3 | The dimred module consists of a command line utility as well as several 4 | classes to reduce the dimensionality of D3plots and visualize the 5 | results. 6 | -------------------------------------------------------------------------------- /docs/dimred/plot_beta_clusters.md: -------------------------------------------------------------------------------- 1 | # Plot beta clusters 2 | 3 | Functions converting input list of clusters and corresponding ids into a 4 | .html page visualizing the results as a 3D Plot 5 | 6 | ::: lasso.dimred.svd.plot_beta_clusters 7 | options: 8 | members: 9 | - __init__ 10 | - plot_clusters_js 11 | -------------------------------------------------------------------------------- /docs/dimred/pod_functions.md: -------------------------------------------------------------------------------- 1 | # POD Functions 2 | 3 | These functions are used for the proper orthogonal decomposition of 4 | samples 5 | 6 | ::: lasso.dimred.svd.pod_functions 7 | -------------------------------------------------------------------------------- /docs/dimred/subsampling_methods.md: -------------------------------------------------------------------------------- 1 | # Subsampling Methods 2 | 3 | Methods used to perform subsampling of simulation runs. 4 | 5 | ::: lasso.dimred.svd.subsampling_methods 6 | -------------------------------------------------------------------------------- /docs/dimred/svd.md: -------------------------------------------------------------------------------- 1 | # Module `lasso.dimred.svd` 2 | 3 | Module containing the methods and functions used by DimredRun 4 | 5 | - subsampling_methods 6 | - pod_functions 7 | - clustering_betas 8 | - plot_beta_clusters 9 | - KeywordTypes 10 | -------------------------------------------------------------------------------- /docs/dyna/ArrayType.md: -------------------------------------------------------------------------------- 1 | # ArrayType 2 | 3 | ::: lasso.dyna.array_type.ArrayType 4 | -------------------------------------------------------------------------------- /docs/dyna/Binout.md: -------------------------------------------------------------------------------- 1 | # Binout 2 | 3 | There is a youtube tutorial available for this class: 4 | 5 |
6 | 7 |
8 | 9 | ::: lasso.dyna.binout.Binout 10 | -------------------------------------------------------------------------------- /docs/dyna/D3plot.md: -------------------------------------------------------------------------------- 1 | # D3plot 2 | 3 | There is a youtube tutorial available for this class: 4 | 5 |
6 | 7 |
8 | 9 | ::: lasso.dyna.d3plot.D3plot 10 | -------------------------------------------------------------------------------- /docs/dyna/D3plotHeader.md: -------------------------------------------------------------------------------- 1 | # D3plotHeader 2 | 3 | ::: lasso.dyna.d3plot_header.D3plotHeader 4 | options: 5 | members: 6 | - __init__ 7 | - load_file 8 | - compare 9 | -------------------------------------------------------------------------------- /docs/dyna/FilterType.md: -------------------------------------------------------------------------------- 1 | # FilterType 2 | 3 | ::: lasso.dyna.filter_type.FilterType 4 | -------------------------------------------------------------------------------- /docs/dyna/index.md: -------------------------------------------------------------------------------- 1 | # lasso.dyna 2 | 3 | The dyna module contains classes to read, write and display LS-Dyna result 4 | files. 5 | For a detailed list of features, see the following list: 6 | 7 | - D3plot 8 | - Read & Write 9 | - Beam, Shell, Solid, TShell Results 10 | - Element/Node deletion 11 | - Part Metadata 12 | - Material Type Data 13 | - Rigid Body Data 14 | - Rigid Road Data 15 | - SPH 16 | - Particles 17 | - Plot shell-based parts as 3D HTML 18 | - Binout 19 | - Read & Write 20 | 21 | !!! note 22 | 23 | The D3plot reader can process almost every structural result or results 24 | connected to it such as airbag results. Multi-Solver data though and 25 | connected results are not supported. 26 | 27 | ## D3plot Example 28 | 29 | ``` python 30 | >>> from lasso.dyna import D3plot, ArrayType, FilterType 31 | 32 | >>> # read a file (zero-copy reading of everything) 33 | >>> d3plot = D3plot("path/to/d3plot") 34 | 35 | >>> # read file 36 | >>> # - buffered (less memory usage) 37 | >>> # - only node displacements (safes memory) 38 | >>> # - read only first and last state 39 | >>> d3plot = D3plot("path/to/d3plot", 40 | >>> state_array_filter=["node_displacement"], 41 | >>> buffered_reading=True, 42 | >>> state_filter={0, -1}) 43 | 44 | >>> # and of course femzipped files 45 | >>> d3plot = D3plot("path/to/d3plot.fz") 46 | 47 | >>> # get arrays (see docs of ArrayType for shape info) 48 | >>> disp = d3plot.arrays["node_displacement"] 49 | >>> disp.shape 50 | (34, 51723, 3) 51 | >>> # this is safer and has auto-completion 52 | >>> disp = d3plot.arrays[ArrayType.node_displacement] 53 | 54 | >>> # filter elements for certain parts 55 | >>> pstrain = d3plot.arrays[ArrayType.element_shell_effective_plastic_strain] 56 | >>> pstrain.shape 57 | (34, 56372, 3) 58 | >>> mask = d3plot.get_part_filter(FilterType.SHELL, [44, 45]) 59 | >>> # filter elements with mask 60 | >>> pstrain[:, mask].shape 61 | (34, 17392, 3) 62 | 63 | >>> # create a standalone html plot 64 | >>> d3plot.plot() 65 | ``` 66 | -------------------------------------------------------------------------------- /docs/dyna/performance_info.md: -------------------------------------------------------------------------------- 1 | # Performance Info 2 | 3 | ## D3plot 4 | 5 | Benchmark: 6 | 7 | The d3plot reader is blazingly fast by using several memory tricks. 8 | While postprocessors build up an internal datastructure during reading, this 9 | reader avoids this and simply references memory within the files. 10 | In consequence performance benchmarks show that the runtime of the code is 11 | solely dominated by pulling the files into memory. 12 | 13 | ```text 14 | 2108 function calls (2099 primitive calls) in 43.017 seconds 15 | Ordered by: internal time 16 | ncalls tottime percall cumtime percall filename:lineno(function) 17 | 35 38.960 1.113 38.960 1.113 {method 'readinto' of '_io.BufferedReader' objects} 18 | 35 2.366 0.068 2.366 0.068 {built-in method io.open} 19 | 1 1.644 1.644 42.959 42.959 D3plot.py:2565(_read_state_bytebuffer) 20 | 71 0.043 0.001 0.043 0.001 {built-in method nt.stat} 21 | 2 0.002 0.001 0.057 0.028 BinaryBuffer.py:234(load) 22 | 70 0.000 0.000 0.001 0.000 ntpath.py:74(join) 23 | 142 0.000 0.000 0.000 0.000 ntpath.py:121(splitdrive) 24 | 1 0.000 0.000 0.019 0.019 D3plot.py:2738() 25 | 1 0.000 0.000 0.000 0.000 {built-in method nt.listdir} 26 | 36 0.000 0.000 0.000 0.000 {method 'match' of '_sre.SRE_Pattern' objects} 27 | 84 0.000 0.000 0.000 0.000 {built-in method numpy.core.multiarray.frombuffer} 28 | 1 0.000 0.000 42.959 42.959 D3plot.py:1304(_read_states) 29 | ... 30 | ``` 31 | 32 | In the table above the largest, first three performance issues are all related 33 | to loading files into memory, accounting for 99.89% of runtime. 34 | The routines reading the data arrays (deserialization) have an almost constant 35 | runtime of a few milliseconds and are independent of the filesize. 36 | 37 | ### Efficiency 38 | 39 | Note that writing such efficient and simple code in C or any other language is 40 | much more challenging than in Python, thus surprisingly this implementation can 41 | be expected to outperform most native codes and tools. 42 | Indeed tests show that when reading all results of a d3plot in a postprocessor, 43 | this library is devastatingly fast. 44 | For reading single node-fields this library can be slower though (see 45 | Improvements below). 46 | 47 | ### Array-based API 48 | 49 | Building objects is useful (e.g. neighbor search) but requires additional 50 | runtime overhead. 51 | By the principle of "don't pay for what you don't use" it was decided to avoid 52 | object orientation, thus providing a majorly array-based API. 53 | This is perfectly suitable for data analysis as well as machine learning. 54 | 55 | ### Improvements 56 | 57 | A speedup 'could' be achieved by using memory-mapping, thus pulling only 58 | fractions of the files into the RAM which hold the required arrays. 59 | Postprocessors sometimes do this when reading certain fields, outperforming 60 | this library in such a case (e.g. node displacements). 61 | Since dyna litters most arrays across the entire file though, this method was 62 | not considered worth the effort and thus this library always reads everything. 63 | 64 | ## Binout 65 | 66 | The binout is used to store arbitrary binary data at a much higher frequency 67 | than the d3plot. 68 | As a result the data is dumped in an internal 'state folder'. 69 | Since different results may be dumped at different frequencies some state 70 | folders might contain more information than others. 71 | This inherently prevents efficient memory reading. 72 | The python version used here is slower than the original C-based version but 73 | one therefore gains better portability across operating systems. 74 | -------------------------------------------------------------------------------- /docs/femzip/FemzipAPI.md: -------------------------------------------------------------------------------- 1 | # FemzipAPI 2 | 3 | ::: lasso.femzip.femzip_api.FemzipAPI 4 | -------------------------------------------------------------------------------- /docs/femzip/FemzipAPIStatus.md: -------------------------------------------------------------------------------- 1 | # FemzipAPIStatus 2 | 3 | ::: lasso.femzip.femzip_api.FemzipAPIStatus 4 | -------------------------------------------------------------------------------- /docs/femzip/FemzipBufferInfo.md: -------------------------------------------------------------------------------- 1 | # FemzipBufferInfo 2 | 3 | ::: lasso.femzip.femzip_api.FemzipBufferInfo 4 | -------------------------------------------------------------------------------- /docs/femzip/FemzipError.md: -------------------------------------------------------------------------------- 1 | # FemzipError 2 | 3 | ::: lasso.femzip.femzip_api.FemzipError 4 | -------------------------------------------------------------------------------- /docs/femzip/FemzipFileMetadata.md: -------------------------------------------------------------------------------- 1 | # FemzipFileMetadata 2 | 3 | ::: lasso.femzip.femzip_api.FemzipFileMetadata 4 | -------------------------------------------------------------------------------- /docs/femzip/FemzipVariableCategory.md: -------------------------------------------------------------------------------- 1 | # FemzipVariableCategory 2 | 3 | ::: lasso.femzip.fz_config.FemzipVariableCategory 4 | -------------------------------------------------------------------------------- /docs/femzip/VariableInfo.md: -------------------------------------------------------------------------------- 1 | # VariableInfo 2 | 3 | ::: lasso.femzip.femzip_api.VariableInfo 4 | -------------------------------------------------------------------------------- /docs/femzip/index.md: -------------------------------------------------------------------------------- 1 | 2 | # Femzip 3 | 4 | Documentation of the Femzip module. 5 | The D3plot reader uses this module to access femzip file data through the femzip 6 | API. 7 | It also allows to decompress files directly or use some additional utility 8 | functions such as a femzip file check if desired. 9 | 10 | ## Example 11 | 12 | ``` python 13 | >>> from lasso.femzip import FemzipAPI 14 | >>> from ctypes import * 15 | >>> filepath = "path/to/d3plot.fz" 16 | 17 | >>> # Initialize API 18 | >>> api = FemzipAPI() 19 | ``` 20 | 21 | Here we check if we can use the extended FEMZIP-API. 22 | The extended FEMZIP-API allows reading selected arrays, but requires a license 23 | with the feature "FEMUNZIPLIB-DYNA", which can be attained from SIDACT or 24 | femzip distributors. 25 | 26 | ``` python 27 | >>> api.has_femunziplib_license() 28 | True 29 | ``` 30 | 31 | Check if a file is femzipped: 32 | 33 | ``` python 34 | >>> # check if file is femzipped 35 | >>> api.is_sidact_file(filepath) 36 | True 37 | ``` 38 | 39 | Check the file and library version. 40 | This is usually not necessary. 41 | 42 | ``` python 43 | >>> api.is_femunzip_version_ok(filepath) 44 | True 45 | ``` 46 | 47 | It's efficient to get the memory demand for arrays beforehand and 48 | hand this memory info to other functions. 49 | It is often not mandatory though and a mere speedup. 50 | 51 | ``` python 52 | >>> # read memory demand info first 53 | >>> buffer_info = api.get_buffer_info(filepath) 54 | >>> # buffer info is a c struct, but we can print it 55 | >>> api.struct_to_dict(buffer_info) 56 | {'n_timesteps': 12, 'timesteps': , 'size_geometry': 537125, 'size_state': 1462902, 'size_displacement': 147716, 'size_activity': 47385, 'size_post': 1266356, 'size_titles': 1448} 57 | >>> for i_timestep in range(buffer_info.n_timesteps): 58 | >>> print(buffer_info.timesteps[i_timestep]) 59 | 0.0 60 | 0.9998100399971008 61 | 1.9998900890350342 62 | 2.9999701976776123 63 | 3.9997801780700684 64 | ``` 65 | 66 | Here we read the geometry buffer. 67 | The file is kept open so that we can read states afterwards. 68 | 69 | ``` python 70 | >>> mview = api.read_geometry(filepath, buffer_info, close_file=False) 71 | ``` 72 | 73 | Femzip can handle only one file per process. 74 | In case of issues close the current file (shown later). 75 | We can check the API status as follows 76 | 77 | ``` python 78 | >>> print(api.struct_to_dict(api.get_femzip_status())) 79 | {'is_file_open': 1, 'is_geometry_read': 1, 'is_states_open': 0, 'i_timestep_state': -1, 'i_timestep_activity': -1} 80 | ``` 81 | 82 | Get the memory of a single state. 83 | Must start at 0. Femzip does not allow reading arbitrary states inbetween. 84 | 85 | ``` python 86 | >>> mview = api.read_single_state(i_timestep=0, buffer_info=buffer_info) 87 | ``` 88 | 89 | It is also possible to read the state memory directly into an already 90 | allocated buffer. 91 | 92 | ``` python 93 | >>> BufferType = c_float * (buffer_info.size_state) 94 | >>> mview = memoryview(BufferType()) 95 | >>> api.read_single_state(1, buffer_info=buffer_info, state_buffer=mview) 96 | ``` 97 | 98 | Let's close the file manually. 99 | This ensures that femzip resets its internal state. 100 | 101 | ``` python 102 | >>> api.close_current_file() 103 | ``` 104 | -------------------------------------------------------------------------------- /docs/icon-discord.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/docs/icon-discord.png -------------------------------------------------------------------------------- /docs/icon-home.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/docs/icon-home.png -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Open LASSO Python 2 | 3 | This python library is designed for general purpose usage in the field of 4 | Computer Aided Engineering (CAE). 5 | It's name originates from the original initiator and donator of the project 6 | [Lasso GmbH](https://www.lasso.de/en). 7 | The library is now maintained by an open-source community. 8 | 9 | ## Community 10 | 11 | Join our open-source community on: 12 | 13 | [![DISCORD](icon-discord.png)](https://discord.gg/YJNPRjHG) 14 | [Discord](https://discord.gg/YJNPRjHG) 15 | 16 | ## Installation 17 | 18 | ```console 19 | python -m pip install lasso-python 20 | ``` 21 | -------------------------------------------------------------------------------- /docs/lasso-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/docs/lasso-logo.png -------------------------------------------------------------------------------- /docs/ls-dyna_database_Apr2020.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/docs/ls-dyna_database_Apr2020.pdf -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: LASSO-Python 2 | site_url: https://open-lasso-python.github.io/lasso-python/ 3 | repo_url: https://github.com/open-lasso-python/lasso-python 4 | 5 | theme: 6 | name: material 7 | 8 | nav: 9 | - Home: index.md 10 | - Dyna: 11 | - Overview: dyna/index.md 12 | - D3plot: 13 | - D3plot: dyna/D3plot.md 14 | - ArrayType: dyna/ArrayType.md 15 | - FilterType: dyna/FilterType.md 16 | - D3plotHeader: dyna/D3plotHeader.md 17 | - Binout: dyna/Binout.md 18 | - Performance Info: dyna/performance_info.md 19 | - Femzip: 20 | - Overview: femzip/index.md 21 | - FemzipAPI: femzip/FemzipAPI.md 22 | - FemzipAPIStatus: femzip/FemzipAPIStatus.md 23 | - FemzipBufferInfo: femzip/FemzipBufferInfo.md 24 | - FemzipError: femzip/FemzipError.md 25 | - FemzipFileMetadata: femzip/FemzipFileMetadata.md 26 | - VariableInfo: femzip/VariableInfo.md 27 | - FemzipVariableCategory: femzip/FemzipVariableCategory.md 28 | - Dimensionality Reduction: 29 | - Overview: dimred/index.md 30 | - Command Line Tool: dimred/command_line_interface.md 31 | - Code: 32 | - DimredRun: dimred/DimredRun.md 33 | - SVD: 34 | - Description: dimred/svd.md 35 | - subsampling_methods: dimred/subsampling_methods.md 36 | - pod_functions: dimred/pod_functions.md 37 | - clustering_betas: dimred/clustering_betas.md 38 | - plot_beta_clusters: dimred/plot_beta_clusters.md 39 | - KeywordTypes: dimred/KeywordTypes.md 40 | - Diffcrash: 41 | - Overview: diffcrash/index.md 42 | - Command Line Tool: diffcrash/run.md 43 | - Code: diffcrash/diffcrash_run.md 44 | - Contribution Guide: contributing.md 45 | - Code Coverage Report: coverage.md 46 | - Changelog: changelog.md 47 | 48 | plugins: 49 | - search 50 | - mkdocstrings: 51 | handlers: 52 | python: 53 | options: 54 | docstring_style: numpy 55 | show_source: false 56 | - coverage 57 | 58 | extra_css: 59 | - css/video.css 60 | 61 | markdown_extensions: 62 | - admonition 63 | # Code highlighting 64 | - pymdownx.highlight: 65 | anchor_linenums: true 66 | - pymdownx.inlinehilite 67 | - pymdownx.superfences 68 | - pymdownx.snippets 69 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "lasso-python" 3 | description = "An open-source CAE and Machine Learning library." 4 | license-files = ["LICENSE.md"] 5 | readme = "README.rst" 6 | dynamic = ["version"] 7 | authors = [{ name = "open-lasso-python", email = "open.lasso.python@gmail.com" }] 8 | classifiers = [ 9 | "Development Status :: 5 - Production/Stable", 10 | "Topic :: Scientific/Engineering", 11 | "Intended Audience :: Science/Research", 12 | "Topic :: Utilities", 13 | "Operating System :: Microsoft :: Windows", 14 | "Operating System :: POSIX :: Linux", 15 | "Programming Language :: Python :: 3", 16 | "Programming Language :: Python :: 3.9", 17 | "Programming Language :: Python :: 3.10", 18 | "Programming Language :: Python :: 3.11", 19 | "Programming Language :: Python :: 3.12", 20 | ] 21 | 22 | requires-python = ">=3.9" 23 | dependencies = [ 24 | "h5py==3.*", 25 | "scipy==1.*", 26 | "numpy==1.*", 27 | "plotly==5.*", 28 | "psutil==5.*", 29 | "rich==13.*", 30 | "pandas==2.*", 31 | "scikit_learn==1.*" 32 | ] 33 | 34 | [tool.setuptools] 35 | package-dir = {"" = "src"} 36 | 37 | [project.scripts] 38 | diffcrash = "lasso.diffcrash.run:main" 39 | dimred = "lasso.dimred.run:main" 40 | 41 | [dependency-groups] 42 | dev = [ 43 | "pytest==8.*", 44 | "pytest-cov==5.*", 45 | "ruff==0.11.*", 46 | "mkdocs==1.*", 47 | "mkdocs-material==9.*", 48 | "mkdocstrings[python]==0.*", 49 | "mkdocs-coverage==1.*", 50 | "git-changelog==2.*", 51 | "twine==5.*", 52 | "go-task-bin" 53 | ] 54 | 55 | [build-system] 56 | requires = ["setuptools>=78", "setuptools-scm>=8"] 57 | build-backend = "setuptools.build_meta" 58 | 59 | [tool.ruff] 60 | required-version = "==0.11.*" 61 | line-length = 100 62 | indent-width = 4 63 | preview = true 64 | 65 | # Output serialization format for violations. The default serialization 66 | # format is "full" [env: RUFF_OUTPUT_FORMAT=] [possible values: 67 | # concise, full, json, json-lines, junit, grouped, github, gitlab, 68 | # pylint, rdjson, azure, sarif] 69 | output-format = "grouped" 70 | 71 | [tool.ruff.lint] 72 | isort.lines-after-imports = 2 73 | select = [ 74 | "C", # Complexity checks (e.g., McCabe complexity, comprehensions) 75 | # "ANN001", "ANN201", "ANN401", # flake8-annotations (required strict type annotations for public functions) 76 | "S", # flake8-bandit (checks basic security issues in code) 77 | # "BLE", # flake8-blind-except (checks the except blocks that do not specify exception) 78 | # "FBT", # flake8-boolean-trap (ensure that boolean args can be used with kw only) 79 | "E", # pycodestyle errors (PEP 8 style guide violations) 80 | "W", # pycodestyle warnings (e.g., extra spaces, indentation issues) 81 | # "DOC", # pydoclint issues (e.g., extra or missing return, yield, warnings) 82 | "A", # flake8-buitins (check variable and function names to not shadow builtins) 83 | "N", # Naming convention checks (e.g., PEP 8 variable and function names) 84 | "F", # Pyflakes errors (e.g., unused imports, undefined variables) 85 | "I", # isort (Ensures imports are sorted properly) 86 | "B", # flake8-bugbear (Detects likely bugs and bad practices) 87 | "TID", # flake8-tidy-imports (Checks for banned or misplaced imports) 88 | "UP", # pyupgrade (Automatically updates old Python syntax) 89 | "YTT", # flake8-2020 (Detects outdated Python 2/3 compatibility issues) 90 | "FLY", # flynt (Converts old-style string formatting to f-strings) 91 | "PIE", # flake8-pie 92 | # "PL", # pylint 93 | # "RUF", # Ruff-specific rules (Additional optimizations and best practices) 94 | ] 95 | 96 | ignore = [ 97 | "C90", # [mccabe](https://docs.astral.sh/ruff/rules/#mccabe-c90) 98 | "PLR2004", # [magic-value-comparision](https://docs.astral.sh/ruff/rules/magic-value-comparison) 99 | "S311", # [suspicious-non-cryptographic-random-usage](https://docs.astral.sh/ruff/rules/suspicious-non-cryptographic-random-usage/) 100 | "S404", # [suspicious-subprocess-import](https://docs.astral.sh/ruff/rules/suspicious-subprocess-import/) 101 | "S603", # [subprocess-without-shell-equals-true](https://docs.astral.sh/ruff/rules/subprocess-without-shell-equals-true/) 102 | ] 103 | 104 | [tool.ruff.lint.per-file-ignores] 105 | # non-lowercase-variable-in-function (N806) 106 | "{femzip_api,femzip_mapper,d3plot,}.py" = ["N806"] 107 | # error-suffix-on-exception-name (N818) 108 | "{femzip_api}.py" = ["N818"] 109 | 110 | [tool.ruff.format] 111 | docstring-code-format = true 112 | skip-magic-trailing-comma = true 113 | -------------------------------------------------------------------------------- /src/lasso/__init__.py: -------------------------------------------------------------------------------- 1 | from importlib.metadata import PackageNotFoundError, version 2 | 3 | 4 | try: 5 | __version__ = version("lasso-python") 6 | except PackageNotFoundError: 7 | # package is not installed 8 | pass 9 | -------------------------------------------------------------------------------- /src/lasso/diffcrash/__init__.py: -------------------------------------------------------------------------------- 1 | from .diffcrash_run import DiffcrashRun 2 | 3 | 4 | __all__ = ["DiffcrashRun"] 5 | -------------------------------------------------------------------------------- /src/lasso/diffcrash/run.py: -------------------------------------------------------------------------------- 1 | from concurrent import futures 2 | 3 | from lasso.diffcrash.diffcrash_run import ( 4 | DC_STAGE_EIGEN, 5 | DC_STAGE_EXPORT, 6 | DC_STAGE_IMPORT, 7 | DC_STAGE_MATH, 8 | DC_STAGE_MATRIX, 9 | DC_STAGE_MERGE, 10 | DC_STAGE_SETUP, 11 | DC_STAGES, 12 | DiffcrashRun, 13 | parse_diffcrash_args, 14 | ) 15 | from lasso.logging import str_error 16 | 17 | 18 | def _parse_stages(start_stage: str, end_stage: str): 19 | # check validity 20 | if start_stage not in DC_STAGES or end_stage not in DC_STAGES: 21 | raise ValueError( 22 | str_error(f"{start_stage} is not a valid stage. Try: {', '.join(DC_STAGES)}.") 23 | ) 24 | 25 | # get indexes 26 | start_stage_index = DC_STAGES.index(start_stage) 27 | end_stage_index = DC_STAGES.index(end_stage) 28 | 29 | # check if start and end are in correct order 30 | if start_stage_index > end_stage_index: 31 | raise ValueError( 32 | str_error( 33 | f"The specified end stage '{end_stage}' comes before " 34 | f"the start stage ({start_stage}). " 35 | f"Try the order: {', '.join(DC_STAGES)}" 36 | ) 37 | ) 38 | 39 | return start_stage_index, end_stage_index 40 | 41 | 42 | def main(): 43 | """Main function for running diffcrash""" 44 | 45 | # parse command line stuff 46 | parser = parse_diffcrash_args() 47 | 48 | # parse settings from command line 49 | diffcrash_run = DiffcrashRun( 50 | project_dir=parser.project_dir, 51 | crash_code=parser.crash_code, 52 | reference_run=parser.reference_run, 53 | exclude_runs=parser.exclude_runs, 54 | simulation_runs=parser.simulation_runs, 55 | diffcrash_home=parser.diffcrash_home, 56 | use_id_mapping=parser.use_id_mapping, 57 | config_file=parser.config_file, 58 | parameter_file=parser.parameter_file, 59 | n_processes=parser.n_processes, 60 | ) 61 | 62 | # determine start and end stages 63 | start_stage_index, end_stage_index = _parse_stages(parser.start_stage, parser.end_stage) 64 | 65 | # remove old stuff 66 | if start_stage_index == 0: 67 | diffcrash_run.clear_project_dir() 68 | diffcrash_run.create_project_dirs() 69 | 70 | # do the thing 71 | print() 72 | print(" ---- Running Routines ---- ") 73 | print() 74 | 75 | # initiate threading pool for handling jobs 76 | with futures.ThreadPoolExecutor(max_workers=diffcrash_run.n_processes) as pool: 77 | # setup 78 | if start_stage_index <= DC_STAGES.index(DC_STAGE_SETUP) <= end_stage_index: 79 | diffcrash_run.run_setup(pool) 80 | 81 | # import 82 | if start_stage_index <= DC_STAGES.index(DC_STAGE_IMPORT) <= end_stage_index: 83 | diffcrash_run.run_import(pool) 84 | 85 | # math 86 | if start_stage_index <= DC_STAGES.index(DC_STAGE_MATH) <= end_stage_index: 87 | diffcrash_run.run_math(pool) 88 | 89 | # export 90 | if start_stage_index <= DC_STAGES.index(DC_STAGE_EXPORT) <= end_stage_index: 91 | diffcrash_run.run_export(pool) 92 | 93 | # matrix 94 | if start_stage_index <= DC_STAGES.index(DC_STAGE_MATRIX) <= end_stage_index: 95 | diffcrash_run.run_matrix(pool) 96 | 97 | # eigen 98 | if start_stage_index <= DC_STAGES.index(DC_STAGE_EIGEN) <= end_stage_index: 99 | diffcrash_run.run_eigen(pool) 100 | 101 | # merge 102 | if start_stage_index <= DC_STAGES.index(DC_STAGE_MERGE) <= end_stage_index: 103 | diffcrash_run.run_merge(pool) 104 | 105 | # final spacing 106 | print() 107 | 108 | 109 | if __name__ == "__main__": 110 | main() 111 | -------------------------------------------------------------------------------- /src/lasso/dimred/__init__.py: -------------------------------------------------------------------------------- 1 | from .dimred_run import DimredRun, DimredStage 2 | 3 | 4 | __all__ = ["DimredRun", "DimredStage"] 5 | -------------------------------------------------------------------------------- /src/lasso/dimred/graph_laplacian.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | import numpy as np 4 | from scipy.sparse import csgraph, dok_matrix 5 | from scipy.sparse.linalg import eigsh 6 | from sklearn.neighbors import KDTree 7 | 8 | 9 | def run_graph_laplacian( 10 | points: np.ndarray, 11 | n_eigenmodes: int = 5, 12 | min_neighbors: Union[int, None] = None, 13 | sigma: Union[float, None] = None, 14 | search_radius: Union[float, None] = None, 15 | ): 16 | """ 17 | Compute a graph laplacian. 18 | 19 | Parameters 20 | ---------- 21 | points : np.ndarray 22 | points with features 23 | n_eigenmodes : int 24 | number of eigenmodes to compute 25 | min_neighbors : int 26 | The minimum number of neighbors of a point to be considered for the laplacian 27 | weights. Can be used to avoid unconnected points. 28 | sigma : float 29 | The standard deviation of the gaussian normal distribution function used to 30 | transform the distances for the inverse distance based weighting. 31 | search_radius: 32 | 33 | 34 | Returns 35 | ------- 36 | eigenvalues : np.ndarray 37 | eigenvalues from the graph 38 | eigenvectors : np.ndarray 39 | eigenvectors with shape (n_points x n_eigenvectors) 40 | """ 41 | with np.warnings.catch_warnings(): 42 | regex_string = ( 43 | r"the matrix subclass is not the recommended way to represent" 44 | + r"matrices or deal with linear algebra" 45 | ) 46 | np.warnings.filterwarnings("ignore", regex_string) 47 | lapl = _laplacian_gauss_idw(points, min_neighbors, sigma, search_radius) 48 | return _laplacian(lapl, n_eigenmodes) 49 | 50 | 51 | def _laplacian_gauss_idw( 52 | points: np.ndarray, 53 | min_neighbors: Union[int, None] = None, 54 | sigma: Union[float, None] = None, 55 | search_radius: Union[float, None] = None, 56 | ): 57 | """ 58 | Calculates the laplacian matrix for the sample points of a manifold. The inverse 59 | of the gauss-transformed distance is used as weighting of the neighbors. 60 | 61 | Parameters 62 | ---------- 63 | points: array-like, shape (n_points, n_components) : 64 | The sampling points of a manifold. 65 | min_neighbors: int 66 | The minimum number of neighbors of a point to be considered for the laplacian 67 | weights. Can be used to avoid unconnected points. 68 | sigma: float 69 | The standard deviation of the gaussian normal distribution function used to 70 | transform the distances for the inverse distance based weighting. 71 | search_radius : float 72 | radius search parameter for nearest neighbors 73 | 74 | Returns 75 | ------- 76 | L: array-like, shape (n_points, n_points) 77 | The laplacian matrix for manifold given by its sampling `points`. 78 | """ 79 | if points.ndim != 2: 80 | raise TypeError("Only 2D arrays are supported.") 81 | 82 | if min_neighbors is None: 83 | min_neighbors = points.shape[1] 84 | 85 | tree = KDTree(points) 86 | 87 | if sigma is None: 88 | d, _ = tree.query(points, 2 + 2 * points.shape[1], return_distance=True) 89 | sigma = np.sum(d[:, -2:]) 90 | sigma /= 3 * len(points) 91 | 92 | if search_radius is None: 93 | search_radius = 3 * sigma 94 | 95 | graph = dok_matrix((len(points), len(points)), dtype=np.double) 96 | 97 | for i, (j, d, e, k) in enumerate( 98 | zip( 99 | *tree.query_radius(points, return_distance=True, r=search_radius), 100 | *tree.query(points, return_distance=True, k=1 + min_neighbors), 101 | ) 102 | ): 103 | # Always search for k neighbors, this prevents strongly connected local areas 104 | # a little, attracting the eigenfield 105 | 106 | d, j = e, k 107 | k = j != i 108 | d, j = d[k], j[k] 109 | d **= 2 110 | d /= -2 * sigma**2 111 | graph[i, j] = d = np.exp(d) 112 | graph[j, i] = d[:, np.newaxis] 113 | 114 | if not np.array_equal(graph, graph.T): 115 | raise RuntimeError("graph matrix is not symetric.") 116 | 117 | return csgraph.laplacian(graph, normed=True) 118 | 119 | 120 | def _laplacian(lapl: csgraph, n_eigenmodes: int = 5): 121 | """ 122 | Compute the laplacian of a graph L 123 | 124 | Parameters 125 | ---------- 126 | L : csgraph 127 | sparse cs graph from scipy 128 | n_eigenmodes : int 129 | number of eigenmodes to compute 130 | points : np.ndarray 131 | coordinates of graph nodes (only for plotting) 132 | 133 | Returns 134 | ------- 135 | eigen_values : np.ndarray 136 | eingenvalues of the graph 137 | eigen_vecs : np.ndarray 138 | eigenvectors of each graph vector (iNode x nEigenmodes) 139 | """ 140 | 141 | n_nonzero_eigenvalues = 0 142 | n_eigenvalues = int(n_eigenmodes * 1.5) 143 | 144 | eigen_vals = np.empty((0,)) 145 | eigen_vecs = np.empty((0, 0)) 146 | 147 | while n_nonzero_eigenvalues < n_eigenmodes: 148 | eigen_vals, eigen_vecs = map(np.real, eigsh(lapl, n_eigenvalues, which="SA")) 149 | 150 | i_start = np.argmax(eigen_vals > 1e-7) 151 | n_nonzero_eigenvalues = len(eigen_vals) - i_start 152 | 153 | if n_nonzero_eigenvalues >= n_eigenmodes: 154 | eigen_vecs = eigen_vecs[:, i_start : i_start + n_eigenmodes] 155 | eigen_vals = eigen_vals[i_start : i_start + n_eigenmodes] 156 | 157 | n_eigenvalues = int(n_eigenvalues * 1.5) 158 | 159 | return eigen_vals, eigen_vecs 160 | -------------------------------------------------------------------------------- /src/lasso/dimred/hashing_sphere.py: -------------------------------------------------------------------------------- 1 | import os 2 | import warnings 3 | 4 | import h5py 5 | import numpy as np 6 | 7 | # scipy is C-code which causes invalid linter warning about ConvexHull not 8 | # being around. 9 | # pylint: disable = no-name-in-module 10 | from scipy.spatial import ConvexHull 11 | from scipy.stats import binned_statistic_2d 12 | from sklearn.preprocessing import normalize 13 | 14 | 15 | warnings.simplefilter(action="ignore", category=FutureWarning) 16 | 17 | 18 | def _create_sphere_mesh(diameter: np.ndarray) -> tuple[np.ndarray, np.ndarray]: 19 | """Compute the alpha and beta increments for a 20 | meshed sphere for binning the projected values 21 | 22 | Parameters 23 | ---------- 24 | diameter : np.ndarray 25 | sphere diameter 26 | 27 | Returns 28 | ------- 29 | bin_alpha : np.ndarray 30 | alpha bin boundaries 31 | bin_beta : np.ndarray 32 | beta bin boundaries 33 | """ 34 | 35 | if diameter.dtype != np.float32: 36 | raise TypeError("diameter array must be of type `np.float32`.") 37 | 38 | # partition latitude 39 | n_alpha = 145 40 | 41 | # sphere radius 42 | r = diameter / 2 43 | 44 | # sphere area 45 | a_sphere = 4 * np.pi * r**2 46 | 47 | # number of elements 48 | n_ele = 144**2 49 | 50 | # area of one element 51 | a_ele = a_sphere / n_ele 52 | 53 | # bin values for alpha and the increment 54 | bin_alpha, delt_alpha = np.linspace(0, 2 * np.pi, n_alpha, retstep=True) 55 | 56 | # for beta axis binning 57 | count = np.linspace(0.0, 144.0, 145) 58 | # compute required bin boundaries to ensure area of each element is the same 59 | tmp = count * a_ele 60 | tmp /= r**2 * delt_alpha 61 | bin_beta = 1 - tmp 62 | 63 | # In case of trailing floats (-1.00000004 for example) 64 | bin_beta[-1] = max(bin_beta[-1], -1) 65 | 66 | bin_beta = np.arccos(bin_beta) 67 | 68 | return bin_alpha, bin_beta 69 | 70 | 71 | def _project_to_sphere( 72 | points: np.ndarray, centroid: np.ndarray, axis: str = "Z" 73 | ) -> tuple[np.ndarray, np.ndarray]: 74 | """compute the projection vectors of centroid to each point in terms of spherical coordinates 75 | 76 | Parameters 77 | ---------- 78 | points : np.ndarray 79 | hashes of first model 80 | centroid : np.ndarray 81 | hashes of first model 82 | AXIS : str 83 | global axis position 84 | 85 | Returns 86 | ------- 87 | proj_alpha : np.ndarray 88 | alpha angles of all points 89 | proj_beta : np.ndarray 90 | beta angle of all points 91 | """ 92 | # standard global axis 93 | indexes = [0, 1, 2] 94 | 95 | # correct the indexes based on user input 96 | if axis == "Z": 97 | indexes = [0, 1, 2] # z axis aligned with global z-axis 98 | elif axis == "Y": 99 | indexes = [0, 2, 1] # z axis aligned with global y-axis 100 | elif axis == "X": 101 | indexes = [2, 1, 0] # z axis aligned with global x-axis 102 | 103 | # projection 104 | vec = points - centroid 105 | 106 | # normalize 107 | vec = normalize(vec, axis=1, norm="l2") 108 | 109 | # alpha based on sphere axis alignment 110 | ang = np.arctan2(vec[:, indexes[1]], vec[:, indexes[0]]) 111 | 112 | # atan2 returns neg angles for values greater than 180 113 | neg_indexes = np.where(ang < 0) 114 | ang[neg_indexes] += 2 * np.pi 115 | 116 | proj_alpha = ang 117 | proj_beta = np.arccos(vec[:, indexes[2]]) 118 | 119 | return proj_alpha, proj_beta 120 | 121 | 122 | def sphere_hashing( 123 | bin_numbers: np.ndarray, bin_counts: np.ndarray, field: np.ndarray 124 | ) -> np.ndarray: 125 | """Compute average field values for all the binned values 126 | 127 | Parameters 128 | ---------- 129 | bin_numbers : np.ndarray 130 | bin numbers for the respective index for the x and y-axis 131 | bin_counts : np.ndarray 132 | number of points that fall into each bin 133 | field : np.ndarray 134 | a fields value (p_strain,velocity etc..) 135 | 136 | Returns 137 | ------- 138 | binned_field : np.ndarray 139 | the averaged field values for each field 140 | """ 141 | # bin_numbers holds the bin_number for its respective index and must have 142 | # same length as the number of points 143 | if not len(bin_numbers[0] == len(field)): 144 | raise AssertionError( 145 | "bin_numbers holds the bin_number for its respective index and" 146 | "must have same length as the number of points." 147 | ) 148 | # check data types 149 | if not np.issubdtype(bin_numbers.dtype, np.integer): 150 | raise TypeError(f"Expected int dtype got {bin_numbers.dtype}") 151 | if not np.issubdtype(bin_counts.dtype, np.floating): 152 | raise TypeError(f"Expected float dtype got {bin_counts.dtype}.") 153 | 154 | n_rows = bin_counts.shape[0] 155 | n_cols = bin_counts.shape[1] 156 | 157 | # bin x and y indexes for each point in field 158 | binx = np.asarray(bin_numbers[0]) - 1 159 | biny = np.asarray(bin_numbers[1]) - 1 160 | 161 | # bincout for averaging 162 | bin_count = np.zeros((n_rows, n_cols)) 163 | 164 | # averaged result to return 165 | binned_field = np.zeros((n_rows, n_cols)) 166 | 167 | # bin the field values 168 | binned_field[binx[:], biny[:]] += field[:] 169 | # count 170 | bin_count[binx[:], biny[:]] += 1 171 | 172 | binned_field = binned_field.flatten() 173 | bin_count = bin_count.flatten() 174 | 175 | # exclude all zero entries 176 | nonzero_inds = np.where(bin_count != 0) 177 | # average the fields 178 | binned_field[nonzero_inds] /= bin_count[nonzero_inds] 179 | 180 | return binned_field 181 | 182 | 183 | def compute_hashes( 184 | source_path: str, target_path: str = None, n_files: int = None, ret_vals: bool = False 185 | ): 186 | """Compute the hashes using spherical projection of the field values 187 | 188 | Parameters 189 | ---------- 190 | source_path : str 191 | path to source directory from which the displacements/strains are 192 | loaded, this directory should contain HDF5 files of the data 193 | target_path : str (optional) 194 | directory in which the hashes are to be written to 195 | n_files : int (optional) 196 | number of files to process, useful for verification and quick visualization 197 | ret_vals : bool (optional) 198 | return the hashes, setting this to true, be aware that the hash list can 199 | take up a lot of ram 200 | 201 | Returns 202 | ------- 203 | hashes : np.ndarray 204 | hashed field values 205 | 206 | Notes 207 | ----- 208 | Key for node_displacements for all timesteps: 'xyz' 209 | Key for field values for all timesteps: 'fields' 210 | """ 211 | 212 | # pylint: disable = too-many-locals 213 | 214 | node_displacement_key = "xyz" 215 | fields_key = "fields" 216 | file_name = "run_" 217 | counter = 0 218 | 219 | hashed_data = [] 220 | # if n_files is none get total number of files in directory 221 | if n_files is None: 222 | n_files = len(os.listdir(source_path)) 223 | 224 | # load the displacements and compute the hashes for each run and consider 225 | # the last time step only 226 | for ii in range(n_files): 227 | with h5py.File(source_path + file_name + str(ii) + ".h5", "r") as hf: 228 | node_displacements = hf[node_displacement_key] 229 | fields = hf[fields_key] 230 | 231 | xyz = node_displacements[:, 0, :] 232 | 233 | # centorid of point cloud 234 | centroid = np.mean(xyz, axis=0) 235 | 236 | # convex hull of point cloud 237 | hull = ConvexHull(xyz) 238 | dist = np.linalg.norm(hull.max_bound - hull.min_bound) 239 | 240 | # compute the bin intervals for alpha and beta split into 144 elements 241 | bins_a, bins_b = _create_sphere_mesh(dist) 242 | 243 | # compute the point projections 244 | proj_alpha, proj_beta = _project_to_sphere(xyz, centroid, axis="Y") 245 | 246 | # bin the spherical coordinates in terms of alpha and beta 247 | histo = binned_statistic_2d( 248 | proj_alpha, proj_beta, None, "count", bins=[bins_a, bins_b], expand_binnumbers=True 249 | ) 250 | # get the field value 251 | p_strains = fields[:, -1] 252 | 253 | # compute hashes 254 | hashes = sphere_hashing(histo.binnumber, histo.statistic, p_strains) 255 | 256 | if target_path: 257 | # write the hashes for each timestep to file 258 | with h5py.File(target_path + "hashes_sphere_" + str(counter) + ".h5", "w") as hf: 259 | hf.create_dataset("hashes", data=hashes) 260 | 261 | if ret_vals: 262 | hashed_data.append(hashes) 263 | 264 | return np.asarray(hashed_data) 265 | -------------------------------------------------------------------------------- /src/lasso/dimred/run.py: -------------------------------------------------------------------------------- 1 | from rich.console import Console 2 | from rich.theme import Theme 3 | 4 | from lasso.dimred.dimred_run import ( 5 | DIMRED_STAGES, 6 | DimredRun, 7 | DimredRunError, 8 | DimredStage, 9 | parse_dimred_args, 10 | ) 11 | 12 | 13 | def main(): 14 | """Runs the dimensionality reduction CLI""" 15 | 16 | # parse command line stuff 17 | parser = parse_dimred_args() 18 | log_theme = Theme({ 19 | "info": "royal_blue1", 20 | "success": "green", 21 | "warning": "dark_orange3", 22 | "error": "bold red", 23 | }) 24 | console = Console(theme=log_theme, record=True, highlight=False) 25 | 26 | try: 27 | # parse settings from command line 28 | dimred_run = DimredRun( 29 | reference_run=parser.reference_run, 30 | simulation_runs=parser.simulation_runs, 31 | console=console, 32 | exclude_runs=parser.exclude_runs, 33 | project_dir=parser.project_dir, 34 | html_name=parser.html_name, 35 | html_set_timestamp=parser.html_timestamp, 36 | img_path=parser.embedding_images, 37 | logfile_filepath=parser.logfile_filepath, 38 | n_processes=parser.n_processes, 39 | part_id_filter=parser.part_ids, 40 | start_stage=parser.start_stage, 41 | end_stage=parser.end_stage, 42 | timestep=parser.timestep, 43 | cluster_args=parser.cluster_args, 44 | outlier_args=parser.outlier_args, 45 | ) 46 | 47 | # do the thing 48 | console.print() 49 | console.print(" ---- Running Routines ---- ") 50 | console.print() 51 | 52 | # initiate threading pool for handling jobs 53 | with dimred_run: 54 | # setup 55 | if ( 56 | dimred_run.start_stage_index 57 | <= DIMRED_STAGES.index(DimredStage.REFERENCE_RUN.value) 58 | <= dimred_run.end_stage_index 59 | ): 60 | dimred_run.process_reference_run() 61 | 62 | # import 63 | if ( 64 | dimred_run.start_stage_index 65 | <= DIMRED_STAGES.index(DimredStage.IMPORT_RUNS.value) 66 | <= dimred_run.end_stage_index 67 | ): 68 | dimred_run.subsample_to_reference_run() 69 | 70 | # math 71 | if ( 72 | dimred_run.start_stage_index 73 | <= DIMRED_STAGES.index(DimredStage.REDUCTION.value) 74 | <= dimred_run.end_stage_index 75 | ): 76 | dimred_run.dimension_reduction_svd() 77 | 78 | # clustering 79 | if ( 80 | dimred_run.start_stage_index 81 | <= DIMRED_STAGES.index(DimredStage.CLUSTERING.value) 82 | <= dimred_run.end_stage_index 83 | ): 84 | dimred_run.clustering_results() 85 | 86 | # export 87 | if ( 88 | dimred_run.start_stage_index 89 | <= DIMRED_STAGES.index(DimredStage.EXPORT_PLOT.value) 90 | <= dimred_run.end_stage_index 91 | ): 92 | dimred_run.visualize_results() 93 | 94 | # print logfile 95 | console.save_html(dimred_run.logfile_filepath) 96 | 97 | # Catch if DimredrunError was called 98 | except DimredRunError as err: 99 | print(err) 100 | 101 | 102 | if __name__ == "__main__": 103 | main() 104 | -------------------------------------------------------------------------------- /src/lasso/dimred/sphere/__init__.py: -------------------------------------------------------------------------------- 1 | # from .algorithms import * 2 | -------------------------------------------------------------------------------- /src/lasso/dimred/sphere/algorithms.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | # scipy is C-code which causes invalid linter warning about ConvexHull not 4 | # being around. 5 | # pylint: disable = no-name-in-module 6 | from scipy.spatial import ConvexHull 7 | from scipy.stats import binned_statistic_2d 8 | from scipy.stats._binned_statistic import BinnedStatistic2dResult 9 | from sklearn.preprocessing import normalize 10 | 11 | 12 | def to_spherical_coordinates(points: np.ndarray, centroid: np.ndarray, axis: str = "Z"): 13 | """Converts the points to spherical coordinates. 14 | 15 | Parameters 16 | ---------- 17 | points: np.ndarray 18 | The point cloud to be sphered. 19 | centroid: np.ndarray 20 | Centroid of the point cloud. 21 | axis: str 22 | Sphere axis in the global coordinate system. 23 | 24 | Returns 25 | ------- 26 | az : np.ndarray 27 | Azimuthal angle vector. 28 | po: np.ndarray 29 | Polar angle vector. 30 | 31 | Notes 32 | ----- 33 | The local x-axis is set as the zero marker for azimuthal angles. 34 | """ 35 | indexes = [0, 1, 2] 36 | # set the correct indexes for swapping if the sphere 37 | # axis is not aligned with the global z axis 38 | if axis == "Y": 39 | indexes = [0, 2, 1] # sphere z axis aligned with global y-axis 40 | elif axis == "X": 41 | indexes = [2, 1, 0] # sphere z axis aligned with global x-axis 42 | 43 | # vectors from centroid to points 44 | vec = points - centroid 45 | vec = normalize(vec, axis=1, norm="l2") 46 | 47 | # azimuthal angles on the local xy plane 48 | # x-axis is the zero marker, and we correct 49 | # all negative angles 50 | az = np.arctan2(vec[:, indexes[1]], vec[:, indexes[0]]) 51 | neg_indexes = np.where(az < 0) 52 | az[neg_indexes] += 2 * np.pi 53 | 54 | # polar angles 55 | po = np.arccos(vec[:, indexes[2]]) 56 | 57 | return az, po 58 | 59 | 60 | def sphere_hashing(histo: BinnedStatistic2dResult, field: np.ndarray): 61 | """Compute the hash of each bucket in the histogram by mapping 62 | the bin numbers to the field values and scaling the field values 63 | by the number of counts in each bin. 64 | 65 | Parameters 66 | ---------- 67 | histo: BinnedStatistic2dResult 68 | 3D histogram containing the indexes of all points of a simulation 69 | mapped to their projected bins. 70 | field: ndarray 71 | 72 | Returns 73 | ------- 74 | hashes: np.ndarray 75 | The hashing result of all points mapped to an embedding space. 76 | 77 | """ 78 | bin_n = histo.binnumber 79 | 80 | if not len(bin_n[0] == len(field)): 81 | raise AssertionError( 82 | "bin_numbers holds the bin_number for its respective index and" 83 | "must have same length as the number of points." 84 | ) 85 | 86 | # get dims of the embedding space 87 | n_rows = histo.statistic.shape[0] 88 | n_cols = histo.statistic.shape[1] 89 | 90 | # bin stores the indexes of the points 91 | # index 0 stores the azimuthal angles 92 | # index 1 stores the polar angles 93 | # we want zero indexing 94 | binx = np.asarray(bin_n[0]) - 1 95 | biny = np.asarray(bin_n[1]) - 1 96 | 97 | # allocate arrays 98 | bin_count = np.zeros((n_rows, n_cols)) 99 | hashes = np.zeros((n_rows, n_cols)) 100 | 101 | # sum all the field values to each bin 102 | hashes[binx[:], biny[:]] += field[:] 103 | bin_count[binx[:], biny[:]] += 1 104 | 105 | hashes = hashes.flatten() 106 | bin_count = bin_count.flatten() 107 | 108 | # exclude all zero entries 109 | nonzero_inds = np.where(bin_count != 0) 110 | 111 | # average the fields 112 | hashes[nonzero_inds] /= bin_count[nonzero_inds] 113 | 114 | return hashes 115 | 116 | 117 | def create_sphere(diameter: float): 118 | """Creates two vectors along the alpha and beta axis of a sphere. Alpha represents 119 | the angle from the sphere axis to the equator. Beta between vectors from the 120 | center of the sphere to one of the poles and the equator. 121 | 122 | Parameters 123 | ---------- 124 | diameter: 125 | Diameter of the sphere. 126 | 127 | Returns 128 | ------- 129 | bin_beta: np.ndarray 130 | Bin bounds for the beta angles. 131 | 132 | bin_alpha: np.ndarray 133 | Bin bounds for the alpha angles. 134 | 135 | """ 136 | # number of partitions for equator 137 | n_alpha = 145 138 | # number of partitions for longitude 139 | n_beta = 144 140 | 141 | r = diameter / 2.0 142 | 143 | # area of sphere 144 | a_sphere = 4 * np.pi * r**2 145 | n_ele = n_beta**2 146 | a_ele = a_sphere / n_ele 147 | 148 | # alpha angles around the equator and the size of one step 149 | bin_alpha, delt_alpha = np.linspace(0, 2 * np.pi, n_alpha, retstep=True) 150 | 151 | # bins for beta axis in terms of axis coorindates between -1 and 1 152 | count = np.linspace(0.0, float(n_beta), 145) 153 | tmp = count * a_ele 154 | tmp /= r**2 * delt_alpha 155 | bin_beta = 1 - tmp 156 | bin_beta[-1] = max(bin_beta[-1], -1) 157 | 158 | bin_beta = np.arccos(bin_beta) 159 | return bin_alpha, bin_beta 160 | 161 | 162 | def compute_similarity(embeddings: np.ndarray) -> np.ndarray: 163 | """Computes the similarity of each embedding. 164 | 165 | Parameters 166 | ---------- 167 | embeddings: np.ndarray 168 | Model embeddings. 169 | 170 | Returns 171 | ------- 172 | smatrix: np.ndarray 173 | Similarity matrix. 174 | """ 175 | 176 | n_runs = len(embeddings) 177 | smatrix = np.empty((n_runs, n_runs), dtype=np.float32) 178 | for ii in range(n_runs): 179 | for jj in range(n_runs): 180 | smatrix[ii, jj] = np.dot(embeddings[ii], embeddings[jj]) / np.sqrt( 181 | np.dot(embeddings[ii], embeddings[ii]) * np.dot(embeddings[jj], embeddings[jj]) 182 | ) 183 | 184 | return smatrix 185 | 186 | 187 | def create_histogram( 188 | cloud: np.ndarray, sphere_axis: str = "Z", planar: bool = False 189 | ) -> BinnedStatistic2dResult: 190 | """Builds a histogram using the blocks of a sphered globe and returns a 191 | binned statistics result for two dimensions. 192 | 193 | Parameters 194 | ---------- 195 | cloud: np.ndarray 196 | Point cloud around which we create an embedding. 197 | sphere_axis: str 198 | Axis of the sphere. This is aligned with the global axis system. 199 | planar: bool 200 | Set to true for planar point clouds and false for higher dimensions. 201 | 202 | Returns 203 | ------- 204 | stats: BinnedStatistic2dResult 205 | Returns a 2D histogram of the sphere with bin numbers and bin statistics. 206 | """ 207 | # casting to array because of typing 208 | centroid = np.array(np.mean(cloud, axis=0)) 209 | 210 | qhull_options = "" 211 | if planar: 212 | qhull_options = "QJ" 213 | 214 | hull = ConvexHull(cloud, qhull_options=qhull_options) 215 | 216 | # we need to determine the largest distance in this point 217 | # cloud, so we can give the sphere a dimension 218 | # we can also create a sphere of random size but this could 219 | # skew the results 220 | dist = np.linalg.norm(hull.max_bound - hull.min_bound) 221 | 222 | bins_a, bins_b = create_sphere(dist) 223 | 224 | cloud_alpha, cloud_beta = to_spherical_coordinates(cloud, centroid, axis=sphere_axis) 225 | 226 | return binned_statistic_2d( 227 | cloud_alpha, cloud_beta, None, "count", bins=[bins_a, bins_b], expand_binnumbers=True 228 | ) 229 | -------------------------------------------------------------------------------- /src/lasso/dimred/svd/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/dimred/svd/__init__.py -------------------------------------------------------------------------------- /src/lasso/dimred/svd/keyword_types.py: -------------------------------------------------------------------------------- 1 | class ClusterType: 2 | """Specifies names of specific clustering algorithms 3 | 4 | Attributes 5 | ---------- 6 | OPTICS: str 7 | OPTICS 8 | DBSCAN: str 9 | DBSCAN 10 | KMeans: str 11 | KMeans 12 | SpectralClustering: str 13 | SpectralClustering 14 | """ 15 | 16 | OPTICS = "OPTICS" 17 | DBSCAN = "DBSCAN" 18 | KMeans = "KMeans" 19 | SpectralClustering = "SpectralClustering" 20 | 21 | @staticmethod 22 | def get_cluster_type_name() -> list[str]: 23 | """Get the name of the clustering algorithms""" 24 | return [ 25 | ClusterType.OPTICS, 26 | ClusterType.DBSCAN, 27 | ClusterType.KMeans, 28 | ClusterType.SpectralClustering, 29 | ] 30 | 31 | 32 | class DetectorType: 33 | """Specifies names of different outlier detector algorithms 34 | 35 | Attributes 36 | ---------- 37 | IsolationForest: str 38 | IsolationForest 39 | OneClassSVM: str 40 | OneClassSVM 41 | LocalOutlierFactor: str 42 | LocalOutlierFactor 43 | """ 44 | 45 | IsolationForest = "IsolationForest" 46 | OneClassSVM = "OneClassSVM" 47 | LocalOutlierFactor = "LocalOutlierFactor" 48 | # Experimental = "Experimental" 49 | 50 | @staticmethod 51 | def get_detector_type_name() -> list[str]: 52 | """Get the name of the detector algorithms""" 53 | return [ 54 | DetectorType.IsolationForest, 55 | DetectorType.OneClassSVM, 56 | DetectorType.LocalOutlierFactor, 57 | ] 58 | -------------------------------------------------------------------------------- /src/lasso/dimred/svd/plot_beta_clusters.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | import time 4 | import webbrowser 5 | from collections.abc import Sequence 6 | from typing import Union 7 | 8 | import numpy as np 9 | 10 | from lasso.dimred.svd.html_str_eles import ( 11 | CONST_STRING, 12 | OVERHEAD_STRING, 13 | SCRIPT_STRING, 14 | TRACE_STRING, 15 | ) 16 | from lasso.plotting.plot_shell_mesh import _read_file 17 | 18 | 19 | def timestamp() -> str: 20 | """ 21 | Creates a timestamp string of format yymmdd_hhmmss_ 22 | """ 23 | 24 | def add_zero(in_str) -> str: 25 | if len(in_str) == 1: 26 | return "0" + in_str 27 | return in_str 28 | 29 | year, month, day, hour, minute, second, _, _, _ = time.localtime() 30 | y_str = str(year)[2:] 31 | mo_str = add_zero(str(month)) 32 | d_str = add_zero(str(day)) 33 | h_str = add_zero(str(hour)) 34 | mi_str = add_zero(str(minute)) 35 | s_str = add_zero(str(second)) 36 | t_str = y_str + mo_str + d_str + "_" + h_str + mi_str + s_str + "_" 37 | return t_str 38 | 39 | 40 | # pylint: disable = inconsistent-return-statements 41 | def plot_clusters_js( 42 | beta_cluster: Sequence, 43 | id_cluster: Union[np.ndarray, Sequence], 44 | save_path: str, 45 | img_path: Union[None, str] = None, 46 | mark_outliers: bool = False, 47 | mark_timestamp: bool = True, 48 | filename: str = "3d_beta_plot", 49 | write: bool = True, 50 | show_res: bool = True, 51 | ) -> Union[None, str]: 52 | """ 53 | Creates a .html visualization of input data 54 | 55 | Parameters 56 | ---------- 57 | beta_cluster: np.ndarray 58 | Numpy array containing beta clusters 59 | id_cluster: Union[np.ndarray, Sequence] 60 | Numpy array or sequence containing the ids samples in clusters. 61 | Must be of same structure as beta_clusters 62 | save_path: str 63 | Where to save the .html visualization 64 | img_path: Union[None, str], default: None 65 | Path to images of samples 66 | mark_outliers: bool, default: False 67 | Set to True if first entry in beta_cluster are outliers 68 | mark_timestamp: bool, default: True 69 | Set to True if name of visualization shall contain time of creation. 70 | If set to False, visualization will override previous file 71 | filename: str, default "3d_beta_plot" 72 | Name of .html file 73 | write: bool, default: True 74 | Set to False to not write .html file and return as string instead 75 | show_res: bool, default: True 76 | Set to False to not open resulting page in webbrowser 77 | 78 | Returns 79 | ------- 80 | html_str_formatted: str 81 | If **write=False** returns .html file as string, else None 82 | """ 83 | 84 | # pylint: disable = too-many-arguments, too-many-locals 85 | 86 | if not isinstance(img_path, str): 87 | img_path = "" 88 | 89 | colorlist = [ 90 | "#1f77b4", 91 | "#ff7f0e", 92 | "#2ca02c", 93 | "#d62728", 94 | "#9467bd", 95 | "#8c564b", 96 | "#e377c2", 97 | "#7f7f7f", 98 | "#bcbd22", 99 | "#17becf", 100 | ] 101 | tracelist = [] 102 | 103 | # rescaling betas to better fit in viz 104 | scale_multiplier = 300 105 | max_val = ( 106 | max(*[max(np.max(cluster), abs(np.min(cluster))) for cluster in beta_cluster]) 107 | if len(beta_cluster) > 1 108 | else max(np.max(beta_cluster[0]), abs(np.min(beta_cluster[0]))) 109 | ) 110 | 111 | beta_cluster = [cluster / max_val * scale_multiplier for cluster in beta_cluster] 112 | 113 | id_nr = [] 114 | for group in id_cluster: 115 | id_group = [] 116 | for entry in group: 117 | nr = re.findall(r"\d+", entry)[0] 118 | id_group.append(nr) 119 | id_nr.append(id_group) 120 | 121 | # pylint: disable = consider-using-f-string 122 | _three_min_ = ''.format( 123 | _read_file( 124 | os.path.join( 125 | # move path to "~/lasso/" 126 | os.path.split(os.path.split(os.path.dirname(__file__))[0])[0], 127 | "plotting/resources/three_latest.min.js", 128 | ) 129 | ) 130 | ) 131 | 132 | html_str_formatted = OVERHEAD_STRING + CONST_STRING.format( 133 | _three_min_=_three_min_, _path_str_=img_path, _runIdEntries_=id_nr 134 | ) 135 | for index, cluster in enumerate(beta_cluster): 136 | name = "Error, my bad" 137 | color = "pink" 138 | if (index == 0) and mark_outliers: 139 | name = "outliers" 140 | color = "black" 141 | else: 142 | name = f"cluster {index}" 143 | color = colorlist[(index - 1) % 10] 144 | formatted_trace = TRACE_STRING.format( 145 | _traceNr_=f"trace{index}", 146 | _name_=name, 147 | _color_=color, 148 | _runIDs_=id_cluster[index].tolist(), 149 | _x_=np.around(cluster[:, 0], decimals=5).tolist(), 150 | _y_=np.around(cluster[:, 1], decimals=5).tolist(), 151 | _z_=np.around(cluster[:, 2], decimals=5).tolist(), 152 | ) 153 | tracelist.append(f"trace{index}") 154 | html_str_formatted += formatted_trace 155 | trace_list_string = " traceList = [" 156 | for trace in tracelist: 157 | trace_list_string += trace + ", " 158 | trace_list_string += "]" 159 | html_str_formatted += trace_list_string 160 | html_str_formatted += SCRIPT_STRING 161 | 162 | if write: 163 | os.makedirs(save_path, exist_ok=True) 164 | 165 | # Timestamp for differentiating different viz / not override previous viz 166 | stamp = timestamp() if mark_timestamp else "" 167 | 168 | output_filepath = os.path.join(save_path, stamp + filename + ".html") 169 | with open(output_filepath, "w", encoding="utf-8") as f: 170 | f.write(html_str_formatted) 171 | if show_res: 172 | webbrowser.open("file://" + os.path.realpath(output_filepath)) 173 | else: 174 | # only needed for testcases 175 | return html_str_formatted 176 | -------------------------------------------------------------------------------- /src/lasso/dimred/svd/pod_functions.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | import numpy as np 4 | from rich.progress import Progress, TaskID 5 | from scipy.sparse import csc_matrix 6 | from scipy.sparse.linalg import svds 7 | 8 | from lasso.utils.rich_progress_bars import PlaceHolderBar 9 | 10 | 11 | def svd_step_and_dim(s_mat: np.ndarray, k=10) -> np.ndarray: 12 | """ 13 | Performs a svds operation on the two dimensional s_mat 14 | 15 | Parameters 16 | ---------- 17 | s_mat: ndarray 18 | 2D array on which the svds operation shall be performed 19 | k: int, 10, optional. 20 | The size of the POD 21 | 22 | Returns 23 | ------- 24 | v: ndarray 25 | Array containing the right reduced order basis 26 | """ 27 | small_mat = csc_matrix(s_mat.astype(np.float64)) 28 | 29 | _, _, v = svds(small_mat, k=k) 30 | 31 | v = v[::-1, :] 32 | 33 | return v 34 | 35 | 36 | def calculate_v_and_betas( 37 | stacked_sub_displ: np.ndarray, 38 | progress_bar: Union[None, Progress, PlaceHolderBar] = None, 39 | task_id: Union[None, TaskID] = None, 40 | ) -> Union[str, tuple[np.ndarray, np.ndarray]]: 41 | """ 42 | Calculates the right reduced order Basis V and up to 10 eigenvalues of the subsamples 43 | 44 | Parameters 45 | ---------- 46 | stacked_sub_displ: np.ndarray 47 | np.ndarray containing all subsampled displacements 48 | shape must be (samples, timesteps, nodes, dims) 49 | 50 | Returns 51 | ------- 52 | v_big: np.ndarray 53 | Reduced order basis to transform betas bag into subsamples 54 | betas: np.ndarray 55 | Projected simulation runs 56 | err_msg: str 57 | Error message if not enough samples where provided 58 | """ 59 | 60 | big_mat = stacked_sub_displ.reshape(( 61 | stacked_sub_displ.shape[0], 62 | stacked_sub_displ.shape[1], 63 | stacked_sub_displ.shape[2] * stacked_sub_displ.shape[3], 64 | )) 65 | 66 | diff_mat = np.stack([big_mat[:, 0, :] for _ in range(big_mat.shape[1])]).reshape(( 67 | big_mat.shape[0], 68 | big_mat.shape[1], 69 | big_mat.shape[2], 70 | )) 71 | 72 | # We only want the difference in displacement 73 | big_mat = big_mat - diff_mat 74 | 75 | k = min(10, big_mat.shape[0] - 1) 76 | if k < 1: 77 | return "Must provide more than 1 sample" 78 | 79 | if task_id is None and progress_bar: 80 | return "Progress requires a task ID" 81 | 82 | v_big = np.zeros((k, big_mat.shape[1], big_mat.shape[2])) 83 | if progress_bar: 84 | progress_bar.advance(task_id) # type: ignore 85 | for step in range(big_mat.shape[1] - 1): 86 | v_big[:, step + 1] = svd_step_and_dim(big_mat[:, step + 1], k) 87 | progress_bar.advance(task_id) # type: ignore 88 | else: 89 | for step in range(big_mat.shape[1] - 1): 90 | v_big[:, step + 1] = svd_step_and_dim(big_mat[:, step + 1], k) 91 | 92 | betas_big = np.einsum("stn, ktn -> stk", big_mat, v_big) 93 | 94 | return v_big, betas_big 95 | -------------------------------------------------------------------------------- /src/lasso/dimred/svd/subsampling_methods.py: -------------------------------------------------------------------------------- 1 | import os 2 | import random 3 | import time 4 | from collections.abc import Sequence 5 | from typing import Union 6 | 7 | import numpy as np 8 | from sklearn.neighbors import NearestNeighbors 9 | 10 | from lasso.dyna import ArrayType, D3plot 11 | 12 | 13 | def _mark_dead_eles(node_indexes: np.ndarray, alive_shells: np.ndarray) -> np.ndarray: 14 | """ 15 | Returns a mask to filter out elements mark as 'no alive' 16 | 17 | Parameters 18 | ---------- 19 | node_indexes: ndarray 20 | Array containing node indexes 21 | alive_nodes: ndarray 22 | Array containing float value representing if element is alive. 23 | Expected for D3plot.arrays[ArrayType.element_shell_is_alive] or equivalent for beams etc 24 | 25 | Returns 26 | ------- 27 | node_coordinate_mask: np.ndarray 28 | Array containing indizes of alive shells. 29 | Use node_coordinates[node_coordinate_mask] to get all nodes alive. 30 | 31 | See Also 32 | -------- 33 | bury_the_dead(), also removes dead beam nodes 34 | """ 35 | 36 | dead_eles_shell = np.unique(np.where(alive_shells == 0)[1]) 37 | 38 | ele_filter = np.zeros(node_indexes.shape[0]) 39 | ele_filter[dead_eles_shell] = 1 40 | ele_filter_bool = ele_filter == 1 41 | 42 | dead_nodes = np.unique(node_indexes[ele_filter_bool]) 43 | 44 | return dead_nodes 45 | 46 | 47 | def _extract_shell_parts( 48 | part_list: Sequence[int], d3plot: D3plot 49 | ) -> Union[tuple[np.ndarray, np.ndarray], str]: 50 | """ 51 | Extracts a shell part defined by its part ID out of the given d3plot. 52 | Returns a new node index, relevant coordinates and displacement 53 | 54 | Parameters 55 | ---------- 56 | part_list: list 57 | List of part IDs of the parts that shall be extracted 58 | d3plot: D3plot 59 | D3plot the part shall be extracted from 60 | 61 | Returns 62 | ------- 63 | node_coordinates: ndarray 64 | Numpy array containing the node coordinates of the extracted part 65 | node_displacement: ndarray 66 | Numpy array containing the node displacement of the extracted part 67 | err_msg: str 68 | If an error occurs, a string containing the error msg is returned instead 69 | """ 70 | 71 | # pylint: disable = too-many-locals, too-many-statements 72 | 73 | # convert into list 74 | part_list = list(part_list) 75 | 76 | shell_node_indexes = d3plot.arrays[ArrayType.element_shell_node_indexes] 77 | shell_part_indexes = d3plot.arrays[ArrayType.element_shell_part_indexes] 78 | beam_node_indexes = d3plot.arrays[ArrayType.element_beam_node_indexes] 79 | beam_part_indexes = d3plot.arrays[ArrayType.element_beam_part_indexes] 80 | solid_node_indexes = d3plot.arrays[ArrayType.element_solid_node_indexes] 81 | solid_part_indexes = d3plot.arrays[ArrayType.element_solid_part_indexes] 82 | tshell_node_indexes = d3plot.arrays[ArrayType.element_tshell_node_indexes] 83 | tshell_part_indexes = d3plot.arrays[ArrayType.element_tshell_part_indexes] 84 | 85 | node_coordinates = d3plot.arrays[ArrayType.node_coordinates] 86 | node_displacement = d3plot.arrays[ArrayType.node_displacement] 87 | 88 | alive_mask = np.full((node_coordinates.shape[0]), True) 89 | 90 | if ArrayType.element_shell_is_alive in d3plot.arrays: 91 | dead_shell_mask = _mark_dead_eles( 92 | shell_node_indexes, d3plot.arrays[ArrayType.element_shell_is_alive] 93 | ) 94 | alive_mask[dead_shell_mask] = False 95 | if ArrayType.element_beam_is_alive in d3plot.arrays: 96 | dead_beam_mask = _mark_dead_eles( 97 | beam_node_indexes, d3plot.arrays[ArrayType.element_beam_is_alive] 98 | ) 99 | alive_mask[dead_beam_mask] = False 100 | if ArrayType.element_solid_is_alive in d3plot.arrays: 101 | dead_solid_mask = _mark_dead_eles( 102 | solid_node_indexes, d3plot.arrays[ArrayType.element_solid_is_alive] 103 | ) 104 | alive_mask[dead_solid_mask] = False 105 | if ArrayType.element_tshell_is_alive in d3plot.arrays: 106 | dead_tshell_mask = _mark_dead_eles( 107 | tshell_node_indexes, d3plot.arrays[ArrayType.element_tshell_is_alive] 108 | ) 109 | alive_mask[dead_tshell_mask] = False 110 | 111 | if len(part_list) > 0: 112 | try: 113 | part_ids = d3plot.arrays[ArrayType.part_ids] 114 | except KeyError: 115 | err_msg = "KeyError: Loaded plot has no parts" 116 | return err_msg 117 | part_ids_as_list = part_ids.tolist() 118 | # check if parts exist 119 | for part in part_list: 120 | try: 121 | part_ids_as_list.index(int(part)) 122 | except ValueError: 123 | err_msg = "ValueError: Could not find part: {0}" 124 | return err_msg.format(part) 125 | 126 | def mask_parts( 127 | part_list2: list[int], element_part_index: np.ndarray, element_node_index: np.ndarray 128 | ) -> np.ndarray: 129 | element_part_filter = np.full(element_part_index.shape, False) 130 | proc_parts = [] 131 | 132 | for pid in part_list2: 133 | part_index = part_ids_as_list.index(int(pid)) 134 | locs = np.where(element_part_index == part_index)[0] 135 | if not locs.shape == (0,): 136 | proc_parts.append(pid) 137 | element_part_filter[locs] = True 138 | 139 | for part in proc_parts: 140 | part_list2.pop(part_list2.index(part)) 141 | 142 | unique_element_node_indexes = np.unique(element_node_index[element_part_filter]) 143 | 144 | return unique_element_node_indexes 145 | 146 | # shells: 147 | unique_shell_node_indexes = mask_parts(part_list, shell_part_indexes, shell_node_indexes) 148 | 149 | # beams 150 | unique_beam_node_indexes = mask_parts(part_list, beam_part_indexes, beam_node_indexes) 151 | 152 | # solids: 153 | unique_solide_node_indexes = mask_parts(part_list, solid_part_indexes, solid_node_indexes) 154 | 155 | # tshells 156 | unique_tshell_node_indexes = mask_parts(part_list, tshell_part_indexes, tshell_node_indexes) 157 | 158 | # this check may seem redundant, but also verifies that our masking of parts works 159 | if not len(part_list) == 0: 160 | err_msg = "Value Error: Could not find parts: " + str(part_list) 161 | return err_msg 162 | 163 | # New coordinate mask 164 | coord_mask = np.full((node_coordinates.shape[0]), False) 165 | coord_mask[unique_shell_node_indexes] = True 166 | coord_mask[unique_solide_node_indexes] = True 167 | coord_mask[unique_beam_node_indexes] = True 168 | coord_mask[unique_tshell_node_indexes] = True 169 | 170 | inv_alive_mask = np.logical_not(alive_mask) 171 | coord_mask[inv_alive_mask] = False 172 | 173 | node_coordinates = node_coordinates[coord_mask] 174 | node_displacement = node_displacement[:, coord_mask] 175 | else: 176 | node_coordinates = node_coordinates[alive_mask] 177 | node_displacement = node_displacement[:, alive_mask] 178 | 179 | return node_coordinates, node_displacement 180 | 181 | 182 | def create_reference_subsample( 183 | load_path: str, parts: Sequence[int], nr_samples=2000 184 | ) -> Union[tuple[np.ndarray, float, float], str]: 185 | """ 186 | Loads the D3plot at load_path, extracts the node coordinates of part 13, returns 187 | a random subsample of these nodes 188 | 189 | Parameters 190 | ---------- 191 | load_path: str 192 | Filepath of the D3plot 193 | parts: Sequence[int] 194 | List of parts to be extracted 195 | nr_samples: int 196 | How many nodes are subsampled 197 | 198 | Returns 199 | ------- 200 | reference_sample: np.array 201 | Numpy array containing the reference sample 202 | t_total: float 203 | Total time required for subsampling 204 | t_load: float 205 | Time required to load plot 206 | err_msg: str 207 | If an error occurs, a string containing the error is returned instead 208 | """ 209 | t_null = time.time() 210 | try: 211 | plot = D3plot( 212 | load_path, 213 | state_array_filter=[ArrayType.node_displacement, ArrayType.element_shell_is_alive], 214 | ) 215 | except Exception: 216 | err_msg = ( 217 | f"Failed to load {load_path}! Please make sure it is a D3plot file. " 218 | f"This might be due to {os.path.split(load_path)[1]} being a timestep of a plot" 219 | ) 220 | return err_msg 221 | 222 | t_load = time.time() - t_null 223 | result = _extract_shell_parts(parts, plot) 224 | if isinstance(result, str): 225 | return result 226 | 227 | coordinates = result[0] 228 | if coordinates.shape[0] < nr_samples: 229 | err_msg = "Number of nodes is lower than desired samplesize" 230 | return err_msg 231 | 232 | random.seed("seed") 233 | samples = random.sample(range(len(coordinates)), nr_samples) 234 | 235 | reference_sample = coordinates[samples] 236 | t_total = time.time() - t_null 237 | return reference_sample, t_total, t_load 238 | 239 | 240 | def remap_random_subsample( 241 | load_path: str, parts: list, reference_subsample: np.ndarray 242 | ) -> Union[tuple[np.ndarray, float, float], str]: 243 | """ 244 | Remaps the specified sample onto a new mesh provided by reference subsampl, using knn matching 245 | 246 | Parameters 247 | ---------- 248 | load_path: str 249 | Filepath of the desired D3plot 250 | parts: list of int 251 | Which parts shall be extracted from the D3plot 252 | reference_subsample: np.array 253 | Numpy array containing the reference nodes 254 | 255 | Returns 256 | ------- 257 | subsampled_displacement: np.ndarray 258 | Subsampled displacement of provided sample 259 | t_total: float 260 | Total time required to perform subsampling 261 | t_load: float 262 | Time required to load D3plot 263 | err_msg: str 264 | If an error occurred, a string is returned instead containing the error 265 | """ 266 | t_null = time.time() 267 | try: 268 | plot = D3plot( 269 | load_path, 270 | state_array_filter=[ArrayType.node_displacement, ArrayType.element_shell_is_alive], 271 | ) 272 | except Exception: 273 | err_msg = ( 274 | f"Failed to load {load_path}! Please make sure it is a D3plot file. " 275 | f"This might be due to {os.path.split(load_path)[1]} being a timestep of a plot" 276 | ) 277 | return err_msg 278 | 279 | t_load = time.time() - t_null 280 | result = _extract_shell_parts(parts, plot) 281 | if isinstance(result, str): 282 | return result 283 | 284 | coordinates, displacement = result[0], result[1] 285 | 286 | quarantine_zone = NearestNeighbors(n_neighbors=1, n_jobs=4).fit(coordinates) 287 | _, quarantined_index = quarantine_zone.kneighbors(reference_subsample) 288 | 289 | subsampled_displacement = displacement[:, quarantined_index[:, 0]] 290 | 291 | return subsampled_displacement, time.time() - t_null, t_load 292 | -------------------------------------------------------------------------------- /src/lasso/dyna/__init__.py: -------------------------------------------------------------------------------- 1 | from .array_type import ArrayType 2 | from .binout import Binout 3 | from .d3plot import D3plot 4 | from .d3plot_header import D3plotFiletype, D3plotHeader 5 | from .filter_type import FilterType 6 | 7 | 8 | __all__ = ["Binout", "D3plot", "ArrayType", "FilterType", "D3plotHeader", "D3plotFiletype"] 9 | -------------------------------------------------------------------------------- /src/lasso/dyna/filter_type.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class FilterType(Enum): 5 | """Used for filtering d3plot arrays 6 | 7 | Attributes 8 | ---------- 9 | BEAM: str 10 | Filters for beam elements 11 | SHELL: str 12 | Filters for shell elements 13 | SOLID: str 14 | Filters for solid elements 15 | TSHELL: str 16 | Filters for thick shells elements 17 | PART: str 18 | Filters for parts 19 | NODE: str 20 | Filters for nodes 21 | 22 | Examples 23 | -------- 24 | >>> part_ids = [13, 14] 25 | >>> d3plot.get_part_filter(FilterType.SHELL, part_ids) 26 | """ 27 | 28 | BEAM = "beam" 29 | SHELL = "shell" 30 | SOLID = "solid" 31 | TSHELL = "tshell" 32 | PART = "part" 33 | NODE = "node" 34 | -------------------------------------------------------------------------------- /src/lasso/femzip/__init__.py: -------------------------------------------------------------------------------- 1 | from .femzip_api import FemzipAPI 2 | 3 | 4 | __all__ = ["FemzipAPI"] 5 | -------------------------------------------------------------------------------- /src/lasso/femzip/fz_config.py: -------------------------------------------------------------------------------- 1 | import enum 2 | from typing import Union 3 | 4 | 5 | def get_last_int_of_line(line: str) -> tuple[str, Union[None, int]]: 6 | """Searches an integer in the line 7 | 8 | Parameters 9 | ---------- 10 | line: str 11 | line to be searched 12 | 13 | Returns 14 | ------- 15 | rest_line: str 16 | rest of line before match 17 | number: Union[int, None] 18 | number or None if not found 19 | """ 20 | for entry in line.split(): 21 | if entry.isdigit(): 22 | return line[: line.rfind(entry)], int(entry) 23 | return line, None 24 | 25 | 26 | class FemzipVariableCategory(enum.Enum): 27 | """Enum for femzip variable categories 28 | 29 | Attributes 30 | ---------- 31 | GEOMETRY: int 32 | -5 33 | PART: int 34 | -2 35 | GLOBAL: int 36 | -1 37 | NODE: int 38 | 0 39 | SOLID: int 40 | 1 41 | THICK_SHELL: int 42 | 2 43 | BEAM: int 44 | 3 45 | TOOLS: int 46 | 4 47 | SHELL: int 48 | 5 49 | SPH: int 50 | 6 51 | FPM: int 52 | 7 53 | CFD: int 54 | 8 55 | CPM_FLOAT_VAR: int 56 | 9 57 | CPM_AIRBAG: int 58 | 10 59 | CPM_INT_VAR: int 60 | 11 61 | RADIOSS_STATE_DATA: int 62 | 12 63 | HEXA20: int 64 | 13 65 | """ 66 | 67 | GEOMETRY = -5 68 | # REST_OF_HEADER_AND_GEOMETRY_UNCOMPRESSED = -3 69 | # ALL_STATE_EXCEPT_GEOMETRY_POSITION = -2 70 | # REST_OF_HEADER_AND_GEOMETRY_COMPRESSED = -1 71 | # EXTERNAL_NODE_IDS = 1 72 | # NODE_COORDINATES = 2 73 | # SOLID_ELEMENT_IDS = 3 74 | # SOLID_NEIGHBORS = 4 75 | # SOLID_MATERIALS = 5 76 | # THICK_SHELLS = (6, 7, 8) 77 | # BEAMS = (9, 10, 11) 78 | # TOOL_ELEMENTS = (12, 13, 14) 79 | # SHELL_ELEMENTS = (15, 16, 17) 80 | # HEADER_AND_PART_TITLES = -4 81 | # TIME = -3 82 | PART = -2 83 | GLOBAL = -1 84 | NODE = 0 85 | SOLID = 1 86 | THICK_SHELL = 2 87 | BEAM = 3 88 | TOOLS = 4 89 | SHELL = 5 90 | SPH = 6 91 | FPM = 7 92 | CFD = 8 93 | CPM_FLOAT_VAR = 9 94 | CPM_AIRBAG = 10 95 | CPM_INT_VAR = 11 96 | RADIOSS_STATE_DATA = 12 97 | HEXA20 = 13 98 | 99 | @staticmethod 100 | def from_int(number: int) -> "FemzipVariableCategory": 101 | """Deserializes an integer into an enum 102 | 103 | Parameters 104 | ---------- 105 | number: int 106 | number to turn into an enum 107 | 108 | Returns 109 | ------- 110 | enum_value: FemzipVariableCategory 111 | """ 112 | if number not in FEMZIP_CATEGORY_TRANSL_DICT: 113 | err_msg = f"Error: Unknown femzip variable category: '{number}'" 114 | raise RuntimeError(err_msg) 115 | 116 | return FEMZIP_CATEGORY_TRANSL_DICT[number] 117 | 118 | 119 | FEMZIP_CATEGORY_TRANSL_DICT: dict[int, FemzipVariableCategory] = { 120 | entry.value: entry for entry in FemzipVariableCategory.__members__.values() 121 | } 122 | 123 | 124 | class FemzipArrayType(enum.Enum): 125 | """Enum for femzip array types""" 126 | 127 | GLOBAL_DATA = "global" 128 | PART_RESULTS = "Parts: Energies and others" 129 | # nodes 130 | NODE_DISPLACEMENT = "coordinates" 131 | NODE_TEMPERATURES = "temperatures" 132 | NODE_ACCELERATIONS = "accelerations" 133 | NODE_HEAT_FLUX = "heat_flux" 134 | NODE_MASS_SCALING = "mass_scaling" 135 | NODE_TEMPERATURE_GRADIENT = "dtdt" 136 | NODE_VELOCITIES = "velocities" 137 | 138 | # beam 139 | BEAM_S_SHEAR_RESULTANT = "s_shear_resultant" 140 | BEAM_T_SHEAR_RESULTANT = "t_shear_resultant" 141 | BEAM_S_BENDING_MOMENT = "s_bending_moment" 142 | BEAM_T_BENDING_MOMENT = "t_bending_moment" 143 | BEAM_AXIAL_FORCE = "axial_force" 144 | BEAM_TORSIONAL_MOMENT = "torsional_resultant" 145 | BEAM_AXIAL_STRESS = "axial_stress" 146 | BEAM_SHEAR_STRESS_RS = "RS_shear_stress" 147 | BEAM_SHEAR_STRESS_TR = "TR_shear_stress" 148 | BEAM_PLASTIC_STRAIN = "plastic_strain" 149 | BEAM_AXIAL_STRAIN = "axial_strain" 150 | 151 | # airbag 152 | AIRBAG_STATE_GEOM = "CPMs_state_geometry" 153 | AIRBAG_PARTICLE_POS_X = "Pos x" 154 | AIRBAG_PARTICLE_POS_Y = "Pos y" 155 | AIRBAG_PARTICLE_POS_Z = "Pos z" 156 | AIRBAG_PARTICLE_VEL_X = "Vel x" 157 | AIRBAG_PARTICLE_VEL_Y = "Vel y" 158 | AIRBAG_PARTICLE_VEL_Z = "Vel z" 159 | AIRBAG_PARTICLE_MASS = "Mass" 160 | AIRBAG_PARTICLE_RADIUS = "Radius" 161 | AIRBAG_PARTICLE_SPIN_ENERGY = "Spin En" 162 | AIRBAG_PARTICLE_TRAN_ENERGY = "Tran En" 163 | AIRBAG_PARTICLE_NEIGHBOR_DIST = "NS dist" 164 | AIRBAG_PARTICLE_GAS_CHAMBER_ID = "GasC ID" 165 | AIRBAG_PARTICLE_CHAMBER_ID = "Cham ID" 166 | AIRBAG_PARTICLE_LEAKAGE = "Leakage" 167 | 168 | STRESS_X = "Sigma-x" 169 | STRESS_Y = "Sigma-y" 170 | STRESS_Z = "Sigma-z" 171 | STRESS_XY = "Sigma-xy" 172 | STRESS_YZ = "Sigma-yz" 173 | STRESS_XZ = "Sigma-zx" 174 | EFF_PSTRAIN = "Effective plastic strain" 175 | HISTORY_VARS = "extra_value_per_element" 176 | BENDING_MOMENT_MX = "bending_moment Mx" 177 | BENDING_MOMENT_MY = "bending_moment My" 178 | BENDING_MOMENT_MXY = "bending_moment Mxy" 179 | SHEAR_FORCE_X = "shear_resultant Qx" 180 | SHEAR_FORCE_Y = "shear_resultant Qy" 181 | NORMAL_FORCE_X = "normal_resultant Nx" 182 | NORMAL_FORCE_Y = "normal_resultant Ny" 183 | NORMAL_FORCE_XY = "normal_resultant Nxy" 184 | THICKNESS = "thickness" 185 | UNKNOWN_1 = "element_dependent_variable_1" 186 | UNKNOWN_2 = "element_dependent_variable_2" 187 | STRAIN_INNER_X = "Epsilon-x (inner)" 188 | STRAIN_INNER_Y = "Epsilon-y (inner)" 189 | STRAIN_INNER_Z = "Epsilon-z (inner)" 190 | STRAIN_INNER_XY = "Epsilon-xy (inner)" 191 | STRAIN_INNER_YZ = "Epsilon-yz (inner)" 192 | STRAIN_INNER_XZ = "Epsilon-zx (inner)" 193 | STRAIN_OUTER_X = "Epsilon-x (outer)" 194 | STRAIN_OUTER_Y = "Epsilon-y (outer)" 195 | STRAIN_OUTER_Z = "Epsilon-z (outer)" 196 | STRAIN_OUTER_XY = "Epsilon-xy (outer)" 197 | STRAIN_OUTER_YZ = "Epsilon-yz (outer)" 198 | STRAIN_OUTER_XZ = "Epsilon-zx (outer)" 199 | INTERNAL_ENERGY = "internal_energy" 200 | 201 | STRAIN_X = "Epsilon-x (IP 1)" 202 | STRAIN_Y = "Epsilon-y (IP 1)" 203 | STRAIN_Z = "Epsilon-z (IP 1)" 204 | STRAIN_XY = "Epsilon-xy (IP 1)" 205 | STRAIN_YZ = "Epsilon-yz (IP 1)" 206 | STRAIN_XZ = "Epsilon-zx (IP 1)" 207 | 208 | @staticmethod 209 | def from_string(femzip_name: str) -> "FemzipArrayType": 210 | """Converts a variable name to an array type string 211 | 212 | Parameters 213 | ---------- 214 | femzip_name: str 215 | name of the variable given by femzip 216 | 217 | Returns 218 | ------- 219 | femzip_array_type: FemzipArrayType 220 | """ 221 | for fz_array_type in FemzipArrayType.__members__.values(): 222 | if fz_array_type.value in femzip_name.strip(): 223 | return fz_array_type 224 | 225 | err_msg = "Unknown femzip variable name: '{0}'" 226 | raise ValueError(err_msg.format(femzip_name)) 227 | -------------------------------------------------------------------------------- /src/lasso/femzip/lib/linux/api_extended.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/femzip/lib/linux/api_extended.so -------------------------------------------------------------------------------- /src/lasso/femzip/lib/linux/libfemzip_a_dyna_sidact_generic.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/femzip/lib/linux/libfemzip_a_dyna_sidact_generic.so -------------------------------------------------------------------------------- /src/lasso/femzip/lib/linux/libfemzip_post_licgenerator_ext_flexlm.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/femzip/lib/linux/libfemzip_post_licgenerator_ext_flexlm.so -------------------------------------------------------------------------------- /src/lasso/femzip/lib/linux/libimf.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/femzip/lib/linux/libimf.so -------------------------------------------------------------------------------- /src/lasso/femzip/lib/linux/libintlc.so.5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/femzip/lib/linux/libintlc.so.5 -------------------------------------------------------------------------------- /src/lasso/femzip/lib/linux/libiomp5.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/femzip/lib/linux/libiomp5.so -------------------------------------------------------------------------------- /src/lasso/femzip/lib/linux/libirng.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/femzip/lib/linux/libirng.so -------------------------------------------------------------------------------- /src/lasso/femzip/lib/linux/libsvml.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/femzip/lib/linux/libsvml.so -------------------------------------------------------------------------------- /src/lasso/femzip/lib/windows/api_extended.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/femzip/lib/windows/api_extended.dll -------------------------------------------------------------------------------- /src/lasso/femzip/lib/windows/femzip_a_dyna_sidact_generic.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/femzip/lib/windows/femzip_a_dyna_sidact_generic.dll -------------------------------------------------------------------------------- /src/lasso/femzip/lib/windows/libfemzip_post_licgenerator_ext_flexlm.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/femzip/lib/windows/libfemzip_post_licgenerator_ext_flexlm.dll -------------------------------------------------------------------------------- /src/lasso/femzip/lib/windows/libifcoremd.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/femzip/lib/windows/libifcoremd.dll -------------------------------------------------------------------------------- /src/lasso/femzip/lib/windows/libifportmd.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/femzip/lib/windows/libifportmd.dll -------------------------------------------------------------------------------- /src/lasso/femzip/lib/windows/libiomp5md.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/femzip/lib/windows/libiomp5md.dll -------------------------------------------------------------------------------- /src/lasso/femzip/lib/windows/libmmd.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/femzip/lib/windows/libmmd.dll -------------------------------------------------------------------------------- /src/lasso/io/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/io/__init__.py -------------------------------------------------------------------------------- /src/lasso/io/binary_buffer.py: -------------------------------------------------------------------------------- 1 | import mmap 2 | import os 3 | from typing import Any, Union 4 | 5 | import numpy as np 6 | 7 | 8 | class BinaryBuffer: 9 | """This class is used to handle binary data""" 10 | 11 | def __init__(self, filepath: Union[str, None] = None, n_bytes: int = 0): 12 | """Buffer used to read binary files 13 | 14 | Parameters 15 | ---------- 16 | filepath: Union[str, None] 17 | path to a binary file 18 | n_bytes: int 19 | how many bytes to load (uses memory mapping) 20 | 21 | Returns 22 | ------- 23 | instance: BinaryBuffer 24 | """ 25 | self.filepath_ = None 26 | self.sizes_ = [] 27 | self.load(filepath, n_bytes) 28 | 29 | @property 30 | def memoryview(self) -> memoryview: 31 | """Get the underlying memoryview of the binary buffer 32 | 33 | Returns 34 | ------- 35 | mv_: memoryview 36 | memoryview used to store the data 37 | """ 38 | return self.mv_ 39 | 40 | @memoryview.setter 41 | def memoryview(self, new_mv): 42 | """Set the memoryview of the binary buffer manually 43 | 44 | Parameters 45 | ---------- 46 | new_mv: memoryview 47 | memoryview used to store the bytes 48 | """ 49 | if not isinstance(new_mv, memoryview): 50 | raise TypeError(f"new_mv must be a memoryview, got {type(new_mv)}") 51 | 52 | self.mv_ = new_mv 53 | self.sizes_ = [len(self.mv_)] 54 | 55 | def get_slice(self, start: int, end=Union[None, int], step: int = 1) -> "BinaryBuffer": 56 | """Get a slice of the binary buffer 57 | 58 | Parameters 59 | ---------- 60 | start: int 61 | start position in bytes 62 | end: Union[int, None] 63 | end position 64 | step: int 65 | step for slicing (default 1) 66 | 67 | Returns 68 | ------- 69 | new_buffer: BinaryBuffer 70 | the slice as a new buffer 71 | """ 72 | 73 | if start >= len(self): 74 | raise IndexError(f"start index {start} out of range (length {len(self)})") 75 | 76 | if end is not None and end >= len(self): 77 | raise IndexError(f"end index {end} out of range (length {len(self)})") 78 | 79 | end = len(self) if end is None else end 80 | 81 | new_binary_buffer = BinaryBuffer() 82 | new_binary_buffer.memoryview = self.mv_[start:end:step] 83 | 84 | return new_binary_buffer 85 | 86 | def __len__(self) -> int: 87 | """Get the length of the byte buffer 88 | 89 | Returns 90 | ------- 91 | len: int 92 | """ 93 | return len(self.mv_) 94 | 95 | @property 96 | def size(self) -> int: 97 | """Get the size of the byte buffer 98 | 99 | Returns 100 | ------- 101 | size: int 102 | size of buffer in bytes 103 | """ 104 | return len(self.mv_) 105 | 106 | @size.setter 107 | def size(self, size: int): 108 | """Set the length of the byte buffer 109 | 110 | Parameters 111 | ---------- 112 | size: int 113 | new size of the buffer 114 | """ 115 | 116 | if len(self.mv_) > size: 117 | self.mv_ = self.mv_[:size] 118 | elif len(self.mv_) < size: 119 | buffer = bytearray(self.mv_) + bytearray(b"0" * (size - len(self.mv_))) 120 | self.mv_ = memoryview(buffer) 121 | 122 | def read_number(self, start: int, dtype: np.dtype) -> Union[float, int]: 123 | """Read a number from the buffer 124 | 125 | Parameters 126 | ---------- 127 | start: int 128 | at which byte to start reading 129 | dtype: np.dtype 130 | type of the number to read 131 | 132 | Returns 133 | ------- 134 | number: np.dtype 135 | number with the type specified 136 | """ 137 | return np.frombuffer(self.mv_, dtype=dtype, count=1, offset=start)[0] 138 | 139 | def write_number(self, start: int, value: Any, dtype: np.dtype): 140 | """Write a number to the buffer 141 | 142 | Parameters 143 | ---------- 144 | start: int 145 | at which byte to start writing 146 | value: Any 147 | value to write 148 | dtype: np.dtype 149 | type of the number to write 150 | """ 151 | 152 | wrapper = np.frombuffer(self.mv_[start:], dtype=dtype) 153 | wrapper[0] = value 154 | 155 | def read_ndarray(self, start: int, length: int, step: int, dtype: np.dtype) -> np.ndarray: 156 | """Read a numpy array from the buffer 157 | 158 | Parameters 159 | ---------- 160 | start: int 161 | at which byte to start reading 162 | length: int 163 | length in bytes to read 164 | step: int 165 | byte step size (how many bytes to skip) 166 | dtype: np.dtype 167 | type of the number to read 168 | 169 | Returns 170 | ------- 171 | array: np.ndarray 172 | """ 173 | 174 | return np.frombuffer(self.mv_[start : start + length : step], dtype=dtype) 175 | 176 | def write_ndarray(self, array: np.ndarray, start: int, step: int): 177 | """Write a numpy array to the buffer 178 | 179 | Parameters 180 | ---------- 181 | array: np.ndarray 182 | array to save to the file 183 | start: int 184 | start in bytes 185 | step: int 186 | byte step size (how many bytes to skip) 187 | """ 188 | 189 | wrapper = np.frombuffer(self.mv_[start::step], dtype=array.dtype) 190 | 191 | np.copyto(wrapper[: array.size], array, casting="no") 192 | 193 | def read_text(self, start: int, length: int, step: int = 1, encoding: str = "utf8") -> str: 194 | """Read text from the binary buffer 195 | 196 | Parameters 197 | ---------- 198 | start: int 199 | start in bytes 200 | length: int 201 | length in bytes to read 202 | step: int 203 | byte step size 204 | encoding: str 205 | encoding used 206 | """ 207 | return self.mv_[start : start + length : step].tobytes().decode(encoding) 208 | 209 | def save(self, filepath: Union[str, None] = None): 210 | """Save the binary buffer to a file 211 | 212 | Parameters 213 | ---------- 214 | filepath: str 215 | path where to save the data 216 | 217 | Notes 218 | ----- 219 | Overwrites to original file if no filepath 220 | is specified. 221 | """ 222 | 223 | filepath_parsed = filepath if filepath else (self.filepath_[0] if self.filepath_ else None) 224 | 225 | if filepath_parsed is None: 226 | return 227 | 228 | with open(filepath_parsed, "wb") as fp: 229 | fp.write(self.mv_) 230 | 231 | self.filepath_ = filepath_parsed 232 | 233 | def load(self, filepath: Union[list[str], str, None] = None, n_bytes: int = 0): 234 | """load a file 235 | 236 | Parameters 237 | ---------- 238 | filepath: Union[str, None] 239 | path to the file to load 240 | n_bytes: int 241 | number of bytes to load (uses memory mapping if nonzero) 242 | 243 | Notes 244 | ----- 245 | If not filepath is specified, then the opened file is simply 246 | reloaded. 247 | """ 248 | 249 | filepath = filepath if filepath else self.filepath_ 250 | 251 | if not filepath: 252 | return 253 | 254 | # convert to a list if only a single file is given 255 | filepath_parsed = [filepath] if isinstance(filepath, str) else filepath 256 | 257 | # get size of all files 258 | sizes = [os.path.getsize(path) for path in filepath_parsed] 259 | 260 | # reduce memory if required 261 | sizes = [entry if n_bytes == 0 else min(n_bytes, entry) for entry in sizes] 262 | 263 | memorysize = sum(sizes) 264 | 265 | # allocate memory 266 | buffer = memoryview(bytearray(b"0" * memorysize)) 267 | 268 | # read files and concatenate them 269 | sizes_tmp = [0] + sizes 270 | for i_path, path in enumerate(filepath_parsed): 271 | with open(path, "br") as fp: 272 | if n_bytes: 273 | mm = mmap.mmap(fp.fileno(), sizes[i_path], access=mmap.ACCESS_READ) 274 | buffer[sizes_tmp[i_path] :] = mm[: sizes[i_path]] 275 | else: 276 | fp.readinto(buffer[sizes_tmp[i_path] :]) 277 | 278 | self.filepath_ = filepath_parsed 279 | self.sizes_ = sizes 280 | self.mv_ = buffer 281 | 282 | def append(self, binary_buffer: "BinaryBuffer"): 283 | """Append another binary buffer to this one 284 | 285 | Parameters 286 | ---------- 287 | binary_buffer: BinaryBuffer 288 | buffer to append 289 | """ 290 | 291 | if not isinstance(binary_buffer, BinaryBuffer): 292 | raise TypeError( 293 | f"binary_buffer must be an instance of BinaryBuffer, got {type(binary_buffer)}" 294 | ) 295 | 296 | self.mv_ = memoryview(bytearray(self.mv_) + bytearray(binary_buffer.mv_)) 297 | self.sizes_.append(len(binary_buffer)) 298 | -------------------------------------------------------------------------------- /src/lasso/io/files.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import glob 3 | import os 4 | import typing 5 | from collections.abc import Iterator 6 | from typing import Union 7 | 8 | 9 | @contextlib.contextmanager 10 | def open_file_or_filepath( 11 | path_or_file: Union[str, typing.BinaryIO], mode: str 12 | ) -> Iterator[typing.BinaryIO]: 13 | """This function accepts a file or filepath and handles closing correctly 14 | 15 | Parameters 16 | ---------- 17 | path_or_file: Union[str, typing.IO] 18 | path or file 19 | mode: str 20 | filemode 21 | 22 | Yields 23 | ------ 24 | f: file object 25 | """ 26 | if isinstance(path_or_file, str): 27 | # We open this file in binary mode anyway so no encoding is needed. 28 | # pylint: disable = unspecified-encoding 29 | f = file_to_close = open(path_or_file, mode) 30 | else: 31 | f = path_or_file 32 | file_to_close = None 33 | try: 34 | yield f 35 | finally: 36 | if file_to_close: 37 | file_to_close.close() 38 | 39 | 40 | def collect_files( 41 | dirpath: Union[str, list[str]], patterns: Union[str, list[str]], recursive: bool = False 42 | ): 43 | """Collect files from directories 44 | 45 | Parameters 46 | ---------- 47 | dirpath: Union[str, List[str]] 48 | path to one or multiple directories to search through 49 | patterns: Union[str, List[str]] 50 | patterns to search for 51 | recursive: bool 52 | whether to also search subdirs 53 | 54 | Returns 55 | ------- 56 | found_files: Union[List[str], List[List[str]]] 57 | returns the list of files found for every pattern specified 58 | 59 | Examples 60 | -------- 61 | >>> png_images, jpeg_images = collect_files("./folder", ["*.png", "*.jpeg"]) 62 | """ 63 | 64 | if not isinstance(dirpath, (list, tuple)): 65 | dirpath = [dirpath] 66 | if not isinstance(patterns, (list, tuple)): 67 | patterns = [patterns] 68 | 69 | found_files = [] 70 | for pattern in patterns: 71 | files_with_pattern = [] 72 | for current_dir in dirpath: 73 | # files in root dir 74 | files_with_pattern += glob.glob(os.path.join(current_dir, pattern)) 75 | # subfolders 76 | if recursive: 77 | files_with_pattern += glob.glob(os.path.join(current_dir, "**", pattern)) 78 | 79 | found_files.append(sorted(files_with_pattern)) 80 | 81 | if len(found_files) == 1: 82 | return found_files[0] 83 | 84 | return found_files 85 | -------------------------------------------------------------------------------- /src/lasso/logging.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import platform 3 | 4 | from lasso.utils.console_coloring import ConsoleColoring 5 | 6 | 7 | # settings 8 | MARKER_INFO = "[/]" 9 | MARKER_RUNNING = "[~]" 10 | MARKER_WARNING = "[!]" 11 | MARKER_SUCCESS = "[Y]" if platform.system() == "Windows" else "[✔]" 12 | MARKER_ERROR = "[X]" if platform.system() == "Windows" else "[✘]" 13 | 14 | LOGGER_NAME = "lasso" 15 | 16 | 17 | def str_info(msg: str): 18 | """Format a message as stuff is running 19 | 20 | Parameters 21 | ---------- 22 | msg: str 23 | message to format 24 | 25 | Returns 26 | ------- 27 | msg_ret: str 28 | formatted message 29 | """ 30 | # return ConsoleColoring.blue("[/] {0}".format(msg), light=True) 31 | return f"{MARKER_INFO} {msg}" 32 | 33 | 34 | def str_running(msg: str): 35 | """Format a message as stuff is running 36 | 37 | Parameters 38 | ---------- 39 | msg: str 40 | message to format 41 | 42 | Returns 43 | ------- 44 | msg_ret: str 45 | formatted message 46 | """ 47 | return f"{MARKER_RUNNING} {msg}" 48 | 49 | 50 | def str_success(msg: str): 51 | """Format a message as successful 52 | 53 | Parameters 54 | ---------- 55 | msg: str 56 | message to format 57 | 58 | Returns 59 | ------- 60 | msg_ret: str 61 | formatted message 62 | """ 63 | return ConsoleColoring.green(f"{MARKER_SUCCESS} {msg}") 64 | 65 | 66 | def str_warn(msg: str): 67 | """Format a string as a warning 68 | 69 | Parameters 70 | ---------- 71 | msg: str 72 | message to format 73 | 74 | Returns 75 | ------- 76 | msg_ret: str 77 | formatted message 78 | """ 79 | return ConsoleColoring.yellow(f"{MARKER_WARNING} {msg}") 80 | 81 | 82 | def str_error(msg: str): 83 | """Format a string as an error 84 | 85 | Parameters 86 | ---------- 87 | msg: str 88 | message to format 89 | 90 | Returns 91 | ------- 92 | msg_ret: str 93 | formatted message 94 | """ 95 | return ConsoleColoring.red(f"{MARKER_ERROR} {msg}") 96 | 97 | 98 | def get_logger(file_flag: str) -> logging.Logger: 99 | """Get the logger for the lasso module 100 | 101 | Returns 102 | ------- 103 | logger: logging.Logger 104 | logger for the lasso module 105 | """ 106 | logging.basicConfig( 107 | datefmt="[%(levelname)s] %(message)s [%(pathname)s %(funcName)s %(lineno)d]" 108 | ) 109 | return logging.getLogger(file_flag) 110 | -------------------------------------------------------------------------------- /src/lasso/math/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/math/__init__.py -------------------------------------------------------------------------------- /src/lasso/math/sampling.py: -------------------------------------------------------------------------------- 1 | import random 2 | from typing import Union 3 | 4 | import numpy as np 5 | from sklearn.neighbors import KDTree 6 | 7 | 8 | def unique_subsamples(start: int, end: int, n_samples: int, seed=None) -> np.ndarray: 9 | """Retrieve unique subsample indexes 10 | 11 | Parameters 12 | ---------- 13 | start: int 14 | starting index of population 15 | end: int 16 | ending index of population (end <= start) 17 | n_samples: int 18 | number of samples to draw 19 | seed: int 20 | seed for random number generator 21 | 22 | Returns 23 | ------- 24 | indexes: np.ndarray 25 | unique sample indexes 26 | """ 27 | if start > end: 28 | raise ValueError( 29 | f"Invalid range: start ({start}) must be less than or equal to end ({end})" 30 | ) 31 | 32 | n_samples = min(n_samples, end - start) 33 | random.seed(seed) 34 | indexes = np.array(random.sample(range(start, end), n_samples), dtype=np.int64) 35 | random.seed() 36 | return indexes 37 | 38 | 39 | def homogenize_density( 40 | points: np.ndarray, 41 | dim: int = 2, 42 | target_distance: Union[float, None] = None, 43 | n_neighbors: int = 18, 44 | seed=None, 45 | ) -> np.ndarray: 46 | """homogenize a cloud density by probabilities 47 | 48 | Parameters 49 | ---------- 50 | points: np.ndarray 51 | point cloud 52 | dim: int 53 | intrinsic dimension of the data 54 | target_distance: float 55 | target distance to aim for 56 | n_neighbors: int 57 | neighbors used for computation of average neighborhood distance 58 | seed: int 59 | seed for random number generator 60 | 61 | Returns 62 | ------- 63 | is_selected: np.ndarray 64 | boolean array indicating which subsamples were selected 65 | """ 66 | n_neighbors = min(n_neighbors, len(points)) 67 | 68 | random.seed(seed) 69 | d, _ = KDTree(points).query(points, k=n_neighbors + 1) 70 | d_average = np.average(d[:, 1:], axis=1) 71 | if target_distance is None: 72 | target_distance = np.median(d_average) 73 | is_selected = np.array([ 74 | dist >= target_distance or random.random() < (dist / target_distance) ** dim 75 | for i, dist in enumerate(d_average) 76 | ]) 77 | random.seed() 78 | return is_selected 79 | -------------------------------------------------------------------------------- /src/lasso/math/stochastic.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy import stats 3 | 4 | 5 | def jensen_shannon_entropy(p: np.ndarray, q: np.ndarray) -> float: 6 | """Jensen Shannon Entropy 7 | 8 | Parameters 9 | ---------- 10 | p: np.ndarray 11 | first probability distribution 12 | q: np.ndarray 13 | second probability distribution 14 | 15 | Returns 16 | ------- 17 | js_divergence: float 18 | Jensen-Shannon divergence 19 | """ 20 | p = np.asarray(p) 21 | q = np.asarray(q) 22 | # normalize 23 | p = p / p.sum() 24 | q = q / q.sum() 25 | m = (p + q) / 2 26 | return (stats.entropy(p, m) + stats.entropy(q, m)) / 2 27 | -------------------------------------------------------------------------------- /src/lasso/plotting/__init__.py: -------------------------------------------------------------------------------- 1 | from .plot_shell_mesh import plot_shell_mesh 2 | 3 | 4 | __all__ = ["plot_shell_mesh"] 5 | -------------------------------------------------------------------------------- /src/lasso/plotting/plot_shell_mesh.py: -------------------------------------------------------------------------------- 1 | import io 2 | import json 3 | import os 4 | import uuid 5 | from base64 import b64encode 6 | from typing import Union 7 | from zipfile import ZIP_DEFLATED, ZipFile 8 | 9 | import numpy as np 10 | 11 | 12 | def _read_file(filepath: str): 13 | """This function reads file as str 14 | 15 | Parameters 16 | ---------- 17 | filepath : str 18 | filepath of the file to read as string 19 | 20 | Returns 21 | ------- 22 | file_content : str 23 | """ 24 | 25 | with open(filepath, encoding="utf-8") as fp_filepath: 26 | return fp_filepath.read() 27 | 28 | 29 | def plot_shell_mesh( 30 | node_coordinates: np.ndarray, 31 | shell_node_indexes: np.ndarray, 32 | field: Union[np.ndarray, None] = None, 33 | is_element_field: bool = True, 34 | fringe_limits: Union[tuple[float, float], None] = None, 35 | ): 36 | """Plot a mesh 37 | 38 | Parameters 39 | ---------- 40 | node_coordinates : np.ndarray 41 | array of node coordinates for elements 42 | shell_node_indexes : np.ndarray 43 | node indexes of shells 44 | field : Union[np.ndarray, None] 45 | Array containing a field value for every element or node 46 | is_element_field : bool 47 | if the specified field is for elements or nodes 48 | fringe_limits : Union[Tuple[float, float], None] 49 | limits for the fringe bar. Set by default to min and max. 50 | 51 | Returns 52 | ------- 53 | html : str 54 | html code for plotting as string 55 | """ 56 | 57 | # pylint: disable = too-many-locals, too-many-statements 58 | 59 | if getattr(node_coordinates, "ndim", None) != 2: 60 | raise ValueError( 61 | f"node_coordinates must be 2-dimensional, " 62 | f"got ndim={getattr(node_coordinates, 'ndim', 'unknown')}" 63 | ) 64 | 65 | if getattr(node_coordinates, "shape", (None, None))[1] != 3: 66 | raise ValueError( 67 | f"node_coordinates must have shape[1] == 3, " 68 | f"got shape={getattr(node_coordinates, 'shape', 'unknown')}" 69 | ) 70 | 71 | if getattr(shell_node_indexes, "ndim", None) != 2: 72 | raise ValueError( 73 | f"shell_node_indexes must be 2-dimensional, " 74 | f"got ndim={getattr(shell_node_indexes, 'ndim', 'unknown')}" 75 | ) 76 | 77 | shape_1 = getattr(shell_node_indexes, "shape", (None, None))[1] 78 | if shape_1 not in (3, 4): 79 | raise ValueError(f"shell_node_indexes must have shape[1] of 3 or 4, got shape[1]={shape_1}") 80 | 81 | if isinstance(field, np.ndarray): 82 | if getattr(field, "ndim", None) != 1: 83 | raise ValueError( 84 | f"field must be 1-dimensional, got ndim={getattr(field, 'ndim', 'unknown')}" 85 | ) 86 | 87 | if is_element_field: 88 | if ( 89 | getattr(field, "shape", (None,))[0] 90 | != getattr(shell_node_indexes, "shape", (None,))[0] 91 | ): 92 | raise ValueError( 93 | f"field length {getattr(field, 'shape', (None,))[0]} does not match " 94 | f"shell_node_indexes length {getattr(shell_node_indexes, 'shape', (None,))[0]}" 95 | ) 96 | elif getattr(field, "shape", (None,))[0] != getattr(node_coordinates, "shape", (None,))[0]: 97 | raise ValueError( 98 | f"field length {getattr(field, 'shape', (None,))[0]} does not match " 99 | f"node_coordinates length {getattr(node_coordinates, 'shape', (None,))[0]}" 100 | ) 101 | 102 | # cast types correctly 103 | # the types MUST be float32 104 | node_coordinates = node_coordinates.astype(np.float32) 105 | if isinstance(field, np.ndarray): 106 | field = field.astype(np.float32) 107 | 108 | # distinguish tria and quads 109 | is_quad = shell_node_indexes[:, 2] != shell_node_indexes[:, 3] 110 | is_tria = np.logical_not(is_quad) 111 | 112 | # separate tria and quads ... I know its sad :( 113 | tria_node_indexes = shell_node_indexes[is_tria][:, :3] 114 | quad_node_indexes = shell_node_indexes[is_quad] 115 | 116 | # we can only plot tria, therefore we need to split quads 117 | # into two trias 118 | quad_node_indexes_tria1 = quad_node_indexes[:, :3] 119 | # quad_node_indexes_tria2 = quad_node_indexes[:, [True, False, True, True]] 120 | quad_node_indexes_tria2 = quad_node_indexes[:, [0, 2, 3]] 121 | 122 | # assemble elements for plotting 123 | # This seems to take a lot of memory, and you are right, thinking this, 124 | # the issue is just in order to plot fringe values, we need to output 125 | # the element values at the 3 corner nodes. Since elements share nodes 126 | # we can not use the same nodes, thus we need to create multiple nodes 127 | # at the same position but with different fringe. 128 | nodes_xyz = np.concatenate([ 129 | node_coordinates[tria_node_indexes].reshape((-1, 3)), 130 | node_coordinates[quad_node_indexes_tria1].reshape((-1, 3)), 131 | node_coordinates[quad_node_indexes_tria2].reshape((-1, 3)), 132 | ]) 133 | 134 | # fringe value and hover title 135 | if isinstance(field, np.ndarray): 136 | if is_element_field: 137 | n_shells = len(shell_node_indexes) 138 | n_tria = np.sum(is_tria) 139 | n_quads = n_shells - n_tria 140 | 141 | # split field according to elements 142 | field_tria = field[is_tria] 143 | field_quad = field[is_quad] 144 | 145 | # allocate fringe array 146 | node_fringe = np.zeros((len(field_tria) + 2 * len(field_quad), 3), dtype=np.float32) 147 | 148 | # set fringe values 149 | node_fringe[:n_tria, 0] = field_tria 150 | node_fringe[:n_tria, 1] = field_tria 151 | node_fringe[:n_tria, 2] = field_tria 152 | 153 | node_fringe[n_tria : n_tria + n_quads, 0] = field_quad 154 | node_fringe[n_tria : n_tria + n_quads, 1] = field_quad 155 | node_fringe[n_tria : n_tria + n_quads, 2] = field_quad 156 | 157 | node_fringe[n_tria + n_quads : n_tria + 2 * n_quads, 0] = field_quad 158 | node_fringe[n_tria + n_quads : n_tria + 2 * n_quads, 1] = field_quad 159 | node_fringe[n_tria + n_quads : n_tria + 2 * n_quads, 2] = field_quad 160 | 161 | # flatty paddy 162 | node_fringe = node_fringe.flatten() 163 | else: 164 | # copy & paste ftw 165 | node_fringe = np.concatenate([ 166 | field[tria_node_indexes].reshape((-1, 3)), 167 | field[quad_node_indexes_tria1].reshape((-1, 3)), 168 | field[quad_node_indexes_tria2].reshape((-1, 3)), 169 | ]) 170 | node_fringe = node_fringe.flatten() 171 | 172 | # element text 173 | node_txt = [str(entry) for entry in node_fringe.flatten()] 174 | else: 175 | node_fringe = np.zeros(len(nodes_xyz), dtype=np.float32) 176 | node_txt = [""] * len(nodes_xyz) 177 | 178 | # zip compression of data for HTML (reduces size) 179 | zip_data = io.BytesIO() 180 | with ZipFile(zip_data, "w", compression=ZIP_DEFLATED) as zipfile: 181 | zipfile.writestr("/intensities", node_fringe.tostring()) 182 | zipfile.writestr("/positions", nodes_xyz.tostring()) 183 | zipfile.writestr("/text", json.dumps(node_txt)) 184 | zip_data = b64encode(zip_data.getvalue()).decode("utf-8") 185 | 186 | # read html template 187 | _html_template = _read_file( 188 | os.path.join(os.path.dirname(__file__), "resources", "template.html") 189 | ) 190 | 191 | # format html template file 192 | min_value = 0 193 | max_value = 0 194 | if fringe_limits: 195 | min_value = fringe_limits[0] 196 | max_value = fringe_limits[1] 197 | elif isinstance(field, np.ndarray): 198 | min_value = field.min() 199 | max_value = field.max() 200 | 201 | _html_div = _html_template.format( 202 | div_id=uuid.uuid4(), lowIntensity=min_value, highIntensity=max_value, zdata=zip_data 203 | ) 204 | 205 | # wrap it up with all needed js libraries 206 | 207 | script_string_js = '' 208 | jszip_js_format = _read_file( 209 | os.path.join(os.path.dirname(__file__), "resources", "jszip.min.js") 210 | ) 211 | jszip_three_format = _read_file( 212 | os.path.join(os.path.dirname(__file__), "resources", "three.min.js") 213 | ) 214 | jszip_chroma_format = _read_file( 215 | os.path.join(os.path.dirname(__file__), "resources", "chroma.min.js") 216 | ) 217 | jszip_jquery_format = _read_file( 218 | os.path.join(os.path.dirname(__file__), "resources", "jquery.min.js") 219 | ) 220 | _html_jszip_js = script_string_js.format(jszip_js_format) 221 | _html_three_js = script_string_js.format(jszip_three_format) 222 | _html_chroma_js = script_string_js.format(jszip_chroma_format) 223 | _html_jquery_js = script_string_js.format(jszip_jquery_format) 224 | 225 | # pylint: disable = consider-using-f-string 226 | return f""" 227 | 228 | 229 | 230 | 231 | {_html_jquery_js} 232 | {_html_jszip_js} 233 | {_html_three_js} 234 | {_html_chroma_js} 235 | 236 | 237 | {_html_div} 238 | 239 | """ 240 | -------------------------------------------------------------------------------- /src/lasso/plotting/resources/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/plotting/resources/__init__.py -------------------------------------------------------------------------------- /src/lasso/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/src/lasso/utils/__init__.py -------------------------------------------------------------------------------- /src/lasso/utils/console_coloring.py: -------------------------------------------------------------------------------- 1 | class ConsoleColoring: 2 | """Holds coloring escape sequences for command line shells""" 3 | 4 | # text coloring 5 | LIGHT_GREEN = "\033[92m" 6 | LIGHT_RED = "\033[91m" 7 | LIGHT_CYAN = "\033[96m" 8 | LIGHT_BLUE = "\033[94m" 9 | LIGHT_PURPLE = "\033[95m" 10 | LIGHT_YELLOW = "\033[93m" 11 | 12 | PURPLE = "\033[95m" 13 | RED = "\033[91m" 14 | GREEN = "\u001b[32m" 15 | CYAN = "\u001b[36m" 16 | WHITE = "\u001b[37m" 17 | BLACK = "\u001b[30m" 18 | BLUE = "\u001b[34m" 19 | ORANGE = "\u001b[33m" 20 | 21 | # special stuff 22 | BOLD = "\033[1m" 23 | UNDERLINE = "\033[4m" 24 | REVERSED = "\u001b[7m" 25 | 26 | # ends coloring 27 | RESET = "\033[0m" 28 | 29 | @staticmethod 30 | def purple(msg, light=False): 31 | """Format a string in purple 32 | 33 | Parameters 34 | ---------- 35 | msg : `str` 36 | string to format 37 | light : `bool` 38 | whether to use light coloring 39 | 40 | Returns 41 | ------- 42 | formatted_msg : `str` 43 | string colored for console output 44 | """ 45 | return ( 46 | ConsoleColoring.LIGHT_PURPLE + msg + ConsoleColoring.RESET 47 | if light 48 | else ConsoleColoring.PURPLE + msg + ConsoleColoring.RESET 49 | ) 50 | 51 | @staticmethod 52 | def yellow(msg, light=False): 53 | """Format a string in yellow 54 | 55 | Parameters 56 | ---------- 57 | msg : `str` 58 | string to format 59 | light : `bool` 60 | whether to use light coloring 61 | 62 | Returns 63 | ------- 64 | formatted_msg : `str` 65 | string colored for console output 66 | """ 67 | return ( 68 | ConsoleColoring.LIGHT_YELLOW + msg + ConsoleColoring.RESET 69 | if light 70 | else ConsoleColoring.ORANGE + msg + ConsoleColoring.RESET 71 | ) 72 | 73 | @staticmethod 74 | def red(msg, light=False): 75 | """Format a string in red 76 | 77 | Parameters 78 | ---------- 79 | msg : `str` 80 | string to format 81 | light : `bool` 82 | whether to use light coloring 83 | 84 | Returns 85 | ------- 86 | formatted_msg : `str` 87 | string colored for console output 88 | """ 89 | return ( 90 | ConsoleColoring.LIGHT_RED + msg + ConsoleColoring.RESET 91 | if light 92 | else ConsoleColoring.RED + msg + ConsoleColoring.RESET 93 | ) 94 | 95 | @staticmethod 96 | def green(msg, light=False): 97 | """Format a string in green 98 | 99 | Parameters 100 | ---------- 101 | msg : `str` 102 | string to format 103 | light : `bool` 104 | whether to use light coloring 105 | 106 | Returns 107 | ------- 108 | formatted_msg : `str` 109 | string colored for console output 110 | """ 111 | return ( 112 | ConsoleColoring.LIGHT_GREEN + msg + ConsoleColoring.RESET 113 | if light 114 | else ConsoleColoring.GREEN + msg + ConsoleColoring.RESET 115 | ) 116 | 117 | @staticmethod 118 | def blue(msg, light=False): 119 | """Format a string in green 120 | 121 | Parameters 122 | ---------- 123 | msg : `str` 124 | string to format 125 | light : `bool` 126 | whether to use light coloring 127 | 128 | Returns 129 | ------- 130 | formatted_msg : `str` 131 | string colored for console output 132 | """ 133 | return ( 134 | ConsoleColoring.LIGHT_BLUE + msg + ConsoleColoring.RESET 135 | if light 136 | else ConsoleColoring.BLUE + msg + ConsoleColoring.RESET 137 | ) 138 | -------------------------------------------------------------------------------- /src/lasso/utils/decorators.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | 3 | 4 | # Source: https://www.fast.ai/2019/08/06/delegation/ 5 | 6 | 7 | def delegates(to_function=None, keep=False): 8 | """Decorator: replace `**kwargs` in signature with params from `to`""" 9 | 10 | def _f(f_att): 11 | if to_function is None: 12 | to_f, from_f = f_att.__base__.__init__, f_att.__init__ 13 | else: 14 | to_f, from_f = to_function, f_att 15 | sig = inspect.signature(from_f) 16 | sig_dict = dict(sig.parameters) 17 | k = sig_dict.pop("kwargs") 18 | s2_dict = { 19 | k: v 20 | for k, v in inspect.signature(to_f).parameters.items() 21 | if v.default != inspect.Parameter.empty and k not in sig_dict 22 | } 23 | sig_dict.update(s2_dict) 24 | if keep: 25 | sig_dict["kwargs"] = k 26 | # noinspection PyTypeChecker 27 | from_f.__signature__ = sig.replace(parameters=sig_dict.values()) 28 | return f_att 29 | 30 | return _f 31 | 32 | 33 | def custom_dir(custom_c, add): 34 | """Custom dir *add description?""" 35 | return dir(type(custom_c)) + list(custom_c.__dict__.keys()) + add 36 | 37 | 38 | class GetAttr: 39 | """ 40 | Base class for attr accesses in `self._xtra` passed down to `self.default 41 | """ 42 | 43 | @property 44 | def _xtra(self): 45 | return [o for o in dir(self.default) if not o.startswith("_")] 46 | 47 | def __getattr__(self, k): 48 | if k in self._xtra: 49 | return getattr(self.default, k) 50 | raise AttributeError(k) 51 | 52 | def __dir__(self): 53 | return custom_dir(self, self._xtra) 54 | -------------------------------------------------------------------------------- /src/lasso/utils/language.py: -------------------------------------------------------------------------------- 1 | def get_var(name, context, default=None): 2 | """Get a variable from a dict context 3 | 4 | Parameters 5 | ---------- 6 | name : str or list(str) 7 | variable name 8 | context : dict 9 | variable dict 10 | default: 11 | default value or function which creates the default 12 | value if failed to find var in context 13 | """ 14 | if not isinstance(name, (list, tuple)): 15 | name = (name,) 16 | 17 | current_context = context 18 | for current_name in name: 19 | if current_name in current_context: 20 | current_context = current_context[current_name] 21 | else: 22 | return default() if callable(default) else default 23 | 24 | return current_context 25 | 26 | 27 | def set_var(name, value, context): 28 | """ 29 | Set a variable in a dict context 30 | 31 | Parameters 32 | ---------- 33 | name : str or list(str) 34 | variable name or path as list 35 | value : object 36 | anything to set 37 | context : dict 38 | variable dict 39 | """ 40 | 41 | if not isinstance(name, (list, tuple)): 42 | name = (name,) 43 | 44 | current_context = context 45 | for i_name, current_name in enumerate(name): 46 | # at last level set var 47 | if i_name == len(name) - 1: 48 | current_context[current_name] = value 49 | # otherwise iterate into next level 50 | elif current_name in current_context: 51 | current_context = current_context[current_name] 52 | else: 53 | new_level = {} 54 | current_context[current_name] = new_level 55 | current_context = new_level 56 | -------------------------------------------------------------------------------- /src/lasso/utils/rich_progress_bars.py: -------------------------------------------------------------------------------- 1 | import math 2 | import time 3 | from typing import Any 4 | 5 | from rich.progress import ProgressColumn 6 | 7 | 8 | class PlaceHolderBar: 9 | """This bar is simply a placeholder bar""" 10 | 11 | finished: bool 12 | tasks: list = [] 13 | 14 | # noinspection PyUnusedLocal 15 | # pylint: disable = unused-argument 16 | def __init__(self, **kwargs): 17 | """This is a placeholder to not clutter console during testing""" 18 | self.finished: False 19 | 20 | # noinspection PyUnusedLocal 21 | # pylint: disable = unused-argument 22 | def render(self, task: Any) -> str: 23 | """returns the planned output: empty string""" 24 | return "" 25 | 26 | def add_task(self, description: str, total: int) -> int: 27 | """Adds a new task""" 28 | self.tasks.append([description, total, 0]) 29 | # entry in list is tuple of description, total tasks, remaining tasks 30 | return len(self.tasks) - 1 31 | 32 | def advance(self, task_id): 33 | """advances the given task""" 34 | prog = self.tasks[task_id][2] 35 | prog += 1 36 | self.tasks[task_id][2] = prog 37 | if prog == self.tasks[task_id][1]: 38 | self.finished = True 39 | 40 | def __enter__(self): 41 | self.finished = False 42 | 43 | def __exit__(self, exception_type, exception_value, traceback): 44 | self.finished = True 45 | 46 | 47 | class WorkingDots(ProgressColumn): 48 | """TODO: add description""" 49 | 50 | max_refresh = 0.5 51 | is_silenced: bool = False 52 | 53 | def __init__(self, output=True): 54 | self.counter = 0 55 | if not output: 56 | self.is_silenced = True 57 | super().__init__() 58 | 59 | def render(self, task: Any) -> str: 60 | self.counter += 1 61 | if self.is_silenced: 62 | return "" 63 | if task.completed == task.total: 64 | msg = "..." 65 | elif self.counter % 3 == 0: 66 | msg = ". " 67 | elif self.counter % 3 == 1: 68 | msg = ".. " 69 | else: 70 | msg = "..." 71 | self.counter = 2 72 | return msg 73 | 74 | 75 | class SubsamplingWaitTime(ProgressColumn): 76 | """TODO: add description""" 77 | 78 | max_refresh = 0.5 79 | 80 | def __init__(self, n_proc: int): 81 | super().__init__() 82 | 83 | # Last time we updated 84 | self.last_time = time.time() 85 | # Cumulative time of all completed sub-sampling processes 86 | self.cum_time = 0 87 | # Number of parallel running processes 88 | self.n_proc = n_proc 89 | self.t_rem = -1 90 | 91 | def render(self, task: Any) -> str: 92 | """TODO: add description?""" 93 | 94 | if task.completed == task.total: 95 | return "Time remaining: 00:00" 96 | 97 | if self.cum_time > 0: 98 | avrg_time = self.cum_time / max(1, task.completed) 99 | rem_tasks = task.total - task.completed 100 | gr_tasks = math.floor(rem_tasks / self.n_proc) 101 | if (rem_tasks % self.n_proc) != 0: 102 | gr_tasks += 1 103 | 104 | total_time = gr_tasks * avrg_time 105 | if self.t_rem < 0 or self.t_rem > total_time: 106 | self.t_rem = total_time 107 | 108 | t_out = self.t_rem - (time.time() - self.last_time) 109 | mins = str(math.floor(t_out / 60)) 110 | secs = str(math.trunc(t_out % 60)) 111 | 112 | if len(mins) == 1: 113 | mins = "0" + mins 114 | if len(secs) == 1: 115 | secs = "0" + secs 116 | out_str = "Time remaining: " + mins + ":" + secs 117 | return out_str 118 | 119 | return "Time remaining: --:--" 120 | 121 | def update_avrg(self, new_time: float): 122 | """TODO: add description""" 123 | 124 | self.cum_time += new_time 125 | self.last_time = time.time() 126 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/__init__.py -------------------------------------------------------------------------------- /test/plot_creator_helper.py: -------------------------------------------------------------------------------- 1 | import math 2 | import os 3 | import random 4 | 5 | import numpy as np 6 | import plotly.graph_objects as go 7 | 8 | from lasso.dyna.d3plot import ArrayType, D3plot 9 | 10 | 11 | # FIXME: there are no tests in this file. probably dead code. 12 | def create_fake_d3plots( 13 | path: str, 14 | element_shell_node_indexes: np.ndarray, 15 | bend_multiplicator: float, 16 | n_nodes_x: int = 500, 17 | n_nodes_y: int = 10, 18 | n_timesteps: int = 5, 19 | ): 20 | """ 21 | Creates a number of artificial D3plots to be used in testing 22 | """ 23 | 24 | # if bend_multiplicator > 0: 25 | # bend_loc_x = int(n_nodes_x/10) 26 | # bend_start = bend_loc_x - int(bend_loc_x/2) 27 | # bend_end = bend_loc_x + int(bend_loc_x/2) 28 | # else: 29 | # bend_loc_x = n_nodes_x - int(n_nodes_x/10) 30 | # bend_start = bend_loc_x - int(n_nodes_x/20) 31 | # bend_end = bend_loc_x + int(n_nodes_x/20) 32 | 33 | x_coords = np.arange(n_nodes_x) 34 | y_coords = np.arange(n_nodes_y) 35 | # z_bend_mat = np.stack( 36 | # [np.array([1+math.sin(x*2*math.pi/(bend_end - bend_start)) 37 | # for x in range(bend_end - bend_start)] 38 | # ) 39 | # for _ in range(n_nodes_y)]).reshape(((bend_end - bend_start)*n_nodes_y)) 40 | 41 | z_bend_mat = np.stack([ 42 | np.array([1 + math.sin(x * math.pi / n_nodes_x) for x in range(n_nodes_x)]) 43 | for _ in range(n_nodes_y) 44 | ]).reshape((n_nodes_x * n_nodes_y,)) 45 | node_coordinates = np.zeros((n_nodes_x * n_nodes_y, 3)) 46 | 47 | # fill in y coords 48 | for n in range(n_nodes_y): 49 | node_coordinates[n * n_nodes_x : n_nodes_x + n * n_nodes_x, 1] = y_coords[n] 50 | node_coordinates[n * n_nodes_x : n_nodes_x + n * n_nodes_x, 0] = x_coords 51 | # fill in x coords 52 | # for n in range(n_nodes_x): 53 | # node_coordinates[n*n_nodes_y:n_nodes_y+n*n_nodes_y, 0] = x_coords[n] 54 | 55 | node_displacement = np.zeros((n_timesteps, n_nodes_x * n_nodes_y, 3)) 56 | 57 | for t in range(n_timesteps): 58 | node_displacement[t] = node_coordinates 59 | # node_displacement[t, bend_start*n_nodes_y:bend_end*n_nodes_y, 2] = \ 60 | # z_bend_mat * bend_multiplicator * t 61 | node_displacement[t, :, 2] = z_bend_mat * bend_multiplicator * t 62 | 63 | # print(node_displacement.shape) 64 | 65 | plot = D3plot() 66 | plot.arrays[ArrayType.node_displacement] = node_displacement 67 | plot.arrays[ArrayType.node_coordinates] = node_coordinates 68 | plot.arrays[ArrayType.element_shell_node_indexes] = element_shell_node_indexes 69 | plot.arrays[ArrayType.element_shell_part_indexes] = np.full( 70 | (element_shell_node_indexes.shape[0]), 0 71 | ) 72 | 73 | # we could create an artificial array element_shell_is_alive to test the 74 | # correct part extraction process not necessary currently 75 | 76 | os.makedirs(path, exist_ok=True) 77 | plot.write_d3plot(os.path.join(path, "plot")) 78 | # plotUtilFunc(node_displacement) 79 | 80 | 81 | def plot_util_func(xyz_array: np.array): 82 | trace = go.Scatter3d( 83 | x=xyz_array[-1, :, 0], 84 | y=xyz_array[-1, :, 1], 85 | z=xyz_array[-1, :, 2], 86 | mode="markers", 87 | text=np.arange(xyz_array.shape[1]), 88 | ) 89 | fig = go.Figure([trace]) 90 | fig.show() 91 | 92 | 93 | def create_element_shell_node_indexes(n_nodes_x: int = 500, n_nodes_y: int = 10) -> np.ndarray: 94 | """ 95 | returns a element_shell_node_indexes array 96 | """ 97 | 98 | new_shell_node_indexes = np.full( 99 | ((n_nodes_x - 1) * (n_nodes_y - 1), 4), np.array([0, 1, n_nodes_x + 1, n_nodes_x]) 100 | ) 101 | mod = np.full((4, n_nodes_x - 1), np.arange(n_nodes_x - 1)) 102 | for i in range(n_nodes_y - 1): 103 | new_shell_node_indexes[(n_nodes_x - 1) * i : (n_nodes_x - 1) + ((n_nodes_x - 1) * i)] += ( 104 | mod + i * n_nodes_x 105 | ).T 106 | 107 | return new_shell_node_indexes 108 | 109 | 110 | def create_n_fake_plots(folder: str, n_nodes_x: int, n_nodes_y: int, n_timesteps=5, n=50): 111 | """ 112 | creates `n` fake plots, `n/2` bending up, `n/2` bending down 113 | 114 | Parameters 115 | ---------- 116 | folder: str 117 | folder path 118 | n_nodes_x: int 119 | how many nodes in x 120 | n_nodes_y: int 121 | how many nodes in y 122 | n_timesteps: int, default: 5 123 | how many timesteps 124 | n: int, default: 50 125 | how many plots 126 | """ 127 | 128 | # init random 129 | randy_random = random.Random("The_Seed") 130 | 131 | plot_name = "SVDTestPlot{i}" 132 | 133 | # doesn't change for each plot with same dimensions, so only created once 134 | element_shell_node_indexes = create_element_shell_node_indexes( 135 | n_nodes_x=n_nodes_x, n_nodes_y=n_nodes_y 136 | ) 137 | 138 | # n plots bending up 139 | for i in range(int(n / 2)): 140 | create_fake_d3plots( 141 | path=os.path.join(folder, plot_name.format(i=f"{i:02d}")), 142 | element_shell_node_indexes=element_shell_node_indexes, 143 | bend_multiplicator=5 * (1 + randy_random.random()), 144 | n_nodes_x=n_nodes_x, 145 | n_nodes_y=n_nodes_y, 146 | n_timesteps=n_timesteps, 147 | ) 148 | 149 | # n plots bending down 150 | for i in range(int(n / 2)): 151 | create_fake_d3plots( 152 | path=os.path.join(folder, plot_name.format(i=f"{i + int(n / 2):02d}")), 153 | element_shell_node_indexes=element_shell_node_indexes, 154 | bend_multiplicator=-5 * (1 + randy_random.random()), 155 | n_nodes_x=n_nodes_x, 156 | n_nodes_y=n_nodes_y, 157 | n_timesteps=n_timesteps, 158 | ) 159 | 160 | 161 | # TODO: Remove after fixing D3plot writing two files issue 162 | # if __name__ == "__main__": 163 | # create_2_fake_plots("../delteThisPlease/", 200, 10) 164 | -------------------------------------------------------------------------------- /test/test_data/DimredRunTest/verificationFile.hdf5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/DimredRunTest/verificationFile.hdf5 -------------------------------------------------------------------------------- /test/test_data/d3plot_beamip/d3plot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_beamip/d3plot -------------------------------------------------------------------------------- /test/test_data/d3plot_beamip/d3plot01: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_beamip/d3plot01 -------------------------------------------------------------------------------- /test/test_data/d3plot_node_temperature/d3plot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_node_temperature/d3plot -------------------------------------------------------------------------------- /test/test_data/d3plot_node_temperature/d3plot01: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_node_temperature/d3plot01 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot01: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot01 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot02: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot02 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot03: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot03 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot04: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot04 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot05: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot05 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot06: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot06 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot07: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot07 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot08: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot08 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot09: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot09 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot10: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot10 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot11: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot11 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot12: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot12 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot13: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot13 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot14: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot14 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot15: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot15 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot16: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot16 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot17: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot17 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot18: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot18 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot19: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot19 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot20: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot20 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot21: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot21 -------------------------------------------------------------------------------- /test/test_data/d3plot_solid_int/d3plot22: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/d3plot_solid_int/d3plot22 -------------------------------------------------------------------------------- /test/test_data/femzip/d3plot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/femzip/d3plot -------------------------------------------------------------------------------- /test/test_data/femzip/d3plot.fz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/femzip/d3plot.fz -------------------------------------------------------------------------------- /test/test_data/femzip/d3plot01: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/femzip/d3plot01 -------------------------------------------------------------------------------- /test/test_data/io_test/file1.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /test/test_data/io_test/subfolder/file2.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /test/test_data/io_test/subfolder/file3.yay: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /test/test_data/order_d3plot/d3plot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/order_d3plot/d3plot -------------------------------------------------------------------------------- /test/test_data/order_d3plot/d3plot01: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/order_d3plot/d3plot01 -------------------------------------------------------------------------------- /test/test_data/order_d3plot/d3plot02: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/order_d3plot/d3plot02 -------------------------------------------------------------------------------- /test/test_data/order_d3plot/d3plot10: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/order_d3plot/d3plot10 -------------------------------------------------------------------------------- /test/test_data/order_d3plot/d3plot100: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/order_d3plot/d3plot100 -------------------------------------------------------------------------------- /test/test_data/order_d3plot/d3plot11: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/order_d3plot/d3plot11 -------------------------------------------------------------------------------- /test/test_data/order_d3plot/d3plot12: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/order_d3plot/d3plot12 -------------------------------------------------------------------------------- /test/test_data/order_d3plot/d3plot22: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/order_d3plot/d3plot22 -------------------------------------------------------------------------------- /test/test_data/simple_d3plot/d3plot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/simple_d3plot/d3plot -------------------------------------------------------------------------------- /test/test_data/simple_d3plot/d3plot01: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/test_data/simple_d3plot/d3plot01 -------------------------------------------------------------------------------- /test/unit_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/unit_tests/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/dimred/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/unit_tests/dimred/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/dimred/svd/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/unit_tests/dimred/svd/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/dimred/svd/test_clustering_betas.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | import numpy as np 4 | 5 | from lasso.dimred.svd.clustering_betas import group_betas 6 | from lasso.dimred.svd.keyword_types import ClusterType, DetectorType 7 | 8 | 9 | class TestClustering(TestCase): 10 | def test_group_betas(self): 11 | """tests correct function of the group_betas function 12 | in clustering_betas.py""" 13 | 14 | fake_names = np.array([f"betas_{i}" for i in range(25)]) 15 | fake_cluster_0 = np.random.rand(12, 3) + 5 16 | fake_cluster_1 = np.random.rand(12, 3) - 5 17 | fake_betas = np.stack([*fake_cluster_0, *fake_cluster_1, np.array([0, 0, 0])]) 18 | expected_clusters = 2 19 | expected_outliers = 1 20 | 21 | # test with recommended settings 22 | beta_clusters, name_clusters = group_betas( 23 | fake_names, 24 | fake_betas, 25 | cluster=ClusterType.KMeans, 26 | detector=DetectorType.LocalOutlierFactor, 27 | cluster_params={"n_clusters": expected_clusters}, 28 | ) 29 | 30 | # verify correct type of output 31 | self.assertIsInstance(beta_clusters, list) 32 | self.assertIsInstance(name_clusters, list) 33 | 34 | # verify that beta_clusters and name_clusters correspond to each other 35 | self.assertEqual(len(beta_clusters), len(name_clusters)) 36 | # verify that beta_clusters contains as many clusters as searched for 37 | # inkluding one outlier 38 | self.assertEqual(len(beta_clusters), expected_clusters + expected_outliers) 39 | 40 | # verify that entries correspond to each other 41 | for c, cluster in enumerate(name_clusters): 42 | for e, entry in enumerate(cluster): 43 | index = np.where(fake_names == entry)[0] 44 | self.assertTrue((fake_betas[index] - beta_clusters[c][e]).max() == 0) 45 | 46 | # verify different keyword combinations 47 | 48 | for cluster_type in ClusterType.get_cluster_type_name(): 49 | for detector_type in DetectorType.get_detector_type_name(): 50 | beta_clusters, name_clusters = group_betas( 51 | fake_names, fake_betas, cluster=cluster_type, detector=detector_type 52 | ) 53 | 54 | # verify correct output 55 | self.assertIsInstance(beta_clusters, list) 56 | self.assertIsInstance(name_clusters, list) 57 | self.assertEqual(len(beta_clusters), len(name_clusters)) 58 | -------------------------------------------------------------------------------- /test/unit_tests/dimred/svd/test_plot_betas_clusters.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | from unittest import TestCase 3 | 4 | import numpy as np 5 | 6 | from lasso.dimred.svd.plot_beta_clusters import plot_clusters_js 7 | 8 | 9 | def hash_str(data: str) -> str: 10 | """hashes a string""" 11 | 12 | hasher1 = hashlib.sha256() 13 | hasher1.update(data.encode("utf-8")) 14 | return hasher1.hexdigest() 15 | 16 | 17 | class TestBetaViz(TestCase): 18 | def test_plot_clusters_js(self): 19 | """Veryfies correct output .html file""" 20 | 21 | betas = [np.array([[1, 1, 1], [1, 2, 3]])] 22 | ids = np.array([["sample0", "sample1"]]) 23 | 24 | html_page_str = plot_clusters_js( 25 | betas, ids, "", mark_timestamp=False, filename="testpage", write=False, show_res=False 26 | ) 27 | 28 | self.assertIsInstance(html_page_str, str) 29 | if isinstance(html_page_str, str): 30 | html_page_hash = hash_str(html_page_str) 31 | 32 | desired_hash = "53f32e658079dfe8b9f24d7b8ff05a1d253abab77185203e408bfd942c837eeb" 33 | self.assertEqual(html_page_hash, desired_hash) 34 | -------------------------------------------------------------------------------- /test/unit_tests/dimred/svd/test_pod_functions.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | import numpy as np 4 | 5 | from lasso.dimred.svd.pod_functions import calculate_v_and_betas 6 | 7 | 8 | class PodFunctionsTest(TestCase): 9 | def test_calculate_v_and_betas(self): 10 | """Verify svd works 11 | Test for: 12 | - returns V and B of correct shape 13 | - failes if dataset to small (1 sample)""" 14 | 15 | # random input for 1 sample, 5 timesteps, 100 nodes, 3 dimensions 16 | rand_samples = np.random.rand(1, 5, 100, 3) 17 | 18 | # should return error message string 19 | err_msg = calculate_v_and_betas(rand_samples) 20 | self.assertTrue(isinstance(err_msg, str)) 21 | 22 | # random input for 5 samples, 5 timesteps, 100 nodes, 3 dimensions 23 | test_shape = (5, 5, 100, 3) 24 | samples, timesteps, nodes, dimensions = test_shape 25 | rand_samples = np.random.rand(samples, timesteps, nodes, dimensions) 26 | result = calculate_v_and_betas(rand_samples) 27 | 28 | # returns Tuple containing v_rob and betas 29 | self.assertTrue(isinstance(result, tuple)) 30 | 31 | v_rob, betas = result 32 | 33 | # v_rob and betas should both be numpy arrays 34 | self.assertTrue(isinstance(v_rob, np.ndarray)) 35 | self.assertTrue(isinstance(betas, np.ndarray)) 36 | 37 | # v_rob should be of shape (k_eigen, timesteps, nodes*dimensions) 38 | # k_eigen should be min(10, samples-1), so in this case k_eigen = samples-1 = 4 39 | k_eigen = min(10, samples - 1) 40 | self.assertEqual(v_rob.shape, (k_eigen, timesteps, nodes * dimensions)) 41 | 42 | # betas should be of shape (samples, timesteps, k_eigen) 43 | self.assertEqual(betas.shape, (samples, timesteps, k_eigen)) 44 | 45 | # v_rob and betas should result in difference in displacements of original result 46 | reshaped_samples = rand_samples.reshape(samples, timesteps, nodes * dimensions) 47 | 48 | delta_displ = reshaped_samples[:, :] - np.stack([ 49 | reshaped_samples[0, :] for _ in range(timesteps) 50 | ]) 51 | 52 | recacl_displ = np.einsum("ktn, stk -> stn", v_rob, betas) 53 | 54 | # check if both original and recalc have the same shape 55 | self.assertEqual(delta_displ.shape, recacl_displ.shape) 56 | -------------------------------------------------------------------------------- /test/unit_tests/dimred/svd/test_subsampling_methods.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | from unittest import TestCase 4 | 5 | import numpy as np 6 | 7 | from lasso.dimred.svd.subsampling_methods import create_reference_subsample, remap_random_subsample 8 | from test.plot_creator_helper import create_n_fake_plots 9 | 10 | 11 | class TestSubsampling(TestCase): 12 | def test_create_reference_sample(self): 13 | """Tests the creation of reference sample""" 14 | 15 | with tempfile.TemporaryDirectory() as tmp_dir: 16 | create_n_fake_plots(tmp_dir, 500, 10, n=2) 17 | load_path = os.path.join(tmp_dir, "SVDTestPlot00/plot") 18 | n_nodes = 200 19 | 20 | result = create_reference_subsample(load_path, parts=[], nr_samples=n_nodes) 21 | 22 | # result should be tuple containing subsample, load time and total process time 23 | self.assertTrue(isinstance(result, tuple)) 24 | 25 | ref_sample, t_total, t_load = result 26 | 27 | # check for correct types 28 | self.assertTrue(isinstance(ref_sample, np.ndarray)) 29 | self.assertTrue(isinstance(t_total, float)) 30 | self.assertTrue(isinstance(t_load, float)) 31 | 32 | # t_total should be greater than t_load 33 | self.assertTrue(t_total - t_load >= 0) 34 | 35 | # check for correct dimensions of ref_sample 36 | self.assertEqual(ref_sample.shape, (n_nodes, 3)) 37 | 38 | # should return string error message if desired samplesize is greater 39 | # than available nodes 40 | n_nodes = 5500 41 | result = create_reference_subsample(load_path, parts=[], nr_samples=n_nodes) 42 | 43 | self.assertTrue(isinstance(result, str)) 44 | 45 | # should return string error message for nonexitant parts: 46 | n_nodes = 200 47 | result = create_reference_subsample(load_path, parts=[1], nr_samples=n_nodes) 48 | 49 | self.assertTrue(isinstance(result, str)) 50 | 51 | def test_remap_random_subsample(self): 52 | """Verifies correct subsampling""" 53 | 54 | with tempfile.TemporaryDirectory() as tmp_dir: 55 | create_n_fake_plots(tmp_dir, 500, 10, n=2) 56 | ref_path = os.path.join(tmp_dir, "SVDTestPlot00/plot") 57 | sample_path = os.path.join(tmp_dir, "SVDTestPlot01/plot") 58 | n_nodes = 200 59 | 60 | ref_result = create_reference_subsample(ref_path, parts=[], nr_samples=n_nodes) 61 | 62 | ref_sample = ref_result[0] 63 | 64 | sub_result = remap_random_subsample( 65 | sample_path, parts=[], reference_subsample=ref_sample 66 | ) 67 | 68 | # sub_result should be Tuple containing subsample, total process time, 69 | # and plot load time 70 | self.assertTrue(isinstance(sub_result, tuple)) 71 | 72 | subsample, t_total, t_load = sub_result 73 | 74 | # confirm correct types 75 | self.assertTrue(isinstance(subsample, np.ndarray)) 76 | self.assertTrue(isinstance(t_total, float)) 77 | self.assertTrue(isinstance(t_load, float)) 78 | 79 | # t_total should be greater t_load 80 | self.assertTrue(t_total - t_load >= 0) 81 | 82 | # correct shape of subsample 83 | self.assertEqual(subsample.shape, (5, n_nodes, 3)) 84 | 85 | # entries of subsmaple at timestep 0 should be the same as the reference sample 86 | # this is only true for for the dimredTestPlots, this might not be the case 87 | # with real plots we check if the difference is 0 88 | self.assertTrue((ref_sample - subsample[0]).max() == 0) 89 | 90 | # should return string error message for nonexistent parts: 91 | err_msg = remap_random_subsample(sample_path, parts=[1], reference_subsample=ref_sample) 92 | 93 | self.assertTrue(isinstance(err_msg, str)) 94 | -------------------------------------------------------------------------------- /test/unit_tests/dimred/test_dimred_run.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | from unittest import TestCase 4 | 5 | import h5py 6 | import numpy as np 7 | 8 | from lasso.dimred.dimred_run import DIMRED_STAGES, DimredRun, DimredRunError, HDF5FileNames 9 | from test.plot_creator_helper import create_n_fake_plots 10 | 11 | 12 | class TestDimredRun(TestCase): 13 | def test_run(self): 14 | """Verifies correct function of DimredRun.py""" 15 | verification_hdf5_file = h5py.File( 16 | "test/test_data/DimredRunTest/verificationFile.hdf5", "r" 17 | ) 18 | 19 | with tempfile.TemporaryDirectory() as tmpdir: 20 | # create simulation runs 21 | create_n_fake_plots(folder=tmpdir, n_nodes_x=500, n_nodes_y=10) 22 | 23 | # collect all simulation runs 24 | # sim_dir = "test/dimredTestPlots" 25 | sim_files = os.listdir(tmpdir) 26 | # sim_files.pop(sim_files.index("htmlTestPage.html")) 27 | sim_runs = [] 28 | for sim in sim_files: 29 | sim_runs.append(os.path.join(tmpdir, sim, "plot")) 30 | 31 | test_run = DimredRun( 32 | reference_run=os.path.join(tmpdir, "SVDTestPlot00/plot"), 33 | simulation_runs=sim_runs, 34 | start_stage=DIMRED_STAGES[0], 35 | end_stage="CLUSTERING", 36 | console=None, 37 | project_dir="test/test_data/DimredRunTest", 38 | n_processes=5, 39 | cluster_args=["kmeans"], 40 | ) 41 | 42 | with test_run: 43 | # verify creation of reference_subsample 44 | # to be able to reproduce results, each DimredRun must return same results 45 | test_run.process_reference_run() 46 | 47 | # check if reference subsamples match 48 | test_refsample = test_run.h5file[HDF5FileNames.SUBSAMPLE_SAVE_NAME.value] 49 | verification_refsample = verification_hdf5_file[ 50 | HDF5FileNames.SUBSAMPLE_SAVE_NAME.value 51 | ] 52 | self.assertEqual(test_refsample.shape, verification_refsample.shape) 53 | self.assertTrue((test_refsample[:] - verification_refsample[:]).max() == 0) 54 | 55 | # check if the expected reference run is chosen 56 | self.assertEqual( 57 | os.path.abspath(os.path.join(tmpdir, "SVDTestPlot00/plot")), 58 | test_run.reference_run, 59 | ) 60 | 61 | # check if subsampled samples match 62 | test_run.subsample_to_reference_run() 63 | 64 | # get subsampled samples 65 | test_sub_group = test_run.h5file[HDF5FileNames.SUBSAMPLED_GROUP_NAME.value] 66 | test_subs = np.stack([test_sub_group[key][:] for key in test_sub_group.keys()]) 67 | 68 | # check if shape is equal to (n_samples, timesteps, subsampled nodes, dims) 69 | # we have 50 sample, minus ref_run is 49 70 | # we have 5 timesteps 71 | # we subsample to 2000 nodes 72 | # we always have 3 spatial dimensions 73 | self.assertEqual(test_subs.shape, (49, 5, 2000, 3)) 74 | 75 | # check if svd yields consistent results 76 | test_run.dimension_reduction_svd() 77 | 78 | # get test betas 79 | test_betas_group = test_run.h5file[HDF5FileNames.BETAS_GROUP_NAME.value] 80 | test_ids = np.stack(list(test_betas_group.keys())) 81 | test_betas = np.stack([test_betas_group[key][:] for key in test_betas_group.keys()]) 82 | 83 | # we check if test_ids and test_betas are of correct shape 84 | # we have 44 samples, 5 timesteps and save the first 10 betas 85 | self.assertEqual(test_ids.shape, (49,)) 86 | self.assertEqual(test_betas.shape, (49, 5, 10)) 87 | 88 | test_v_rob = test_run.h5file[HDF5FileNames.V_ROB_SAVE_NAME.value][:] 89 | # shape of v_rob must be (eigen, timesteps, nodes) 90 | self.assertEqual(test_v_rob.shape, (10, 5, 2000 * 3)) 91 | 92 | # verify that calculated betas are reproducible as expected 93 | # first, create displ mat containing difference in displ over time 94 | verify_displ_stacked = test_subs.reshape(49, 5, 2000 * 3) 95 | verify_diff_mat = np.stack([ 96 | verify_displ_stacked[:, 0, :] for _ in range(5) 97 | ]).reshape(49, 5, 2000 * 3) 98 | verify_displ_stacked = verify_displ_stacked - verify_diff_mat 99 | 100 | # calculate betas and check if they are similar 101 | verify_betas = np.einsum("stn, ktn -> stk", verify_displ_stacked, test_v_rob) 102 | self.assertTrue(np.allclose(verify_betas, test_betas)) 103 | 104 | # recalculate displ 105 | recalc_displ_stacked = np.einsum("stk, ktn -> stn", test_betas, test_v_rob) 106 | 107 | # Due to projection into eigenspace and back not using all available eigenvectors, 108 | # a small error margin is inevitable 109 | self.assertTrue((verify_displ_stacked - recalc_displ_stacked).max() <= 1e-5) 110 | 111 | # checking clustering and html output makes little sense here, 112 | # but we know how the created plots are laid out: 25 bending up, 25 bending down 113 | # this should be presented in the betas 114 | # We will only look at the last timestep 115 | # We only check the first beta 116 | 117 | # first 24 betas point one direction (reference run is run 0 and points up) 118 | betas_up = test_betas[:24, -1] 119 | # other 25 betas point down 120 | betas_down = test_betas[24:, -1] 121 | 122 | # check that first beta has the same sign as others bending up 123 | is_pos_up = betas_up[0, 0] > 0 124 | for b in betas_up: 125 | self.assertEqual(is_pos_up, b[0] > 0) 126 | 127 | # check that 25th betas has same sign as other bending down 128 | is_pos_down = betas_down[0, 0] > 0 129 | for b in betas_down: 130 | self.assertEqual(is_pos_down, b[0] > 0) 131 | 132 | # verify that one group has negative and other group positive direction 133 | self.assertFalse(is_pos_down and is_pos_up) 134 | 135 | test_run.clustering_results() 136 | 137 | # check if glob pattern works correctly 138 | DimredRun( 139 | simulation_runs=os.path.join(tmpdir, "SVDTestPlot*/plot"), 140 | start_stage=DIMRED_STAGES[0], 141 | end_stage=DIMRED_STAGES[0], 142 | project_dir="test/test_data/DimredRunTest", 143 | console=None, 144 | ) 145 | 146 | def test_for_errors(self): 147 | """Verifies correct error behaviour when facing incorrect parser arguments""" 148 | 149 | with tempfile.TemporaryDirectory() as tmpdir: 150 | # collect all simulation runs 151 | sim_files = os.listdir(tmpdir) 152 | sim_runs = [] 153 | for sim in sim_files: 154 | sim_runs.append(os.path.join(tmpdir, sim, "plot")) 155 | 156 | # check invalid start_stage 157 | self.assertRaises( 158 | DimredRunError, 159 | DimredRun, 160 | reference_run="test/dimredTestPlots/SVDTestPlot0/plot", 161 | simulation_runs=sim_runs, 162 | start_stage="INVALID_START", 163 | end_stage=DIMRED_STAGES[-1], 164 | console=None, 165 | project_dir="test/test_data/DimredRunTest", 166 | n_processes=5, 167 | ) 168 | 169 | # check invalid end_stage 170 | self.assertRaises( 171 | DimredRunError, 172 | DimredRun, 173 | reference_run="test/dimredTestPlots/SVDTestPlot0/plot", 174 | simulation_runs=sim_runs, 175 | start_stage=DIMRED_STAGES[0], 176 | end_stage="INVALID_END", 177 | console=None, 178 | project_dir="test/test_data/DimredRunTest", 179 | n_processes=5, 180 | ) 181 | 182 | # check invalid start_stage after end_stage 183 | self.assertRaises( 184 | DimredRunError, 185 | DimredRun, 186 | reference_run="test/dimredTestPlots/SVDTestPlot0/plot", 187 | simulation_runs=sim_runs, 188 | start_stage=DIMRED_STAGES[-1], 189 | end_stage=DIMRED_STAGES[0], 190 | console=None, 191 | project_dir="test/test_data/DimredRunTest", 192 | n_processes=5, 193 | ) 194 | 195 | # check invalid simulation runs 196 | self.assertRaises( 197 | DimredRunError, 198 | DimredRun, 199 | simulation_runs="test/dimredTestPlots200/plot", 200 | start_stage=DIMRED_STAGES[0], 201 | end_stage=DIMRED_STAGES[-1], 202 | console=None, 203 | project_dir="test/test_data/DimredRunTest", 204 | n_processes=5, 205 | ) 206 | 207 | # check invalid cluster_args 208 | self.assertRaises( 209 | DimredRunError, 210 | DimredRun, 211 | simulation_runs=sim_runs, 212 | start_stage=DIMRED_STAGES[0], 213 | end_stage=DIMRED_STAGES[-1], 214 | console=None, 215 | project_dir="test/test_data/DimredRunTest", 216 | n_processes=5, 217 | cluster_args=["noMeans"], 218 | ) 219 | 220 | # check invalid outlier-args 221 | self.assertRaises( 222 | DimredRunError, 223 | DimredRun, 224 | simulation_runs=sim_runs, 225 | start_stage=DIMRED_STAGES[0], 226 | end_stage=DIMRED_STAGES[-1], 227 | console=None, 228 | project_dir="test/test_data/DimredRunTest", 229 | n_processes=5, 230 | cluster_args=["kmeans"], 231 | outlier_args=["DoesNotExist"], 232 | ) 233 | 234 | # check inexistent reference run 235 | self.assertRaises( 236 | DimredRunError, 237 | DimredRun, 238 | reference_run=os.path.join(tmpdir, "IDontExist"), 239 | simulation_runs=sim_runs, 240 | start_stage=DIMRED_STAGES[0], 241 | end_stage=DIMRED_STAGES[-1], 242 | console=None, 243 | project_dir="test/test_data/DimredRunTest", 244 | n_processes=5, 245 | ) 246 | # check for empty simulation runs 247 | self.assertRaises( 248 | DimredRunError, 249 | DimredRun, 250 | simulation_runs="", 251 | start_stage=DIMRED_STAGES[0], 252 | end_stage=DIMRED_STAGES[-1], 253 | console=None, 254 | project_dir="test/test_data/DimredRunTest", 255 | n_processes=5, 256 | ) 257 | 258 | def tearDown(self): 259 | # cleanup of created files 260 | test_files = os.listdir("test/test_data/DimredRunTest") 261 | test_files.pop(test_files.index("verificationFile.hdf5")) 262 | for entry in test_files: 263 | os.remove(os.path.join("test/test_data/DimredRunTest", entry)) 264 | -------------------------------------------------------------------------------- /test/unit_tests/dyna/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/unit_tests/dyna/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/dyna/test_d3plot_header.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | from unittest import TestCase 3 | 4 | import numpy as np 5 | 6 | from lasso.dyna.d3plot_header import ( 7 | D3plotFiletype, 8 | D3plotHeader, 9 | d3plot_filetype_from_integer, 10 | get_digit, 11 | ) 12 | from lasso.io.binary_buffer import BinaryBuffer 13 | 14 | 15 | class D3plotHeaderTest(TestCase): 16 | def test_loading(self): 17 | filepaths = [ 18 | "test/test_data/simple_d3plot/d3plot", 19 | "test/test_data/d3plot_node_temperature/d3plot", 20 | "test/test_data/d3plot_beamip/d3plot", 21 | "test/test_data/d3plot_solid_int/d3plot", 22 | ] 23 | 24 | for filepath in filepaths: 25 | D3plotHeader().load_file(filepath) 26 | 27 | # TODO more 28 | warnings.warn(message="No assertions of behavior, test is incomplete", stacklevel=2) 29 | 30 | def test_get_digit(self) -> None: 31 | number = 1234567890 32 | 33 | # the numbers are sorted from the lowest importance 34 | # upwards 35 | # 0 -> 0 36 | # 1 -> 9 37 | # ... 38 | number_str = str(number)[::-1] 39 | 40 | for index in range(len(number_str)): 41 | digit = get_digit(number, index) 42 | self.assertEqual( 43 | digit, 44 | int(number_str[index]), 45 | f"index {index} digit {digit} digit_str {number_str[index]}", 46 | ) 47 | 48 | self.assertEqual(get_digit(number, 10), 0) 49 | 50 | def test_d3plot_filetype_from_integer(self) -> None: 51 | self.assertEqual(d3plot_filetype_from_integer(1), D3plotFiletype.D3PLOT) 52 | self.assertEqual(d3plot_filetype_from_integer(5), D3plotFiletype.D3PART) 53 | self.assertEqual(d3plot_filetype_from_integer(11), D3plotFiletype.D3EIGV) 54 | 55 | # INFOR is forbidden 56 | with self.assertRaises(ValueError): 57 | d3plot_filetype_from_integer(4) 58 | 59 | with self.assertRaises(ValueError): 60 | d3plot_filetype_from_integer(0) 61 | 62 | def test_determine_file_settings(self) -> None: 63 | # the routine checks the "filetype" flag 64 | # if it makes any sense under any circumstances 65 | # we assume the corresponding file settings 66 | 67 | # 44 -> int32 68 | # 88 -> int64 69 | for position in (44, 88): 70 | for filetype in (D3plotFiletype.D3PLOT, D3plotFiletype.D3PART, D3plotFiletype.D3EIGV): 71 | bb = BinaryBuffer() 72 | bb.memoryview = memoryview(bytearray(256)) 73 | bb.write_number(position, filetype.value, np.int32) 74 | 75 | word_size, itype, ftype = D3plotHeader._determine_file_settings(bb) 76 | 77 | if position == 44: 78 | self.assertEqual(word_size, 4) 79 | self.assertEqual(itype, np.int32) 80 | self.assertEqual(ftype, np.float32) 81 | else: 82 | self.assertEqual(word_size, 8) 83 | self.assertEqual(itype, np.int64) 84 | self.assertEqual(ftype, np.float64) 85 | 86 | # error 87 | bb = BinaryBuffer() 88 | bb.memoryview = memoryview(bytearray(256)) 89 | 90 | with self.assertRaises(RuntimeError): 91 | D3plotHeader._determine_file_settings(bb) 92 | -------------------------------------------------------------------------------- /test/unit_tests/io/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/unit_tests/io/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/io/test_binary_buffer.py: -------------------------------------------------------------------------------- 1 | import filecmp 2 | import os 3 | from unittest import TestCase 4 | 5 | import numpy as np 6 | 7 | from lasso.io.binary_buffer import BinaryBuffer 8 | 9 | 10 | class BinaryBufferTest(TestCase): 11 | def setUp(self): 12 | # read file 13 | self.bb = BinaryBuffer("test/test_data/simple_d3plot/d3plot") 14 | 15 | def test_init(self): 16 | # test some stuff for fun 17 | self.assertEqual(self.bb.mv_[40:42].tobytes(), b"\xaf\\") 18 | self.assertEqual(len(self.bb), len(self.bb.mv_)) 19 | self.assertEqual(len(self.bb), 192512) 20 | 21 | def test_memoryview(self): 22 | self.assertEqual(self.bb.mv_, self.bb.memoryview) 23 | with self.assertRaises(TypeError): 24 | self.bb.memoryview = None 25 | self.memoryview = memoryview(bytearray(b"")) 26 | 27 | def test_reading(self): 28 | # numbers 29 | self.assertEqual(self.bb.read_number(44, np.int32), 1) 30 | self.assertEqual(self.bb.read_number(56, np.float32), 960.0) 31 | 32 | self.assertEqual(self.bb.read_text(0, 40), " " * 40) 33 | self.assertEqual(self.bb.read_text(52, 4), "R712") 34 | 35 | self.assertListEqual(self.bb.read_ndarray(60, 12, 1, np.int32).tolist(), [4, 4915, 6]) 36 | 37 | def test_save(self): 38 | self.bb.save("test/test_data/tmp") 39 | eq = filecmp.cmp("test/test_data/simple_d3plot/d3plot", "test/test_data/tmp") 40 | os.remove("test/test_data/tmp") 41 | self.assertEqual(eq, True) 42 | 43 | def test_writing(self): 44 | bb = BinaryBuffer("test/test_data/simple_d3plot/d3plot") 45 | bb.write_number(44, 13, np.int32) 46 | self.assertEqual(bb.read_number(44, np.int32), 13) 47 | 48 | array = np.array([1, 2, 3, 4], np.int32) 49 | bb.write_ndarray(array, 44, 1) 50 | self.assertListEqual(bb.read_ndarray(44, 16, 1, array.dtype).tolist(), array.tolist()) 51 | 52 | def test_size(self): 53 | bb = BinaryBuffer("test/test_data/simple_d3plot/d3plot") 54 | self.assertEqual(bb.size, 192512) 55 | self.assertEqual(bb.size, len(bb)) 56 | 57 | bb.size = 192511 58 | self.assertEqual(bb.size, 192511) 59 | 60 | bb.size = 192512 61 | self.assertEqual(bb.size, 192512) 62 | self.assertEqual(bb.mv_[-1 : len(bb)].tobytes(), b"0") 63 | -------------------------------------------------------------------------------- /test/unit_tests/io/test_files.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from lasso.io.files import collect_files 4 | 5 | 6 | class Test(unittest.TestCase): 7 | def test_collect_files(self): 8 | files = collect_files("test/test_data/io_test", "*.txt") 9 | self.assertEqual(len(files), 1) 10 | 11 | files = collect_files("test/test_data/io_test/", "*.txt", recursive=True) 12 | self.assertEqual(len(files), 2) 13 | 14 | files1, files2 = collect_files( 15 | "test/test_data/io_test/", ["*.txt", "*.yay"], recursive=True 16 | ) 17 | self.assertEqual(len(files1), 2) 18 | self.assertEqual(len(files2), 1) 19 | 20 | files1, files2 = collect_files( 21 | ["test/test_data/io_test/", "test/test_data/io_test/subfolder"], ["*.txt", "*.yay"] 22 | ) 23 | self.assertEqual(len(files1), 2) 24 | self.assertEqual(len(files2), 1) 25 | -------------------------------------------------------------------------------- /test/unit_tests/math/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-lasso-python/lasso-python/25effb6dae80fa3d998a9a1514c6aad5d72547af/test/unit_tests/math/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/math/test_sampling.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from lasso.math.sampling import unique_subsamples 4 | 5 | 6 | class Test(unittest.TestCase): 7 | def test_unique_subsamples(self): 8 | self.assertEqual(len(set(unique_subsamples(0, 20, 100))), 20) 9 | self.assertEqual(len(set(unique_subsamples(0, 200, 100))), 100) 10 | -------------------------------------------------------------------------------- /test/unit_tests/math/test_stochastic.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from lasso.math.stochastic import jensen_shannon_entropy 4 | 5 | 6 | class Test(unittest.TestCase): 7 | def test_jensen_shannon_entropy(self): 8 | p1 = [0.5, 0.5, 0.0] 9 | p2 = [0, 0.1, 0.9] 10 | 11 | self.assertEqual(jensen_shannon_entropy(p1, p1), 0) 12 | self.assertAlmostEqual(jensen_shannon_entropy(p1, p2), 0.55797881790005399) 13 | self.assertAlmostEqual(jensen_shannon_entropy(p2, p1), 0.55797881790005399) 14 | --------------------------------------------------------------------------------