├── .flake8 ├── .github └── workflows │ └── main.yaml ├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── README.md ├── docs ├── api.md └── index.md ├── mkdocs.yml ├── pyproject.toml ├── requirements ├── Makefile ├── dev-requirements.in └── dev-requirements.txt ├── src └── wannier90io │ ├── __about__.py │ ├── __init__.py │ ├── __main__.py │ ├── _amn.py │ ├── _chk.py │ ├── _core.py │ ├── _eig.py │ ├── _mmn.py │ ├── _nnkp.py │ ├── _schema.py │ ├── _u.py │ ├── _unk.py │ ├── _win.py │ └── _wout.py └── tests ├── conftest.py ├── fixtures ├── .gitignore ├── Makefile ├── fixtures.mk └── setup.sh ├── test_amn.py ├── test_chk.py ├── test_cli.py ├── test_eig.py ├── test_mmn.py ├── test_nnkp.py ├── test_u.py ├── test_unk_formatted.py ├── test_win.py └── test_wout.py /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = E20,E22,E241,E501,E731 3 | per-file-ignores = __init__.py:F401,F403 4 | -------------------------------------------------------------------------------- /.github/workflows/main.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | pull_request: 5 | branches: [ main ] 6 | push: 7 | branches: [ main ] 8 | workflow_dispatch: 9 | 10 | jobs: 11 | test: 12 | runs-on: ubuntu-20.04 13 | strategy: 14 | matrix: 15 | python-version: [ '3.8', '3.9', '3.10' ] 16 | steps: 17 | - uses: actions/checkout@v3 18 | - name: Cache test fixtures 19 | id: cache-test-fixtures 20 | uses: actions/cache@v3 21 | with: 22 | path: ${{ github.workspace }}/tests/fixtures/wannier90-* 23 | key: ${{ runner.os }}-${{ hashFiles('tests/fixtures/*') }}_0 24 | - uses: actions/setup-python@v3 25 | with: 26 | python-version: ${{ matrix.python-version }} 27 | - name: Install dependencies 28 | run: | 29 | pip install -r requirements/dev-requirements.txt 30 | pip install . 31 | - name: Make fixtures 32 | if: steps.cache-test-fixtures.outputs.cache-hit != 'true' 33 | run: | 34 | make -C ./tests/fixtures 35 | - name: Run tests 36 | run: | 37 | pytest -v 38 | 39 | docs: 40 | runs-on: ubuntu-20.04 41 | steps: 42 | - uses: actions/checkout@v3 43 | - uses: actions/setup-python@v3 44 | with: 45 | python-version: '3.8' 46 | - name: Install dependencies 47 | run: | 48 | pip install -r requirements/dev-requirements.txt 49 | pip install . 50 | - name: Build docs 51 | run: | 52 | mkdocs build 53 | - name: Publish docs 54 | if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} 55 | uses: peaceiris/actions-gh-pages@v3 56 | with: 57 | github_token: ${{ secrets.GITHUB_TOKEN }} 58 | publish_dir: ./site/ 59 | force_orphan: true 60 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 105 | __pypackages__/ 106 | 107 | # Celery stuff 108 | celerybeat-schedule 109 | celerybeat.pid 110 | 111 | # SageMath parsed files 112 | *.sage.py 113 | 114 | # Environments 115 | .env 116 | .venv 117 | env/ 118 | venv/ 119 | ENV/ 120 | env.bak/ 121 | venv.bak/ 122 | 123 | # Spyder project settings 124 | .spyderproject 125 | .spyproject 126 | 127 | # Rope project settings 128 | .ropeproject 129 | 130 | # mkdocs documentation 131 | /site 132 | 133 | # mypy 134 | .mypy_cache/ 135 | .dmypy.json 136 | dmypy.json 137 | 138 | # Pyre type checker 139 | .pyre/ 140 | 141 | # pytype static type analyzer 142 | .pytype/ 143 | 144 | # Cython debug symbols 145 | cython_debug/ 146 | 147 | # PyCharm 148 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 149 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 150 | # and can be added to the global gitignore or merged into this file. For a more nuclear 151 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 152 | #.idea/ 153 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v4.0.1 4 | hooks: 5 | - id: check-yaml 6 | - id: end-of-file-fixer 7 | - id: trailing-whitespace 8 | - repo: https://github.com/pycqa/flake8 9 | rev: 4.0.1 10 | hooks: 11 | - id: flake8 12 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Jamal I. Mustafa 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Wannier90 I/O with Python 2 | 3 | A Python library for reading and writing [Wannier90][w90] files. 4 | 5 | ## Quickstart 6 | 7 | 13 | 14 | 15 | ```python 16 | import pprint 17 | 18 | import wannier90io as w90io 19 | 20 | 21 | pp = pprint.PrettyPrinter() 22 | 23 | with open(win_fpath, 'r') as fh: 24 | parsed_win = w90io.parse_win_raw(fh.read()) 25 | 26 | with open(nnkp_fpath, 'r') as fh: 27 | parsed_nnkp = w90io.parse_nnkp_raw(fh.read()) 28 | 29 | pp.pprint(parsed_win) 30 | pp.pprint(parsed_nnkp) 31 | ``` 32 | 33 | [w90]: http://wannier.org 34 | -------------------------------------------------------------------------------- /docs/api.md: -------------------------------------------------------------------------------- 1 | # API Reference 2 | 3 | ::: wannier90io.parse_win_raw 4 | ::: wannier90io.parse_nnkp_raw 5 | ::: wannier90io.parse_wout_iteration_info 6 | ::: wannier90io.read_amn 7 | ::: wannier90io.write_amn 8 | ::: wannier90io.read_chk 9 | ::: wannier90io.read_u 10 | ::: wannier90io.read_unk_formatted 11 | ::: wannier90io.read_eig 12 | ::: wannier90io.write_eig 13 | ::: wannier90io.read_mmn 14 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | --8<-- "README.md" 2 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Wannier90 I/O with Python 2 | site_url: https://wannier-developers.github.io/wannier90io-python 3 | site_author: Jamal Mustafa 4 | site_description: >- 5 | Wannier90 I/O with Python 6 | 7 | repo_name: wannier-developers/wannier90io-python 8 | repo_url: https://github.com/wannier-developers/wannier90io-python 9 | edit_uri: '' 10 | 11 | theme: 12 | name: material 13 | palette: 14 | - scheme: default 15 | toggle: 16 | icon: material/toggle-switch-off-outline 17 | name: Switch to dark mode 18 | - scheme: slate 19 | toggle: 20 | icon: material/toggle-switch 21 | name: Switch to light mode 22 | 23 | plugins: 24 | - search 25 | - mkdocstrings: 26 | default_handler: python 27 | handlers: 28 | python: 29 | selection: 30 | docstring_style: google 31 | rendering: 32 | show_root_heading: true 33 | show_root_toc_entry: false 34 | show_signature_annotations: true 35 | show_source: true 36 | members_order: source 37 | watch: 38 | - src/wannier90io 39 | 40 | extra: 41 | social: 42 | - icon: fontawesome/brands/github 43 | link: https://github.com/wannier-developers 44 | 45 | markdown_extensions: 46 | - pymdownx.snippets: 47 | check_paths: true 48 | 49 | nav: 50 | - Home: index.md 51 | - API Reference: api.md 52 | 53 | watch: 54 | - README.md 55 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "wannier90io" 7 | description = "A Python library for Wannier90 I/O" 8 | readme = "README.md" 9 | requires-python = ">=3.8" 10 | license = { file = "LICENSE" } 11 | authors = [ 12 | { "name" = "Jamal I. Mustafa" }, 13 | ] 14 | classifiers = [ 15 | "License :: OSI Approved :: MIT License", 16 | "Programming Language :: Python :: 3", 17 | ] 18 | dependencies = [ 19 | "numpy", 20 | "pydantic", 21 | ] 22 | dynamic=['version'] 23 | 24 | [project.urls] 25 | Documentation = "https://github.com/wannier-developers/wannier90io-python#readme" 26 | Issues = "https://github.com/wannier-developers/wannier90io-python/issues" 27 | Source = "https://github.com/wannier-developers/wannier90io-python" 28 | 29 | [tool.hatch.build] 30 | only-packages = true 31 | 32 | [tool.hatch.version] 33 | path = "src/wannier90io/__about__.py" 34 | 35 | [project.scripts] 36 | w90io = "wannier90io.__main__:main" 37 | 38 | [tool.autopep8] 39 | select = ["E231"] 40 | 41 | [tool.pytest.ini_options] 42 | addopts = "--cov=wannier90io --codeblocks" 43 | -------------------------------------------------------------------------------- /requirements/Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: all 2 | all: setup.txt dev-requirements.txt 3 | 4 | .PHONY: install 5 | install: setup.txt dev-requirements.txt 6 | pip-sync $^ 7 | pip install -e ../ 8 | 9 | setup.txt: ../pyproject.toml 10 | pip-compile $< --output-file $@ 11 | 12 | dev-requirements.txt: dev-requirements.in setup.txt 13 | pip-compile $< 14 | 15 | %.txt: %.in 16 | pip-compile $< 17 | -------------------------------------------------------------------------------- /requirements/dev-requirements.in: -------------------------------------------------------------------------------- 1 | hatch 2 | mkdocs-material 3 | mkdocstrings[python] 4 | pip-tools 5 | pre-commit 6 | pydantic 7 | pytest 8 | pytest-codeblocks 9 | pytest-cov 10 | -------------------------------------------------------------------------------- /requirements/dev-requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.8 3 | # by the following command: 4 | # 5 | # pip-compile dev-requirements.in 6 | # 7 | anyio==3.6.1 8 | # via httpcore 9 | astunparse==1.6.3 10 | # via pytkdocs 11 | attrs==21.4.0 12 | # via pytest 13 | build==1.0.3 14 | # via pip-tools 15 | certifi==2022.5.18.1 16 | # via 17 | # httpcore 18 | # httpx 19 | cffi==1.15.1 20 | # via cryptography 21 | cfgv==3.3.1 22 | # via pre-commit 23 | click==8.0.3 24 | # via 25 | # hatch 26 | # mkdocs 27 | # pip-tools 28 | # userpath 29 | commonmark==0.9.1 30 | # via rich 31 | coverage[toml]==6.4.1 32 | # via pytest-cov 33 | cryptography==37.0.4 34 | # via secretstorage 35 | distlib==0.3.5 36 | # via virtualenv 37 | editables==0.3 38 | # via hatchling 39 | filelock==3.5.1 40 | # via virtualenv 41 | ghp-import==2.1.0 42 | # via mkdocs 43 | griffe==0.19.2 44 | # via mkdocstrings-python 45 | h11==0.12.0 46 | # via httpcore 47 | hatch==1.4.2 48 | # via -r dev-requirements.in 49 | hatchling==1.8.0 50 | # via hatch 51 | httpcore==0.15.0 52 | # via httpx 53 | httpx==0.23.0 54 | # via hatch 55 | hyperlink==21.0.0 56 | # via hatch 57 | identify==2.4.10 58 | # via pre-commit 59 | idna==3.3 60 | # via 61 | # anyio 62 | # hyperlink 63 | # rfc3986 64 | importlib-metadata==4.11.1 65 | # via 66 | # build 67 | # keyring 68 | # markdown 69 | # mkdocs 70 | iniconfig==1.1.1 71 | # via pytest 72 | jeepney==0.8.0 73 | # via 74 | # keyring 75 | # secretstorage 76 | jinja2==3.1.2 77 | # via 78 | # mkdocs 79 | # mkdocs-material 80 | # mkdocstrings 81 | keyring==23.8.2 82 | # via hatch 83 | markdown==3.3.7 84 | # via 85 | # mkdocs 86 | # mkdocs-autorefs 87 | # mkdocs-material 88 | # mkdocstrings 89 | # pymdown-extensions 90 | markupsafe==2.1.1 91 | # via 92 | # jinja2 93 | # mkdocstrings 94 | mergedeep==1.3.4 95 | # via mkdocs 96 | mkdocs==1.3.0 97 | # via 98 | # mkdocs-autorefs 99 | # mkdocs-material 100 | # mkdocstrings 101 | mkdocs-autorefs==0.4.1 102 | # via mkdocstrings 103 | mkdocs-material==8.2.15 104 | # via -r dev-requirements.in 105 | mkdocs-material-extensions==1.0.3 106 | # via mkdocs-material 107 | mkdocstrings[python]==0.18.1 108 | # via 109 | # -r dev-requirements.in 110 | # mkdocstrings-python 111 | # mkdocstrings-python-legacy 112 | mkdocstrings-python==0.6.6 113 | # via mkdocstrings 114 | mkdocstrings-python-legacy==0.2.2 115 | # via mkdocstrings 116 | nodeenv==1.6.0 117 | # via pre-commit 118 | packaging==21.3 119 | # via 120 | # build 121 | # hatchling 122 | # mkdocs 123 | # pytest 124 | pathspec==0.9.0 125 | # via hatchling 126 | pexpect==4.8.0 127 | # via hatch 128 | pip-tools==7.3.0 129 | # via -r dev-requirements.in 130 | platformdirs==2.5.0 131 | # via 132 | # hatch 133 | # virtualenv 134 | pluggy==1.0.0 135 | # via 136 | # hatchling 137 | # pytest 138 | pre-commit==2.17.0 139 | # via -r dev-requirements.in 140 | ptyprocess==0.7.0 141 | # via pexpect 142 | py==1.11.0 143 | # via pytest 144 | pycparser==2.21 145 | # via cffi 146 | pydantic==1.9.1 147 | # via -r dev-requirements.in 148 | pygments==2.12.0 149 | # via 150 | # mkdocs-material 151 | # rich 152 | pymdown-extensions==9.4 153 | # via 154 | # mkdocs-material 155 | # mkdocstrings 156 | pyparsing==3.0.7 157 | # via packaging 158 | pyperclip==1.8.2 159 | # via hatch 160 | pyproject-hooks==1.0.0 161 | # via build 162 | pytest==7.0.1 163 | # via 164 | # -r dev-requirements.in 165 | # pytest-codeblocks 166 | # pytest-cov 167 | pytest-codeblocks==0.16.1 168 | # via -r dev-requirements.in 169 | pytest-cov==3.0.0 170 | # via -r dev-requirements.in 171 | python-dateutil==2.8.2 172 | # via ghp-import 173 | pytkdocs==0.16.1 174 | # via mkdocstrings-python-legacy 175 | pyyaml==6.0 176 | # via 177 | # mkdocs 178 | # pre-commit 179 | # pyyaml-env-tag 180 | pyyaml-env-tag==0.1 181 | # via mkdocs 182 | rfc3986[idna2008]==1.5.0 183 | # via httpx 184 | rich==12.5.1 185 | # via hatch 186 | secretstorage==3.3.3 187 | # via keyring 188 | shellingham==1.5.0 189 | # via hatch 190 | six==1.16.0 191 | # via 192 | # astunparse 193 | # python-dateutil 194 | sniffio==1.2.0 195 | # via 196 | # anyio 197 | # httpcore 198 | # httpx 199 | toml==0.10.2 200 | # via pre-commit 201 | tomli==2.0.1 202 | # via 203 | # build 204 | # coverage 205 | # hatchling 206 | # pip-tools 207 | # pyproject-hooks 208 | # pytest 209 | tomli-w==1.0.0 210 | # via hatch 211 | tomlkit==0.11.4 212 | # via hatch 213 | typing-extensions==4.1.1 214 | # via 215 | # pydantic 216 | # rich 217 | userpath==1.8.0 218 | # via hatch 219 | virtualenv==20.16.3 220 | # via 221 | # hatch 222 | # pre-commit 223 | watchdog==2.1.8 224 | # via mkdocs 225 | wheel==0.37.1 226 | # via 227 | # astunparse 228 | # pip-tools 229 | zipp==3.7.0 230 | # via importlib-metadata 231 | 232 | # The following packages are considered to be unsafe in a requirements file: 233 | # pip 234 | # setuptools 235 | -------------------------------------------------------------------------------- /src/wannier90io/__about__.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.1.0b1' 2 | -------------------------------------------------------------------------------- /src/wannier90io/__init__.py: -------------------------------------------------------------------------------- 1 | from . import _schema 2 | from ._amn import * 3 | from ._chk import * 4 | from ._eig import * 5 | from ._mmn import * 6 | from ._nnkp import * 7 | from ._u import * 8 | from ._unk import * 9 | from ._win import * 10 | from ._wout import * 11 | -------------------------------------------------------------------------------- /src/wannier90io/__main__.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import re 3 | import pprint 4 | 5 | import wannier90io as w90io 6 | 7 | 8 | pp = pprint.PrettyPrinter(indent=2, width=120) 9 | pprint.sorted = lambda x, key=None: x # print dictionaries with keys in insertion order 10 | 11 | 12 | def parse_win(args): 13 | contents = args.file.read() 14 | 15 | comments = w90io._core.extract_comments(contents) 16 | parameters = w90io._core.extract_parameters(contents) 17 | blocks = w90io._core.extract_blocks(contents) 18 | 19 | if args.extract_only: 20 | pp.pprint({ 21 | 'comments': comments, 22 | 'parameters': parameters, 23 | 'blocks': blocks, 24 | }) 25 | else: 26 | parsed_win = w90io.parse_win_raw(contents) 27 | if args.parameters: 28 | pp.pprint({ 29 | parameter: parsed_win['parameters'][parameter] 30 | for parameter in args.parameters if parameter in parsed_win['parameters'] 31 | }) 32 | if args.blocks: 33 | pp.pprint({ 34 | block: parsed_win[block] 35 | for block in args.blocks if block in parsed_win['blocks'] 36 | }) 37 | if not args.parameters and not args.blocks: 38 | pp.pprint(parsed_win) 39 | 40 | 41 | def parse_wout_iteration_info(args): 42 | parsed_iteration_info = w90io.parse_wout_iteration_info(args.file) 43 | 44 | if args.convergence: 45 | pp.pprint(parsed_iteration_info['convergence']) 46 | 47 | if args.spread: 48 | pp.pprint(parsed_iteration_info['spread']) 49 | 50 | if args.delta: 51 | pp.pprint(parsed_iteration_info['delta']) 52 | 53 | if args.disentanglement: 54 | pp.pprint(parsed_iteration_info['disentanglement']) 55 | 56 | if not any([args.convergence, args.spread, args.delta, args.disentanglement]): 57 | pp.pprint(parsed_iteration_info) 58 | 59 | 60 | def parse_nnkp(args): 61 | contents = args.file.read() 62 | 63 | comments = w90io._core.extract_comments(contents) 64 | parameters = w90io._core.extract_parameters(contents) 65 | blocks = w90io._core.extract_blocks(contents) 66 | 67 | if args.extract_only: 68 | pp.pprint({ 69 | 'comments': comments, 70 | 'parameters': parameters, 71 | 'blocks': blocks, 72 | }) 73 | else: 74 | parsed_nnkp = w90io.parse_nnkp_raw(contents) 75 | if args.parameters: 76 | pp.pprint({ 77 | parameter: parsed_nnkp['parameters'][parameter] 78 | for parameter in args.parameters if parameter in parsed_nnkp['parameters'] 79 | }) 80 | if args.blocks: 81 | pp.pprint({ 82 | block: parsed_nnkp[block] 83 | for block in args.blocks if block in parsed_nnkp['blocks'] 84 | }) 85 | if not args.parameters and not args.blocks: 86 | pp.pprint(parsed_nnkp) 87 | 88 | 89 | def info_amn(args): 90 | with args.file: 91 | amn = w90io.read_amn(args.file) 92 | 93 | print(f'Nk = {amn.shape[0]}') 94 | print(f'Nb = {amn.shape[1]}') 95 | print(f'Np = {amn.shape[2]}') 96 | 97 | 98 | def info_eig(args): 99 | with args.file: 100 | eig = w90io.read_eig(args.file) 101 | 102 | print(f'Nk = {eig.shape[0]}') 103 | print(f'Nb = {eig.shape[1]}') 104 | 105 | 106 | def info_mmn(args): 107 | with args.file: 108 | (mmn, nnkpts) = w90io.read_mmn(args.file) 109 | 110 | print(mmn.shape, nnkpts.shape) 111 | 112 | 113 | def info_chk(args): 114 | with args.file: 115 | chk = w90io.read_chk(args.file) 116 | 117 | print(chk['num_bands']) 118 | print(chk['num_wann']) 119 | print(chk['have_disentangled']) 120 | 121 | 122 | def info_u(args): 123 | with args.file: 124 | kpoints, u_matrices = w90io.read_u(args.file) 125 | 126 | print(f'Nk = {u_matrices.shape[0]}') 127 | print(f'Nb = {u_matrices.shape[1]}') 128 | print(f'Nw = {u_matrices.shape[2]}') 129 | 130 | 131 | def info_unk_formatted(args): 132 | with args.file: 133 | ik, wvfn = w90io.read_unk_formatted(args.file) 134 | 135 | print(f'ik = {ik}') 136 | print(f'ngx = {wvfn.shape[0]}') 137 | print(f'ngy = {wvfn.shape[1]}') 138 | print(f'ngz = {wvfn.shape[2]}') 139 | print(f'Nb = {wvfn.shape[3]}') 140 | 141 | 142 | def main(): 143 | parser = argparse.ArgumentParser() 144 | subparsers = parser.add_subparsers(dest='subparser', required=True) 145 | # 146 | parser_common = argparse.ArgumentParser(add_help=False) 147 | parser_common.add_argument('file', type=open) 148 | parser_common_parse = argparse.ArgumentParser(add_help=False) 149 | group = parser_common_parse.add_mutually_exclusive_group() 150 | group.add_argument('--parameters', type=lambda string: re.split('[ ,]', string)) 151 | group.add_argument('--blocks', type=lambda string: re.split('[ ,]', string)) 152 | # 153 | parser_win = subparsers.add_parser('parse-win', parents=[parser_common, parser_common_parse]) 154 | parser_win.add_argument('--extract-only', action='store_true') 155 | parser_win.set_defaults(func=parse_win) 156 | # 157 | parser_wout_iteration_info = subparsers.add_parser('parse-wout-iteration-info', parents=[parser_common]) 158 | group = parser_wout_iteration_info.add_mutually_exclusive_group() 159 | group.add_argument('--convergence', action='store_true') 160 | group.add_argument('--spread', action='store_true') 161 | group.add_argument('--delta', action='store_true') 162 | group.add_argument('--disentanglement', action='store_true') 163 | parser_wout_iteration_info.set_defaults(func=parse_wout_iteration_info) 164 | # 165 | parser_nnkp = subparsers.add_parser('parse-nnkp', parents=[parser_common, parser_common_parse]) 166 | parser_nnkp.add_argument('--extract-only', action='store_true') 167 | parser_nnkp.set_defaults(func=parse_nnkp) 168 | # 169 | parser_info_amn = subparsers.add_parser('info-amn', parents=[parser_common]) 170 | parser_info_amn.set_defaults(func=info_amn) 171 | # 172 | parser_info_eig = subparsers.add_parser('info-eig', parents=[parser_common]) 173 | parser_info_eig.set_defaults(func=info_eig) 174 | # 175 | parser_info_mmn = subparsers.add_parser('info-mmn', parents=[parser_common]) 176 | parser_info_mmn.set_defaults(func=info_mmn) 177 | # 178 | parser_info_chk = subparsers.add_parser('info-chk', parents=[parser_common]) 179 | parser_info_chk.set_defaults(func=info_chk) 180 | # 181 | parser_info_u = subparsers.add_parser('info-u', parents=[parser_common]) 182 | parser_info_u.set_defaults(func=info_u) 183 | # 184 | parser_info_unk_formatted = subparsers.add_parser('info-unk-formatted', parents=[parser_common]) 185 | parser_info_unk_formatted.set_defaults(func=info_unk_formatted) 186 | 187 | args = parser.parse_args() 188 | args.func(args) 189 | -------------------------------------------------------------------------------- /src/wannier90io/_amn.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import typing 3 | 4 | import numpy as np 5 | 6 | 7 | __all__ = ['read_amn', 'write_amn'] 8 | 9 | 10 | def read_amn(stream: typing.TextIO) -> np.ndarray: 11 | """ 12 | Read projections matrix 13 | 14 | Arguments: 15 | stream: a file-like stream 16 | 17 | Returns: 18 | projections matrix (Nk, Nb, Np) 19 | 20 | """ 21 | stream.readline() 22 | 23 | [Nb, Nk, Np] = np.fromstring(stream.readline(), sep=' ', dtype=int) 24 | 25 | raw_data = np.loadtxt(stream).reshape((Nk, Np, Nb, 5)) 26 | 27 | amn = np.transpose(raw_data[:, :, :, 3] + 1j*raw_data[:, :, :, 4], axes=(0, 2, 1)) 28 | 29 | return amn 30 | 31 | 32 | def write_amn(stream: typing.TextIO, amn: np.ndarray, header: typing.Optional[str] = 'HEADER'): 33 | r""" 34 | Write projections matrix 35 | 36 | Arguments: 37 | stream: a file-like stream 38 | amn: projections matrix (Nk, Nb, Np) 39 | header: header 40 | 41 | """ 42 | (Nk, Nb, Np) = amn.shape 43 | indices = np.mgrid[:Nb, :Np, :Nk].reshape((3, -1), order='F') + 1 44 | 45 | amn = np.transpose(amn, axes=(1, 2, 0)).flatten(order='F').view(float).reshape((-1, 2)) 46 | data = np.column_stack((indices.T, amn)) 47 | 48 | print(header, file=stream) 49 | print(f'{Nb:12d}{Nk:12d}{Np:12d}', file=stream) 50 | np.savetxt(stream, data, fmt='%5d%5d%5d%18.12f%18.12f') 51 | -------------------------------------------------------------------------------- /src/wannier90io/_chk.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import typing 3 | 4 | import numpy as np 5 | 6 | 7 | __all__ = ['read_chk'] 8 | 9 | 10 | def read_chk(stream: typing.TextIO) -> dict: 11 | """ 12 | Read checkpoint 13 | 14 | Arguments: 15 | stream: a file-like stream 16 | 17 | Returns: 18 | dict 19 | 20 | """ 21 | chk = {} 22 | 23 | chk['header'] = stream.readline() 24 | chk['num_bands'] = Nb = int(stream.readline()) 25 | chk['num_exclude_bands'] = int(stream.readline()) 26 | if chk['num_exclude_bands'] > 0: 27 | chk['num_exclude_bands'] = np.fromstring(stream.readline(), dtype=int) 28 | chk['real_lattice'] = np.fromstring(stream.readline(), sep=' ', dtype=float).reshape((3, 3), order='F') 29 | chk['recip_lattice'] = np.fromstring(stream.readline(), sep=' ', dtype=float).reshape((3, 3), order='F') 30 | chk['num_kpts'] = Nk = int(stream.readline()) 31 | chk['mp_grid'] = np.fromstring(stream.readline(), sep=' ', dtype=int) 32 | chk['kpt_latt'] = np.zeros((chk['num_kpts'], 3)) 33 | for idx in range(chk['num_kpts']): 34 | chk['kpt_latt'][idx] = np.fromstring(stream.readline(), sep=' ', dtype=float) 35 | chk['nntot'] = Nn = int(stream.readline()) 36 | chk['num_wann'] = Nw = int(stream.readline()) 37 | chk['checkpoint'] = stream.readline() 38 | chk['have_disentangled'] = bool(int(stream.readline())) 39 | if chk['have_disentangled']: 40 | chk['omega_invariant'] = float(stream.readline()) 41 | chk['lwindow'] = np.loadtxt(stream, max_rows=(Nk*Nb), dtype=bool).reshape((Nk, Nb)) 42 | chk['nwindim'] = np.loadtxt(stream, max_rows=Nk, dtype=int) 43 | chk['u_matrix_opt'] = np.loadtxt(stream, max_rows=(Nk*Nw*Nb), dtype=float).view(complex).reshape((Nk, Nw, Nb)) 44 | chk['u_matrix'] = np.loadtxt(stream, max_rows=(Nk*Nw*Nw), dtype=float).view(complex).reshape((Nw, Nw, Nk), order='F').transpose((2, 0, 1)) 45 | chk['m_matrix'] = np.loadtxt(stream, max_rows=(Nk*Nn*Nw*Nw), dtype=float).view(complex).reshape((Nw, Nw, Nn, Nk), order='F').transpose((3, 2, 0, 1)) 46 | 47 | return chk 48 | -------------------------------------------------------------------------------- /src/wannier90io/_core.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import re 3 | 4 | 5 | expressions = { 6 | 'float': r'[-+]?(\d+(\.\d*)?|\.\d+)([eEdD][-+]?\d+)?', 7 | '3-vector': ( 8 | r'[-+]?(\d+(\.\d*)?|\.\d+)([eEdD][-+]?\d+)?[ \t,;]+' 9 | r'[-+]?(\d+(\.\d*)?|\.\d+)([eEdD][-+]?\d+)?[ \t,;]+' 10 | r'[-+]?(\d+(\.\d*)?|\.\d+)([eEdD][-+]?\d+)?' 11 | ), 12 | } 13 | 14 | patterns = { 15 | 'comment': re.compile( 16 | r'(!|#)[ \t]*(?P.+)\n', 17 | ), 18 | 'parameter': re.compile( 19 | r'^[ \t]*(?!begin|end)(?P\w+)[ \t]*[ =:][ \t]*(?P[\S ]+)[ \t]*$', 20 | re.IGNORECASE | re.MULTILINE 21 | ), 22 | 'block': re.compile( 23 | r'[ \t]*begin[ \t]+(?P\w+)\s+(?P.+\n)[ \t]*end[ \t]+(?P=block)', 24 | re.IGNORECASE | re.MULTILINE | re.DOTALL 25 | ), 26 | } 27 | 28 | 29 | def convert(string: str) -> int | float | bool | str | list[int] | list[float]: 30 | string = string.strip() 31 | 32 | # regular expressions adapted (in part) from: 33 | # https://docs.python.org/3/library/re.html#simulating-scanf 34 | if re.compile(r'^[-+]?\d+$').match(string): 35 | return int(string) 36 | elif re.compile(r'^[-+]?(\d+(\.\d*)?|\.\d+)([eEdD][-+]?\d+)?$').match(string): 37 | return float(string.replace('d', 'e').replace('D', 'e')) 38 | elif re.compile(r'^t|true|[.]true[.]$', re.IGNORECASE).match(string): 39 | return True 40 | elif re.compile(r'^f|false|[.]false[.]$', re.IGNORECASE).match(string): 41 | return False 42 | elif re.compile(rf'^{expressions["3-vector"]}$').match(string): 43 | try: 44 | return [int(x) for x in re.split(r'[ \t,;]+', string)] 45 | except ValueError: 46 | return [float(x) for x in re.split(r'[ \t,;]+', string)] 47 | elif re.compile(r'^\d+(-\d+)?([ \t,;]+\d+(-\d+)?)+$').match(string): 48 | values = [] 49 | for component in re.split('[ \t,;]', string): 50 | if '-' in component: 51 | [i1, i2] = list(map(int, component.split('-'))) 52 | values += list(range(i1, i2+1)) 53 | else: 54 | values.append(int(component)) 55 | return values 56 | else: 57 | return string 58 | 59 | 60 | def extract_comments(string: str) -> list[str]: 61 | return [ 62 | match.group() 63 | for match in re.finditer(patterns['comment'], string) 64 | ] 65 | 66 | 67 | def extract_parameters(string: str) -> list[str] : 68 | string = re.sub(patterns['comment'], '', string) 69 | string = re.sub(patterns['block'], '', string) 70 | 71 | return [ 72 | match.group() 73 | for match in re.finditer(patterns['parameter'], string) 74 | ] 75 | 76 | 77 | def extract_blocks(string: str) -> list[str]: 78 | string = re.sub(patterns['comment'], '', string) 79 | 80 | return [ 81 | match.group() 82 | for match in re.finditer(patterns['block'], string) 83 | ] 84 | 85 | 86 | def parse_parameters(parameters: list[str]) -> dict: 87 | return { 88 | match.group('parameter'): convert(match.group('value')) 89 | for match in map(patterns['parameter'].match, parameters) 90 | } 91 | 92 | 93 | def parse_blocks(blocks: list[str]) -> list[dict]: 94 | return { 95 | match.group('block').lower(): match.group('contents') 96 | for match in map(patterns['block'].match, blocks) 97 | } 98 | -------------------------------------------------------------------------------- /src/wannier90io/_eig.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import typing 3 | 4 | import numpy as np 5 | 6 | 7 | __all__ = ['read_eig', 'write_eig'] 8 | 9 | 10 | def read_eig(stream: typing.TextIO) -> np.ndarray: 11 | """ 12 | Read eigenvalues matrix 13 | 14 | Arguments: 15 | stream: a file-like stream 16 | 17 | Returns: 18 | eigenvalues matrix (Nk, Nb) 19 | 20 | """ 21 | raw_data = np.loadtxt(stream) 22 | 23 | Nb = int(raw_data[-1, 0]) 24 | Nk = int(raw_data[-1, 1]) 25 | 26 | eig = raw_data[:, 2].reshape((Nk, Nb)) 27 | 28 | return eig 29 | 30 | 31 | def write_eig(stream: typing.TextIO, eig: np.ndarray): 32 | r""" 33 | Write eigenvalues matrix 34 | 35 | Arguments: 36 | stream: a file-like stream 37 | eig: eigenvalues matrix (Nk, Nb) 38 | 39 | """ 40 | (Nk, Nb) = eig.shape 41 | indices = np.mgrid[:Nb, :Nk].reshape((2, Nk*Nb), order='F') + 1 42 | 43 | data = np.column_stack((indices.T, eig.flatten())) 44 | 45 | np.savetxt(stream, data, fmt='%5d%5d%18.12f') 46 | -------------------------------------------------------------------------------- /src/wannier90io/_mmn.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import itertools 3 | import typing 4 | 5 | import numpy as np 6 | 7 | 8 | __all__ = ['read_mmn'] 9 | 10 | 11 | def read_mmn(stream: typing.TextIO) -> tuple[np.ndarray, np.ndarray]: 12 | """ 13 | Read overlaps matrix 14 | 15 | Arguments: 16 | stream: a file-like stream 17 | 18 | Returns: 19 | overlaps matrix (Nk, Nn, Nb, Nb) 20 | nnkps (Nk, Nn, 5) 21 | 22 | """ 23 | stream.readline() # header 24 | 25 | [Nb, Nk, Nn] = np.fromstring(stream.readline(), sep=' ', dtype=int) 26 | 27 | mmn = np.zeros((Nk, Nn, Nb, Nb), dtype=complex) 28 | nnkpts = np.zeros((Nk, Nn, 5), dtype=int) 29 | 30 | for (ik, ikb) in itertools.product(range(Nk), range(Nn)): 31 | nnkpts[ik, ikb] = np.fromstring(stream.readline(), sep=' ', dtype=int) 32 | mmn[ik, ikb] = np.loadtxt(stream, max_rows=(Nb*Nb)).view(complex).reshape((Nb, Nb), order='F') 33 | 34 | nnkpts[:, :, 0] -= 1 35 | nnkpts[:, :, 1] -= 1 36 | 37 | return (mmn, nnkpts) 38 | -------------------------------------------------------------------------------- /src/wannier90io/_nnkp.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import re 3 | 4 | import numpy as np 5 | 6 | from . import _core 7 | 8 | 9 | __all__ = ['parse_nnkp_raw'] 10 | 11 | 12 | patterns = { 13 | 'lattice_vectors': re.compile( 14 | rf'(?P{_core.expressions["3-vector"]})\s+' 15 | rf'(?P{_core.expressions["3-vector"]})\s+' 16 | rf'(?P{_core.expressions["3-vector"]})', 17 | re.IGNORECASE | re.DOTALL 18 | ), 19 | 'kpoints': re.compile( 20 | r'\d+\s+' 21 | r'(?P.+)', 22 | re.IGNORECASE | re.DOTALL 23 | ), 24 | 'projections': re.compile( 25 | r'\d+\s+' 26 | r'(?P.+)', 27 | re.IGNORECASE | re.DOTALL 28 | ), 29 | 'nnkpts': re.compile( 30 | r'\d+\s+' 31 | r'(?P.+)', 32 | re.IGNORECASE | re.DOTALL 33 | ), 34 | 'exclude_bands': re.compile( 35 | r'\d+\s*' 36 | r'(?P.*)', 37 | re.IGNORECASE | re.DOTALL 38 | ), 39 | } 40 | 41 | 42 | def parse_lattice(string: str) -> dict: 43 | match = patterns['lattice_vectors'].search(string) 44 | 45 | if match is not None: 46 | v1 = [float(x) for x in match.group('v1').split()] 47 | v2 = [float(x) for x in match.group('v2').split()] 48 | v3 = [float(x) for x in match.group('v3').split()] 49 | 50 | return { 51 | 'v1': v1, 'v2': v2, 'v3': v3, 52 | } 53 | else: 54 | return None 55 | 56 | 57 | def parse_direct_lattice(string: str) -> dict: 58 | lattice = parse_lattice(string) 59 | 60 | return { 61 | 'a1': lattice['v1'], 'a2': lattice['v2'], 'a3': lattice['v3'], 62 | } 63 | 64 | 65 | def parse_reciprocal_lattice(string: str) -> dict: 66 | lattice = parse_lattice(string) 67 | 68 | return { 69 | 'b1': lattice['v1'], 'b2': lattice['v2'], 'b3': lattice['v3'], 70 | } 71 | 72 | 73 | def parse_kpoints(string: str) -> dict: 74 | match = patterns['kpoints'].search(string) 75 | 76 | return { 77 | 'kpoints': [ 78 | [float(x) for x in line.split()] for line in match.group('kpoints').splitlines() 79 | ] 80 | } 81 | 82 | 83 | def parse_projections(string: str) -> dict: 84 | match = patterns['projections'].search(string) 85 | 86 | projections = np.fromstring(match.group('projections'), sep='\n').reshape((-1, 13)) 87 | 88 | return [ 89 | { 90 | 'center': projection[:3], 91 | 'l': int(projection[3]), 92 | 'mr': int(projection[4]), 93 | 'r': int(projection[5]), 94 | 'z-axis': projection[6:9], 95 | 'x-axis': projection[9:12], 96 | 'zona': projection[12], 97 | } 98 | for projection in projections 99 | ] 100 | 101 | 102 | def parse_spinor_projections(string: str) -> dict: 103 | match = patterns['projections'].search(string) 104 | 105 | projections = np.fromstring(match.group('projections'), sep='\n').reshape((-1, 17)) 106 | 107 | return [ 108 | { 109 | 'center': projection[:3], 110 | 'l': int(projection[3]), 111 | 'mr': int(projection[4]), 112 | 'r': int(projection[5]), 113 | 'z-axis': projection[6:9], 114 | 'x-axis': projection[9:12], 115 | 'zona': projection[12], 116 | 'spin': int(projection[13]), 117 | 'spin-axis': projection[14:], 118 | } 119 | for projection in projections 120 | ] 121 | 122 | 123 | def parse_nnkpts(string: str) -> list: 124 | match = patterns['nnkpts'].search(string) 125 | 126 | return [[int(x) for x in line.split()] for line in match.group('nnkpts').splitlines()] 127 | 128 | 129 | def parse_exclude_bands(string: str) -> dict: 130 | match = patterns['exclude_bands'].search(string) 131 | 132 | if match is not None: 133 | return { 134 | 'exclude_bands': [int(line) for line in match.group('exclude_bands').splitlines()] 135 | } 136 | else: 137 | return { 138 | 'exclude_bands': None 139 | } 140 | 141 | 142 | def parse_nnkp_raw(string: str) -> dict: 143 | """ 144 | Parse NNKP 145 | 146 | Arguments: 147 | string: the NNKP text 148 | 149 | Returns: 150 | the parsed NNKP 151 | """ 152 | comments = _core.extract_comments(string) 153 | parameters = _core.parse_parameters(_core.extract_parameters(string)) 154 | blocks = _core.parse_blocks(_core.extract_blocks(string)) 155 | 156 | parsed_nnkp = { 157 | 'comments': comments, 158 | 'parameters': parameters, 159 | 'blocks': blocks, 160 | 'direct_lattice': parse_direct_lattice(blocks['real_lattice']), 161 | 'reciprocal_lattice': parse_reciprocal_lattice(blocks['recip_lattice']), 162 | 'kpoints': parse_kpoints(blocks['kpoints']), 163 | 'nnkpts': parse_nnkpts(blocks['nnkpts']), 164 | 'exclude_bands': parse_exclude_bands(blocks['exclude_bands']), 165 | } 166 | if 'projections' in blocks: 167 | parsed_nnkp['projections'] = parse_projections(blocks['projections']) 168 | if 'spinor_projections' in blocks: 169 | parsed_nnkp['spinor_projections'] = parse_spinor_projections(blocks['spinor_projections']) 170 | 171 | return parsed_nnkp 172 | -------------------------------------------------------------------------------- /src/wannier90io/_schema.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import typing 3 | 4 | import pydantic 5 | 6 | 7 | class UnitCell(pydantic.BaseModel): 8 | units: typing.Optional[str] 9 | a1: list 10 | a2: list 11 | a3: list 12 | 13 | 14 | class DirectLattice(pydantic.BaseModel): 15 | a1: list 16 | a2: list 17 | a3: list 18 | 19 | 20 | class ReciprocalLattice(pydantic.BaseModel): 21 | b1: list 22 | b2: list 23 | b3: list 24 | 25 | 26 | class Atom(pydantic.BaseModel): 27 | species: str 28 | basis_vector: list 29 | 30 | 31 | class Atoms(pydantic.BaseModel): 32 | units: typing.Optional[str] 33 | atoms: typing.List[Atom] 34 | 35 | 36 | class Projections(pydantic.BaseModel): 37 | units: typing.Optional[str] 38 | projections: typing.List[str] 39 | 40 | 41 | class Kpoints(pydantic.BaseModel): 42 | kpoints: typing.List[list] 43 | 44 | 45 | class ExcludeBands(pydantic.BaseModel): 46 | exclude_bands: typing.Optional[typing.List[int]] 47 | 48 | 49 | class WIN(pydantic.BaseModel): 50 | comments: typing.List[str] 51 | parameters: dict 52 | blocks: dict 53 | unit_cell_cart: typing.Optional[UnitCell] 54 | atoms_frac: typing.Optional[Atoms] 55 | atoms_cart: typing.Optional[Atoms] 56 | projections: Projections 57 | kpoints: Kpoints 58 | 59 | 60 | class NNKP(pydantic.BaseModel): 61 | comments: typing.List[str] 62 | parameters: dict 63 | blocks: dict 64 | nnkpts: typing.List[typing.List[int]] 65 | direct_lattice: DirectLattice 66 | reciprocal_lattice: ReciprocalLattice 67 | kpoints: Kpoints 68 | exclude_bands: ExcludeBands 69 | -------------------------------------------------------------------------------- /src/wannier90io/_u.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import typing 3 | 4 | import numpy as np 5 | 6 | # TODO: implement also read_u_dis 7 | __all__ = ['read_u'] 8 | 9 | 10 | def read_u(stream: typing.TextIO) -> tuple[np.ndarray, np.ndarray]: 11 | """ 12 | Read unitary matrix file (seedname_u.mat) or the rectangular U_dis matrix 13 | file (seedname_u_dis.mat). 14 | 15 | Note: 16 | for the _u.mat file, num_bands == num_wann. 17 | 18 | Arguments: 19 | stream: a file-like stream 20 | 21 | Returns: 22 | kpoint coordinates in fractional coordinates (num_kpts, 3) 23 | U matrix U(k) or U_dis(k) (num_kpts, num_bands, num_wann) 24 | 25 | """ 26 | stream.readline() # header 27 | 28 | [nkpt, num_wann, num_bands] = np.fromstring(stream.readline(), sep=' ', dtype=int) 29 | u_matrices = np.zeros((nkpt, num_bands, num_wann), dtype=complex) 30 | kpoints = [] 31 | 32 | for ikpt in range(nkpt): 33 | empty = stream.readline() # header 34 | assert not empty.strip(), f"Expected empty line but found instead: '{empty}'" 35 | 36 | kpoint = np.fromstring(stream.readline(), sep=' ', dtype=float) 37 | assert len(kpoint) == 3 38 | kpoints.append(kpoint) 39 | u_matrices[ikpt, :, :] = np.loadtxt(stream, max_rows=(num_wann * num_bands)).view(complex).reshape((num_bands, num_wann), order='F') 40 | 41 | return np.array(kpoints), u_matrices 42 | -------------------------------------------------------------------------------- /src/wannier90io/_unk.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import typing 3 | 4 | import numpy as np 5 | 6 | # TODO: implement also read_unk_unformatted (that is the default) 7 | __all__ = ['read_unk_formatted'] 8 | 9 | 10 | def read_unk_formatted(stream: typing.TextIO) -> tuple[int, np.ndarray]: 11 | """ 12 | Read wavefunction files (UNKnnnnn.n files) in formatted format. 13 | 14 | Note that the UNK files must have been created using the `wvfn_formatted` 15 | option set to True in the interface code (e.g. pw2wannier90.x for the 16 | Quantum ESPRESSO interface). Note that this is *not* the default, however 17 | for reading into an external code, this is recommended for portability. 18 | 19 | Note: 20 | for now only works in the non-spinor case. 21 | Spinor case still to be implemented. 22 | 23 | Arguments: 24 | stream: a file-like stream 25 | 26 | Returns: 27 | k-point index ik (integer) 28 | complex wavefunction (ngx, ngy, ngz, Nb) 29 | 30 | """ 31 | [ngx, ngy, ngz, ik, nbnd] = np.fromstring(stream.readline(), sep=' ', dtype=int) 32 | 33 | wvfn = np.zeros((ngx, ngy, ngz, nbnd), dtype=complex) 34 | 35 | for ibnd in range(nbnd): 36 | wvfn[:, :, :, ibnd] = np.loadtxt(stream, max_rows=(ngx * ngy * ngz)).view(complex).reshape((ngx, ngy, ngz), order='F') 37 | 38 | return (ik, wvfn) 39 | -------------------------------------------------------------------------------- /src/wannier90io/_win.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import re 3 | 4 | from . import _core 5 | 6 | 7 | __all__ = ['parse_win_raw'] 8 | 9 | 10 | patterns = { 11 | 'unit_cell': re.compile( 12 | r'((?Pbohr|ang)\s+)?' 13 | rf'(?P{_core.expressions["3-vector"]})\s+' 14 | rf'(?P{_core.expressions["3-vector"]})\s+' 15 | rf'(?P{_core.expressions["3-vector"]})', 16 | re.IGNORECASE | re.DOTALL 17 | ), 18 | 'atoms': re.compile( 19 | r'((?Pbohr|ang)\s+)?' 20 | rf'(?P([ \t]*\w+[ \t]+{_core.expressions["3-vector"]}\s*)+)', 21 | re.IGNORECASE | re.DOTALL 22 | ), 23 | 'projections': re.compile( 24 | r'((?Pbohr|ang)\s+)?' 25 | r'(?P.+)', 26 | re.IGNORECASE | re.DOTALL 27 | ), 28 | } 29 | 30 | 31 | def parse_unit_cell(string: str) -> dict: 32 | match = patterns['unit_cell'].search(string) 33 | 34 | if match is not None: 35 | a1 = [float(x) for x in re.split(r'[ \t,;]+', match.group('a1'))] 36 | a2 = [float(x) for x in re.split(r'[ \t,;]+', match.group('a2'))] 37 | a3 = [float(x) for x in re.split(r'[ \t,;]+', match.group('a3'))] 38 | 39 | return { 40 | 'units': match.group('units'), 41 | 'a1': a1, 'a2': a2, 'a3': a3, 42 | } 43 | else: 44 | return None 45 | 46 | 47 | def parse_atoms(string: str) -> dict: 48 | match = patterns['atoms'].search(string) 49 | 50 | if match is not None: 51 | return { 52 | 'units': match.group('units'), 53 | 'atoms': [ 54 | { 55 | 'species': line.split()[0], 56 | 'basis_vector': [ 57 | float(x) for x in re.split(r'[ \t,;]+', line.strip())[1:] 58 | ], 59 | } 60 | for line in match.group('atoms').splitlines() 61 | ] 62 | } 63 | else: 64 | return None 65 | 66 | 67 | def parse_projections(string: str) -> dict: 68 | match = patterns['projections'].search(string) 69 | 70 | if match is not None: 71 | return { 72 | 'units': match.group('units'), 73 | 'projections': match.group('projections').splitlines(), 74 | } 75 | else: 76 | return None 77 | 78 | 79 | def parse_kpoints(string: str) -> dict: 80 | return { 81 | 'kpoints': [ 82 | [float(x) for x in line.split()] for line in string.splitlines() 83 | ] 84 | } 85 | 86 | 87 | def parse_win_raw(string: str) -> dict: 88 | """ 89 | Parse WIN 90 | 91 | Arguments: 92 | string: the WIN text 93 | 94 | Returns: 95 | the parsed WIN 96 | """ 97 | comments = _core.extract_comments(string) 98 | parameters = _core.parse_parameters(_core.extract_parameters(string)) 99 | blocks = _core.parse_blocks(_core.extract_blocks(string)) 100 | 101 | parsed_win = { 102 | 'comments': comments, 103 | 'parameters': parameters, 104 | 'blocks': blocks, 105 | } 106 | if 'unit_cell_cart' in blocks: 107 | parsed_win['unit_cell_cart'] = parse_unit_cell(blocks['unit_cell_cart']) 108 | if 'atoms_cart' in blocks: 109 | parsed_win['atoms_cart'] = parse_atoms(blocks['atoms_cart']) 110 | if 'atoms_frac' in blocks: 111 | parsed_win['atoms_frac'] = parse_atoms(blocks['atoms_frac']) 112 | if 'projections' in blocks: 113 | parsed_win['projections'] = parse_projections(blocks['projections']) 114 | if 'kpoints' in blocks: 115 | parsed_win['kpoints'] = parse_kpoints(blocks['kpoints']) 116 | 117 | return parsed_win 118 | -------------------------------------------------------------------------------- /src/wannier90io/_wout.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import typing 3 | import re 4 | 5 | from . import _core 6 | 7 | 8 | __all__ = ['parse_wout', 'parse_wout_iteration_info'] 9 | 10 | 11 | patterns = { 12 | 'convergence_line': re.compile( 13 | r'^\s*' 14 | r'(?P\d+)\s+' 15 | rf'(?P{_core.expressions["float"]})\s+' 16 | rf'(?P{_core.expressions["float"]})\s+' 17 | rf'(?P{_core.expressions["float"]})\s+' 18 | rf'(?P