├── .gitattributes
├── .github
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── ISSUE_TEMPLATE
│ ├── 00_bug_report.md
│ ├── 10_feature_request.md
│ └── 20_help_request.md
├── PUll_REQUEST_TEMPLATE
│ └── pull_request_template.md
├── dependabot.yml
└── workflows
│ ├── pypi.yaml
│ └── pythonapp.yml
├── .gitignore
├── .readthedocs.yml
├── CHANGELOG.md
├── CITATION.cff
├── LICENSE.txt
├── MANIFEST.in
├── README.rst
├── codecov.yaml
├── conftest.py
├── data
└── carboxylic_acids
│ └── dataset.csv
├── doc_environment.yml
├── doc_requirements.txt
├── docs
├── Makefile
├── _images
│ ├── default_tree.jpg
│ ├── merged_tree.jpg
│ ├── nac_worflow.png
│ └── tree.png
├── _static
│ └── default.css
├── _templates
│ └── layout.html
├── absorption_spectrum.rst
├── conf.py
├── coop.rst
├── derivative_couplings.rst
├── distribute_absorption_spectrum.rst
├── docs_command_line.rst
├── docs_cp2k_interface.rst
├── docs_derivative_coupling.rst
├── docs_integrals.rst
├── docs_molecular_orbitals.rst
├── docs_workflows.rst
├── documentation.rst
├── includereadme.rst
├── index.rst
├── intro.rst
├── ipr.rst
├── make.bat
├── single_points.rst
└── theory.rst
├── install_requirements.txt
├── libint
├── compute_integrals.cc
├── include
│ ├── compute_integrals.hpp
│ └── namd.hpp
└── py_compute_integrals.cc
├── licenses
├── LICENSE_LIBHDF5.txt
├── LICENSE_LIBINT2.txt
└── README.rst
├── linting_requirements.txt
├── nanoqm
├── __init__.py
├── _data.py
├── _logger.py
├── _monkey_patch.py
├── analysis
│ ├── __init__.py
│ └── tools.py
├── basis
│ ├── BASIS_ADMM
│ ├── BASIS_ADMM_MOLOPT
│ ├── BASIS_MOLOPT
│ ├── GTH_POTENTIALS
│ ├── aux_fit.json
│ └── valence_electrons.json
├── common.py
├── compute_integrals.pyi
├── integrals
│ ├── __init__.py
│ ├── multipole_matrices.py
│ └── nonAdiabaticCoupling.py
├── py.typed
├── schedule
│ ├── __init__.py
│ ├── components.py
│ ├── scheduleCP2K.py
│ └── scheduleCoupling.py
└── workflows
│ ├── __init__.py
│ ├── distribute_jobs.py
│ ├── initialization.py
│ ├── input_validation.py
│ ├── orbitals_type.py
│ ├── run_workflow.py
│ ├── schemas.py
│ ├── templates.py
│ ├── tools.py
│ ├── workflow_coop.py
│ ├── workflow_coupling.py
│ ├── workflow_ipr.py
│ ├── workflow_single_points.py
│ └── workflow_stddft_spectrum.py
├── pyproject.toml
├── scripts
├── convert_legacy_hdf5.py
├── download_cp2k.sh
├── get_whl_name.py
├── hamiltonians
│ ├── plot_couplings.py
│ ├── plot_mos_energies.py
│ └── plot_spectra.py
├── prepare_test_dir.sh
├── pyxaid
│ ├── iconds_excess_energy.py
│ ├── plot_average_energy.py
│ ├── plot_cooling.py
│ ├── plot_spectra_pyxaid.py
│ └── plot_states_pops.py
├── qmflows
│ ├── README.rst
│ ├── convolution.py
│ ├── coordination_ldos.py
│ ├── dos_cp2k.py
│ ├── mergeHDF5.py
│ ├── opt_anion_cp2k.py
│ ├── opt_cation_cp2k.py
│ ├── opt_cp2k.py
│ ├── plot_dos.py
│ ├── removeHDF5folders.py
│ └── remove_mos_hdf5.py
└── reenumerate.py
├── setup.py
├── test
├── __init__.py
├── test_absorption_spectrum.py
├── test_analysis_tools.py
├── test_citation_cff.py
├── test_cli.py
├── test_coupling.py
├── test_cpk2_error_call.py
├── test_distribute.py
├── test_files
│ ├── ALL_BASIS_SETS
│ ├── BASIS_ADMM
│ ├── BASIS_ADMM_MOLOPT
│ ├── BASIS_MOLOPT
│ ├── BASIS_MOLOPT_UZH
│ ├── C.xyz
│ ├── Cd.hdf5
│ ├── Cd.xyz
│ ├── Cd33Se33.hdf5
│ ├── Cd33Se33.xyz
│ ├── Cd33Se33_fivePoints.xyz
│ ├── F2.xyz
│ ├── GTH_POTENTIALS
│ ├── HF.xyz
│ ├── He.hdf5
│ ├── He.xyz
│ ├── O2_coupling.xyz
│ ├── ethylene.hdf5
│ ├── ethylene.xyz
│ ├── ethylene_couplings.xyz
│ ├── file_cell_parameters.txt
│ ├── file_distribute_cell_parameters.txt
│ ├── guanine_distribution.yml
│ ├── input_couplings_alphas.yml
│ ├── input_couplings_both.yml
│ ├── input_fast_test_derivative_couplings.yml
│ ├── input_test_IPR.yml
│ ├── input_test_absorption_spectrum.yml
│ ├── input_test_absorption_spectrum_all.yml
│ ├── input_test_absorption_spectrum_unrestricted.yml
│ ├── input_test_b3lyp.yml
│ ├── input_test_coop.yml
│ ├── input_test_distribute_absorption_spectrum.yml
│ ├── input_test_distribute_derivative_couplings.yml
│ ├── input_test_pbe.yml
│ ├── input_test_pbe0.yml
│ ├── input_test_single_points.yml
│ ├── input_test_single_points_hybrid_functional.yml
│ ├── legacy.hdf5
│ ├── mypy.ini
│ ├── oxygen.hdf5
│ ├── test_files.hdf5
│ └── test_initialization.yaml
├── test_initialization.py
├── test_input_validation.py
├── test_multipole.py
├── test_read_cp2k_basis.py
├── test_schemas.py
├── test_tools.py
├── test_version.py
├── test_workflow_IPR.py
├── test_workflow_coop.py
├── test_workflow_single_points.py
└── utilsTest.py
├── test_requirements.txt
└── typings
├── README.rst
├── h5py
└── __init__.pyi
├── mendeleev
├── __init__.pyi
├── mendeleev.pyi
└── models.pyi
├── noodles
├── __init__.pyi
├── interface
│ ├── __init__.pyi
│ ├── decorator.pyi
│ └── functions.pyi
└── serial
│ ├── __init__.pyi
│ ├── dataclass.pyi
│ └── registry.pyi
├── scipy
├── __init__.pyi
├── constants.pyi
├── linalg.pyi
├── optimize.pyi
└── spatial
│ ├── __init__.pyi
│ └── distance.pyi
├── scm
├── __init__.pyi
└── plams
│ ├── __init__.pyi
│ ├── core
│ ├── __init__.pyi
│ ├── functions.pyi
│ └── settings.pyi
│ └── mol
│ ├── __init__.pyi
│ ├── atom.pyi
│ ├── bond.pyi
│ ├── molecule.pyi
│ └── pdbtools.pyi
└── wheel
├── __init__.pyi
└── bdist_wheel.pyi
/.gitattributes:
--------------------------------------------------------------------------------
1 | * text=auto eol=lf
2 | tests/test_files/** linguist-generated
3 |
--------------------------------------------------------------------------------
/.github/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as
6 | contributors and maintainers pledge to making participation in our project and
7 | our community a harassment-free experience for everyone, regardless of age, body
8 | size, disability, ethnicity, gender identity and expression, level of experience,
9 | education, socio-economic status, nationality, personal appearance, race,
10 | religion, or sexual identity and orientation.
11 |
12 | ## Our Standards
13 |
14 | Examples of behavior that contributes to creating a positive environment
15 | include:
16 |
17 | * Using welcoming and inclusive language
18 | * Being respectful of differing viewpoints and experiences
19 | * Gracefully accepting constructive criticism
20 | * Focusing on what is best for the community
21 | * Showing empathy towards other community members
22 |
23 | Examples of unacceptable behavior by participants include:
24 |
25 | * The use of sexualized language or imagery and unwelcome sexual attention or
26 | advances
27 | * Trolling, insulting/derogatory comments, and personal or political attacks
28 | * Public or private harassment
29 | * Publishing others' private information, such as a physical or electronic
30 | address, without explicit permission
31 | * Other conduct which could reasonably be considered inappropriate in a
32 | professional setting
33 |
34 | ## Our Responsibilities
35 |
36 | Project maintainers are responsible for clarifying the standards of acceptable
37 | behavior and are expected to take appropriate and fair corrective action in
38 | response to any instances of unacceptable behavior.
39 |
40 | Project maintainers have the right and responsibility to remove, edit, or
41 | reject comments, commits, code, wiki edits, issues, and other contributions
42 | that are not aligned to this Code of Conduct, or to ban temporarily or
43 | permanently any contributor for other behaviors that they deem inappropriate,
44 | threatening, offensive, or harmful.
45 |
46 | ## Scope
47 |
48 | This Code of Conduct applies both within project spaces and in public spaces
49 | when an individual is representing the project or its community. Examples of
50 | representing a project or community include using an official project e-mail
51 | address, posting via an official social media account, or acting as an appointed
52 | representative at an online or offline event. Representation of a project may be
53 | further defined and clarified by project maintainers.
54 |
55 | ## Enforcement
56 |
57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
58 | reported by contacting the project team at f.zapata@esciencecenter.nl. All
59 | complaints will be reviewed and investigated and will result in a response that
60 | is deemed necessary and appropriate to the circumstances. The project team is
61 | obligated to maintain confidentiality with regard to the reporter of an incident.
62 | Further details of specific enforcement policies may be posted separately.
63 |
64 | Project maintainers who do not follow or enforce the Code of Conduct in good
65 | faith may face temporary or permanent repercussions as determined by other
66 | members of the project's leadership.
67 |
68 | ## Attribution
69 |
70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
72 |
73 | [homepage]: https://www.contributor-covenant.org
74 |
--------------------------------------------------------------------------------
/.github/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing guidelines
2 |
3 | We welcome any kind of contribution to our software, from simple comment or question to a full fledged [pull request](https://help.github.com/articles/about-pull-requests/). Please read and follow our [Code of Conduct](CODE_OF_CONDUCT.md).
4 |
5 | A contribution can be one of the following cases:
6 |
7 | 1. you have a question;
8 | 1. you think you may have found a bug (including unexpected behavior);
9 | 1. you want to make some kind of change to the code base (e.g. to fix a bug, to add a new feature, to update documentation).
10 |
11 | The sections below outline the steps in each case.
12 |
13 | ## You have a question
14 |
15 | 1. use the search functionality [here](https://github.com/SCM-NV/nano-qmflows/issues) to see if someone already filed the same issue;
16 | 1. if your issue search did not yield any relevant results, make a new issue;
17 | 1. apply the "Question" label; apply other labels when relevant.
18 |
19 | ## You think you may have found a bug
20 |
21 | 1. use the search functionality [here](https://github.com/SCM-NV/nano-qmflows/issues) to see if someone already filed the same issue;
22 | 1. if your issue search did not yield any relevant results, make a new issue, making sure to provide enough information to the rest of the community to understand the cause and context of the problem. Depending on the issue, you may want to include:
23 | - the [SHA hashcode](https://help.github.com/articles/autolinked-references-and-urls/#commit-shas) of the commit that is causing your problem;
24 | - some identifying information (name and version number) for dependencies you're using;
25 | - information about the operating system;
26 | 1. apply relevant labels to the newly created issue.
27 |
28 | ## You want to make some kind of change to the code base
29 |
30 | 1. (**important**) announce your plan to the rest of the community _before you start working_. This announcement should be in the form of a (new) issue;
31 | 1. (**important**) wait until some kind of consensus is reached about your idea being a good idea;
32 | 1. if needed, fork the repository to your own Github profile and create your own feature branch off of the latest master commit. While working on your feature branch, make sure to stay up to date with the master branch by pulling in changes, possibly from the 'upstream' repository (follow the instructions [here](https://help.github.com/articles/configuring-a-remote-for-a-fork/) and [here](https://help.github.com/articles/syncing-a-fork/));
33 | 1. make sure the existing tests still work by running ``pytest test``;
34 | 1. add your own tests (if necessary);
35 | 1. update or expand the documentation;
36 | 1. [push](http://rogerdudler.github.io/git-guide/) your feature branch to (your fork of) the [nano-qmflows](https://github.com/SCM-NV/nano-qmflows) repository on GitHub;
37 | 1. create the pull request, e.g. following the instructions [here](https://help.github.com/articles/creating-a-pull-request/).
38 |
39 | In case you feel like you've made a valuable contribution, but you don't know how to write or run tests for it, or how to generate the documentation: don't let this discourage you from making the pull request; we can help you! Just go ahead and submit the pull request, but keep in mind that you might be asked to append additional commits to your pull request.
40 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/00_bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Something doesn't work like I expected.
4 | title: ''
5 | labels: bug
6 | assignees: ''
7 |
8 | ---
9 |
10 | Use your best judgment to provide a useful level of information. Depending on the nature of the issue, consider including, e.g.
11 |
12 | - Which version of the software you're running
13 | - Any logging or error output you see
14 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/10_feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: I would like a new feature to be included in the library
4 | title: ''
5 | labels: enhancement
6 | assignees: ''
7 |
8 | ---
9 |
10 | Tell us what would be a nice feature to make the **nano-qmflows** library even better!
11 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/20_help_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Help wanted
3 | about: I need some help with the code
4 | title: ''
5 | labels: help wanted
6 | assignees: ''
7 |
8 | ---
9 |
--------------------------------------------------------------------------------
/.github/PUll_REQUEST_TEMPLATE/pull_request_template.md:
--------------------------------------------------------------------------------
1 | ### All Submissions:
2 |
3 | * [ ] Have you followed the guidelines in our Contributing document?
4 | * [ ] Have you checked to ensure there aren't other open [Pull Requests](https://github.com/SCM-NV/nano-qmflows/pulls) for the same update/change?
5 |
6 |
7 | ### New Feature Submissions:
8 |
9 | 1. [ ] Does your submission pass tests?
10 | 2. [ ] Have you check your code quality (e.g. using [flake8](http://flake8.pycqa.org/en/latest/)) prior to submission?
11 |
12 | ### Changes to Core Features:
13 |
14 | * [ ] Have you added an explanation of what your changes do and why you'd like us to include them?
15 | * [ ] Have you written new tests for your core changes, as applicable?
16 | * [ ] Have you successfully ran tests with your changes locally?
17 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for all configuration options:
4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
5 |
6 | version: 2
7 | updates:
8 | - package-ecosystem: "github-actions" # See documentation for possible values
9 | directory: "/" # Location of package manifests
10 | schedule:
11 | interval: "monthly"
12 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | x# Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 |
5 | # C extensions
6 | *.so
7 |
8 | # Distribution / packaging
9 | .Python
10 | env/
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | *.egg-info/
23 | .installed.cfg
24 | *.egg
25 | /tmp
26 | nanoqm/_version.py
27 | wheelhouse/
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .coverage
43 | .coverage.*
44 | .cache
45 | nosetests.xml
46 | coverage.xml
47 | *,cover
48 |
49 | # Translations
50 | *.mo
51 | *.pot
52 |
53 | # Django stuff:
54 | *.log
55 |
56 | # Sphinx documentation
57 | docs/_build/
58 |
59 | # PyBuilder
60 | target/
61 |
62 | # emacs
63 | *~
64 |
65 | #vi
66 | *.swp
67 |
68 | #Profiling
69 | *.lprof
70 | *.prof
71 | *.out
72 |
73 | # pytest
74 | .pytest*
75 |
76 | # VScode
77 | .vscode
78 |
79 | # Mypy
80 | .mypy_cache/
81 |
82 | # MacOS
83 | .DS_Store
84 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 |
5 | # Required
6 | version: 2
7 |
8 | # Optionally build your docs in additional formats such as PDF
9 | formats: all
10 |
11 | # Optionally set the version of Python and requirements required to build your docs
12 | conda:
13 | environment: doc_environment.yml
14 |
--------------------------------------------------------------------------------
/CITATION.cff:
--------------------------------------------------------------------------------
1 | # YAML 1.2
2 | # Metadata for citation of this software according to the CFF format (https://citation-file-format.github.io/)
3 | cff-version: 1.2.0
4 | message: If you use this software, please cite it as below.
5 | title: nano-qmflows
6 | abstract: Nano-QMFlows is a generic python library for computing (numerically) electronic properties for nanomaterials like the non-adiabatic coupling vectors (NACV) using several quantum chemical (QM) packages.
7 | authors:
8 | - given-names: Felipe
9 | family-names: Zapata
10 | orcid: "https://orcid.org/0000-0001-8286-677X"
11 | - given-names: Bas
12 | family-names: van Beek
13 | orcid: "https://orcid.org/0000-0003-2463-6559"
14 |
15 | keywords:
16 | - computational-chemistry
17 | - materials-science
18 | - python
19 | - Workflows
20 | version: '0.14.2'
21 | date-released: "2023-10-11" # yyyy-mm-dd
22 | repository-code: https://github.com/SCM-NV/nano-qmflows
23 | license: "Apache-2.0"
24 | doi: 10.5281/zenodo.2576893
25 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | exclude test/*
2 | include *_requirements.txt
3 |
4 | include nanoqm/py.typed
5 | recursive-include nanoqm *.pyi
6 | include nanoqm/basis/GTH_POTENTIALS
7 | include nanoqm/basis/*.json
8 | include nanoqm/basis/BASIS_*
9 |
10 | include libint/include/*.hpp
11 |
--------------------------------------------------------------------------------
/codecov.yaml:
--------------------------------------------------------------------------------
1 | coverage:
2 | status:
3 | project:
4 | default:
5 | target: 80
6 | patch:
7 | default:
8 | target: 0
9 |
--------------------------------------------------------------------------------
/conftest.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import logging
4 |
5 | import os
6 | import shutil
7 | from collections.abc import Generator
8 |
9 | import pytest
10 | from nanoqm._logger import logger as nanoqm_logger
11 | from scm.plams import add_to_class, Cp2kJob
12 |
13 |
14 | @add_to_class(Cp2kJob)
15 | def get_runscript(self) -> str:
16 | """Run a parallel version of CP2K without mpirun or srun, \
17 | as this can cause issues with some executables.
18 |
19 | This method is monkey-patched into the PLAMS ``Cp2kJob`` class.
20 |
21 | """
22 | cp2k_command = self.settings.get("executable", "cp2k.ssmp")
23 | return f"{cp2k_command} -i {self._filename('inp')} -o {self._filename('out')}"
24 |
25 |
26 | @pytest.fixture(autouse=True, scope="session")
27 | def is_release() -> Generator[bool, None, None]:
28 | """Yield whether the test suite is run for a nano-qmflows release or not."""
29 | env_var = os.environ.get("IS_RELEASE", 0)
30 | try:
31 | yield bool(int(env_var))
32 | except ValueError as ex:
33 | raise ValueError("The `IS_RELEASE` environment variable expected an integer") from ex
34 |
35 |
36 | @pytest.fixture(autouse=True, scope="function")
37 | def cleunup_files() -> Generator[None, None, None]:
38 | # TODO: Investigate if these files can be removed by their respective test(s)
39 | yield None
40 | if os.path.isfile("quantum.hdf5"):
41 | os.remove("quantum.hdf5")
42 | if os.path.isfile("input_parameters.yml"):
43 | os.remove("input_parameters.yml")
44 | if os.path.isdir("overlaps"):
45 | shutil.rmtree("overlaps")
46 |
47 |
48 | @pytest.fixture(autouse=True, scope="session")
49 | def prepare_logger() -> Generator[None, None, None]:
50 | """Remove the logging output to stdout while running tests."""
51 | import noodles
52 | import qmflows
53 | noodles_logger = logging.getLogger("noodles")
54 | qmflows_logger = logging.getLogger("qmflows")
55 |
56 | nanoqm_handlers = nanoqm_logger.handlers.copy()
57 | noodles_handlers = noodles_logger.handlers.copy()
58 | qmflows_handlers = qmflows_logger.handlers.copy()
59 |
60 | for handler in nanoqm_handlers:
61 | nanoqm_logger.removeHandler(handler)
62 | for handler in noodles_handlers:
63 | noodles_logger.removeHandler(handler)
64 | for handler in qmflows_handlers:
65 | qmflows_logger.removeHandler(handler)
66 |
67 | yield None
68 |
69 | for handler in nanoqm_handlers:
70 | nanoqm_logger.addHandler(handler)
71 | for handler in noodles_handlers:
72 | noodles_logger.addHandler(handler)
73 | for handler in qmflows_handlers:
74 | qmflows_logger.addHandler(handler)
75 |
--------------------------------------------------------------------------------
/doc_environment.yml:
--------------------------------------------------------------------------------
1 | name: nanoqm-doc
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - python
6 | - boost
7 | - eigen
8 | - "libint>=2.6.0"
9 | - highfive
10 | - pip
11 | - pip:
12 | - -r doc_requirements.txt
13 | - .
14 |
--------------------------------------------------------------------------------
/doc_requirements.txt:
--------------------------------------------------------------------------------
1 | sphinx>=2.1
2 | sphinx_rtd_theme
3 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SOURCEDIR = .
8 | BUILDDIR = _build
9 |
10 | # Put it first so that "make" without argument is like "make help".
11 | help:
12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
13 |
14 | .PHONY: help Makefile
15 |
16 | # Catch-all target: route all unknown targets to Sphinx using the new
17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
18 | %: Makefile
19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
--------------------------------------------------------------------------------
/docs/_images/default_tree.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/docs/_images/default_tree.jpg
--------------------------------------------------------------------------------
/docs/_images/merged_tree.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/docs/_images/merged_tree.jpg
--------------------------------------------------------------------------------
/docs/_images/nac_worflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/docs/_images/nac_worflow.png
--------------------------------------------------------------------------------
/docs/_images/tree.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/docs/_images/tree.png
--------------------------------------------------------------------------------
/docs/_templates/layout.html:
--------------------------------------------------------------------------------
1 | {% extends "!layout.html" %}
2 |
3 | {%- block sidebarlogo %}
4 | {%- if logo %}
5 |
6 |
7 |
8 | {%- endif %}
9 | {%- endblock %}
--------------------------------------------------------------------------------
/docs/coop.rst:
--------------------------------------------------------------------------------
1 | Crystal Orbital Overlap Population (COOP) calculation
2 | =====================================================
3 |
4 | The workflow coop_calculation allows to compute the crystal orbital overlap population between two selected elements.
5 |
6 | Preparing the input
7 | -------------------
8 |
9 | The following is an example of input file to perform the COOP calculation between Cd and Se for the Cd33Se33 system.
10 |
11 | .. code-block:: yaml
12 |
13 | workflow:
14 | coop_calculation
15 |
16 | project_name: Cd33Se33
17 | active_space: [50, 50]
18 | path_hdf5: "Cd33Se33.hdf5"
19 | path_traj_xyz: "Cd33Se33.xyz"
20 | scratch_path: "/tmp/COOP"
21 |
22 | coop_elements: ["Cd", "Se"]
23 |
24 | cp2k_general_settings:
25 | basis: "DZVP-MOLOPT-SR-GTH"
26 | potential: "GTH-PBE"
27 | cell_parameters: 28.0
28 | periodic: none
29 | executable: cp2k.popt
30 |
31 | cp2k_settings_main:
32 | specific:
33 | template: pbe_main
34 | cp2k:
35 | force_eval:
36 | dft:
37 | scf:
38 | eps_scf: 1e-6
39 |
40 | cp2k_settings_guess:
41 | specific:
42 | template:
43 | pbe_guess
44 |
45 |
46 | In your working directory, copy the previous input into an *input_test_coop.yml* file.
47 | Also copy locally the file containing the coordinates of the relaxed Cd33Se33 system, Cd33Se33.xyz_.
48 |
49 | Your *input_test_coop.yml* input file now contains all settings to perform the coop calculations and needs to be edited according to your system and preferences.
50 | Please note that this input is very similar to the basic example of single point calculation provided in a previous tutorial_ (please refer to it for a more extensive description of the above options)
51 | except for the following options: **workflow**, **coop_elements**.
52 |
53 | - **workflow**: The workflow you need for your calculations, in this case set to coop_calculation is this case.
54 | - **coop_elements**: List of the two elements to calculate the COOP for, here Cd and Se.
55 |
56 | In the cp2k_general_settings, you can customize the settings used to generate the cp2k input. To help you creating your custom input requirements, please consult the cp2k manual_ and the templates_ available in nano-qmflows.
57 |
58 | .. _Cd33Se33.xyz: https://github.com/SCM-NV/nano-qmflows/blob/master/test/test_files/Cd33Se33.xyz
59 | .. _tutorial: https://qmflows-namd.readthedocs.io/en/latest/single_points.html
60 | .. _manual: https://manual.cp2k.org/
61 | .. _templates: https://github.com/SCM-NV/nano-qmflows/blob/master/nanoqm/workflows/templates.py
62 |
63 | Setting up the calculation
64 | ---------------------------
65 |
66 | Once all settings of your yml input have been customized, can to launch your coop calculation.
67 |
68 | - First, activate the conda environment with QMFlows:
69 |
70 | ``conda activate qmflows``
71 |
72 | - Then, load the module with your version of cp2k, for example:
73 |
74 | ``module load CP2K/7.1.0``
75 |
76 | - Finally, use the command run_workflow.py to submit your calculation.
77 |
78 | ``run_workflow.py -i input_test_coop.yml``
79 |
80 | Results
81 | -------
82 |
83 | Once your calculation has finished successfully, you will find a *COOP.txt* file in your working directory.
84 | The two columns of this file contain, respectively, the orbitals’ energies and the corresponding COOP values for the selected atoms pair.
85 |
--------------------------------------------------------------------------------
/docs/docs_command_line.rst:
--------------------------------------------------------------------------------
1 | Command line interface
2 | ----------------------
3 | Running a workflow
4 | ##################
5 | .. automodule:: nanoqm.workflows.run_workflow
6 |
7 | Workflows distribution
8 | ######################
9 | .. automodule:: nanoqm.workflows.distribute_jobs
--------------------------------------------------------------------------------
/docs/docs_cp2k_interface.rst:
--------------------------------------------------------------------------------
1 | CP2K Interface
2 | --------------
3 | .. automodule:: nanoqm.schedule.scheduleCP2K
--------------------------------------------------------------------------------
/docs/docs_derivative_coupling.rst:
--------------------------------------------------------------------------------
1 | Derivative Couplings
2 | --------------------
3 | .. automodule:: nanoqm.integrals.nonAdiabaticCoupling
--------------------------------------------------------------------------------
/docs/docs_integrals.rst:
--------------------------------------------------------------------------------
1 | Integrals
2 | ---------
3 | .. automodule:: nanoqm.integrals.multipole_matrices
--------------------------------------------------------------------------------
/docs/docs_molecular_orbitals.rst:
--------------------------------------------------------------------------------
1 | Molecular Orbitals
2 | ------------------
3 | .. automodule:: nanoqm.schedule.components
--------------------------------------------------------------------------------
/docs/docs_workflows.rst:
--------------------------------------------------------------------------------
1 | Workflows
2 | ---------
3 |
4 | The following workflows are available:
5 |
6 | .. autofunction:: nanoqm.workflows.workflow_coop.workflow_crystal_orbital_overlap_population
7 | .. autofunction:: nanoqm.workflows.workflow_coupling.workflow_derivative_couplings
8 | .. autofunction:: nanoqm.workflows.workflow_ipr.workflow_ipr
9 | .. autofunction:: nanoqm.workflows.workflow_single_points.workflow_single_points
10 | .. autofunction:: nanoqm.workflows.workflow_stddft_spectrum.workflow_stddft
11 |
--------------------------------------------------------------------------------
/docs/documentation.rst:
--------------------------------------------------------------------------------
1 |
2 | For a more detailed description of **nano-qmflows** read the documentation
3 |
4 | .. toctree::
5 | docs_command_line
6 | docs_cp2k_interface
7 | docs_derivative_coupling
8 | docs_molecular_orbitals
9 | docs_integrals
10 | docs_workflows
--------------------------------------------------------------------------------
/docs/includereadme.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../README.rst
2 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 |
2 | Welcome to nano-qmflows's documentation!
3 | ========================================
4 |
5 | Contents:
6 |
7 | .. toctree::
8 | :maxdepth: 2
9 | :caption: Introduction
10 |
11 | includereadme
12 | theory
13 |
14 | .. toctree::
15 | :maxdepth: 2
16 | :caption: Tutorials
17 |
18 | intro
19 | single_points
20 | coop
21 | ipr
22 | derivative_couplings
23 | absorption_spectrum
24 | distribute_absorption_spectrum
25 |
26 | .. toctree::
27 | :maxdepth: 2
28 | :caption: Library Documentation
29 |
30 | documentation
31 |
32 |
33 |
34 | Indices and tables
35 | ==================
36 |
37 | * :ref:`genindex`
38 | * :ref:`modindex`
39 | * :ref:`search`
40 |
41 |
--------------------------------------------------------------------------------
/docs/intro.rst:
--------------------------------------------------------------------------------
1 | Introduction to the Tutorials
2 | =============================
3 |
4 | The *nano-qmflows* packages offers the following set of workflows to compute different properties:
5 | * single_points
6 | * coop_calculation
7 | * ipr_calculation
8 | * derivative_coupling
9 | * absorption_spectrum
10 | * distribute_absorption_spectrum
11 |
12 | Known Issues
13 | ------------
14 |
15 | Distribution of the workflow over multiple nodes
16 | ################################################
17 |
18 | `CP2K` can uses multiple nodes to perform the computation of the molecular orbitals using the **MPI** protocol. Unfortunately, the `MPI` implementation for the computation of the *derivative coupling matrix* is experimental and unestable. The practical consequences of the aforemention issues, is that **the calculation of the coupling matrices are carried out in only 1 computational node**. It means that if you want ask for more than 1 node to compute the molecular orbitals with `CP2K`, once the workflow starts to compute the *derivative couplings* only 1 node will be used at a time and the rest will remain idle wating computational resources.
19 |
20 |
21 | Reporting a bug or requesting a feature
22 | ---------------------------------------
23 | To report an issue or request a new feature you can use the github issues_ tracker.
24 |
25 | .. _HDF5: http://www.h5py.org/
26 | .. _issues: https://github.com/SCM-NV/nano-qmflows/issues
27 | .. _QMflows: https://github.com/SCM-NV/qmflows
28 | .. _PYXAID: https://www.acsu.buffalo.edu/~alexeyak/pyxaid/overview.html
29 | .. _YAML: https://pyyaml.org/wiki/PyYAML
30 |
31 |
32 |
--------------------------------------------------------------------------------
/docs/ipr.rst:
--------------------------------------------------------------------------------
1 | Inverse Participation Ratio (IPR) calculation
2 | =============================================
3 |
4 | The workflow ipr_calculation returns the inverse participation ratio for the selected orbitals.
5 | For finite systems, the IPR is defined as the inverse of number of atoms that contribute to a given electronic state i.
6 | It assumes its maximum value, 1, in the case of a state localized to a single atom (1/1) and tends to 0 (1/*N*, where *N* is the total number of atoms in the system) when the wave function is distributed equally over all atoms.
7 |
8 | Preparing the input
9 | -------------------
10 |
11 | The following is an example of input file to perform the IPR calculation for the Cd33Se33 system.
12 |
13 | .. code-block:: yaml
14 |
15 | workflow:
16 | ipr_calculation
17 |
18 | project_name: Cd33Se33
19 | active_space: [50, 50]
20 | path_hdf5: "Cd33Se33.hdf5"
21 | path_traj_xyz: "Cd33Se33.xyz"
22 | scratch_path: "/tmp/IPR"
23 |
24 | cp2k_general_settings:
25 | basis: "DZVP-MOLOPT-SR-GTH"
26 | potential: "GTH-PBE"
27 | cell_parameters: 28.0
28 | periodic: none
29 | executable: cp2k.popt
30 |
31 | cp2k_settings_main:
32 | specific:
33 | template: pbe_main
34 | cp2k:
35 | force_eval:
36 | dft:
37 | scf:
38 | eps_scf: 1e-6
39 |
40 | cp2k_settings_guess:
41 | specific:
42 | template:
43 | pbe_guess
44 |
45 |
46 | In your working directory, copy the previous input into an *input_test_ipr.yml* file.
47 | Also copy locally the file containing the coordinates of the relaxed Cd33Se33 system, Cd33Se33.xyz_.
48 |
49 | Your *input_test_ipr.yml* input file now contains all settings to perform the coop calculations and needs to be edited according to your system and preferences.
50 | Please note that this input is very similar to the basic example of single point calculation provided in a previous tutorial_ (please refer to it for a more extensive description of the above options)
51 | except for the **workflow** option, set in this case to *ipr_calculation*.
52 |
53 | Here again you can customize the settings used to generate the cp2k input in the cp2k_general_settings. To help you creating your custom input requirements, please consult the cp2k manual_ and the templates_ available in nano-qmflows.
54 |
55 | .. _Cd33Se33.xyz: https://github.com/SCM-NV/nano-qmflows/blob/master/test/test_files/Cd33Se33.xyz
56 | .. _tutorial: https://qmflows-namd.readthedocs.io/en/latest/single_points.html
57 | .. _manual: https://manual.cp2k.org/
58 | .. _templates: https://github.com/SCM-NV/nano-qmflows/blob/master/nanoqm/workflows/templates.py
59 |
60 | Setting up the calculation
61 | ---------------------------
62 |
63 | Once all settings of your yml input have been customized, can to launch your ipr calculation.
64 |
65 | - First, activate the conda environment with QMFlows:
66 |
67 | ``conda activate qmflows``
68 |
69 | - Then, load the module with your version of cp2k, for example:
70 |
71 | ``module load CP2K/7.1.0``
72 |
73 | - Finally, use the command run_workflow.py to submit your calculation.
74 |
75 | ``run_workflow.py -i input_test_ipr.yml``
76 |
77 | Results
78 | -------
79 |
80 | Once your calculation has finished successfully, you will find a *IPR.txt* file in your working directory.
81 | The two columns of this file contain, respectively, the orbitals’ energies and the corresponding IPR values.
82 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/theory.rst:
--------------------------------------------------------------------------------
1 | Theory
2 | ==========
3 |
4 | Nonadiabatic coupling matrix
5 | -----------------------------
6 |
7 | The current implementation of the nonadiabatic coupling is based on:
8 | Plasser, F.; Granucci, G.; Pittner, j.; Barbatti, M.; Persico, M.;
9 | Lischka. *Surface hopping dynamics using a locally diabatic formalism:
10 | Charge transfer in the ethylene dimer cation and excited state dynamics
11 | in the 2-pyridone dimer*. **J. Chem. Phys. 2012, 137, 22A514.**
12 |
13 | The total time-dependent wave function :math:`\Psi(\mathbf{R}, t)` can be
14 | expressed in terms of a linear combination of ``N`` adiabatic electronic
15 | eigenstates :math:`\phi_{i}(\mathbf{R}(t))`,
16 |
17 | .. math::
18 | \Psi(\mathbf{R}, t) = \sum^{N}_{i=1} c_i(t)\phi_{i}(\mathbf{R}(t)) \quad \mathbf(1)
19 |
20 | The time-dependent coefficients are propagated according to
21 |
22 | .. math::
23 |
24 | \frac{dc_j(t)}{dt} = -i\hbar^2 c_j(t) E_j(t) - \sum^{N}_{i=1}c_i(t)\sigma_{ji}(t) \quad \mathbf(2)
25 |
26 | where :math:`E_j(t)` is the energy of the jth adiabatic state and :math:`\sigma_{ji}(t)` the nonadiabatic matrix, which elements are given by the expression
27 |
28 | .. math::
29 | \sigma_{ji}(t) = \langle \phi_{j}(\mathbf{R}(t)) \mid \frac{\partial}{\partial t} \mid \phi_{i}(\mathbf{R}(t)) \rangle \quad \mathbf(3)
30 |
31 | that can be approximate using three consecutive molecular geometries
32 |
33 | .. math::
34 | \sigma_{ji}(t) \approx \frac{1}{4 \Delta t} (3\mathbf{S}{ji}(t) - 3\mathbf{S}{ij}(t) - \mathbf{S}{ji}(t-\Delta t) + \mathbf{S}{ij}(t-\Delta t)) \quad \mathbf(4)
35 |
36 | where :math:`\mathbf{S}_{ji}(t)` is the overlap matrix between two consecutive time steps
37 |
38 | .. math::
39 | \mathbf{S}{ij}(t) = \langle \phi{j}(\mathbf{R}(t-\Delta t)) \mid \phi_{i}(\mathbf{R}(t)) \rangle \quad \mathbf(5)
40 |
41 | and the overlap matrix is calculated in terms of atomic orbitals
42 |
43 | .. math::
44 | \mathbf{S}{ji}(t) = \sum{\mu} C^{*}{\mu i}(t) \sum{\nu} C_{\nu j}(t - \Delta t) \mathbf{S}_{\mu \nu}(t) \quad \mathbf(6)
45 |
46 | Where :math:C_{\mu i} are the Molecular orbital coefficients and :math:`\mathbf{S}_{\mu \nu}` The atomic orbitals overlaps.
47 |
48 | .. math::
49 | \mathbf{S}{\mu \nu}(\mathbf{R}(t), \mathbf{R}(t - \Delta t)) = \langle \chi{\mu}(\mathbf{R}(t)) \mid \chi_{\nu}(\mathbf{R}(t - \Delta t)\rangle \quad \mathbf(7)
50 |
51 |
52 | Nonadiabatic coupling algorithm implementation
53 | ----------------------------------------------
54 |
55 | The figure belows shows schematically the workflow for calculating the Nonadiabatic
56 | coupling matrices from a molecular dynamic trajectory. The uppermost node represent
57 | a molecular dynamics
58 | trajectory that is subsequently divided in its components andfor each geometry the molecular
59 | orbitals are computed. These molecular orbitals are stored in a HDF5_.
60 | binary file and subsequently calculations retrieve sets of three molecular orbitals that are
61 | use to calculate the nonadiabatic coupling matrix using equations **4** to **7**.
62 | These coupling matrices are them feed to the PYXAID_ package to carry out nonadiabatic molecular dynamics.
63 |
64 | The Overlap between primitives are calculated using the Obara-Saika recursive scheme and has been implemented using the C++ libint2_ library for efficiency reasons.
65 | The libint2_ library uses either OpenMP_ or C++ threads to distribute the integrals among the available CPUs.
66 | Also, all the heavy numerical processing is carried out by the highly optimized functions in NumPy_.
67 |
68 | The **nonadiabaticCoupling** package relies on *QMWorks* to run the Quantum mechanical simulations using the [CP2K](https://www.cp2k.org/) package. Also, the noodles_ is used
69 | to schedule expensive numerical computations that are required to calculate the nonadiabatic coupling matrix.
70 |
71 |
72 | .. _OpenMP: https://www.openmp.org/
73 | .. _libint2: https://github.com/evaleev/libint/wiki
74 | .. _HDF5: http://www.h5py.org/
75 | .. _PYXAID: https://www.acsu.buffalo.edu/~alexeyak/pyxaid/overview.html
76 | .. _multiprocessing: https://docs.python.org/3.6/library/multiprocessing.html
77 | .. _NumPy: http://www.numpy.org
78 | .. _noodles: http://nlesc.github.io/noodles/
79 |
--------------------------------------------------------------------------------
/install_requirements.txt:
--------------------------------------------------------------------------------
1 | h5py>=2.9.0
2 | mendeleev>=0.1.0
3 | more-itertools>=2.4.0
4 | noodles>=0.3.4
5 | numpy>=1.17.3,<2
6 | scipy>=1.3.2
7 | schema>=0.6.0,!=0.7.5
8 | pyyaml>=5.1
9 | plams==1.5.1
10 | qmflows>=0.12.1
11 | packaging>=17.1
12 | Nano-Utils>=2.0.0
13 |
--------------------------------------------------------------------------------
/libint/include/compute_integrals.hpp:
--------------------------------------------------------------------------------
1 | /*
2 | * This module contains the implementation of several
3 | * kind of integrals used for non-adiabatic molecular dynamics,
4 | * including the overlaps integrals between different geometries
5 | * And the dipoles and quadrupoles to compute absorption spectra.
6 | * This module is based on libint and Eigen.
7 | * Copyright (C) 2018-2022 the Netherlands eScience Center.
8 | */
9 |
10 | #ifndef INT_H_
11 | #define INT_H_
12 |
13 | #include "namd.hpp"
14 |
15 | namd::Matrix compute_integrals_couplings(
16 | const std::string &path_xyz_1,
17 | const std::string &path_xyz_2,
18 | const std::string &path_hdf5,
19 | const std::string &basis_name
20 | );
21 |
22 | namd::Matrix compute_integrals_multipole(
23 | const std::string &path_xyz,
24 | const std::string &path_hdf5,
25 | const std::string &basis_name,
26 | const std::string &multipole
27 | );
28 |
29 | #endif // INT_H_
30 |
--------------------------------------------------------------------------------
/libint/include/namd.hpp:
--------------------------------------------------------------------------------
1 | /*
2 | * This module contains the implementation of several
3 | * kind of integrals used for non-adiabatic molecular dynamics,
4 | * including the overlaps integrals between different geometries
5 | * And the dipoles and quadrupoles to compute absorption spectra.
6 | * This module is based on libint and Eigen.
7 | * Copyright (C) 2018-2022 the Netherlands eScience Center.
8 | */
9 |
10 | #ifndef NAMD_H_
11 | #define NAMD_H_
12 |
13 | #include
14 | #include
15 | #include
16 | #include
17 | #include
18 | #include
19 | #include
20 | #include
21 | #include
22 |
23 | // integrals library
24 | #include
25 |
26 | // Eigen matrix algebra library
27 | #include
28 |
29 | // HDF5 funcionality
30 | #include
31 | #include
32 | #include
33 |
34 | #if defined(_OPENMP)
35 | #include
36 | #endif
37 |
38 | namespace namd {
39 |
40 | using real_t = libint2::scalar_type;
41 | // import dense, dynamically sized Matrix type from Eigen;
42 | // this is a matrix with row-major storage
43 | // (http://en.wikipedia.org/wiki/Row-major_order) to meet the layout of the
44 | // integrals returned by the Libint integral library
45 | using Matrix =
46 | Eigen::Matrix;
47 |
48 | struct CP2K_Contractions {
49 | int l; // Angular momentum quantum number for a given shell-type
50 | int count; // Number of contractions for a given shell-type
51 | };
52 |
53 | struct CP2K_Basis_Atom {
54 | // Contains the basis specificationf for a given atom
55 | std::string symbol;
56 | libint2::svector> coefficients;
57 | libint2::svector> exponents;
58 | libint2::svector> basis_format;
59 | };
60 |
61 | // Map from atomic_number to symbol
62 | const std::unordered_map map_elements = {
63 | {1, "h"}, {2, "he"}, {3, "li"}, {4, "be"}, {5, "b"}, {6, "c"},
64 | {7, "n"}, {8, "o"}, {9, "f"}, {10, "ne"}, {11, "na"}, {12, "mg"},
65 | {13, "al"}, {14, "si"}, {15, "p"}, {16, "s"}, {17, "cl"}, {18, "ar"},
66 | {19, "k"}, {20, "ca"}, {21, "sc"}, {22, "ti"}, {23, "v"}, {24, "cr"},
67 | {25, "mn"}, {26, "fe"}, {27, "co"}, {28, "ni"}, {29, "cu"}, {30, "zn"},
68 | {31, "ga"}, {32, "ge"}, {33, "as"}, {34, "se"}, {35, "br"}, {36, "kr"},
69 | {37, "rb"}, {38, "sr"}, {39, "y"}, {40, "zr"}, {41, "nb"}, {42, "mo"},
70 | {43, "tc"}, {44, "ru"}, {45, "rh"}, {46, "pd"}, {47, "ag"}, {48, "cd"},
71 | {49, "in"}, {50, "sn"}, {51, "sb"}, {52, "te"}, {53, "i"}, {54, "xe"},
72 | {55, "cs"}, {56, "ba"}, {57, "la"}, {58, "ce"}, {59, "pr"}, {60, "nd"},
73 | {61, "pm"}, {62, "sm"}, {63, "eu"}, {64, "gd"}, {65, "tb"}, {66, "dy"},
74 | {67, "ho"}, {68, "er"}, {69, "tm"}, {70, "yb"}, {71, "lu"}, {72, "hf"},
75 | {73, "ta"}, {74, "w"}, {75, "re"}, {76, "os"}, {77, "ir"}, {78, "pt"},
76 | {79, "au"}, {80, "hg"}, {81, "tl"}, {82, "pb"}, {83, "bi"}, {84, "po"},
77 | {85, "at"}, {86, "rn"}, {87, "fr"}, {88, "ra"}, {89, "ac"}, {90, "th"},
78 | {91, "pa"}, {92, "u"}, {93, "np"}, {94, "pu"}, {95, "am"}, {96, "cm"}};
79 |
80 | // Map from atomic symbols to the number of valence electrons
81 | // See also `nanoqm.workflows.templates.valence_electrons`
82 | const std::unordered_map valence_electrons = {
83 | {"ag", 11},
84 | {"al", 3},
85 | {"ar", 8},
86 | {"as", 5},
87 | {"at", 7},
88 | {"au", 11},
89 | {"b", 3},
90 | {"ba", 10},
91 | {"be", 4},
92 | {"bi", 5},
93 | {"br", 7},
94 | {"c", 4},
95 | {"ca", 10},
96 | {"cd", 12},
97 | {"cl", 7},
98 | {"co", 17},
99 | {"cr", 4},
100 | {"cs", 9},
101 | {"cu", 11},
102 | {"f", 7},
103 | {"fe", 16},
104 | {"ga", 13},
105 | {"ge", 4},
106 | {"h", 1},
107 | {"he", 2},
108 | {"hf", 12},
109 | {"hg", 12},
110 | {"i", 7},
111 | {"in", 13},
112 | {"ir", 17},
113 | {"k", 9},
114 | {"kr", 8},
115 | {"li", 3},
116 | {"mg", 10},
117 | {"mn", 15},
118 | {"mo", 14},
119 | {"n", 5},
120 | {"na", 9},
121 | {"nb", 13},
122 | {"ne", 8},
123 | {"ni", 18},
124 | {"o", 6},
125 | {"os", 16},
126 | {"p", 5},
127 | {"pb", 4},
128 | {"pd", 18},
129 | {"po", 6},
130 | {"pt", 18},
131 | {"rb", 9},
132 | {"re", 15},
133 | {"rh", 17},
134 | {"rn", 8},
135 | {"ru", 16},
136 | {"s", 6},
137 | {"sb", 5},
138 | {"sc", 11},
139 | {"se", 6},
140 | {"si", 4},
141 | {"sn", 4},
142 | {"sr", 10},
143 | {"ta", 13},
144 | {"tc", 15},
145 | {"te", 6},
146 | {"ti", 12},
147 | {"tl", 13},
148 | {"u", 14},
149 | {"v", 13},
150 | {"w", 14},
151 | {"xe", 8},
152 | {"y", 11},
153 | {"zn", 12},
154 | {"zr", 12},
155 | };
156 |
157 | } // namespace namd
158 | #endif // NAMD_H_
159 |
--------------------------------------------------------------------------------
/licenses/LICENSE_LIBHDF5.txt:
--------------------------------------------------------------------------------
1 | Copyright Notice and License Terms for
2 | HDF5 (Hierarchical Data Format 5) Software Library and Utilities
3 | -----------------------------------------------------------------------------
4 |
5 | HDF5 (Hierarchical Data Format 5) Software Library and Utilities
6 | Copyright 2006 by The HDF Group.
7 |
8 | NCSA HDF5 (Hierarchical Data Format 5) Software Library and Utilities
9 | Copyright 1998-2006 by The Board of Trustees of the University of Illinois.
10 |
11 | All rights reserved.
12 |
13 | Redistribution and use in source and binary forms, with or without modification, are permitted for any
14 | purpose (including commercial purposes) provided that the following conditions are met:
15 |
16 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions, and
17 | the following disclaimer.
18 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions,
19 | and the following disclaimer in the documentation and/or materials provided with the distribution.
20 | 3. Neither the name of The HDF Group, the name of the University, nor the name of any Contributor
21 | may be used to endorse or promote products derived from this software without specific prior
22 | written permission from The HDF Group, the University, or the Contributor, respectively.
23 |
24 | DISCLAIMER:
25 | THIS SOFTWARE IS PROVIDED BY THE HDF GROUP AND THE CONTRIBUTORS "AS IS" WITH NO
26 | WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED. IN NO EVENT SHALL THE HDF GROUP OR
27 | THE CONTRIBUTORS BE LIABLE FOR ANY DAMAGES SUFFERED BY THE USERS ARISING OUT OF THE USE
28 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 |
30 | You are under no obligation whatsoever to provide any bug fixes, patches, or upgrades to the features,
31 | functionality or performance of the source code ("Enhancements") to anyone; however, if you choose
32 | to make your Enhancements available either publicly, or directly to The HDF Group, without imposing a
33 | separate written license agreement for such Enhancements, then you hereby grant the following
34 | license: a non-exclusive, royalty-free perpetual license to install, use, modify, prepare derivative works,
35 | incorporate into other computer software, distribute, and sublicense such enhancements or derivative
36 | works thereof, in binary and source code form.
37 |
38 | Limited portions of HDF5 were developed by Lawrence Berkeley National Laboratory (LBNL). LBNL's
39 | Copyright Notice and Licensing Terms can be found here: COPYING_LBNL_HDF5 file in this directory or
40 | at http://support.hdfgroup.org/ftp/HDF5/releases/COPYING_LBNL_HDF5.
41 |
42 | Contributors: National Center for Supercomputing Applications (NCSA) at the University of Illinois,
43 | Fortner Software, Unidata Program Center (netCDF), The Independent JPEG Group (JPEG), Jean-loup
44 | Gailly and Mark Adler (gzip), and Digital Equipment Corporation (DEC).
45 |
46 | Portions of HDF5 were developed with support from the Lawrence Berkeley National Laboratory (LBNL)
47 | and the United States Department of Energy under Prime Contract No. DE-AC02-05CH11231.
48 |
49 | Portions of HDF5 were developed with support from Lawrence Livermore National Laboratory and the
50 | United States Department of Energy under Prime Contract No. DE-AC52-07NA27344.
51 |
52 | Portions of HDF5 were developed with support from the University of California, Lawrence Livermore
53 | National Laboratory (UC LLNL). The following statement applies to those portions of the product and
54 | must be retained in any redistribution of source code, binaries, documentation, and/or accompanying
55 | materials:
56 |
57 | This work was partially produced at the University of California, Lawrence Livermore National
58 | Laboratory (UC LLNL) under contract no. W-7405-ENG-48 (Contract 48) between the U.S. Department
59 | of Energy (DOE) and The Regents of the University of California (University) for the operation of UC
60 | LLNL.
61 |
62 | DISCLAIMER:
63 | THIS WORK WAS PREPARED AS AN ACCOUNT OF WORK SPONSORED BY AN AGENCY OF THEUNITED
64 | STATES GOVERNMENT. NEITHER THE UNITED STATES GOVERNMENT NOR THE UNIVERSITY OF
65 | CALIFORNIA NOR ANY OF THEIR EMPLOYEES, MAKES ANY WARRANTY, EXPRESS OR IMPLIED, OR
66 | ASSUMES ANY LIABILITY OR RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS, OR USEFULNESS OF
67 | ANY INFORMATION, APPARATUS, PRODUCT, OR PROCESS DISCLOSED, OR REPRESENTS THAT ITS USE
68 | WOULD NOT INFRINGE PRIVATELY- OWNED RIGHTS. REFERENCE HEREIN TO ANY SPECIFIC
69 | COMMERCIAL PRODUCTS, PROCESS, OR SERVICE BY TRADE NAME, TRADEMARK, MANUFACTURER, OR
70 | OTHERWISE, DOES NOT NECESSARILY CONSTITUTE OR IMPLY ITS ENDORSEMENT, RECOMMENDATION,
71 | OR FAVORING BY THE UNITED STATES GOVERNMENT OR THE UNIVERSITY OF CALIFORNIA. THE VIEWS
72 | AND OPINIONS OF AUTHORS EXPRESSED HEREIN DO NOT NECESSARILY STATE OR REFLECT THOSE OF
73 | THE UNITED STATES GOVERNMENT OR THE UNIVERSITY OF CALIFORNIA, AND SHALL NOT BE USED FOR
74 | ADVERTISING OR PRODUCT ENDORSEMENT PURPOSES.
75 |
--------------------------------------------------------------------------------
/licenses/LICENSE_LIBINT2.txt:
--------------------------------------------------------------------------------
1 | Libint - a library for the evaluation of molecular integrals of many-body operators over Gaussian functions
2 |
3 | Copyright (C) 2004-2020 Edward F. Valeev
4 |
5 | Libint is free software. Most of the source code of the Libint compiler (code generator)
6 | can be redistributed and/or modified under the terms of the GNU General Public License, version 3,
7 | as published by the Free Software Foundation (see the accompanying file COPYING;
8 | if you did not receive this file refer to ).
9 | The rest of Libint, including the generated source, can be distributed under
10 | the GNU Lesser General Public License, version 3, (see the file COPYING.LESSER,
11 | or ). The applicable license is specified in each
12 | source file. If not indicated, the applicable license is GNU LGPL, version 3 (not GPL).
13 |
14 | Libint is distributed in the hope that it will be useful,
15 | but WITHOUT ANY WARRANTY; without even the implied warranty of
16 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 | GNU General Public License for more details.
18 |
19 | See https://github.com/evaleev/libint for the Libint source code.
20 |
--------------------------------------------------------------------------------
/licenses/README.rst:
--------------------------------------------------------------------------------
1 | Licenses for various third-party binaries distributed in the Nano-QMFlows wheels.
2 |
3 | Note that aforementioned binaries are absent from source-only Nano-QMFlows distributions.
4 |
--------------------------------------------------------------------------------
/linting_requirements.txt:
--------------------------------------------------------------------------------
1 | pydocstyle[toml]>=6.1
2 | flake8>=5
3 | Flake8-pyproject>=1.0.1
4 |
5 | # Mypy-related stuff
6 | mypy
7 | qmflows
8 | numpy>=1.21
9 | types-pyyaml
10 | types-setuptools
11 | pyparsing>=3.0.8
12 | Nano-Utils>=2.0.0
13 | Nano-CAT
14 |
--------------------------------------------------------------------------------
/nanoqm/__init__.py:
--------------------------------------------------------------------------------
1 | """Nano-QMFlows is a generic python library for computing (numerically) electronic properties \
2 | for nanomaterials like the non-adiabatic coupling vectors (NACV) using several quantum \
3 | chemical (QM) packages."""
4 |
5 | # Monkey patch noodles with support for slots-containing dataclasses
6 | from . import _monkey_patch
7 |
8 | from ._version import __version__ as __version__, __version_tuple__ as __version_tuple__
9 | from ._logger import logger as logger
10 |
11 | from .analysis import (
12 | autocorrelate, dephasing, convolute, func_conv, gauss_function,
13 | parse_list_of_lists, read_couplings, read_energies,
14 | read_energies_pyxaid, read_pops_pyxaid, spectral_density
15 | )
16 |
17 | from .integrals import (calculate_couplings_levine, compute_overlaps_for_coupling)
18 |
19 | from .schedule import (calculate_mos, lazy_couplings)
20 |
21 | from .workflows import (workflow_derivative_couplings, workflow_stddft)
22 |
23 | del _monkey_patch
24 |
25 | __all__ = [
26 | '__version__', '__version_tuple__', 'logger',
27 | 'autocorrelate', 'calculate_couplings_levine', 'calculate_mos',
28 | 'compute_overlaps_for_coupling', 'convolute', 'dephasing',
29 | 'func_conv', 'gauss_function', 'lazy_couplings',
30 | 'parse_list_of_lists', 'read_couplings', 'read_energies',
31 | 'read_energies_pyxaid', 'read_pops_pyxaid', 'spectral_density',
32 | 'workflow_derivative_couplings', 'workflow_stddft',
33 | ]
34 |
--------------------------------------------------------------------------------
/nanoqm/_logger.py:
--------------------------------------------------------------------------------
1 | """The Nano-QMFlows logger."""
2 |
3 | from __future__ import annotations
4 |
5 | import os
6 | import sys
7 | import types
8 | import logging
9 | import contextlib
10 | from typing import ClassVar
11 |
12 | from qmflows.type_hints import PathLike
13 |
14 | __all__ = ["logger", "stdout_handler", "EnableFileHandler"]
15 |
16 | #: The Nano-QMFlows logger.
17 | logger = logging.getLogger("nanoqm")
18 | logger.setLevel(logging.DEBUG)
19 |
20 | qmflows_logger = logging.getLogger("qmflows")
21 | noodles_logger = logging.getLogger("noodles")
22 | noodles_logger.setLevel(logging.WARNING)
23 |
24 | #: The Nano-QMFlows stdout handler.
25 | stdout_handler = logging.StreamHandler(stream=sys.stdout)
26 | stdout_handler.setLevel(logging.DEBUG)
27 | stdout_handler.setFormatter(logging.Formatter(
28 | fmt='[%(asctime)s] %(levelname)s: %(message)s',
29 | datefmt='%H:%M:%S',
30 | ))
31 | logger.addHandler(stdout_handler)
32 |
33 |
34 | class EnableFileHandler(contextlib.ContextDecorator):
35 | """Add a file handler to the noodles, qmflows and nanoqm loggers.
36 |
37 | Attributes
38 | ----------
39 | handler : logging.FileHandler
40 | The relevant titular handler.
41 |
42 | """
43 |
44 | __slots__ = ("handler",)
45 |
46 | LOGGERS: ClassVar = (logger, qmflows_logger, noodles_logger)
47 |
48 | def __init__(self, path: PathLike) -> None:
49 | """Initialize the context manager.
50 |
51 | Parameters
52 | ----------
53 | path : path-like object
54 | Path to the log file.
55 |
56 | """
57 | self.handler = logging.FileHandler(os.fsdecode(path))
58 | self.handler.setLevel(logging.DEBUG)
59 | self.handler.setFormatter(logging.Formatter(
60 | fmt='%(asctime)s---%(levelname)s\n%(message)s\n',
61 | datefmt='%H:%M:%S',
62 | ))
63 |
64 | def __enter__(self) -> None:
65 | """Add the file handler."""
66 | for logger in self.LOGGERS:
67 | if self.handler not in logger.handlers:
68 | logger.addHandler(self.handler)
69 |
70 | def __exit__(
71 | self,
72 | exc_type: type[BaseException] | None,
73 | exc_value: BaseException | None,
74 | exc_traceback: types.TracebackType | None,
75 | ) -> None:
76 | """Remove the file handler."""
77 | for logger in self.LOGGERS:
78 | if self.handler in logger.handlers:
79 | logger.removeHandler(self.handler)
80 |
--------------------------------------------------------------------------------
/nanoqm/_monkey_patch.py:
--------------------------------------------------------------------------------
1 | """Monkey patch noodles with support for slots-containing dataclasses."""
2 |
3 | import dataclasses
4 |
5 | from noodles.serial.dataclass import SerDataClass
6 | from scm.plams import add_to_class
7 |
8 |
9 | @add_to_class(SerDataClass)
10 | def encode(self, obj, make_rec):
11 | """Encode the passed dataclass."""
12 | if hasattr(obj, "__dict__"):
13 | return make_rec(obj.__dict__)
14 | else:
15 | return make_rec(dataclasses.asdict(obj))
16 |
--------------------------------------------------------------------------------
/nanoqm/analysis/__init__.py:
--------------------------------------------------------------------------------
1 | """Tools for postprocessing."""
2 | from .tools import (autocorrelate, convolute, dephasing, func_conv,
3 | gauss_function, parse_list_of_lists, read_couplings,
4 | read_energies, read_energies_pyxaid, read_pops_pyxaid,
5 | spectral_density)
6 |
7 | __all__ = [
8 | 'autocorrelate', 'dephasing', 'convolute', 'func_conv', 'gauss_function',
9 | 'parse_list_of_lists', 'read_couplings', 'read_energies',
10 | 'read_energies_pyxaid', 'read_pops_pyxaid', 'spectral_density']
11 |
--------------------------------------------------------------------------------
/nanoqm/basis/aux_fit.json:
--------------------------------------------------------------------------------
1 | {
2 | "Ag": [9, 9, 10, 11, 12],
3 | "Al": [6, 6, 7, 8, 9],
4 | "Ar": [],
5 | "As": [6, 6, 7, 9, 9],
6 | "At": [6, 6, 7, 9, 9],
7 | "Au": [9, 9, 10, 11, 12],
8 | "B": [3, 3, 3, 3, 3],
9 | "Ba": [8, 8, 9, 10, 11],
10 | "Be": [3, 3, 3, 3, 3],
11 | "Bi": [6, 6, 7, 9, 9],
12 | "Br": [6, 6, 7, 9, 9],
13 | "C": [3, 3, 3, 3, 3],
14 | "Ca": [7, 8, 9, 10, 11],
15 | "Cd": [9, 9, 10, 11, 12],
16 | "Cl": [3, 3, 3, 3, 3, 3],
17 | "Co": [10, 10, 10, 11, 12],
18 | "Cr": [10, 10, 11, 12, 13],
19 | "Cs": [7, 8, 9, 10, 11],
20 | "Cu": [9, 9, 10, 11, 12],
21 | "F": [3, 3, 3, 3, 3],
22 | "Fe": [10, 10, 11, 12, 13],
23 | "Ga": [9, 10, 11, 12, 13],
24 | "Ge": [6, 6, 6, 7, 9],
25 | "H": [3, 3, 3, 3, 3],
26 | "He": [3, 3, 3, 3, 3],
27 | "Hf": [10, 10, 11, 12, 13],
28 | "Hg": [9, 9, 10, 11, 12],
29 | "I": [6, 6, 6, 7, 9],
30 | "In": [9, 10, 11, 12, 13],
31 | "Ir": [10, 10, 11, 12, 13],
32 | "K": [7, 8, 9, 10, 11],
33 | "Kr": [],
34 | "Li": [4, 4, 4, 4, 4],
35 | "Mg": [3, 3, 3, 3, 3],
36 | "Mn": [10, 10, 11, 12, 13],
37 | "Mo": [10, 10, 11, 12, 13],
38 | "N": [3, 3, 3, 3, 3],
39 | "Na": [3, 3, 3, 3, 3],
40 | "Nb": [10, 10, 11, 12, 13],
41 | "Ne": [],
42 | "Ni": [10, 10, 11, 12, 13],
43 | "O": [3, 3, 3, 3, 3],
44 | "Os": [10, 10, 11, 12, 13],
45 | "P": [3, 3, 3, 3, 3],
46 | "Pb": [6, 6, 6, 7, 9],
47 | "Pd": [10, 10, 11, 12, 13],
48 | "Po": [6, 6, 6, 7, 9],
49 | "Pt": [10, 10, 11, 12, 13],
50 | "Rb": [7, 8, 9, 10, 11],
51 | "Re": [10, 10, 11, 12, 13],
52 | "Rh": [10, 10, 11, 12, 13],
53 | "Rn": [],
54 | "Ru": [10, 10, 11, 12, 13],
55 | "S": [3, 3, 3, 3, 3],
56 | "Sb": [6, 6, 6, 7, 9],
57 | "Sc": [10, 10, 11, 12, 13],
58 | "Se": [6, 6, 6, 7, 9],
59 | "Si": [3, 3, 3, 3, 3],
60 | "Sn": [6, 6, 6, 7, 9],
61 | "Sr": [8, 8, 9, 10, 11],
62 | "Ta": [10, 10, 11, 12, 13],
63 | "Tc": [10, 10, 11, 12, 13],
64 | "Te": [6, 6, 6, 7, 9],
65 | "Ti": [10, 10, 11, 12, 13],
66 | "Tl": [9, 10, 11, 12, 13],
67 | "V": [10, 10, 11, 12, 13],
68 | "W": [10, 10, 11, 12, 13],
69 | "Xe": [],
70 | "Y": [10, 10, 11, 12, 13],
71 | "Zn": [9, 9, 10, 11, 12],
72 | "Zr": [10, 10, 11, 12, 13]
73 | }
74 |
--------------------------------------------------------------------------------
/nanoqm/basis/valence_electrons.json:
--------------------------------------------------------------------------------
1 | {
2 | "Ag": 11,
3 | "Al": 3,
4 | "Ar": 8,
5 | "As": 5,
6 | "At": 7,
7 | "Au": 11,
8 | "B": 3,
9 | "Ba": 10,
10 | "Be": 4,
11 | "Bi": 5,
12 | "Br": 7,
13 | "C": 4,
14 | "Ca": 10,
15 | "Cd": 12,
16 | "Cl": 7,
17 | "Co": 17,
18 | "Cr": 4,
19 | "Cs": 9,
20 | "Cu": 11,
21 | "F": 7,
22 | "Fe": 16,
23 | "Ga": 13,
24 | "Ge": 4,
25 | "H": 1,
26 | "He": 2,
27 | "Hf": 12,
28 | "Hg": 12,
29 | "I": 7,
30 | "In": 13,
31 | "Ir": 17,
32 | "K": 9,
33 | "Kr": 8,
34 | "Li": 3,
35 | "Mg": 10,
36 | "Mn": 15,
37 | "Mo": 14,
38 | "N": 5,
39 | "Na": 9,
40 | "Nb": 13,
41 | "Ne": 8,
42 | "Ni": 18,
43 | "O": 6,
44 | "Os": 16,
45 | "P": 5,
46 | "Pb": 4,
47 | "Pd": 18,
48 | "Po": 6,
49 | "Pt": 18,
50 | "Rb": 9,
51 | "Re": 15,
52 | "Rh": 17,
53 | "Rn": 8,
54 | "Ru": 16,
55 | "S": 6,
56 | "Sb": 5,
57 | "Sc": 11,
58 | "Se": 6,
59 | "Si": 4,
60 | "Sn": 4,
61 | "Sr": 10,
62 | "Ta": 13,
63 | "Tc": 15,
64 | "Te": 6,
65 | "Ti": 12,
66 | "Tl": 13,
67 | "U": 14,
68 | "V": 13,
69 | "W": 14,
70 | "Xe": 8,
71 | "Y": 11,
72 | "Zn": 12,
73 | "Zr": 12
74 | }
75 |
--------------------------------------------------------------------------------
/nanoqm/compute_integrals.pyi:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import numpy.typing as npt
3 |
4 | def compute_integrals_couplings(
5 | __path_xyz_1: str,
6 | __path_xyz_2: str,
7 | __path_hdf5: str,
8 | __basis_name: str,
9 | ) -> npt.NDArray[np.float64]: ...
10 |
11 | def compute_integrals_multipole(
12 | __path_xyz: str,
13 | __path_hdf5: str,
14 | __basis_name: str,
15 | __multipole: str,
16 | ) -> npt.NDArray[np.float64]: ...
17 |
18 | def get_thread_count() -> int: ...
19 |
20 | def get_thread_type() -> str: ...
21 |
--------------------------------------------------------------------------------
/nanoqm/integrals/__init__.py:
--------------------------------------------------------------------------------
1 | """Nonadiabatic coupling implementation."""
2 | from .nonAdiabaticCoupling import (calculate_couplings_3points,
3 | calculate_couplings_levine,
4 | compute_overlaps_for_coupling,
5 | correct_phases)
6 |
7 | __all__ = ['calculate_couplings_3points', 'calculate_couplings_levine',
8 | 'calculate_couplings_levine', 'compute_overlaps_for_coupling',
9 | 'correct_phases']
10 |
--------------------------------------------------------------------------------
/nanoqm/integrals/multipole_matrices.py:
--------------------------------------------------------------------------------
1 | """Compute multipole integrals using `Libint2 `.
2 |
3 | The interface to the C++ Libint2 library is located at the parent folder,
4 | in the `libint` folder.
5 |
6 | Index
7 | -----
8 | .. currentmodule:: nanoqm.integrals.multipole_matrices
9 | .. autosummary::
10 | get_multipole_matrix
11 | compute_matrix_multipole
12 |
13 | API
14 | ---
15 | .. autofunction:: get_multipole_matrix
16 | .. autofunction:: compute_matrix_multipole
17 | """
18 |
19 | from __future__ import annotations
20 |
21 | import os
22 | import uuid
23 | from os.path import join
24 | from pathlib import Path
25 | from typing import TYPE_CHECKING, Literal
26 |
27 | from qmflows.common import AtomXYZ
28 |
29 | from .. import logger
30 | from ..common import is_data_in_hdf5, retrieve_hdf5_data, store_arrays_in_hdf5, tuplesXYZ_to_plams
31 | from ..compute_integrals import compute_integrals_multipole, get_thread_count, get_thread_type
32 |
33 | if TYPE_CHECKING:
34 | from numpy.typing import NDArray
35 | from numpy import float64 as f8
36 | from .. import _data
37 |
38 |
39 | def get_multipole_matrix(
40 | config: _data.AbsorptionSpectrum,
41 | inp: _data.AbsorptionData,
42 | multipole: Literal["overlap", "dipole", "quadrupole"],
43 | ) -> NDArray[f8]:
44 | """Retrieve the `multipole` number `i` from the trajectory. Otherwise compute it.
45 |
46 | Parameters
47 | ----------
48 | config
49 | Global configuration to run a workflow
50 | inp
51 | Information about the current point, e.g. molecular geometry.
52 | multipole
53 | Either overlap, dipole or quadrupole.
54 |
55 | Returns
56 | -------
57 | np.ndarray
58 | Tensor containing the multipole.
59 |
60 | """
61 | point = f'point_{inp.i + config.enumerate_from}'
62 | path_hdf5 = config.path_hdf5
63 | path_multipole_hdf5 = join(config.orbitals_type, multipole, point)
64 | matrix_multipole = search_multipole_in_hdf5(
65 | path_hdf5, path_multipole_hdf5, multipole)
66 |
67 | if matrix_multipole is None:
68 | matrix_multipole = compute_matrix_multipole(inp.mol, config, multipole)
69 | store_arrays_in_hdf5(path_hdf5, path_multipole_hdf5, matrix_multipole)
70 |
71 | return matrix_multipole
72 |
73 |
74 | def search_multipole_in_hdf5(
75 | path_hdf5: str | Path,
76 | path_multipole_hdf5: str,
77 | multipole: str,
78 | ) -> None | NDArray[f8]:
79 | """Search if the multipole is already store in the HDF5."""
80 | if is_data_in_hdf5(path_hdf5, path_multipole_hdf5):
81 | logger.info(f"retrieving multipole: {multipole} from the hdf5")
82 | return retrieve_hdf5_data(path_hdf5, path_multipole_hdf5)
83 |
84 | logger.info(f"computing multipole: {multipole}")
85 | return None
86 |
87 |
88 | def compute_matrix_multipole(
89 | mol: list[AtomXYZ],
90 | config: _data.GeneralOptions,
91 | multipole: Literal["overlap", "dipole", "quadrupole"],
92 | ) -> NDArray[f8]:
93 | """Compute a `multipole` matrix: overlap, dipole, etc. for a given geometry `mol`.
94 |
95 | The multipole is Computed in spherical coordinates.
96 |
97 | Note: for the dipole and quadrupole the super_matrix contains all the matrices stack all the
98 | 0-axis.
99 |
100 | Parameters
101 | ----------
102 | mol
103 | Molecule to compute the dipole
104 | config
105 | Dictionary with the current configuration
106 | multipole
107 | kind of multipole to compute
108 |
109 | Returns
110 | -------
111 | np.ndarray
112 | Matrix with entries <ψi | x^i y^j z^k | ψj>
113 |
114 | """
115 | path_hdf5 = config.path_hdf5
116 |
117 | # Write molecule in temporal file
118 | path = join(config.scratch_path, f"molecule_{uuid.uuid4()}.xyz")
119 | mol_plams = tuplesXYZ_to_plams(mol)
120 | mol_plams.write(path)
121 |
122 | # name of the basis set
123 | basis_name = config.cp2k_general_settings.basis
124 | thread_count = get_thread_count()
125 | thread_type = get_thread_type()
126 | logger.info(f"Will scale over {thread_count} {thread_type} threads")
127 |
128 | if multipole == 'overlap':
129 | matrix_multipole = compute_integrals_multipole(
130 | path, path_hdf5, basis_name, multipole)
131 | elif multipole == 'dipole':
132 | # The tensor contains the overlap + {x, y, z} dipole matrices
133 | super_matrix = compute_integrals_multipole(
134 | path, path_hdf5, basis_name, multipole)
135 | dim = super_matrix.shape[1]
136 |
137 | # Reshape the super_matrix as a tensor containing overlap + {x, y, z} dipole matrices
138 | matrix_multipole = super_matrix.reshape(4, dim, dim)
139 |
140 | elif multipole == 'quadrupole':
141 | # The tensor contains the overlap + {xx, xy, xz, yy, yz, zz} quadrupole matrices
142 | super_matrix = compute_integrals_multipole(
143 | path, path_hdf5, basis_name, multipole)
144 | dim = super_matrix.shape[1]
145 |
146 | # Reshape to 3d tensor containing overlap + {x, y, z} + {xx, xy, xz, yy, yz, zz}
147 | # quadrupole matrices
148 | matrix_multipole = super_matrix.reshape(10, dim, dim)
149 |
150 | # Delete the tmp molecule file
151 | os.remove(path)
152 |
153 | return matrix_multipole
154 |
--------------------------------------------------------------------------------
/nanoqm/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/nanoqm/py.typed
--------------------------------------------------------------------------------
/nanoqm/schedule/__init__.py:
--------------------------------------------------------------------------------
1 | """Schedule API."""
2 | from .scheduleCoupling import (compute_phases, lazy_couplings,
3 | write_hamiltonians)
4 | from .scheduleCP2K import (prepare_cp2k_settings, prepare_job_cp2k)
5 | from .components import (calculate_mos, create_point_folder, split_file_geometries)
6 |
7 | __all__ = ['calculate_mos', 'compute_phases',
8 | 'create_point_folder', 'lazy_couplings',
9 | 'prepare_cp2k_settings', 'prepare_job_cp2k',
10 | 'split_file_geometries', 'write_hamiltonians']
11 |
--------------------------------------------------------------------------------
/nanoqm/schedule/scheduleCP2K.py:
--------------------------------------------------------------------------------
1 | """Module to configure and run CP2K jobs.
2 |
3 | Index
4 | -----
5 | .. currentmodule:: nanoqm.schedule.scheduleCP2K
6 | .. autosummary::
7 | prepare_job_cp2k
8 |
9 | API
10 | ---
11 | .. autofunction:: prepare_job_cp2k
12 |
13 | """
14 |
15 | from __future__ import annotations
16 |
17 | import fnmatch
18 | import os
19 | from os.path import join
20 | from pathlib import Path
21 | from typing import TYPE_CHECKING
22 |
23 | from noodles import schedule # Workflow Engine
24 | from qmflows import Settings, cp2k, templates
25 | from qmflows.packages import CP2K, CP2K_Result
26 | from qmflows.parsers import string_to_plams_Molecule
27 | from qmflows.type_hints import PromisedObject
28 |
29 | from .. import logger
30 | if TYPE_CHECKING:
31 | from .. import _data
32 |
33 |
34 | def try_to_read_wf(path_dir: str | os.PathLike[str]) -> str:
35 | """Try to get a wave function file from ``path_dir``.
36 |
37 | Returns
38 | -------
39 | str
40 | Path to the wave function file.
41 |
42 | Raises
43 | ------
44 | RuntimeError
45 | If there is not a wave function file.
46 |
47 | """
48 | xs = os.listdir(path_dir)
49 | files = list(filter(lambda x: fnmatch.fnmatch(x, '*wfn'), xs))
50 | if files:
51 | return join(path_dir, files[0])
52 | else:
53 | msg = f"There are no wave function file in path: {os.fspath(path_dir)!r}\n"
54 | msg += print_cp2k_error(path_dir, "err")
55 | msg += print_cp2k_error(path_dir, "out")
56 | raise RuntimeError(msg)
57 |
58 |
59 | def prepare_cp2k_settings(
60 | settings: Settings,
61 | dict_input: _data.ComponentsData,
62 | guess_job: None | CP2K_Result,
63 | ) -> Settings:
64 | """Fill in the parameters for running a single job in CP2K.
65 |
66 | Parameters
67 | ----------
68 | settings
69 | Input for CP2K
70 | dict_input
71 | Input for the current molecular geometry
72 | guess_job
73 | Previous job to read the guess wave function
74 |
75 | Returns
76 | .......
77 | CP2K
78 | job to run
79 |
80 | """
81 | dft = settings.specific.cp2k.force_eval.dft
82 | dft['print']['mo']['filename'] = dict_input.job_files.get_MO
83 |
84 | # Global parameters for CP2K
85 | settings.specific.cp2k['global']['project'] = f'point_{dict_input.k}'
86 |
87 | if guess_job is not None:
88 | plams_dir = guess_job.archive['plams_dir']
89 | if plams_dir is None:
90 | raise RuntimeError("There are no wave function file in path: None\n")
91 | dft.wfn_restart_file_name = try_to_read_wf(plams_dir)
92 |
93 | input_args = templates.singlepoint.overlay(settings)
94 |
95 | return input_args
96 |
97 |
98 | @schedule
99 | def prepare_job_cp2k(
100 | settings: Settings,
101 | dict_input: _data.ComponentsData,
102 | guess_job: None | PromisedObject,
103 | ) -> CP2K:
104 | """Generate a :class:`qmflows.packages.CP2K` job.
105 |
106 | Parameters
107 | ----------
108 | settings
109 | Input for CP2K
110 | dict_input
111 | Input for the current molecular geometry
112 | guess_job
113 | Previous job to read the guess wave function
114 |
115 | Returns
116 | -------
117 | :class:`qmflows.packages.CP2K`
118 | job to run
119 |
120 | """
121 | job_settings = prepare_cp2k_settings(settings, dict_input, guess_job)
122 |
123 | # remove keywords not use on the next translation phase
124 | for x in ('basis', 'potential'):
125 | if x in job_settings:
126 | del job_settings[x]
127 |
128 | return cp2k(
129 | job_settings, string_to_plams_Molecule(dict_input.geometry),
130 | work_dir=dict_input.point_dir,
131 | )
132 |
133 |
134 | def print_cp2k_error(path_dir: str | os.PathLike[str], prefix: str) -> str:
135 | """Search for error in the CP2K output files."""
136 | err_file = next(Path(path_dir).glob(f"*{prefix}"), None)
137 | msg = ""
138 | if err_file is not None:
139 | with open(err_file, 'r', encoding="utf8") as handler:
140 | err = handler.read()
141 | msg = f"CP2K {prefix} file:\n{err}\n"
142 | logger.error(msg)
143 |
144 | return msg
145 |
--------------------------------------------------------------------------------
/nanoqm/workflows/__init__.py:
--------------------------------------------------------------------------------
1 | """Simulation workflows."""
2 | from .initialization import initialize
3 | from .workflow_coop import workflow_crystal_orbital_overlap_population
4 | from .workflow_coupling import workflow_derivative_couplings
5 | from .workflow_single_points import workflow_single_points
6 | from .workflow_stddft_spectrum import workflow_stddft
7 |
8 | __all__ = [
9 | 'initialize', 'workflow_crystal_orbital_overlap_population',
10 | 'workflow_derivative_couplings', 'workflow_single_points', 'workflow_stddft']
11 |
--------------------------------------------------------------------------------
/nanoqm/workflows/orbitals_type.py:
--------------------------------------------------------------------------------
1 | """Module to run restricted and unrestricted calculations."""
2 |
3 | from __future__ import annotations
4 |
5 | import copy
6 | from typing import Any, Callable, TypeVar, TYPE_CHECKING
7 |
8 | from noodles import gather
9 | from qmflows import run
10 |
11 | from .. import logger
12 | from .initialization import initialize
13 |
14 | if TYPE_CHECKING:
15 | from qmflows.type_hints import PromisedObject
16 | from .. import _data
17 |
18 | _T = TypeVar("_T", bound=_data.GeneralOptions)
19 |
20 | __all__ = ["select_orbitals_type"]
21 |
22 |
23 | def select_orbitals_type(config: _T, workflow: Callable[[_T], PromisedObject]) -> Any:
24 | """Call a workflow using restriced or unrestricted orbitals."""
25 | # Dictionary containing the general configuration
26 | initialize(config)
27 |
28 | if config.orbitals_type != "both":
29 | logger.info("starting workflow calculation!")
30 | promises = workflow(config)
31 | return run(promises, folder=config.workdir, always_cache=False)
32 | else:
33 | config_alphas = copy.copy(config)
34 | config_betas = copy.copy(config)
35 | config_alphas.orbitals_type = "alphas"
36 | promises_alphas = workflow(config_alphas)
37 | config_betas.orbitals_type = "betas"
38 | promises_betas = workflow(config_betas)
39 | all_promises = gather(promises_alphas, promises_betas)
40 | alphas, betas = run(all_promises, folder=config.workdir, always_cache=False)
41 | return alphas, betas
42 |
--------------------------------------------------------------------------------
/nanoqm/workflows/run_workflow.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Comman line interface to run the workflows.
3 |
4 | Usage:
5 | run_workflow.py -i input.yml
6 |
7 | Available workflow:
8 | * absorption_spectrum
9 | * derivative_couplings
10 | * single_points
11 | * ipr_calculation
12 | * coop_calculation
13 |
14 | """
15 |
16 | from __future__ import annotations
17 |
18 | import argparse
19 | import os
20 | from collections.abc import Callable
21 | from typing import Any
22 |
23 | import yaml
24 |
25 | from .. import logger
26 | from ..common import UniqueSafeLoader
27 | from .input_validation import process_input
28 | from .workflow_coop import workflow_crystal_orbital_overlap_population
29 | from .workflow_coupling import workflow_derivative_couplings
30 | from .workflow_ipr import workflow_ipr
31 | from .workflow_single_points import workflow_single_points
32 | from .workflow_stddft_spectrum import workflow_stddft
33 |
34 | msg = "run_workflow.py -i input.yml"
35 |
36 | parser = argparse.ArgumentParser(description=msg)
37 | parser.add_argument('-i', required=True, help="Input file in YAML format")
38 |
39 | dict_workflows: dict[str, Callable[[Any], object]] = {
40 | 'absorption_spectrum': workflow_stddft,
41 | 'derivative_couplings': workflow_derivative_couplings,
42 | 'single_points': workflow_single_points,
43 | 'ipr_calculation': workflow_ipr,
44 | 'coop_calculation': workflow_crystal_orbital_overlap_population,
45 | }
46 |
47 |
48 | def main() -> None:
49 | """Parse the command line arguments and run workflow."""
50 | args = parser.parse_args()
51 | input_file: str = args.i
52 | with open(input_file, 'r', encoding="utf8") as f:
53 | dict_input = yaml.load(f, Loader=UniqueSafeLoader)
54 | if 'workflow' not in dict_input:
55 | raise RuntimeError(
56 | "The name of the workflow is required in the input file")
57 | else:
58 | workflow_name = dict_input['workflow']
59 |
60 | # Read and process input
61 | inp = process_input(input_file, workflow_name)
62 |
63 | # run workflow
64 | function = dict_workflows[workflow_name]
65 |
66 | logger.info(f"Running worflow using: {os.path.abspath(input_file)}")
67 | function(inp)
68 |
69 |
70 | if __name__ == "__main__":
71 | main()
72 |
--------------------------------------------------------------------------------
/nanoqm/workflows/tools.py:
--------------------------------------------------------------------------------
1 | """Common utilities use by the workflows."""
2 |
3 | from __future__ import annotations
4 |
5 | from typing import TYPE_CHECKING
6 |
7 | from .. import logger
8 | from ..common import is_data_in_hdf5
9 | from .workflow_single_points import workflow_single_points
10 |
11 | if TYPE_CHECKING:
12 | from .. import _data
13 |
14 | __all__ = ["compute_single_point_eigenvalues_coefficients"]
15 |
16 |
17 | def compute_single_point_eigenvalues_coefficients(config: _data.SinglePoints) -> None:
18 | """Check if hdf5 contains the required eigenvalues and coefficients.
19 |
20 | If not, it runs the single point calculation.
21 | """
22 | node_path_coefficients = f'{config.project_name}/point_0/cp2k/mo/coefficients'
23 | node_path_eigenvalues = f'{config.project_name}/point_0/cp2k/mo/eigenvalues'
24 |
25 | node_paths = (node_path_coefficients, node_path_eigenvalues)
26 | if all(is_data_in_hdf5(config.path_hdf5, x) for x in node_paths):
27 | logger.info("Coefficients and eigenvalues already in hdf5.")
28 | else:
29 | # Call the single point workflow to calculate the eigenvalues and
30 | # coefficients
31 | logger.info("Starting single point calculation.")
32 | workflow_single_points(config)
33 |
--------------------------------------------------------------------------------
/nanoqm/workflows/workflow_coupling.py:
--------------------------------------------------------------------------------
1 | """Workflow to compute the derivate coupling between states.
2 |
3 | The ``workflow_derivative_couplings`` expected a file with a trajectory-like
4 | file with the molecular geometries to compute the couplings.
5 |
6 | Index
7 | -----
8 | .. currentmodule:: nanoqm.workflows.workflow_coupling
9 | .. autosummary::
10 |
11 | """
12 |
13 | from __future__ import annotations
14 |
15 | import os
16 | from os.path import join
17 | from typing import List, Tuple, TYPE_CHECKING
18 |
19 | from noodles import gather, schedule, unpack
20 | from noodles.interface import PromisedObject
21 |
22 | from .. import logger
23 | from ..schedule.components import calculate_mos
24 | from ..schedule.scheduleCoupling import (calculate_overlap, lazy_couplings,
25 | write_hamiltonians)
26 | from .orbitals_type import select_orbitals_type
27 |
28 | if TYPE_CHECKING:
29 | from .. import _data
30 |
31 | __all__ = ['workflow_derivative_couplings']
32 |
33 | #: Type defining the derivative couplings calculation
34 | ResultPaths = Tuple[List[str], List[str]]
35 |
36 |
37 | def workflow_derivative_couplings(
38 | config: _data.DerivativeCoupling
39 | ) -> ResultPaths | tuple[ResultPaths, ResultPaths]:
40 | """Compute the derivative couplings for a molecular dynamic trajectory.
41 |
42 | Parameters
43 | ----------
44 | config
45 | Dictionary with the configuration to run the workflows
46 |
47 | Return
48 | ------
49 | Folders where the Hamiltonians are stored.
50 |
51 | """
52 | return select_orbitals_type(config, run_workflow_couplings)
53 |
54 |
55 | def run_workflow_couplings(config: _data.DerivativeCoupling) -> PromisedObject:
56 | """Run the derivative coupling workflow using `config`."""
57 | # compute the molecular orbitals
58 | logger.info("starting couplings calculation!")
59 | mo_paths_hdf5, energy_paths_hdf5 = unpack(calculate_mos(config), 2)
60 |
61 | # Overlap matrix at two different times
62 | promised_overlaps = calculate_overlap(config, mo_paths_hdf5)
63 |
64 | # Calculate Non-Adiabatic Coupling
65 | promised_crossing_and_couplings = lazy_couplings(config, promised_overlaps)
66 |
67 | # Write the results in PYXAID format
68 | config.path_hamiltonians = create_path_hamiltonians(config.workdir, config.orbitals_type)
69 |
70 | # Inplace scheduling of write_hamiltonians function.
71 | # Equivalent to add @schedule on top of the function
72 | schedule_write_ham = schedule(write_hamiltonians)
73 |
74 | # Number of matrix computed
75 | config.npoints = len(config.geometries) - 2
76 |
77 | # Write Hamilotians in PYXAID format
78 | promise_files = schedule_write_ham(
79 | config, promised_crossing_and_couplings, mo_paths_hdf5)
80 |
81 | return gather(promise_files, energy_paths_hdf5)
82 |
83 |
84 | def create_path_hamiltonians(workdir: str | os.PathLike[str], orbitals_type: str) -> str:
85 | """Create the Paths to store the resulting hamiltonians."""
86 | prefix = "hamiltonians"
87 | name = prefix if not orbitals_type else f"{orbitals_type}_{prefix}"
88 | path_hamiltonians = join(workdir, name)
89 | if not os.path.exists(path_hamiltonians):
90 | os.makedirs(path_hamiltonians)
91 |
92 | return path_hamiltonians
93 |
--------------------------------------------------------------------------------
/nanoqm/workflows/workflow_ipr.py:
--------------------------------------------------------------------------------
1 | """Inverse Participation Ratio calculation.
2 |
3 | Index
4 | -----
5 | .. currentmodule:: nanoqm.workflows.workflow_ipr
6 | .. autosummary::
7 | workflow_ipr
8 |
9 | """
10 |
11 | from __future__ import annotations
12 |
13 | from typing import TYPE_CHECKING
14 |
15 | import numpy as np
16 | from scipy.linalg import sqrtm
17 | from qmflows.parsers import readXYZ
18 |
19 | from .. import logger
20 | from ..common import h2ev, number_spherical_functions_per_atom, retrieve_hdf5_data
21 | from ..integrals.multipole_matrices import compute_matrix_multipole
22 | from .initialization import initialize
23 | from .tools import compute_single_point_eigenvalues_coefficients
24 |
25 | if TYPE_CHECKING:
26 | from .. import _data
27 |
28 | __all__ = ['workflow_ipr']
29 |
30 |
31 | def workflow_ipr(config: _data.IPR) -> np.ndarray:
32 | """Compute the Inverse Participation Ratio main function."""
33 | # Dictionary containing the general information
34 | initialize(config)
35 |
36 | # Checking if hdf5 contains the required eigenvalues and coefficientsa
37 | compute_single_point_eigenvalues_coefficients(config)
38 |
39 | # Logger info
40 | logger.info("Starting IPR calculation.")
41 |
42 | # Get eigenvalues and coefficients from hdf5
43 | node_path_coefficients = 'coefficients/point_0/'
44 | node_path_eigenvalues = 'eigenvalues/point_0'
45 | atomic_orbitals = retrieve_hdf5_data(config.path_hdf5, node_path_coefficients)
46 | energies = retrieve_hdf5_data(config.path_hdf5, node_path_eigenvalues)
47 | energies *= h2ev # To get them from Hartree to eV
48 |
49 | # Converting the xyz-file to a mol-file
50 | mol = readXYZ(config.path_traj_xyz)
51 |
52 | # Computing the overlap-matrix S and its square root
53 | overlap = compute_matrix_multipole(mol, config, 'overlap')
54 | squared_overlap = sqrtm(overlap)
55 |
56 | # Converting the coeficients from AO-basis to MO-basis
57 | transformed_orbitals = np.dot(squared_overlap, atomic_orbitals)
58 |
59 | # Now we add up the rows of the c_MO that belong to the same atom
60 | sphericals = number_spherical_functions_per_atom(
61 | mol,
62 | 'cp2k',
63 | config.cp2k_general_settings.basis,
64 | config.path_hdf5,
65 | ) # Array with number of spherical orbitals per atom
66 |
67 | # New matrix with the atoms on the rows and the MOs on the columns
68 | indices = np.zeros(len(mol), dtype='int')
69 | indices[1:] = np.cumsum(sphericals[:-1])
70 | accumulated_transf_orbitals = np.add.reduceat(transformed_orbitals, indices, 0)
71 |
72 | # Finally, we can calculate the IPR
73 | ipr = np.zeros(accumulated_transf_orbitals.shape[1])
74 |
75 | for i in range(accumulated_transf_orbitals.shape[1]):
76 | ipr[i] = np.sum(np.absolute(accumulated_transf_orbitals[:, i])**4) / \
77 | (np.sum(np.absolute(accumulated_transf_orbitals[:, i])**2))**2
78 |
79 | # Lastly, we save the output as a txt-file
80 | result = np.zeros((accumulated_transf_orbitals.shape[1], 2))
81 | result[:, 0] = energies
82 | result[:, 1] = 1.0 / ipr
83 | np.savetxt('IPR.txt', result)
84 | return result
85 |
--------------------------------------------------------------------------------
/nanoqm/workflows/workflow_single_points.py:
--------------------------------------------------------------------------------
1 | """Workflow to perform single point calculation in a trajectory.
2 |
3 | Index
4 | -----
5 | .. currentmodule:: nanoqm.workflows.workflow_single_points
6 | .. autosummary::
7 | workflow_single_points
8 |
9 | """
10 |
11 | from __future__ import annotations
12 |
13 | from typing import TYPE_CHECKING
14 |
15 | from qmflows import run
16 |
17 | from .. import logger
18 | from ..schedule.components import calculate_mos
19 | from .initialization import initialize
20 |
21 | if TYPE_CHECKING:
22 | from .. import _data
23 |
24 | __all__ = ['workflow_single_points']
25 |
26 |
27 | def workflow_single_points(
28 | config: _data.SinglePoints,
29 | ) -> tuple[list[tuple[str, str, str]], list[str]]:
30 | """Perform single point calculations for a given trajectory.
31 |
32 | Parameters
33 | ----------
34 | config
35 | Input to run the workflow.
36 |
37 | Returns
38 | -------
39 | List with the node path to the molecular orbitals in the HDF5.
40 |
41 | """
42 | # Dictionary containing the general configuration
43 | initialize(config)
44 |
45 | logger.info("starting!")
46 |
47 | # compute the molecular orbitals
48 | # Unpack
49 | mo_paths_hdf5 = calculate_mos(config)
50 |
51 | # Pack
52 | return tuple(run(mo_paths_hdf5, folder=config.workdir))
53 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | # Minimum requirements for the build system to execute.
3 | requires = [
4 | "setuptools>=61.0",
5 | "wheel>=0.21",
6 | "oldest-supported-numpy",
7 | "setuptools_scm[toml]>=6.2",
8 | ]
9 | build-backend = "setuptools.build_meta"
10 |
11 | [project]
12 | name = "nano-qmflows"
13 | dynamic = [
14 | "dependencies",
15 | "optional-dependencies",
16 | "version",
17 | "readme",
18 | ]
19 | description = "Derivative coupling calculation"
20 | license = { text = "Apache-2.0" }
21 | authors = [
22 | { name = "Felipe Zapata & Ivan Infante", email = "f.zapata@esciencecenter.nl" },
23 | ]
24 | keywords = [
25 | "chemistry",
26 | "Photochemistry",
27 | "Simulation",
28 | ]
29 | classifiers = [
30 | "License :: OSI Approved :: Apache Software License",
31 | "Natural Language :: English",
32 | "Operating System :: MacOS",
33 | "Operating System :: POSIX :: Linux",
34 | "Programming Language :: C++",
35 | "Programming Language :: Python",
36 | "Programming Language :: Python :: 3",
37 | "Programming Language :: Python :: 3 :: Only",
38 | "Programming Language :: Python :: 3.8",
39 | "Programming Language :: Python :: 3.9",
40 | "Programming Language :: Python :: 3.10",
41 | "Programming Language :: Python :: 3.11",
42 | "Programming Language :: Python :: 3.12",
43 | "Programming Language :: Python :: Implementation :: CPython",
44 | "Development Status :: 4 - Beta",
45 | "Intended Audience :: Science/Research",
46 | "Topic :: Scientific/Engineering :: Chemistry",
47 | "Typing :: Typed",
48 | ]
49 | requires-python = ">=3.8"
50 |
51 | [project.urls]
52 | Homepage = "https://github.com/SCM-NV/nano-qmflows"
53 | "Bug Tracker" = "https://github.com/SCM-NV/nano-qmflows/issues"
54 | Documentation = "https://qmflows-namd.readthedocs.io/en/latest/"
55 |
56 | [project.scripts]
57 | "run_workflow.py" = "nanoqm.workflows.run_workflow:main"
58 | "distribute_jobs.py" = "nanoqm.workflows.distribute_jobs:main"
59 |
60 | [tool.setuptools]
61 | license-files = ["LICENSE*.txt"]
62 |
63 | [tool.setuptools.packages.find]
64 | exclude = ["test"]
65 |
66 | [tool.setuptools.package-data]
67 | nanoqm = [
68 | "basis/*.json",
69 | "basis/BASIS*",
70 | "basis/GTH_POTENTIALS",
71 | "py.typed",
72 | "*.pyi",
73 | ]
74 |
75 | [tool.setuptools.dynamic]
76 | dependencies = { file = ["install_requirements.txt"] }
77 | optional-dependencies.test = { file = ["test_requirements.txt"] }
78 | optional-dependencies.doc = { file = ["doc_requirements.txt"] }
79 | optional-dependencies.lint = { file = ["linting_requirements.txt"] }
80 | readme = { file = ["README.rst"], content-type = "text/x-rst" }
81 |
82 | [tool.setuptools_scm]
83 | write_to = "nanoqm/_version.py"
84 |
85 | [tool.mypy]
86 | plugins = "numpy.typing.mypy_plugin"
87 | show_error_codes = true
88 | mypy_path = "typings"
89 | files = ["nanoqm", "typings"]
90 |
91 | [[tool.mypy.overrides]]
92 | module = [
93 | "schema.*",
94 | "matplotlib.*",
95 | ]
96 | ignore_missing_imports = true
97 |
98 | [tool.pydocstyle]
99 | add_ignore = ["D401"]
100 |
101 | [tool.coverage.run]
102 | branch = true
103 | source = ["nanoqm"]
104 |
105 | [tool.pytest.ini_options]
106 | testpaths = "test"
107 | addopts = "--tb=short --cov --cov-report xml --cov-report term --cov-report html --cache-clear --pdbcls=IPython.terminal.debugger:TerminalPdb --durations=6"
108 | markers = [
109 | "slow: A marker for slow tests requiring external quantum-chemical packages."
110 | ]
111 | filterwarnings = [
112 | "error::qmflows.warnings_qmflows.QMFlows_Warning",
113 | "ignore:Generic keyword '_pytestfixturefunction' not implemented for package \\w+:qmflows.warnings_qmflows.Key_Warning",
114 | ]
115 |
116 | [tool.flake8]
117 | max-line-length = 100
118 | per-file-ignores = [
119 | "nanoqm/workflows/input_validation.py: E704,E501",
120 | "nanoqm/analysis/tools.py: F821",
121 | ]
122 |
123 | [tool.cibuildwheel]
124 | build = [
125 | "cp39-manylinux_x86_64",
126 | "cp39-manylinux-aarch64",
127 | "cp39-macosx_x86_64",
128 | "cp39-macosx_arm64",
129 | ]
130 | before-all = "cp licenses/LICENSE_LIBHDF5.txt licenses/LICENSE_LIBINT2.txt ."
131 | build-frontend = "build"
132 |
133 | [tool.cibuildwheel.linux]
134 | environment = { QMFLOWS_INCLUDEDIR="", QMFLOWS_LIBDIR="", CFLAGS="-Werror", LDFLAGS="-Wl,--strip-debug" }
135 | manylinux-x86_64-image = "ghcr.io/nlesc-nano/manylinux2014_x86_64-qmflows:latest"
136 | manylinux-aarch64-image = "ghcr.io/nlesc-nano/manylinux2014_aarch64-qmflows:latest"
137 | repair-wheel-command = [
138 | "auditwheel -v repair -w {dest_dir} {wheel}",
139 | "pipx run abi3audit --strict --verbose --report {wheel}",
140 | ]
141 |
142 | [tool.cibuildwheel.macos]
143 | environment = { QMFLOWS_INCLUDEDIR="", QMFLOWS_LIBDIR="", LDFLAGS="-Wl", MACOSX_DEPLOYMENT_TARGET="10.14" }
144 | repair-wheel-command = [
145 | "delocate-listdeps {wheel}",
146 | "delocate-wheel --require-archs {delocate_archs} -w {dest_dir} {wheel}",
147 | "pipx run abi3audit --strict --verbose --report {wheel}",
148 | ]
149 |
150 | [[tool.cibuildwheel.overrides]]
151 | select = "cp39-macosx_arm64"
152 | environment = { QMFLOWS_INCLUDEDIR="", QMFLOWS_LIBDIR="", LDFLAGS="-Wl", MACOSX_DEPLOYMENT_TARGET="11" }
153 |
--------------------------------------------------------------------------------
/scripts/convert_legacy_hdf5.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | """Convert old HDF5 files to the new storage layout."""
4 |
5 | from __future__ import annotations
6 |
7 | import argparse
8 | from itertools import chain
9 | from pathlib import Path
10 | from collections.abc import Iterable
11 |
12 | import h5py
13 | import numpy as np
14 |
15 |
16 | def exists(input_file: str) -> Path:
17 | """Check if the input file exists."""
18 | path = Path(input_file)
19 | if not path.exists():
20 | raise argparse.ArgumentTypeError(f"{input_file} doesn't exist!")
21 |
22 | return path
23 |
24 |
25 | class LegacyConverter:
26 | """Convert legacy HDF5 files to the new storage layout."""
27 |
28 | def __init__(self, source: h5py.File, dest: h5py.File) -> None:
29 | """Initialize the converter."""
30 | self.source = source
31 | self.dest = dest
32 | self.project = self.get_project_name()
33 |
34 | def get_project_name(self) -> str:
35 | """Get the project root name."""
36 | # There are only two names onder root: cp2k and project name
37 | diff = set(self.source.keys()) - {'cp2k'}
38 | return diff.pop()
39 |
40 | def copy_data_set(self, old_path: str, new_path: str) -> None:
41 | """Copy data set from old ``source`` to new ``dest``."""
42 | if old_path in self.source:
43 | data = self.source[old_path][()]
44 | self.dest.require_dataset(new_path, shape=np.shape(data), data=data, dtype=np.float32)
45 |
46 | def copy_node_values(self, old_names: Iterable[str], new_names: Iterable[str]) -> None:
47 | """Copy the data set values from the old file to the new one."""
48 | for old, new in zip(old_names, new_names):
49 | self.copy_data_set(old, new)
50 |
51 | def copy_orbitals(self) -> None:
52 | """Copy orbitals from old ``source`` to new ``dest``."""
53 | points = [k for k in self.source[self.project].keys() if k.startswith("point_")]
54 | keys = {"coefficients", "eigenvalues", "energy"}
55 | # "project/point_x/cp2k/mo/"
56 | old_names = chain.from_iterable(
57 | [f"{self.project}/{point}/cp2k/mo/{k}" for point in points] for k in keys
58 | )
59 | new_names = chain.from_iterable([f"{k}/{point}" for point in points] for k in keys)
60 | self.copy_node_values(old_names, new_names)
61 |
62 | def copy_couplings(self) -> None:
63 | """Copy couplings and swap matrix."""
64 | couplings = [k for k in self.source[self.project].keys() if k.startswith("coupling_")]
65 | old_names = [f"{self.project}/{cs}" for cs in couplings]
66 | new_names = couplings
67 | self.copy_node_values(old_names, new_names)
68 |
69 | swaps = f"{self.project}/swaps"
70 | old_names = [swaps]
71 | new_names = ["swaps"]
72 | self.copy_node_values(old_names, new_names)
73 |
74 | def copy_overlaps(self) -> None:
75 | """Copy the overlaps to the new layout."""
76 | overlaps = [k for k in self.source[self.project].keys() if k.startswith("overlaps_")]
77 | keys = {"mtx_sji_t0", "mtx_sji_t0_corrected"}
78 | old_names = chain(*[[f"{self.project}/{over}/{k}" for over in overlaps] for k in keys])
79 | new_names = chain(*[[f"{over}/{k}" for over in overlaps] for k in keys])
80 | self.copy_node_values(old_names, new_names)
81 |
82 | def copy_multipoles(self) -> None:
83 | """Copy the multipoles to the new layout."""
84 | multipole = f"{self.project}/multipole"
85 | if multipole in self.source:
86 | points = [k for k in self.source[multipole].keys() if k.startswith("point_")]
87 | old_names = [f"{self.project}/multipole/{p}/dipole" for p in points]
88 | new_names = [f"dipole/{p}" for p in points]
89 | self.copy_node_values(old_names, new_names)
90 |
91 | def copy_all(self) -> None:
92 | """Copy all the has been stored in the legacy format."""
93 | for fun in {"copy_orbitals", "copy_couplings", "copy_overlaps", "copy_multipoles"}:
94 | method = getattr(self, fun)
95 | method()
96 |
97 |
98 | def convert(path_hdf5: Path) -> None:
99 | """Convert ``path_hdf5`` to new storage format."""
100 | new_hdf5 = path_hdf5
101 | old_hdf5 = path_hdf5.rename(f'old_{path_hdf5.name}')
102 |
103 | with h5py.File(new_hdf5, 'a') as dest, h5py.File(old_hdf5, 'r') as source:
104 | converter = LegacyConverter(source, dest)
105 | converter.copy_all()
106 |
107 |
108 | def main():
109 | """Perform the conversion."""
110 | parser = argparse.ArgumentParser("convert_legacy_hdf5")
111 | parser.add_argument("input", type=exists, help="HDF5 file to convert")
112 |
113 | args = parser.parse_args()
114 | convert(args.input)
115 |
116 |
117 | if __name__ == "__main__":
118 | main()
119 |
--------------------------------------------------------------------------------
/scripts/download_cp2k.sh:
--------------------------------------------------------------------------------
1 | # Bash script for downloading CP2K
2 |
3 | set -euo pipefail
4 |
5 | ARCH="$1"
6 | VERSION="$2"
7 |
8 | # After the 9.1 release CP2K switched to a `.` version scheme (e.g. `2021.1`)
9 | if [[ $VERSION =~ [0-9][0-9][0-9][0-9]\.[0-9]+ ]]; then
10 | PLAT="Linux-gnu"
11 | VERSION_LONG=v"$VERSION"
12 | else
13 | PLAT="Linux"
14 | VERSION_LONG=v"$VERSION".0
15 | fi
16 |
17 | echo "Installing CP2K $ARCH $VERSION binaries"
18 | curl -Lsf https://github.com/cp2k/cp2k/releases/download/$VERSION_LONG/cp2k-$VERSION-$PLAT-$ARCH.ssmp -o /usr/local/bin/cp2k.ssmp
19 | chmod u+rx /usr/local/bin/cp2k.ssmp
20 |
--------------------------------------------------------------------------------
/scripts/get_whl_name.py:
--------------------------------------------------------------------------------
1 | """Find the first file in the passed directory matching a given regex pattern."""
2 |
3 | from __future__ import annotations
4 |
5 | import os
6 | import re
7 | import argparse
8 |
9 |
10 | def main(directory: str | os.PathLike[str], pattern: str | re.Pattern[str]) -> str:
11 | pattern = re.compile(pattern)
12 | for i in os.listdir(directory):
13 | if pattern.search(i) is not None:
14 | return os.path.join(directory, i)
15 | else:
16 | raise FileNotFoundError(
17 | f"Failed to identify a file in {os.fspath(directory)!r} "
18 | f"with the following pattern: {pattern!r}"
19 | )
20 |
21 |
22 | if __name__ == "__main__":
23 | parser = argparse.ArgumentParser(
24 | usage="python get_whl_name.py . manylinux2014_x86_64", description=__doc__
25 | )
26 | parser.add_argument("directory", help="The to-be searched directory")
27 | parser.add_argument("pattern", help="The to-be searched regex pattern")
28 |
29 | args = parser.parse_args()
30 | print(main(args.directory, args.pattern))
31 |
--------------------------------------------------------------------------------
/scripts/hamiltonians/plot_couplings.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | """This programs plots the electronic coupling between two states.
3 |
4 | It reads all Ham_*_im files and cache them in a tensor saved on disk.
5 | Usage:
6 | plot_couplings.py -p . -s1 XX -s2 YY -dt 1.0
7 |
8 | p = path to the hamiltonian files
9 | s1 = state 1 index
10 | s2 = state 2 index
11 | dt = time step in fs
12 | """
13 |
14 | from __future__ import annotations
15 |
16 | import numpy as np
17 | import matplotlib.pyplot as plt
18 | import argparse
19 | import glob
20 | import os.path
21 |
22 | r2meV = 13605.698 # From Rydeberg to eV
23 |
24 |
25 | def main(path_output: str, s1: int, s2: int, dt: float) -> None:
26 | # Check if the file with couplings exists
27 | if not os.path.isfile('couplings.npy'):
28 | # Check all the files stored
29 | files_im = glob.glob('Ham_*_im')
30 | # Read the couplings
31 | couplings = np.stack(
32 | [np.loadtxt(f'Ham_{f}_im') for f in range(len(files_im))]
33 | )
34 | # Save the file for fast reading afterwards
35 | np.save('couplings', couplings)
36 | else:
37 | couplings = np.load('couplings.npy')
38 | ts = np.arange(couplings.shape[0]) * dt
39 | plt.plot(ts, couplings[:, s1, s2] * r2meV)
40 | plt.xlabel('Time (fs)')
41 | plt.ylabel('Energy (meV)')
42 | plt.show()
43 |
44 |
45 | def read_cmd_line(parser: argparse.ArgumentParser) -> tuple[str, int, int, float]:
46 | """
47 | Parse Command line options.
48 | """
49 | args = parser.parse_args()
50 | return (args.p, args.s1, args.s2, args.dt)
51 |
52 |
53 | if __name__ == "__main__":
54 | msg = "plot_decho -p -s1 -s2 \
55 | -dt "
56 |
57 | parser = argparse.ArgumentParser(description=msg)
58 | parser.add_argument('-p', required=True,
59 | help='path to the Hamiltonian files in Pyxaid format')
60 | parser.add_argument('-s1', required=True, type=int,
61 | help='Index of the first state')
62 | parser.add_argument('-s2', required=True, type=int,
63 | help='Index of the second state')
64 | parser.add_argument('-dt', type=float, default=1.0,
65 | help='Index of the second state')
66 | main(*read_cmd_line(parser))
67 |
--------------------------------------------------------------------------------
/scripts/hamiltonians/plot_mos_energies.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | """This program plots the energies of each kohn-sham state along the MD trajectory
3 |
4 | Note that you have to provide the location of the folder where the NAMD
5 | hamiltonian elements are stored using the -p flag
6 | """
7 |
8 | from __future__ import annotations
9 |
10 | import glob
11 | import argparse
12 | import os
13 | from typing import TYPE_CHECKING
14 |
15 | import numpy as np
16 | import matplotlib.pyplot as plt
17 | from nanoqm.analysis import read_energies
18 |
19 | if TYPE_CHECKING:
20 | from numpy.typing import NDArray
21 | from numpy import float64 as f8
22 |
23 |
24 | def plot_stuff(
25 | energies: NDArray[f8],
26 | ts: int,
27 | ihomo: int,
28 | nhomos: int,
29 | nlumos: int,
30 | ) -> None:
31 | """
32 | energies - a vector of energy values that can be plotted
33 | """
34 | dim_x = np.arange(ts)
35 |
36 | plt.xlabel('Time (fs)')
37 | plt.ylabel('Energy (eV)')
38 | plt.plot(dim_x, energies[:, ihomo - nhomos: ihomo + nlumos])
39 |
40 | fileName = "MOs_energies.png"
41 | plt.savefig(fileName, format='png', dpi=300)
42 |
43 | plt.show()
44 |
45 |
46 | def main(path_hams: str, ts: str, ihomo: int, nhomos: int, nlumos: int) -> None:
47 | if ts == 'All':
48 | files = glob.glob(os.path.join(path_hams, 'Ham_*_re'))
49 | ts_int = len(files)
50 | else:
51 | ts_int = int(ts)
52 | energies = read_energies(path_hams, ts_int)
53 | plot_stuff(energies, ts_int, ihomo, nhomos, nlumos)
54 |
55 |
56 | def read_cmd_line(parser: argparse.ArgumentParser) -> tuple[str, str, int, int, int]:
57 | """
58 | Parse Command line options.
59 | """
60 | args = parser.parse_args()
61 | return (args.p, args.ts, args.ihomo, args.nhomos, args.nlumos)
62 |
63 |
64 | if __name__ == "__main__":
65 |
66 | msg = "plot_decho -p -ts \
67 | -nhomos \
68 | -nlumos NDArray[f8]:
32 | inpfile = os.path.join(path, fn)
33 | cols = tuple(range(5, nstates * 2 + 5, 2))
34 | xs = np.loadtxt(f'{inpfile}', usecols=cols)
35 | return xs
36 |
37 |
38 | def main(path_output: str, nstates: int, iconds: list[int], excess: float, delta: float) -> None:
39 |
40 | # Read Energies
41 | energies = read_energies(path_output, 'me_energies0', nstates)
42 |
43 | # HOMO-LUMO gap at each time t
44 | lowest_hl_gap = np.amin(energies[:, 1:], axis=1)
45 | lowest_hl_gap = lowest_hl_gap.reshape(lowest_hl_gap.shape[0], 1)
46 |
47 | # Scale the energies to calculate the excess energies over the CB and VB
48 | en_scaled = energies[:, 1:] - lowest_hl_gap
49 |
50 | # Find the index of the states with a given excess energy
51 | indexes = [np.where(
52 | (en_scaled[iconds[i]] > excess-delta) & (en_scaled[iconds[i]] < excess + delta))
53 | for i in range(len(iconds))]
54 |
55 | # Print the states
56 | t = 'Time Init Cond List with State Indexes\n'
57 | for i in range(len(iconds)):
58 | t += f' {iconds[i]} {indexes[i][0] + 1}\n'
59 |
60 | with open('initial_conditions.out', 'w', encoding="utf8") as f:
61 | f.write(t)
62 |
63 |
64 | def read_cmd_line(parser: argparse.ArgumentParser) -> tuple[str, int, list[int], float, float]:
65 | """
66 | Parse Command line options.
67 | """
68 | args = parser.parse_args()
69 | return (args.p, args.nstates, args.iconds, args.excess, args.delta)
70 |
71 |
72 | # ============<>===============
73 | if __name__ == "__main__":
74 | msg = "plot_states_pops -p \
75 | -nstates \
76 | -iconds \
77 | -excess \
78 | -delta "
79 |
80 | parser = argparse.ArgumentParser(description=msg)
81 | parser.add_argument('-p', required=True,
82 | help='path to the Hamiltonian files in Pyxaid format')
83 | parser.add_argument('-nstates', type=int, required=True,
84 | help='Number of states')
85 | parser.add_argument('-iconds', nargs='+', type=int, required=True,
86 | help='List of initial conditions')
87 | parser.add_argument('-excess', type=float, required=True,
88 | help='Excess energy in eV')
89 | parser.add_argument('-delta', type=float, required=True,
90 | help='Delta Energy around excess')
91 | main(*read_cmd_line(parser))
92 |
--------------------------------------------------------------------------------
/scripts/pyxaid/plot_average_energy.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | """
3 | This program plots the average electronic energy during a NAMD simulatons
4 | averaged over several initial conditions.
5 | It plots both the SH and SE population based energies.
6 |
7 | Example:
8 |
9 | plot_average_energy.py -p . -nstates 26 -nconds 6
10 |
11 | Note that the number of states is the same as given in the pyxaid output.
12 | It must include the ground state as well.
13 | """
14 |
15 | from __future__ import annotations
16 |
17 | import os
18 | import argparse
19 | from typing import TYPE_CHECKING
20 |
21 | import numpy as np
22 | import matplotlib.pyplot as plt
23 |
24 | if TYPE_CHECKING:
25 | from numpy.typing import NDArray
26 | from numpy import float64 as f8
27 |
28 |
29 | def plot_stuff(outs: NDArray[f8], pops: NDArray[f8]) -> None:
30 | """
31 | energies - a vector of energy values that can be plotted
32 | """
33 | dim_x = np.arange(outs.shape[0])
34 |
35 | plot = np.column_stack((outs, pops))
36 | plt.xlabel('Time (fs)')
37 | plt.ylabel('Energy (eV)')
38 | plt.plot(dim_x, plot[:, 0:])
39 |
40 | fileName = "Average_Energy.png"
41 |
42 | plt.show()
43 | plt.savefig(fileName, format='png', dpi=300)
44 |
45 |
46 | def read_energies(path: str, fn: str, nstates: int, nconds: int) -> NDArray[f8]:
47 | inpfile = os.path.join(path, fn)
48 | cols = tuple(range(5, nstates * 2 + 5, 2))
49 | xs = np.stack(
50 | [np.loadtxt(f'{inpfile}{j}', usecols=cols, dtype=np.float64) for j in range(nconds)]
51 | ).T
52 | # Rows = timeframes ; Columns = states ; tensor = initial conditions
53 | xs = xs.swapaxes(0, 1)
54 | return xs
55 |
56 |
57 | def read_pops(path: str, fn: str, nstates: int, nconds: int) -> NDArray[f8]:
58 | inpfile = os.path.join(path, fn)
59 | cols = tuple(range(3, nstates * 2 + 3, 2))
60 | xs = np.stack(
61 | [np.loadtxt(f'{inpfile}{j}', usecols=cols, dtype=np.float64) for j in range(nconds)]
62 | ).T
63 | # Rows = timeframes ; Columns = states ; tensor = initial conditions
64 | xs = xs.swapaxes(0, 1)
65 | return xs
66 |
67 |
68 | def main(path_output: str, nstates: int, nconds: int) -> None:
69 | outs = read_pops(path_output, 'out', nstates, nconds)
70 | pops = read_pops(path_output, 'me_pop', nstates, nconds)
71 | energies = read_energies(path_output, 'me_energies', nstates, nconds)
72 |
73 | # Weighted state energy for a given SH or SH population at time t
74 | eav_outs = energies * outs
75 | eav_pops = energies * pops
76 | # Ensamble average over initial conditions of the electronic energy
77 | # as a function of time
78 | el_ene_outs = np.average(np.sum(eav_outs, axis=1), axis=1)
79 | el_ene_pops = np.average(np.sum(eav_pops, axis=1), axis=1)
80 | # Ensamble average scaled to the lowest excitation energy.
81 | # This way the cooling converge to 0.
82 | lowest_hl_gap = np.average(np.amin(energies[:, 1:, :], axis=1), axis=1)
83 | ene_outs_ref0 = el_ene_outs - lowest_hl_gap
84 | ene_pops_ref0 = el_ene_pops - lowest_hl_gap
85 |
86 | plot_stuff(ene_outs_ref0, ene_pops_ref0)
87 |
88 |
89 | def read_cmd_line(parser: argparse.ArgumentParser) -> tuple[str, int, int]:
90 | """
91 | Parse Command line options.
92 | """
93 | args = parser.parse_args()
94 | return (args.p, args.nstates, args.nconds)
95 |
96 |
97 | # ============<>===============
98 | if __name__ == "__main__":
99 | msg = "plot_states_pops -p \
100 | -nstates \
101 | -nconds "
102 |
103 | parser = argparse.ArgumentParser(description=msg)
104 | parser.add_argument('-p', required=True,
105 | help='path to the Hamiltonian files in Pyxaid format')
106 | parser.add_argument('-nstates', type=int, required=True,
107 | help='Number of states')
108 | parser.add_argument('-nconds', type=int, required=True,
109 | help='Number of initial conditions')
110 | main(*read_cmd_line(parser))
111 |
--------------------------------------------------------------------------------
/scripts/pyxaid/plot_states_pops.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | """This program reads the ouput files, out and me_pop,from a NAMD simulation run with pyxaid.
3 |
4 | It average the populations of each state upon several initial conditions and
5 | also allows to define macrostates.
6 | A macrostate is formed by a group of (micro)states indexed according to pyxaid numbering.
7 | For example, if you have donor-acceptor system, you can group all (micro)states formed by
8 | excited states localized within the donor/acceptor and or charge transfer states
9 | into a few macrostates. This helps the analysis of the population traces when a large
10 | number of excited states is included in the NAMD simulation.
11 |
12 | Example:
13 |
14 | plot_states_pops.py -p . -ms "[ [0], [1,3], [4,6] ]" -nconds 6
15 |
16 | This means that the path with the output files is the current path '.' .
17 | The number of initial conditions over which the populations are averaged are 6.
18 | The macrostates are defined as a list of lists. The [0] is the ground state, [1,3]
19 | form a macrostate of states 1 and 3 indexed as in pyxaid, [4,6] forms
20 | a macrostates of states 4 and 6.
21 | """
22 |
23 | from __future__ import annotations
24 |
25 | import os
26 | import argparse
27 | from typing import TYPE_CHECKING
28 |
29 | import numpy as np
30 | import matplotlib.pyplot as plt
31 | from nanoqm.analysis import parse_list_of_lists
32 |
33 | if TYPE_CHECKING:
34 | from numpy.typing import NDArray
35 | from numpy import float64 as f8
36 |
37 |
38 | def plot_stuff(outs: NDArray[f8], pops: NDArray[f8]) -> None:
39 | """energies - a vector of energy values that can be plotted."""
40 | dim_x = np.arange(outs.shape[0])
41 |
42 | ax1 = plt.subplot(121)
43 | ax1.set_title('SH Population ')
44 | ax1.set_xlabel('Time (fs)')
45 | ax1.set_ylabel('Population')
46 | ax1.plot(dim_x, outs[0:, :])
47 |
48 | ax2 = plt.subplot(122)
49 | ax2.set_title('SE Population ')
50 | ax2.set_xlabel('Time (fs)')
51 | ax2.set_ylabel('Population')
52 | ax2.plot(dim_x, pops[0:, :])
53 |
54 | fileName = "State Population.png"
55 | plt.savefig(fileName, format='png', dpi=300)
56 |
57 | plt.show()
58 |
59 |
60 | def read_populations(path: str, fn: str, nconds: int, ms: list[list[int]]) -> list[NDArray[f8]]:
61 | inpfile = os.path.join(path, fn)
62 | cols = list(map(lambda row: tuple(map(lambda x: x * 2 + 3, row)), ms))
63 | xs = [
64 | np.stack(
65 | [np.loadtxt(f'{inpfile}{j}', usecols=col, dtype=np.float64) for j in range(nconds)]
66 | ) for col in cols
67 | ]
68 | return xs
69 |
70 |
71 | def main(path_output: str, ms: list[list[int]], nconds: int) -> None:
72 |
73 | outs = read_populations(path_output, 'out', nconds, ms)
74 | pops = read_populations(path_output, 'me_pop', nconds, ms)
75 |
76 | outs_avg = [np.average(out, axis=0) for out in outs]
77 | pops_avg = [np.average(pop, axis=0) for pop in pops]
78 |
79 | outs_fin, pops_fin = [], []
80 |
81 | for x in outs_avg:
82 | if x.ndim == 1:
83 | outs_fin.append(x)
84 | else:
85 | outs_fin.append(np.sum(x, axis=1))
86 | outs_fin_ar = np.array(outs_fin).T
87 |
88 | for x in pops_avg:
89 | if x.ndim == 1:
90 | pops_fin.append(x)
91 | else:
92 | pops_fin.append(np.sum(x, axis=1))
93 | pops_fin_ar = np.array(pops_fin).T
94 |
95 | plot_stuff(outs_fin_ar, pops_fin_ar)
96 |
97 |
98 | def read_cmd_line(parser) -> tuple[str, str, int]:
99 | """
100 | Parse Command line options.
101 | """
102 | args = parser.parse_args()
103 | return (args.p, args.ms, args.nconds)
104 |
105 |
106 | # ============<>===============
107 | if __name__ == "__main__":
108 | msg = "plot_states_pops -p \
109 | -ms \
110 | -nconds "
111 |
112 | parser = argparse.ArgumentParser(description=msg)
113 | parser.add_argument('-p', required=True,
114 | help='path to the Hamiltonian files in Pyxaid format')
115 | parser.add_argument('-ms', type=str, required=True,
116 | help='Macrostate defined as a list of microstates')
117 | parser.add_argument('-nconds', type=int, required=True,
118 | help='Number of initial conditions')
119 |
120 | p, ms, nconds = read_cmd_line(parser)
121 | main(p, parse_list_of_lists(ms), nconds)
122 |
--------------------------------------------------------------------------------
/scripts/qmflows/README.rst:
--------------------------------------------------------------------------------
1 | Using coordination_ldos.py
2 | --------------------------
3 |
4 | The script prints local PDOS projected on subsets of atoms given through lists.
5 | These lists are obtained using the nano-CAT module ``nanoCAT.recipes.coordination_number`` (see the relative documentation_)
6 | that returns a nested dictionary
7 |
8 | ``{'Cd': {4: [0, 1, 2, 3, 4, ...], ...}, ...}``
9 |
10 | with atomic symbol (*e.g.* ``'Cd'``) and coordination number (*e.g.* ``4``) as keys.
11 |
12 |
13 | You thus have to install the nano-CAT package in your conda environment according to the installation instructions, reported here_.
14 |
15 | .. _documentation: https://cat.readthedocs.io/en/latest/12_5_recipes.html
16 | .. _here: https://github.com/nlesc-nano/nano-CAT
17 |
--------------------------------------------------------------------------------
/scripts/qmflows/convolution.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | """ This script convolutes the calculated oscillator strengts for different snapshots \
3 | in a MD trajectory and plot the average spectrum.
4 |
5 | Usage:
6 | convolution.py -sigma 0.05 -n 2 -nm True -write True
7 | Use the sigma flag to change the sigma parameter of the gaussian functions
8 | used in the convolution. If you use the n flag you imply that you want to plot
9 | only the spectrum of snapshot number n (starting from 0)instead of an average
10 | over all strucutures.
11 | Use the nm to obtain a plot in nm scale.
12 | You can choose to write the coordinates obtained from the convolution with the write flag.
13 | """
14 |
15 | from __future__ import annotations
16 |
17 | import argparse
18 | import glob
19 |
20 | import matplotlib.pyplot as plt
21 | import numpy as np
22 |
23 | from nanoqm.analysis import convolute
24 |
25 |
26 | def main(sigma: float, n: int, nm: bool, write: bool) -> None:
27 | # Check output files
28 | files = sorted(glob.glob('output_*.txt'))
29 |
30 | if n is None:
31 | # Define energy window for the plot
32 | energy = np.loadtxt(files[0], usecols=1)
33 | emax = energy[-1] + 0.5
34 | emin = energy[0] - 0.5
35 | x_grid = np.linspace(emin, emax, 800)
36 | y_grid = np.empty([x_grid.size, 0])
37 | for f in files:
38 | # Read transition energies and oscillator strengths
39 | data = np.loadtxt(f, usecols=(1, 2))
40 | # Convolute each spectrum at a time
41 | ys = convolute(data[:, 0], data[:, 1], x_grid, sigma)
42 | # Stack the spectra in a matrix
43 | y_grid = np.column_stack((y_grid, ys))
44 | # Average them
45 | y_grid = np.sum(y_grid, axis=1) / len(files)
46 |
47 | else:
48 | # Read transition energies and oscillator strengths
49 | data = np.loadtxt(files[n], usecols=(1, 2))
50 | # Define energy window for the plot
51 | emax = data[-1, 0] + 0.5
52 | emin = data[0, 0] - 0.5
53 | x_grid = np.linspace(emin, emax, 800)
54 | # Convolute
55 | y_grid = convolute(data[:, 0], data[:, 1], x_grid, sigma)
56 |
57 | # Convert in nm if request
58 | if nm:
59 | x_grid = 1240/x_grid
60 |
61 | # plot
62 | plt.plot(x_grid, y_grid)
63 | if nm:
64 | plt.xlabel('Wavelenght[nm]')
65 | else:
66 | plt.xlabel('Energy[eV]')
67 | plt.ylabel('Oscillator strength')
68 | plt.show()
69 |
70 | # Write convoluted coordinates if request
71 | if write:
72 | output = np.empty((len(x_grid), 2))
73 | output[:, 0] = x_grid
74 | output[:, 1] = y_grid
75 | fmt = '{:^10s}{:^10s}'
76 | if nm:
77 | header = fmt.format(
78 | 'nm', 'f')
79 | output = output[::-1]
80 | else:
81 | header = fmt.format(
82 | 'eV', 'f')
83 | np.savetxt('convolution.txt', output,
84 | fmt='%11.3f %10.5f', header=header)
85 |
86 |
87 | def read_cmd_line(parser) -> tuple[float, int, bool, bool]:
88 | """
89 | Parse Command line options.
90 | """
91 | args = parser.parse_args()
92 | return (args.sigma, args.n, args.nm, args.write)
93 |
94 |
95 | # ============<>===============
96 | if __name__ == "__main__":
97 |
98 | msg = """convolution.py -sigma \
99 | -n \
100 | -nm -write """
101 |
102 | parser = argparse.ArgumentParser(description=msg)
103 | parser.add_argument('-sigma', default=0.1, type=float,
104 | help='Sigma parameter of the gaussian functions')
105 | parser.add_argument('-n', default=None, type=int,
106 | help='Plot only the spectrum of the strucure number n')
107 | parser.add_argument('-nm', default=False, type=bool,
108 | help='Convert the x axis in nm')
109 | parser.add_argument('-write', default=False, type=bool,
110 | help='Write the coordinates from the convolution')
111 |
112 | main(*read_cmd_line(parser))
113 |
--------------------------------------------------------------------------------
/scripts/qmflows/dos_cp2k.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | """This programs performs a density of states (DOS) calculation with cp2k \
3 | using generic settings, i.e. DFT/PBE
4 |
5 | Note that is mandatory to define a cell_parameter, and a xyz structure.
6 | If you have a restart file, a basis set and you can also define it
7 | in the command line.
8 | It assumes that the basis and pot files are in $HOME/cp2k_basis folder
9 | in your home, which can be changed)
10 | It assumes a DZVP by default, which can be also changed
11 |
12 | It is always advised to submit the script using a JOB Manager like Slurm
13 | """
14 |
15 | from __future__ import annotations
16 |
17 | import argparse
18 | import os
19 | from os.path import join
20 |
21 | from nanoqm import logger
22 | from qmflows import cp2k, run, templates
23 | from scm.plams import Molecule
24 |
25 |
26 | def main(file_xyz: str, cell: str, restart: str, basis: str, basis_folder: str) -> None:
27 | """
28 | Define which systems need to be calculated
29 | """
30 | system = Molecule(file_xyz)
31 |
32 | # Set path for basis set
33 | basisCP2K = join(basis_folder, "BASIS_MOLOPT")
34 | potCP2K = join(basis_folder, "GTH_POTENTIALS")
35 |
36 | # Settings specifics
37 | s = templates.singlepoint
38 | del s.specific.cp2k.force_eval.dft.print.mo
39 | s.basis = basis
40 | s.potential = "GTH-PBE"
41 | s.cell_parameters = cell
42 | s.specific.cp2k.force_eval.dft.basis_set_file_name = basisCP2K
43 | s.specific.cp2k.force_eval.dft.potential_file_name = potCP2K
44 | s.specific.cp2k.force_eval.dft.print.pdos.nlumo = '1000'
45 | s.specific.cp2k.force_eval.dft.wfn_restart_file_name = f'{restart}'
46 | s.specific.cp2k.force_eval.dft.scf.ot.minimizer = 'DIIS'
47 | s.specific.cp2k.force_eval.dft.scf.ot.n_diis = 7
48 | s.specific.cp2k.force_eval.dft.scf.ot.preconditioner = 'FULL_SINGLE_INVERSE'
49 | s.specific.cp2k['global']['run_type'] = 'energy'
50 |
51 | # =======================
52 | # Compute OPT files with CP2k
53 | # =======================
54 |
55 | result = run(cp2k(s, system))
56 |
57 | # ======================
58 | # Output the results
59 | # ======================
60 |
61 | logger.info(result.energy)
62 |
63 |
64 | def read_cmd_line(parser: argparse.ArgumentParser) -> tuple[str, str, str, str, str]:
65 | """
66 | Parse Command line options.
67 | """
68 | args = parser.parse_args()
69 | return (args.xyz, args.cell, args.restart, args.basis, args.bas_fold)
70 |
71 |
72 | # ============<>===============
73 | if __name__ == "__main__":
74 |
75 | msg = "plot_decho -xyz -cell \
76 | -restart \
77 | -basis \
78 | -bas_fold "
79 |
80 | home = os.path.expanduser('~')
81 |
82 | parser = argparse.ArgumentParser(description=msg)
83 | parser.add_argument('-xyz', required=True, help='path to the xyz file')
84 | parser.add_argument('-cell', required=True, help='Size of the cell')
85 | parser.add_argument('-restart', type=str, default='', help='path to restart file name')
86 | parser.add_argument('-basis', type=str, default='DZVP-MOLOPT-SR-GTH',
87 | help='Basis-set name')
88 | parser.add_argument('-bas_fold', type=str, default=join(home, 'cp2k_basis'),
89 | help='Location of basis set files')
90 |
91 | main(*read_cmd_line(parser))
92 |
--------------------------------------------------------------------------------
/scripts/qmflows/mergeHDF5.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 |
3 | """
4 | This program merges the HDF5 files obtained from the SCF calculations when
5 | the MD trajectory has been split in more than one block.
6 | Example:
7 |
8 | mergeHDF5.py -i chunk_a.hdf5 chunk_b.hdf5 chunk_c.hdf5 -o total.hdf5
9 |
10 | An empty total.hdf5 file should be already available before using the script.
11 | """
12 |
13 | from __future__ import annotations
14 |
15 | import argparse
16 | import h5py
17 | import os
18 |
19 | # ====================================<>=======================================
20 | msg = " script -i -o "
21 |
22 | parser = argparse.ArgumentParser(description=msg)
23 | parser.add_argument('-i', required=True,
24 | help='Path(s) to the HDF5 to merge', nargs='+')
25 | parser.add_argument('-o', required=True,
26 | help='Path to the HDF5 were the merge is going to be stored')
27 |
28 |
29 | def read_cmd_line() -> tuple[str, str]:
30 | """
31 | Parse Command line options.
32 | """
33 | args = parser.parse_args()
34 | inp = args.i
35 | out = args.o
36 |
37 | return inp, out
38 | # ===============><==================
39 |
40 |
41 | def mergeHDF5(inp: str, out: str) -> None:
42 | """Merge Recursively two hdf5 Files."""
43 | with h5py.File(inp, 'r') as f5, h5py.File(out, 'r+') as g5:
44 | merge_recursively(f5, g5)
45 |
46 |
47 | def merge_recursively(f: h5py.File, g: h5py.File) -> None:
48 | """Traverse all the groups tree and copy the different datasets."""
49 | for k in f.keys():
50 | if k not in g:
51 | if isinstance(f[k], h5py.Dataset):
52 | f.copy(k, g)
53 | else:
54 | g.create_group(k)
55 | merge_recursively(f[k], g[k])
56 | elif isinstance(f[k], h5py.Group):
57 | merge_recursively(f[k], g[k])
58 |
59 |
60 | def main() -> None:
61 | inps, out = read_cmd_line()
62 | if not os.path.exists(out):
63 | touch(out)
64 | for i in inps:
65 | mergeHDF5(i, out)
66 |
67 |
68 | def touch(fname: str, times: tuple[float, float] | None = None) -> None:
69 | """Equivalent to unix touch command"""
70 | with open(fname, 'a', encoding="utf8"):
71 | os.utime(fname, times)
72 |
73 |
74 | if __name__ == "__main__":
75 | main()
76 |
--------------------------------------------------------------------------------
/scripts/qmflows/opt_anion_cp2k.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | """
3 | This programs performs a geometry optimization on the anion of a closed-shell
4 | molecule with cp2k using generic settings: DFT/PBE.
5 |
6 | Note that is mandatory to define a cell_parameter, and a xyz structure.
7 | If you have a restart file, a basis set and you can also define it in the command line.
8 | It assumes that the basis and pot files are in $HOME/cp2k_basis
9 | folder in your home, which can be changed)
10 | It assumes a DZVP by default, which can be also changed
11 |
12 | It is always advised to submit the script using a JOB Manager like Slurm
13 | """
14 |
15 | from __future__ import annotations
16 |
17 | from qmflows import (cp2k, run, templates)
18 | from scm.plams import Molecule
19 | import argparse
20 | from os.path import join
21 | import os
22 |
23 | from nanoqm import logger
24 |
25 |
26 | def main(file_xyz: str, cell: str, restart: str, basis: str, basis_folder: str) -> None:
27 | """Define which systems need to be calculated."""
28 | system = Molecule(file_xyz)
29 |
30 | # Set path for basis set
31 | basisCP2K = join(basis_folder, "BASIS_MOLOPT")
32 | potCP2K = join(basis_folder, "GTH_POTENTIALS")
33 |
34 | # Settings specifics
35 | s = templates.geometry
36 | s.basis = basis
37 | s.potential = "GTH-PBE"
38 | s.cell_parameters = cell
39 | s.specific.cp2k.force_eval.dft.basis_set_file_name = basisCP2K
40 | s.specific.cp2k.force_eval.dft.potential_file_name = potCP2K
41 | s.specific.cp2k.force_eval.dft.uks = ''
42 | s.specific.cp2k.force_eval.dft.charge = '-1'
43 | s.specific.cp2k.force_eval.dft.multiplicity = '2'
44 | s.specific.cp2k.force_eval.dft.wfn_restart_file_name = f'{restart}'
45 |
46 | # =======================
47 | # Compute OPT files with CP2k
48 | # =======================
49 |
50 | result = run(cp2k(s, system))
51 |
52 | # ======================
53 | # Output the results
54 | # ======================
55 |
56 | logger.info(result.energy)
57 |
58 |
59 | def read_cmd_line(parser: argparse.ArgumentParser) -> tuple[str, str, str, str, str]:
60 | """
61 | Parse Command line options.
62 | """
63 | args = parser.parse_args()
64 | return (args.xyz, args.cell, args.restart, args.basis, args.bas_fold)
65 |
66 |
67 | # ============<>===============
68 | if __name__ == "__main__":
69 |
70 | msg = "plot_decho -xyz -cell \
71 | -restart \
72 | -basis \
73 | -bas_fold "
74 |
75 | home = os.path.expanduser('~')
76 |
77 | parser = argparse.ArgumentParser(description=msg)
78 | parser.add_argument(
79 | '-xyz', required=True, help='path to the xyz file')
80 | parser.add_argument(
81 | '-cell', required=True, help='Size of the cell')
82 | parser.add_argument(
83 | '-restart', type=str, default='', help='path to restart file name')
84 | parser.add_argument(
85 | '-basis', type=str, default='DZVP-MOLOPT-SR-GTH', help='Basis-set name')
86 | parser.add_argument(
87 | '-bas_fold', type=str, default=join(home, 'cp2k_basis'),
88 | help='Location of basis set files')
89 |
90 | main(*read_cmd_line(parser))
91 |
--------------------------------------------------------------------------------
/scripts/qmflows/opt_cation_cp2k.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | """
3 | This programs performs a geometry optimization of the cation of a closed-shell
4 | molecule with cp2k using generic settings: DFT/PBE.
5 |
6 | Note that is mandatory to define a cell_parameter, and a xyz structure.
7 | If you have a restart file, a basis set and you can also define
8 | it in the command line.
9 | It assumes that the basis and pot files are in $HOME/cp2k_basis
10 | folder in your home, which can be changed)
11 | It assumes a DZVP by default, which can be also changed
12 |
13 | It is always advised to submit the script using a JOB Manager like Slurm
14 | """
15 |
16 | from __future__ import annotations
17 |
18 | from qmflows import (cp2k, run, templates)
19 | from scm.plams import Molecule
20 | import argparse
21 | from os.path import join
22 | import os
23 |
24 | from nanoqm import logger
25 |
26 |
27 | def main(file_xyz: str, cell: str, restart: str, basis: str, basis_folder: str) -> None:
28 | """Define which systems need to be calculated."""
29 | system = Molecule(file_xyz)
30 |
31 | # Set path for basis set
32 | basisCP2K = join(basis_folder, "BASIS_MOLOPT")
33 | potCP2K = join(basis_folder, "GTH_POTENTIALS")
34 |
35 | # Settings specifics
36 | s = templates.geometry
37 | s.basis = basis
38 | s.potential = "GTH-PBE"
39 | s.cell_parameters = cell
40 | s.specific.cp2k.force_eval.dft.basis_set_file_name = basisCP2K
41 | s.specific.cp2k.force_eval.dft.potential_file_name = potCP2K
42 | s.specific.cp2k.force_eval.dft.uks = ''
43 | s.specific.cp2k.force_eval.dft.charge = '1'
44 | s.specific.cp2k.force_eval.dft.multiplicity = '2'
45 | s.specific.cp2k.force_eval.dft.wfn_restart_file_name = f'{restart}'
46 |
47 | # =======================
48 | # Compute OPT files with CP2k
49 | # =======================
50 |
51 | result = run(cp2k(s, system))
52 |
53 | # ======================
54 | # Output the results
55 | # ======================
56 |
57 | logger.info(result.energy)
58 |
59 |
60 | def read_cmd_line(parser) -> tuple[str, str, str, str, str]:
61 | """
62 | Parse Command line options.
63 | """
64 | args = parser.parse_args()
65 | return (args.xyz, args.cell, args.restart, args.basis, args.bas_fold)
66 |
67 |
68 | # ============<>===============
69 | if __name__ == "__main__":
70 |
71 | msg = "plot_decho -xyz -cell \
72 | -restart \
73 | -basis \
74 | -bas_fold "
75 |
76 | home = os.path.expanduser('~')
77 |
78 | parser = argparse.ArgumentParser(description=msg)
79 | parser.add_argument(
80 | '-xyz', required=True, help='path to the xyz file')
81 | parser.add_argument(
82 | '-cell', required=True, help='Size of the cell')
83 | parser.add_argument(
84 | '-restart', type=str, default='', help='path to restart file name')
85 | parser.add_argument(
86 | '-basis', type=str, default='DZVP-MOLOPT-SR-GTH', help='Basis-set name')
87 | parser.add_argument(
88 | '-bas_fold', type=str, default=join(home, 'cp2k_basis'),
89 | help='Location of basis set files')
90 |
91 | main(*read_cmd_line(parser))
92 |
--------------------------------------------------------------------------------
/scripts/qmflows/opt_cp2k.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | """
3 | This programs performs a geometry optimization with cp2k using generic settings.
4 |
5 | Note that is mandatory to define a cell_parameter, and a xyz structure.
6 | If you have a restart file, a basis set and you can also define
7 | it in the command line.
8 | It assumes that the basis and pot files are in $HOME/cp2k_basis folder
9 | in your home, which can be changed)
10 | It assumes a DZVP by default, which can be also changed
11 |
12 | It is always advised to submit the script using a JOB Manager like Slurm
13 | """
14 |
15 | from __future__ import annotations
16 |
17 | import argparse
18 | import os
19 | from os.path import join
20 |
21 | from qmflows import cp2k, run, templates
22 | from scm.plams import Molecule
23 | from nanoqm import logger
24 |
25 |
26 | def main(file_xyz: str, cell: str, restart: str, basis: str, basis_folder: str) -> None:
27 | """Define which systems need to be calculated."""
28 | system = Molecule(file_xyz)
29 |
30 | # Set path for basis set
31 | basisCP2K = join(basis_folder, "BASIS_MOLOPT")
32 | potCP2K = join(basis_folder, "GTH_POTENTIALS")
33 |
34 | # Settings specifics
35 | s = templates.geometry
36 | s.basis = basis
37 | s.potential = "GTH-PBE"
38 | s.cell_parameters = cell
39 | s.specific.cp2k.force_eval.dft.basis_set_file_name = basisCP2K
40 | s.specific.cp2k.force_eval.dft.potential_file_name = potCP2K
41 | s.specific.cp2k.force_eval.dft.wfn_restart_file_name = f'{restart}'
42 |
43 | # =======================
44 | # Compute OPT files with CP2k
45 | # =======================
46 |
47 | result = run(cp2k(s, system))
48 |
49 | # ======================
50 | # Output the results
51 | # ======================
52 |
53 | logger.info(result.energy)
54 |
55 |
56 | def read_cmd_line(parser) -> tuple[str, str, str, str, str]:
57 | """
58 | Parse Command line options.
59 | """
60 | args = parser.parse_args()
61 | return (args.xyz, args.cell, args.restart, args.basis, args.bas_fold)
62 |
63 |
64 | if __name__ == "__main__":
65 |
66 | msg = "plot_decho -xyz -cell \
67 | -restart \
68 | -basis \
69 | -bas_fold "
70 |
71 | home = os.path.expanduser('~')
72 |
73 | parser = argparse.ArgumentParser(description=msg)
74 | parser.add_argument('-xyz', required=True, help='path to the xyz file')
75 | parser.add_argument('-cell', required=True, help='Size of the cell')
76 | parser.add_argument(
77 | '-restart', type=str, default='', help='path to restart file name')
78 | parser.add_argument(
79 | '-basis', type=str, default='DZVP-MOLOPT-SR-GTH', help='Basis-set name')
80 | parser.add_argument(
81 | '-bas_fold', type=str, default=join(home, 'cp2k_basis'),
82 | help='Location of basis set files')
83 |
84 | main(*read_cmd_line(parser))
85 |
--------------------------------------------------------------------------------
/scripts/qmflows/removeHDF5folders.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | import argparse
3 | from os.path import join
4 |
5 | import h5py
6 | from nanoqm import logger
7 |
8 |
9 | def main(path_hdf5: str, remove_overlaps: bool) -> None:
10 | """Remove unused array from the HDF5."""
11 | path_swaps = ['swaps']
12 | paths_overlaps_corrected = [
13 | join(f'overlaps_{i}/mtx_sji_t0_corrected') for i in range(10000)]
14 | if remove_overlaps:
15 | paths_overlaps = [
16 | join(f'overlaps_{i}/mtx_sji_t0') for i in range(10000)]
17 | else:
18 | paths_overlaps = []
19 |
20 | with h5py.File(path_hdf5, 'r+') as f5:
21 | paths_css = list(filter(lambda x: 'coupling' in x, f5.keys()))
22 | paths = paths_css + paths_overlaps_corrected + path_swaps + paths_overlaps
23 | for p in (p for p in paths if p in f5):
24 | logger.info("removing: ", p)
25 | del f5[p]
26 |
27 |
28 | if __name__ == "__main__":
29 | parser = argparse.ArgumentParser("removeHDF5folders.py")
30 | parser.add_argument('-hdf5', required=True,
31 | help='Path to the HDF5 file')
32 | parser.add_argument(
33 | '-o', help='Remove the overlap matrices', action='store_true')
34 | args = parser.parse_args()
35 | main(args.hdf5, args.o)
36 |
--------------------------------------------------------------------------------
/scripts/qmflows/remove_mos_hdf5.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 |
3 | from __future__ import annotations
4 |
5 | from os.path import join
6 | import argparse
7 | import h5py
8 | import numpy as np
9 |
10 | from nanoqm import logger
11 |
12 |
13 | def main(
14 | project_name: str,
15 | path_hdf5: str,
16 | indices: list[int],
17 | overlap_flag: bool,
18 | mo_flag: bool,
19 | ) -> None:
20 |
21 | # Shift indices to start from 0
22 | indices_ar = np.array(indices) - 1
23 |
24 | # path to the failed points
25 | if mo_flag:
26 | mos = [join(project_name, f'point_{i}') for i in indices_ar]
27 | else:
28 | mos = []
29 | if overlap_flag:
30 | overlaps = [join(project_name, f'overlaps_{i}') for i in indices_ar]
31 | else:
32 | overlaps = []
33 |
34 | # Concatenate both Molecular orbitals and Overlaps
35 | paths = mos + overlaps
36 |
37 | with h5py.File(path_hdf5, 'r+') as f5:
38 | for p in paths:
39 | if p in f5:
40 | logger.info("removing: ", p)
41 | del f5[p]
42 |
43 |
44 | def read_cmd_line(parser) -> tuple[str, str, list[int], bool, bool]:
45 | """
46 | Parse Command line options.
47 | """
48 | args = parser.parse_args()
49 | return (args.pn, args.hdf5, args.i, args.o, args.mo)
50 |
51 |
52 | if __name__ == "__main__":
53 |
54 | msg = " remove_couplings -pn -hdf5 -o False"
55 |
56 | parser = argparse.ArgumentParser(description=msg)
57 | parser.add_argument('-pn', required=True,
58 | help='project name')
59 | parser.add_argument('-hdf5', required=True,
60 | help='Path to the HDF5')
61 | parser.add_argument(
62 | '-o', help='flag to remove the overlaps', action='store_true')
63 | parser.add_argument('-mo', help='flag to remove the molecular overlaps',
64 | action='store_true')
65 | parser.add_argument('-i', help='Indices of the Molecular orbitals', required=True,
66 | nargs='+', type=int)
67 | main(*read_cmd_line(parser))
68 |
--------------------------------------------------------------------------------
/scripts/reenumerate.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Comman line interface to reenumerate a bunch of HDF5."""
3 |
4 | from __future__ import annotations
5 |
6 | import argparse
7 | from pathlib import Path
8 | from collections.abc import Iterable
9 |
10 | import h5py
11 |
12 | from nanoqm import logger
13 | from nanoqm.workflows.distribute_jobs import compute_number_of_geometries
14 |
15 | msg = "reenumerate.py -n name_project -d directory"
16 |
17 | parser = argparse.ArgumentParser(description=msg)
18 | parser.add_argument('-n', required=True,
19 | help="Name of the project")
20 | parser.add_argument('-d', help="work directory", default='.')
21 |
22 |
23 | def create_new_group_names(groups: Iterable[str], index: int) -> list[str]:
24 | """Create new names using index for groups."""
25 | new_names = []
26 | for g in groups:
27 | root, number = g.split('_')
28 | new_index = index + int(number)
29 | new_names.append(f"{root}_{new_index}")
30 | return new_names
31 |
32 |
33 | def rename_groups_in_hdf5(path_hdf5: Path, project: str, index: int) -> None:
34 | """Rename the group inside project using index."""
35 | with h5py.File(path_hdf5, 'r+') as f5:
36 | groups = list(f5[project].keys())
37 | new_names = create_new_group_names(groups, index)
38 | root = f5[project]
39 | # Move all the groups to some intermediate names
40 | # to avoid collisions
41 | for old, new in zip(groups, new_names):
42 | if old != new:
43 | root.move(old, f"000_{new}")
44 |
45 | # Finally rename everything
46 | for old, new in zip(groups, new_names):
47 | if old != new:
48 | root.move(f"000_{new}", new)
49 |
50 |
51 | def reenumerate(project: str, folder_and_hdf5: tuple[str, str], acc: int) -> int:
52 | """Reenumerate hdf5 files in folder using acc."""
53 | folder, hdf5 = folder_and_hdf5
54 | logger.info(f"Renaming {hdf5} by adding {acc} to the index")
55 | rename_groups_in_hdf5(Path(hdf5), project, acc)
56 |
57 | # Count the number of geometries in the chunk
58 | path_to_trajectory = next(Path(folder).glob("chunk_xyz_*"))
59 | number_of_geometries = compute_number_of_geometries(path_to_trajectory)
60 | return acc + number_of_geometries
61 |
62 |
63 | def main() -> None:
64 | """Parse the command line arguments and run workflow."""
65 | args = parser.parse_args()
66 | project = args.n
67 | directory = Path(args.d)
68 |
69 | # Get the folders where the trajectories are stored
70 | folders = [x for x in directory.glob("chunk_*") if x.is_dir()]
71 | folders.sort()
72 |
73 | # Get the hdf5 files
74 | hdf5_files = list(directory.glob("*.hdf5"))
75 | hdf5_files.sort()
76 |
77 | acc = 0
78 | for folder_and_hdf5 in zip(folders, hdf5_files):
79 | acc = reenumerate(project, folder_and_hdf5, acc)
80 |
81 |
82 | if __name__ == "__main__":
83 | main()
84 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | """Installation recipe."""
2 |
3 | import os
4 | import sys
5 | import platform
6 | from os.path import join
7 |
8 | import setuptools # noqa: F401
9 |
10 | import numpy as np
11 | from setuptools import Extension, setup
12 | from setuptools.command.build_ext import build_ext
13 | from wheel.bdist_wheel import bdist_wheel
14 |
15 |
16 | def is_macos_arm64() -> bool:
17 | """Return whether nano-qmflows is being (cross) compiled for macosx_arm64."""
18 | if sys.platform != "darwin":
19 | return False
20 | return "arm64" in os.environ.get("CIBW_ARCHS_MACOS", "") or platform.machine() == "arm64"
21 |
22 |
23 | class BuildExt(build_ext):
24 | """A custom build extension for adding compiler-specific options."""
25 |
26 | c_opts: "dict[str, list[str]]" = {
27 | 'msvc': ['/EHsc'],
28 | 'unix': ['-Wall'],
29 | }
30 | l_opts: "dict[str, list[str]]" = {
31 | 'msvc': [],
32 | 'unix': ['-Wall'],
33 | }
34 |
35 | if sys.platform == 'darwin':
36 | min_version = "11" if is_macos_arm64() else "10.14"
37 | darwin_opts = [
38 | '-stdlib=libc++',
39 | '-mmacosx-version-min={}'.format(min_version),
40 | '-fno-sized-deallocation',
41 | ]
42 | c_opts['unix'] += darwin_opts
43 | l_opts['unix'] += darwin_opts
44 |
45 | def build_extensions(self) -> None:
46 | """Actual compilation."""
47 | ct = self.compiler.compiler_type
48 | opts = self.c_opts.get(ct, [])
49 | link_opts = self.l_opts.get(ct, [])
50 | if ct == 'unix':
51 | opts += ["-std=c++11", "-fvisibility=hidden"]
52 | for ext in self.extensions:
53 | ext.extra_compile_args = opts
54 | ext.extra_link_args = link_opts
55 | super().build_extensions()
56 |
57 |
58 | class BDistWheelABI3(bdist_wheel):
59 | """Ensure that wheels are built with the ``abi3`` tag."""
60 |
61 | def get_tag(self) -> "tuple[str, str, str]":
62 | python, abi, plat = super().get_tag()
63 |
64 | if python.startswith("cp"):
65 | # on CPython, our wheels are abi3 and compatible back to 3.8
66 | return "cp38", "abi3", plat
67 | else:
68 | return python, abi, plat
69 |
70 |
71 | def get_paths() -> "tuple[list[str], list[str]]":
72 | """Get the paths specified in the ``QMFLOWS_INCLUDEDIR`` and ``QMFLOWS_LIBDIR`` \
73 | environment variables.
74 |
75 | If not specified if specified use ``CONDA_PREFIX`` instead.
76 | Multiple include and/or lib paths must be specified with the standard (OS-specific)
77 | path separator, *e.g.* ``":"`` for POSIX.
78 |
79 | Examples
80 | --------
81 | .. code-block:: bash
82 |
83 | export QMFLOWS_INCLUDEDIR="/libint/include:/eigen3/include"
84 | export QMFLOWS_LIBDIR="/hdf5/lib:/libint/lib"
85 |
86 | .. code-block:: python
87 |
88 | >>> get_paths()
89 | (['/libint/include', '/eigen3/include'], ['/hdf5/lib', '/libint/lib'])
90 |
91 | Returns
92 | -------
93 | tuple[list[str], list[str]]
94 | Lists of include- and library-directories used in compiling the ``compute_integrals``
95 | extension module.
96 |
97 | """
98 | conda_prefix = os.environ.get("CONDA_PREFIX")
99 | include_dirs = os.environ.get("QMFLOWS_INCLUDEDIR")
100 | lib_dirs = os.environ.get("QMFLOWS_LIBDIR")
101 |
102 | if include_dirs is not None and lib_dirs is not None:
103 | include_list = include_dirs.split(os.pathsep) if include_dirs else []
104 | lib_list = lib_dirs.split(os.pathsep) if lib_dirs else []
105 | elif conda_prefix is not None:
106 | include_list = [
107 | join(conda_prefix, "include"),
108 | join(conda_prefix, "include", "eigen3"),
109 | ]
110 | lib_list = [join(conda_prefix, "lib")]
111 | else:
112 | raise RuntimeError(
113 | "No conda module found. A Conda environment is required "
114 | "or one must set both the `QMFLOWS_INCLUDEDIR` and `QMFLOWS_LIBDIR` "
115 | "environment variables"
116 | )
117 | return include_list, lib_list
118 |
119 |
120 | include_list, lib_list = get_paths()
121 | libint_ext = Extension(
122 | 'nanoqm.compute_integrals',
123 | sources=['libint/compute_integrals.cc', 'libint/py_compute_integrals.cc'],
124 | include_dirs=[
125 | "libint/include",
126 | *include_list,
127 | np.get_include(),
128 | ],
129 | libraries=['hdf5', 'int2'],
130 | library_dirs=lib_list,
131 | language='c++',
132 | py_limited_api=True,
133 | )
134 |
135 | setup(
136 | cmdclass={'build_ext': BuildExt, "bdist_wheel": BDistWheelABI3},
137 | ext_modules=[libint_ext],
138 |
139 | # Equivalent to `script-files` in pyproject.toml,
140 | # but that keyword is deprecated so keep things here for now
141 | scripts=[
142 | 'scripts/convert_legacy_hdf5.py',
143 | 'scripts/hamiltonians/plot_mos_energies.py',
144 | 'scripts/hamiltonians/plot_spectra.py',
145 | 'scripts/pyxaid/plot_average_energy.py',
146 | 'scripts/pyxaid/plot_cooling.py',
147 | 'scripts/pyxaid/plot_spectra_pyxaid.py',
148 | 'scripts/pyxaid/plot_states_pops.py',
149 | 'scripts/qmflows/mergeHDF5.py',
150 | 'scripts/qmflows/plot_dos.py',
151 | 'scripts/qmflows/removeHDF5folders.py',
152 | 'scripts/qmflows/remove_mos_hdf5.py',
153 | 'scripts/qmflows/convolution.py'
154 | ],
155 | )
156 |
--------------------------------------------------------------------------------
/test/__init__.py:
--------------------------------------------------------------------------------
1 | """Testing functions."""
2 |
--------------------------------------------------------------------------------
/test/test_absorption_spectrum.py:
--------------------------------------------------------------------------------
1 | """Test he absorption spectrum workflows."""
2 | import shutil
3 | from os.path import join
4 | from pathlib import Path
5 | from typing import Literal
6 |
7 | import h5py
8 | import pytest
9 | import numpy as np
10 | from qmflows.warnings_qmflows import Orbital_Warning
11 | from assertionlib import assertion
12 |
13 | from nanoqm.common import retrieve_hdf5_data, DictConfig
14 | from nanoqm.workflows import workflow_stddft
15 | from nanoqm.workflows.input_validation import process_input
16 | from nanoqm.workflows.workflow_stddft_spectrum import validate_active_space
17 | from .utilsTest import PATH_TEST, remove_files, requires_cp2k
18 |
19 |
20 | @requires_cp2k
21 | class TestComputeOscillators:
22 | _PARAMS = {
23 | "MOLOPT": ("Cd", "input_test_absorption_spectrum.yml", ""),
24 | "ALL_BASIS_SETS": ("He", "input_test_absorption_spectrum_all.yml", ""),
25 | "unrestricted": ("oxygen", "input_test_absorption_spectrum_unrestricted.yml", "alphas"),
26 | }
27 | PARAMS = {k: (k, *v) for k, v in _PARAMS.items()}
28 | del _PARAMS
29 |
30 | @pytest.mark.parametrize("approx", ["sing_orb", "stda"])
31 | @pytest.mark.parametrize("name,project,inp,orbital_type", PARAMS.values(), ids=PARAMS.keys())
32 | def test(
33 | self,
34 | tmp_path: Path,
35 | name: str,
36 | project: str,
37 | inp: str,
38 | orbital_type: str,
39 | approx: Literal["sing_orb", "stda"],
40 | ) -> None:
41 | """Compute the oscillator strenght and check the results."""
42 | name += f"-{approx}"
43 | path_original_hdf5 = PATH_TEST / f'{project}.hdf5'
44 | shutil.copy(path_original_hdf5, tmp_path)
45 | try:
46 | # Run the actual test
47 | path = Path(tmp_path) / f"{project}_{approx}.hdf5"
48 | shutil.copyfile(path_original_hdf5, path)
49 | self.calculate_oscillators(path, tmp_path, approx, inp)
50 | self.check_properties(path, orbital_type, name)
51 |
52 | # Run again the workflow to check that the data is read from the hdf5
53 | self.calculate_oscillators(path, tmp_path, approx, inp)
54 | self.check_properties(path, orbital_type, name)
55 | finally:
56 | remove_files()
57 |
58 | def calculate_oscillators(
59 | self, path: Path, scratch: Path, approx: Literal["sing_orb", "stda"], inp: str
60 | ) -> None:
61 | """Compute a couple of couplings with the Levine algorithm using precalculated MOs."""
62 | input_file = PATH_TEST / inp
63 | config = process_input(input_file, 'absorption_spectrum')
64 | config.path_hdf5 = path.absolute().as_posix()
65 | config.scratch_path = scratch
66 | config.workdir = scratch
67 | config.tddft = approx
68 | config.path_traj_xyz = Path(config.path_traj_xyz).absolute().as_posix()
69 | workflow_stddft(config)
70 |
71 | def check_properties(self, path: Path, orbitals_type: str, name: str) -> None:
72 | """Check that the tensor stored in the HDF5 are correct."""
73 | path_dipole = join(orbitals_type, 'dipole', 'point_0')
74 | dipole_matrices = retrieve_hdf5_data(path, path_dipole)
75 |
76 | # The diagonals of each component of the matrix must be zero
77 | # for a single atom
78 | trace = dipole_matrices.trace(axis1=1, axis2=2)
79 | np.testing.assert_allclose(trace[1:], 0.0)
80 |
81 | # Compare with reference data
82 | with h5py.File(PATH_TEST / "test_files.hdf5", "r") as f:
83 | ref = f[f"test_absorption_spectrum/TestComputeOscillators/{name}/dipole"][...]
84 | np.testing.assert_allclose(dipole_matrices, ref, rtol=0, atol=1e-08)
85 |
86 |
87 | def test_active_space_readjustment() -> None:
88 | config = DictConfig(
89 | active_space=(6, 8),
90 | multiplicity=2,
91 | orbitals_type="betas",
92 | )
93 | with pytest.warns(Orbital_Warning):
94 | out = validate_active_space(config, 4, 6)
95 | assertion.eq(out, (4, 6))
96 |
--------------------------------------------------------------------------------
/test/test_analysis_tools.py:
--------------------------------------------------------------------------------
1 | """Test the analysis tools."""
2 |
3 | from nanoqm.analysis.tools import parse_list_of_lists
4 |
5 |
6 | def test_list_parser():
7 | """Check that a string representing a list of list is parsed correctly."""
8 | xs = '[[1,2,3,4]]'
9 | result = parse_list_of_lists(xs)
10 | assert result[0] == [1, 2, 3, 4]
11 |
--------------------------------------------------------------------------------
/test/test_citation_cff.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from pathlib import Path
3 |
4 | import yaml
5 | from packaging.version import Version
6 | from assertionlib import assertion
7 |
8 | import nanoqm
9 |
10 | CITATION_FILE = Path(__file__).parents[1] / "CITATION.cff"
11 | with open(CITATION_FILE, "r", encoding="utf8") as f:
12 | CITATION_DCT = yaml.load(f.read(), Loader=yaml.SafeLoader)
13 |
14 |
15 | def test_date(is_release: bool) -> None:
16 | date = datetime.strptime(CITATION_DCT["date-released"], "%Y-%m-%d").date()
17 | today = datetime.today().date()
18 | if is_release:
19 | assertion.eq(date, today, message="CITATION.cff date-released mismatch")
20 |
21 |
22 | def test_version(is_release: bool) -> None:
23 | version = Version(CITATION_DCT["version"])
24 | nanoqm_version = Version(nanoqm.__version__)
25 | assertion.not_(version.is_devrelease, message="CITATION.cff version dev release")
26 | if is_release:
27 | assertion.eq(version, nanoqm_version, message="CITATION.cff version mismatch")
28 |
--------------------------------------------------------------------------------
/test/test_cli.py:
--------------------------------------------------------------------------------
1 | """Test the run_runworkflow script."""
2 |
3 | import argparse
4 | from pathlib import Path
5 |
6 | import pytest
7 | import yaml
8 | from nanoqm.common import UniqueSafeLoader
9 | from nanoqm.workflows.run_workflow import main
10 | from pytest_mock import MockFixture
11 |
12 | from .utilsTest import PATH_TEST
13 |
14 |
15 | def call_main(mocker: MockFixture, path_input: Path, scratch_path: Path):
16 | """Mock main function."""
17 | # Mock argparse
18 | mocker.patch("argparse.ArgumentParser.parse_args", return_value=argparse.Namespace(
19 | i=path_input))
20 |
21 | mocker.patch("nanoqm.workflows.run_workflow.process_input", return_value={})
22 | mocker.patch("nanoqm.workflows.run_workflow.dict_workflows", return_value=len)
23 | main()
24 |
25 |
26 | def test_run_workflow(mocker: MockFixture, tmp_path: Path):
27 | """Test that the CLI main command is called correctly."""
28 | path_input = PATH_TEST / "input_fast_test_derivative_couplings.yml"
29 | call_main(mocker, path_input, tmp_path)
30 |
31 |
32 | def test_run_workflow_no_workflow(mocker: MockFixture, tmp_path: Path):
33 | """Check that an error is raised if not workflow is provided."""
34 | # remove workflow keyword
35 | with open(
36 | PATH_TEST / "input_fast_test_derivative_couplings.yml", 'r', encoding="utf8"
37 | ) as handler:
38 | input = yaml.load(handler, UniqueSafeLoader)
39 | input.pop('workflow')
40 | path_input = tmp_path / "wrong_input.yml"
41 | with open(path_input, 'w', encoding="utf8") as handler:
42 | yaml.dump(input, handler)
43 |
44 | with pytest.raises(RuntimeError) as info:
45 | call_main(mocker, path_input, tmp_path)
46 |
47 | error = info.value.args[0]
48 | assert "is required" in error
49 |
--------------------------------------------------------------------------------
/test/test_cpk2_error_call.py:
--------------------------------------------------------------------------------
1 | """Check that the cp2k error is print to the user."""
2 |
3 | from pathlib import Path
4 |
5 | import pytest
6 | from assertionlib import assertion
7 | from nanoqm.schedule.scheduleCP2K import try_to_read_wf
8 |
9 |
10 | def test_cp2k_call_error(tmp_path: Path):
11 | """Check cp2k error files."""
12 | path_err = tmp_path / "cp2k.err"
13 | with open(path_err, 'w', encoding="utf8") as handler:
14 | handler.write("Some CP2K error")
15 |
16 | with pytest.raises(RuntimeError) as info:
17 | try_to_read_wf(tmp_path)
18 |
19 | error = info.value.args[0]
20 | assertion.contains(error, "CP2K error")
21 |
--------------------------------------------------------------------------------
/test/test_distribute.py:
--------------------------------------------------------------------------------
1 | """Test the distribution script."""
2 |
3 | import fnmatch
4 | import shutil
5 | import os
6 | import pytest
7 | from pathlib import Path
8 | from typing import Literal
9 |
10 | from nanoqm.workflows.distribute_jobs import distribute_jobs
11 | from nanoqm.workflows.input_validation import process_input
12 |
13 | _WorkflowKind = Literal["derivative_couplings", "absorption_spectrum"]
14 |
15 | JOBS = {
16 | "derivative_couplings": "test/test_files/input_test_distribute_derivative_couplings.yml",
17 | "absorption_spectrum": "test/test_files/input_test_distribute_absorption_spectrum.yml",
18 | }
19 |
20 |
21 | @pytest.mark.parametrize("workflow,file", JOBS.items(), ids=JOBS)
22 | def test_distribute(workflow: _WorkflowKind, file: str) -> None:
23 | """Execute the distribute script and check that if finish succesfully."""
24 | try:
25 | distribute_jobs(file)
26 | check_scripts(workflow)
27 | finally:
28 | remove_chunk_folder()
29 |
30 |
31 | def check_scripts(workflow: _WorkflowKind) -> None:
32 | """Check that the distribution scripts were created correctly."""
33 | paths = fnmatch.filter(os.listdir('.'), "chunk*")
34 | cwd_old = os.getcwd()
35 |
36 | # Check that the files are created correctly
37 | files = ["launch.sh", "chunk_xyz*", "input.yml"]
38 | for _p in paths:
39 | p = Path(_p)
40 | for f in files:
41 | try:
42 | next(p.glob(f))
43 | except StopIteration:
44 | msg = f"There is no such file: {f!r}"
45 | raise RuntimeError(msg) from None
46 | if f == "input.yml":
47 | os.chdir(p)
48 | try:
49 | process_input(f, workflow)
50 | finally:
51 | os.chdir(cwd_old)
52 |
53 |
54 | def remove_chunk_folder() -> None:
55 | """Remove resulting scripts."""
56 | for path in fnmatch.filter(os.listdir('.'), "chunk*"):
57 | shutil.rmtree(path, ignore_errors=True)
58 |
--------------------------------------------------------------------------------
/test/test_files/C.xyz:
--------------------------------------------------------------------------------
1 | 1
2 |
3 | C 0.000 0.000 0.000
4 |
--------------------------------------------------------------------------------
/test/test_files/Cd.hdf5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/test/test_files/Cd.hdf5
--------------------------------------------------------------------------------
/test/test_files/Cd.xyz:
--------------------------------------------------------------------------------
1 | 1
2 |
3 | Cd 0.000 0.000 0.000
4 |
--------------------------------------------------------------------------------
/test/test_files/Cd33Se33.hdf5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/test/test_files/Cd33Se33.hdf5
--------------------------------------------------------------------------------
/test/test_files/Cd33Se33.xyz:
--------------------------------------------------------------------------------
1 | 66
2 | i = 4, time = 4.000, E = -1832.3234095166
3 | cd 18.5932469428 17.4199615541 15.8194399582
4 | cd 15.3516593705 18.3522787576 18.2016948627
5 | cd 15.4980246750 13.8202322036 20.2171508695
6 | cd 15.7754735656 10.7437314232 19.3429723304
7 | cd 12.0630575183 11.6626611409 19.2183568154
8 | cd 9.4484686261 10.4514390013 17.4990621565
9 | cd 9.7390182423 14.8524216564 17.9871195727
10 | cd 8.9724990145 16.0289905012 13.8417866810
11 | cd 10.0365818788 17.1521308056 10.3754733218
12 | cd 12.2981139448 14.4560947158 8.0524522864
13 | cd 15.4753524876 12.6512868994 8.2690338633
14 | cd 16.1734355907 9.8694725103 11.2387990197
15 | cd 13.5516798322 12.9811205227 11.5633649726
16 | cd 12.3364716908 16.5917258441 13.3438745267
17 | cd 14.3840750295 19.3966413356 11.1814161382
18 | cd 17.2328494173 18.6242611103 12.0391987034
19 | cd 15.1120975592 19.9906589805 15.3360559451
20 | cd 11.6981640496 18.2457318209 16.6194834515
21 | cd 13.4784945179 15.4324339967 17.1753931848
22 | cd 10.4286756211 12.8101748843 14.8289865054
23 | cd 8.7447349403 10.3628902544 12.4383675018
24 | cd 10.9636692121 10.4517608420 9.8916788881
25 | cd 15.5629398772 8.3655823011 14.3539734404
26 | cd 18.4635242687 11.4152356260 13.9346137377
27 | cd 18.1056787701 12.9564526470 10.8698876775
28 | cd 15.2201828018 16.3078824092 9.2400370660
29 | cd 19.8528291854 14.5124640560 14.0486055413
30 | cd 16.1933843619 15.2758621724 13.8067958296
31 | cd 18.1049711810 13.8909772067 17.5775486722
32 | cd 14.5826682185 11.4879238698 15.5499311622
33 | cd 8.9999488283 14.2276162899 10.4268142786
34 | cd 13.0149199710 8.3330357111 17.0980631030
35 | cd 12.0547054274 8.9554316339 13.1058823483
36 | se 17.3060006136 19.2650336002 16.8316670011
37 | se 15.9183523074 15.6268597656 18.4571687528
38 | se 17.8140516985 12.5131820156 19.8023555030
39 | se 14.0182589638 11.6968697872 21.1056447753
40 | se 11.1532196489 8.9477788595 18.7573236041
41 | se 9.5993724020 12.4763463707 19.4622920031
42 | se 8.3780566637 14.4605960631 15.7370640847
43 | se 7.6831176451 16.2271397911 11.4697263828
44 | se 10.0099074026 15.5746249034 8.0954619954
45 | se 14.7282756048 14.8770819010 6.8969220283
46 | se 17.3287287544 11.0643073260 9.1826691871
47 | se 14.2332996051 10.6558849350 12.8628689112
48 | se 11.2613607807 14.1436326770 12.5531658351
49 | se 12.7767188458 17.5087346061 10.6451312759
50 | se 16.5119278485 18.4588551451 9.6506507683
51 | se 15.8477715415 20.8057433506 12.9475827582
52 | se 13.4653518528 19.9977296648 17.4001135390
53 | se 11.4249995752 16.6985370694 18.6760741756
54 | se 12.6978140405 12.9700321015 16.5138025880
55 | se 9.8600968620 10.3220935382 14.8276702740
56 | se 10.5535222393 8.2239766318 11.1831237877
57 | se 12.8599689910 12.0229993998 9.0092408750
58 | se 15.4217357209 9.3736622141 17.1309475563
59 | se 17.9452424951 8.9248027131 13.2898718794
60 | se 20.3115531390 12.5997932731 12.3674645314
61 | se 15.8459412994 14.2849114110 11.1855028004
62 | se 18.4973647847 16.8826826799 13.3044279144
63 | se 14.4546466792 16.8860525380 14.9672955741
64 | se 20.1984418952 15.3449228651 16.5106255804
65 | se 16.9242562089 13.1013587342 15.2119817232
66 | se 10.3652455852 18.3092783681 14.2821915498
67 | se 8.5480040472 11.6493116080 10.0839748022
68 | se 13.2380822430 7.1974858885 14.7002550021
69 |
--------------------------------------------------------------------------------
/test/test_files/F2.xyz:
--------------------------------------------------------------------------------
1 | 2
2 |
3 | F 0.000 0.000 0.000
4 | F 1.412 0.000 0.000
5 |
--------------------------------------------------------------------------------
/test/test_files/HF.xyz:
--------------------------------------------------------------------------------
1 | 2
2 |
3 | F 0.0000 0.0000 0.0000
4 | H 0.9168 0.0000 0.0000
5 |
--------------------------------------------------------------------------------
/test/test_files/He.hdf5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/test/test_files/He.hdf5
--------------------------------------------------------------------------------
/test/test_files/He.xyz:
--------------------------------------------------------------------------------
1 | 1
2 |
3 | He 0.000 0.000 0.000
4 |
--------------------------------------------------------------------------------
/test/test_files/O2_coupling.xyz:
--------------------------------------------------------------------------------
1 | 2
2 |
3 | O 0.0 0.0 0.0
4 | O 0.0 0.0 1.208
5 | 2
6 |
7 | O 0.0 0.0 0.0
8 | O 0.0 0.0 1.2
9 | 2
10 |
11 | O 0.0 0.0 0.0
12 | O 0.0 0.0 1.19
13 | 2
14 |
15 | O 0.0 0.0 0.0
16 | O 0.0 0.0 1.17
17 | 2
18 |
19 | O 0.0 0.0 0.0
20 | O 0.0 0.0 1.18
--------------------------------------------------------------------------------
/test/test_files/ethylene.hdf5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/test/test_files/ethylene.hdf5
--------------------------------------------------------------------------------
/test/test_files/ethylene.xyz:
--------------------------------------------------------------------------------
1 | 6
2 | molecule
3 | C -2.580 0.068 0.000
4 | H -2.047 -0.859 0.000
5 | H -3.650 0.068 0.000
6 | C -1.905 1.243 0.000
7 | H -2.438 2.171 0.000
8 | H -0.835 1.243 0.000
9 |
--------------------------------------------------------------------------------
/test/test_files/ethylene_couplings.xyz:
--------------------------------------------------------------------------------
1 | 6
2 |
3 | C -2.57800061 0.07220118 0.00153830
4 | H -2.08123635 -0.85936428 -0.02119101
5 | H -3.64453696 0.04818665 0.00073504
6 | C -1.90584678 1.23858459 0.00270511
7 | H -2.43387817 2.20817201 0.00126375
8 | H -0.81772716 1.24095467 -0.03136666
9 | 6
10 |
11 | C -2.57406955 0.07570384 -0.00119895
12 | H -2.06578987 -0.89490749 -0.01871043
13 | H -3.68764000 0.04763059 -0.00808794
14 | C -1.90729353 1.24117495 0.00441364
15 | H -2.42959414 2.18667389 0.02590060
16 | H -0.82395409 1.22595565 -0.03740409
17 | 6
18 |
19 | C -2.56851640 0.08010703 -0.00287812
20 | H -2.05056967 -0.88668614 -0.03177197
21 | H -3.69592060 0.04382792 -0.01670436
22 | C -1.91159611 1.23956516 0.00794838
23 | H -2.43542943 2.16436901 0.02990065
24 | H -0.83995860 1.21055959 -0.04183487
25 | 6
26 |
27 | C -2.56965596 0.07851234 0.00031301
28 | H -2.06211801 -0.91443092 -0.02303383
29 | H -3.69587054 0.05733088 -0.01869065
30 | C -1.91091871 1.24170251 0.00609275
31 | H -2.45747866 2.17394312 0.02053482
32 | H -0.80090471 1.20876143 -0.05513298
33 | 6
34 |
35 | C -2.56534454 0.08139029 -0.00268843
36 | H -2.06135210 -0.92021317 -0.01903618
37 | H -3.69014211 0.05673962 -0.01332161
38 | C -1.91214471 1.23738138 0.01072452
39 | H -2.48751269 2.19942340 -0.00454747
40 | H -0.81412670 1.20684970 -0.05884216
41 |
--------------------------------------------------------------------------------
/test/test_files/file_cell_parameters.txt:
--------------------------------------------------------------------------------
1 | # Step Time [fs] Ax [Angstrom] Ay [Angstrom] Az [Angstrom] Bx [Angstrom] By [Angstrom] Bz [Angstrom] Cx [Angstrom] Cy [Angstrom] Cz [Angstrom] Volume [Angstrom^3]
2 | 0 0.000 5.0000000 0.0000000000 0.0000000000 0.000000 5.000000000 0.0000000000 0.0000000000 0.0000000000 5.0000000 125.0
3 | 1 0.000 5.0000000 0.0000000000 0.0000000000 0.000000 5.000000000 0.0000000000 0.0000000000 0.0000000000 5.0000000 125.0
4 |
--------------------------------------------------------------------------------
/test/test_files/file_distribute_cell_parameters.txt:
--------------------------------------------------------------------------------
1 | # Step Time [fs] Ax [Angstrom] Ay [Angstrom] Az [Angstrom] Bx [Angstrom] By [Angstrom] Bz [Angstrom] Cx [Angstrom] Cy [Angstrom] Cz [Angstrom] Volume [Angstrom^3]
2 | 0 0.000 28.0000000 0.0000000000 0.0000000000 0.000000 28.000000000 0.0000000000 0.0000000000 0.0000000000 28.0000000 784.0
3 | 1 1.000 28.0000000 0.0000000000 0.0000000000 0.000000 28.000000000 0.0000000000 0.0000000000 0.0000000000 28.0000000 784.0
4 | 2 2.000 28.0000000 0.0000000000 0.0000000000 0.000000 28.000000000 0.0000000000 0.0000000000 0.0000000000 28.0000000 784.0
5 | 3 3.000 28.0000000 0.0000000000 0.0000000000 0.000000 28.000000000 0.0000000000 0.0000000000 0.0000000000 28.0000000 784.0
6 | 4 4.000 28.0000000 0.0000000000 0.0000000000 0.000000 28.000000000 0.0000000000 0.0000000000 0.0000000000 28.0000000 784.0
7 |
--------------------------------------------------------------------------------
/test/test_files/guanine_distribution.yml:
--------------------------------------------------------------------------------
1 | workflow:
2 | distribute_absorption_spectrum
3 | project_name:
4 | guanine_distribution
5 | xc_dft:
6 | pbe
7 | tddft:
8 | stda
9 | active_space: [20, 20]
10 | stride:
11 | 10
12 | path_hdf5:
13 | "/test/test_files/guanine.hdf5"
14 | path_traj_xyz:
15 | "test/test_files/trajectory.xyz"
16 | scratch_path:
17 | "test/test_files/"
18 | workdir: "."
19 | blocks: 5
20 |
21 | job_scheduler:
22 | scheduler: SLURM
23 | nodes: 1
24 | tasks: 24
25 | wall_time: "1:00:00"
26 | queue_name: "normal"
27 | load_modules: "source activate qmflows\nmodule load eb\nmodule load CP2K/5.1-foss-2017b"
28 |
29 |
30 | cp2k_general_settings:
31 | basis: "DZVP-MOLOPT-SR-GTH"
32 | potential: "GTH-PBE"
33 | path_basis: "/home/user/cp2k_basis"
34 | periodic: "xyz"
35 | charge: 0
36 | cell_parameters: 25.0
37 | cell_angles: [90.0,90.0,90.0]
38 | executable: cp2k.ssmp
39 |
40 | cp2k_settings_main:
41 | specific:
42 | template: pbe_main
43 |
44 | cp2k_settings_guess:
45 | specific:
46 | template: pbe_guess
47 |
--------------------------------------------------------------------------------
/test/test_files/input_couplings_alphas.yml:
--------------------------------------------------------------------------------
1 | workflow:
2 | derivative_couplings
3 | project_name: oxygen
4 | active_space: [2, 2]
5 | path_hdf5: "test/test_files/oxygen.hdf5"
6 | path_traj_xyz: "test/test_files/O2_coupling.xyz"
7 | scratch_path: "/tmp/namd"
8 | orbitals_type: alphas
9 |
10 | cp2k_general_settings:
11 | basis: "DZVP-MOLOPT-SR-GTH"
12 | potential: "GTH-PBE"
13 | multiplicity: 3
14 | cell_parameters: 4.0
15 | periodic: none
16 | executable: cp2k.ssmp
17 |
18 | cp2k_settings_main:
19 | specific:
20 | template: pbe_main
21 |
22 | cp2k_settings_guess:
23 | specific:
24 | template:
25 | pbe_guess
26 |
--------------------------------------------------------------------------------
/test/test_files/input_couplings_both.yml:
--------------------------------------------------------------------------------
1 | workflow:
2 | derivative_couplings
3 | project_name: oxygen
4 | active_space: [2, 2]
5 | path_hdf5: "test/test_files/oxygen.hdf5"
6 | path_traj_xyz: "test/test_files/O2_coupling.xyz"
7 | scratch_path: "/tmp/namd"
8 | orbitals_type: both
9 |
10 | cp2k_general_settings:
11 | basis: "DZVP-MOLOPT-SR-GTH"
12 | potential: "GTH-PBE"
13 | multiplicity: 3
14 | cell_parameters: 4.0
15 | periodic: none
16 | executable: cp2k.ssmp
17 |
18 | cp2k_settings_main:
19 | specific:
20 | template: pbe_main
21 |
22 | cp2k_settings_guess:
23 | specific:
24 | template:
25 | pbe_guess
26 |
--------------------------------------------------------------------------------
/test/test_files/input_fast_test_derivative_couplings.yml:
--------------------------------------------------------------------------------
1 | workflow:
2 | derivative_couplings
3 |
4 | project_name:
5 | ethylene
6 |
7 | # Step of the MD (Femtoseconds)
8 | dt: 1
9 |
10 | # Algorithm to compute the derivative couplings
11 | algorithm:
12 | "levine"
13 |
14 | # Track crossing between orbitals
15 | tracking:
16 | True
17 |
18 | # Occupied and Virtual orbitals to compute the couplings
19 | active_space:
20 | [4, 4]
21 |
22 | # Path to the file to store all the intermediate tensors
23 | path_hdf5:
24 | "test/test_files/ethylene.hdf5"
25 |
26 | # MD trajectory
27 | path_traj_xyz:
28 | "test/test_files/ethylene_couplings.xyz"
29 |
30 | # Path where all the temporal files are store
31 | scratch_path:
32 | "/tmp/namd/fast_derivative_couplings"
33 |
34 | # CP2K Configuration
35 | cp2k_general_settings:
36 | potential: "GTH-PBE"
37 | basis: "DZVP-MOLOPT-SR-GTH"
38 | cell_parameters: 10.0
39 | cell_angles: null
40 | periodic: none
41 | executable: cp2k.ssmp
42 |
43 | # Configuration to compute the molecular orbitals
44 | cp2k_settings_main:
45 | specific:
46 | template: pbe_main
47 |
48 | # Configuration to compute an initial guess of the wave function
49 | cp2k_settings_guess:
50 | specific:
51 | template:
52 | pbe_guess
53 |
--------------------------------------------------------------------------------
/test/test_files/input_test_IPR.yml:
--------------------------------------------------------------------------------
1 | workflow:
2 | ipr_calculation
3 | project_name: F2
4 |
5 | active_space: [5, 1]
6 | path_hdf5: "test/test_files/F2.hdf5"
7 | path_traj_xyz: "test/test_files/F2.xyz"
8 | scratch_path: "/tmp/IPR"
9 |
10 |
11 | cp2k_general_settings:
12 | basis: "SZV-MOLOPT-SR-GTH"
13 | potential: "GTH-PBE"
14 | cell_parameters: 6.0
15 | periodic: none
16 | executable: cp2k.ssmp
17 |
18 | cp2k_settings_main:
19 | specific:
20 | template: pbe_main
21 |
22 | cp2k_settings_guess:
23 | specific:
24 | template:
25 | pbe_guess
26 |
--------------------------------------------------------------------------------
/test/test_files/input_test_absorption_spectrum.yml:
--------------------------------------------------------------------------------
1 | workflow:
2 | absorption_spectrum
3 | project_name:
4 | Cd
5 | xc_dft:
6 | pbe
7 | tddft:
8 | stda
9 | active_space: [6, 15]
10 |
11 | path_hdf5:
12 | "test/test_files/Cd.hdf5"
13 | path_traj_xyz:
14 | "test/test_files/Cd.xyz"
15 | scratch_path:
16 | "/tmp/namd/absorption_spectrum"
17 |
18 | cp2k_general_settings:
19 | basis: "DZVP-MOLOPT-SR-GTH"
20 | potential: "GTH-PBE"
21 | cell_parameters: 5.0
22 | cell_angles: [90.0, 90.0, 90.0]
23 | periodic: none
24 | executable: cp2k.ssmp
25 |
26 | cp2k_settings_main:
27 | specific:
28 | template: pbe_main
29 |
30 | cp2k_settings_guess:
31 | specific:
32 | template: pbe_guess
33 |
--------------------------------------------------------------------------------
/test/test_files/input_test_absorption_spectrum_all.yml:
--------------------------------------------------------------------------------
1 | workflow:
2 | absorption_spectrum
3 | project_name:
4 | He
5 | xc_dft:
6 | pbe
7 | tddft:
8 | stda
9 | active_space: [1, 1]
10 |
11 | path_hdf5:
12 | "test/test_files/He.hdf5"
13 | path_traj_xyz:
14 | "test/test_files/He.xyz"
15 | scratch_path:
16 | "/tmp/namd/absorption_spectrum_admm"
17 |
18 | cp2k_general_settings:
19 | basis: DZVP-ALL
20 | potential: "GTH-PBE"
21 | basis_file_name: ALL_BASIS_SETS
22 | path_basis: test/test_files
23 | cell_parameters: 5.0
24 | cell_angles: [90.0, 90.0, 90.0]
25 | periodic: none
26 | executable: cp2k.ssmp
27 |
28 | cp2k_settings_main:
29 | specific:
30 | template: pbe_main
31 |
32 | cp2k_settings_guess:
33 | specific:
34 | template: pbe_guess
35 |
--------------------------------------------------------------------------------
/test/test_files/input_test_absorption_spectrum_unrestricted.yml:
--------------------------------------------------------------------------------
1 | workflow:
2 | absorption_spectrum
3 | project_name: oxygen
4 | xc_dft: pbe
5 | tddft: stda
6 | active_space: [2, 2]
7 | orbitals_type: alphas
8 |
9 | path_hdf5:
10 | "test/test_files/oxygen.hdf5"
11 | path_traj_xyz:
12 | "test/test_files/O2_coupling.xyz"
13 | scratch_path:
14 | "/tmp/namd/absorption_spectrum_unrestricted"
15 |
16 | cp2k_general_settings:
17 | basis: "DZVP-MOLOPT-SR-GTH"
18 | potential: "GTH-PBE"
19 | multiplicity: 3
20 | cell_parameters: 4.0
21 | periodic: none
22 | executable: cp2k.ssmp
23 |
24 | cp2k_settings_main:
25 | specific:
26 | template: pbe_main
27 |
28 | cp2k_settings_guess:
29 | specific:
30 | template:
31 | pbe_guess
32 |
--------------------------------------------------------------------------------
/test/test_files/input_test_b3lyp.yml:
--------------------------------------------------------------------------------
1 | workflow:
2 | single_points
3 | project_name: b3lyp_C
4 | path_hdf5: "b3lyp_C.hdf5"
5 | path_traj_xyz: "test/test_files/C.xyz"
6 | scratch_path: "/tmp/namd"
7 |
8 |
9 | cp2k_general_settings:
10 | basis: "DZVP-MOLOPT-SR-GTH"
11 | potential: "GTH-PBE"
12 | cell_parameters: 5.0
13 | periodic: none
14 | executable: cp2k.ssmp
15 |
16 | cp2k_settings_main:
17 | specific:
18 | template: b3lyp_main
19 |
20 | cp2k_settings_guess:
21 | specific:
22 | template: b3lyp_guess
23 |
--------------------------------------------------------------------------------
/test/test_files/input_test_coop.yml:
--------------------------------------------------------------------------------
1 | workflow:
2 | coop_calculation
3 | project_name: Cd33Se33
4 |
5 | active_space: [4, 1]
6 | path_hdf5: "test/test_files/HF.hdf5"
7 | path_traj_xyz: "test/test_files/HF.xyz"
8 | scratch_path: "/tmp/COOP"
9 |
10 | coop_elements: ["F", "H"]
11 |
12 | cp2k_general_settings:
13 | basis: "SZV-MOLOPT-SR-GTH"
14 | potential: "GTH-PBE"
15 | cell_parameters: 6.0
16 | periodic: none
17 | executable: cp2k.ssmp
18 |
19 | cp2k_settings_main:
20 | specific:
21 | template: pbe_main
22 |
23 | cp2k_settings_guess:
24 | specific:
25 | template:
26 | pbe_guess
27 |
--------------------------------------------------------------------------------
/test/test_files/input_test_distribute_absorption_spectrum.yml:
--------------------------------------------------------------------------------
1 | workflow:
2 | distribute_absorption_spectrum
3 |
4 | project_name: Cd33Se33
5 | xc_dft: pbe
6 | tddft: stda
7 | active_space:
8 | [10, 10]
9 | stride:
10 | 50
11 | path_hdf5:
12 | "test/test_files/Cd33Se33.hdf5"
13 | path_traj_xyz:
14 | "test/test_files/Cd33Se33_fivePoints.xyz"
15 | scratch_path:
16 | "/tmp/namd"
17 | workdir: "."
18 | blocks: 5
19 | calculate_guesses: "all"
20 |
21 | job_scheduler:
22 | scheduler: SLURM
23 | nodes: 1
24 | tasks: 24
25 | wall_time: "24:00:00"
26 | load_modules: "source activate qmflows\nmodule load cp2k/3.0"
27 |
28 |
29 | cp2k_general_settings:
30 | basis: "DZVP-MOLOPT-SR-GTH"
31 | potential: "GTH-PBE"
32 | cell_parameters: 28.0
33 | periodic: xyz
34 | executable: cp2k.ssmp
35 |
36 | cp2k_settings_main:
37 | specific:
38 | template: pbe_main
39 |
40 | cp2k_settings_guess:
41 | specific:
42 | template:
43 | pbe_guess
44 |
--------------------------------------------------------------------------------
/test/test_files/input_test_distribute_derivative_couplings.yml:
--------------------------------------------------------------------------------
1 | workflow:
2 | distribute_derivative_couplings
3 |
4 | project_name: Cd33Se33
5 | dt: 1
6 | active_space: [10, 10]
7 | algorithm: "levine"
8 | tracking: False
9 | path_hdf5: "test/test_files/Cd33Se33.hdf5"
10 | path_traj_xyz: "test/test_files/Cd33Se33_fivePoints.xyz"
11 | scratch_path: "/tmp/namd"
12 | workdir: "."
13 | blocks: 2
14 |
15 | job_scheduler:
16 | free_format: "
17 | #! /bin/bash\n
18 | #SBATCH --job-name=Cd33Se33\n
19 | #SBATCH -N 1\n
20 | #SBATCH -t 00:15:00\n
21 | #SBATCH -p short\n
22 |
23 | source activate qmflows\n
24 | module load cp2k/3.0\n\n"
25 |
26 | cp2k_general_settings:
27 | basis: "DZVP-MOLOPT-SR-GTH"
28 | potential: "GTH-PBE"
29 | cell_parameters: 28.0
30 | file_cell_parameters: "test/test_files/file_distribute_cell_parameters.txt"
31 | periodic: none
32 | executable: cp2k.ssmp
33 |
34 | cp2k_settings_main:
35 | specific:
36 | template: pbe_main
37 |
38 | cp2k_settings_guess:
39 | specific:
40 | template:
41 | pbe_guess
42 |
--------------------------------------------------------------------------------
/test/test_files/input_test_pbe.yml:
--------------------------------------------------------------------------------
1 | workflow:
2 | derivative_couplings
3 | project_name: pbe_C
4 | dt: 1
5 | active_space: [2, 2]
6 | algorithm: "levine"
7 | tracking: False
8 | path_hdf5: "test/test_files/C.hdf5"
9 | path_traj_xyz: "test/test_files/C.xyz"
10 | scratch_path: "/tmp/namd"
11 |
12 |
13 | cp2k_general_settings:
14 | basis: "DZVP-MOLOPT-SR-GTH"
15 | potential: "GTH-PBE"
16 | cell_parameters: 5.0
17 | periodic: none
18 | file_cell_parameters: "test/test_files/file_cell_parameters.txt"
19 | executable: cp2k.ssmp
20 |
21 | cp2k_settings_main:
22 | specific:
23 | template: pbe_main
24 |
25 | cp2k_settings_guess:
26 | specific:
27 | template:
28 | pbe_guess
29 |
--------------------------------------------------------------------------------
/test/test_files/input_test_pbe0.yml:
--------------------------------------------------------------------------------
1 | workflow:
2 | derivative_couplings
3 | project_name: pbe0_C
4 | dt: 1
5 | active_space: [2, 2]
6 | algorithm: "levine"
7 | tracking: False
8 | path_hdf5: "test/test_files/C.hdf5"
9 | path_traj_xyz: "test/test_files/ethylene.xyz"
10 | scratch_path: "/tmp/namd"
11 |
12 |
13 | cp2k_general_settings:
14 | basis: "DZVP-MOLOPT-SR-GTH"
15 | potential: "GTH-PBE"
16 | cell_parameters: 4.0
17 | cell_angles: [90.0, 90.0, 90.0]
18 | periodic: none
19 | aux_fit: good
20 | executable: cp2k.ssmp
21 |
22 | cp2k_settings_main:
23 | specific:
24 | template: pbe0_main
25 |
26 | cp2k_settings_guess:
27 | specific:
28 | template:
29 | pbe0_guess
30 |
--------------------------------------------------------------------------------
/test/test_files/input_test_single_points.yml:
--------------------------------------------------------------------------------
1 | workflow:
2 | single_points
3 | project_name: ethylene
4 | compute_orbitals: True
5 | active_space: [2, 2]
6 | path_hdf5: "test/test_files/ethylene.hdf5"
7 | path_traj_xyz: "test/test_files/ethylene.xyz"
8 | scratch_path: "/tmp/namd"
9 | remove_log_file: true
10 |
11 | cp2k_general_settings:
12 | basis: "DZVP-MOLOPT-SR-GTH"
13 | potential: "GTH-PBE"
14 | cell_parameters: 5.0
15 | periodic: none
16 | file_cell_parameters: "test/test_files/file_cell_parameters.txt"
17 | executable: cp2k.ssmp
18 |
19 | cp2k_settings_main:
20 | specific:
21 | template: pbe_main
22 |
23 | cp2k_settings_guess:
24 | specific:
25 | template:
26 | pbe_guess
27 |
--------------------------------------------------------------------------------
/test/test_files/input_test_single_points_hybrid_functional.yml:
--------------------------------------------------------------------------------
1 | workflow:
2 | single_points
3 | project_name: ethylene
4 | compute_orbitals: True
5 | active_space: [2, 2]
6 | path_hdf5: "C.hdf5"
7 | path_traj_xyz: "test/test_files/C.xyz"
8 | scratch_path: "/tmp/namd"
9 | remove_log_file: false
10 |
11 | cp2k_general_settings:
12 | basis: "DZVP-MOLOPT-SR-GTH"
13 | potential: "GTH-PBE"
14 | cell_parameters: 2.0
15 | periodic: none
16 | executable: cp2k.ssmp
17 |
18 | cp2k_settings_main:
19 | specific:
20 | template: pbe0_main
21 |
22 | cp2k_settings_guess:
23 | specific:
24 | template:
25 | pbe0_guess
26 |
--------------------------------------------------------------------------------
/test/test_files/legacy.hdf5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/test/test_files/legacy.hdf5
--------------------------------------------------------------------------------
/test/test_files/mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 | ignore_missing_imports = True
--------------------------------------------------------------------------------
/test/test_files/oxygen.hdf5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/test/test_files/oxygen.hdf5
--------------------------------------------------------------------------------
/test/test_files/test_files.hdf5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/test/test_files/test_files.hdf5
--------------------------------------------------------------------------------
/test/test_initialization.py:
--------------------------------------------------------------------------------
1 | """Test that the path are created propery."""
2 |
3 | from __future__ import annotations
4 |
5 | import os
6 | from pathlib import Path
7 | from typing import TYPE_CHECKING
8 |
9 | import h5py
10 | import pytest
11 | import yaml
12 | from nanoutils import RecursiveKeysView
13 | from assertionlib import assertion
14 |
15 | from nanoqm.common import UniqueSafeLoader, DictConfig
16 | from nanoqm.workflows.initialization import initialize, save_basis_to_hdf5
17 | from nanoqm.workflows.input_validation import process_input
18 | from .utilsTest import PATH_TEST
19 |
20 | if TYPE_CHECKING:
21 | import _pytest
22 |
23 |
24 | def test_run_workflow(tmp_path: Path) -> None:
25 | """Check that all the paths are initialize."""
26 | create_config(tmp_path, True)
27 | create_config(tmp_path, False)
28 |
29 |
30 | def create_config(tmp_path: Path, scrath_is_None: bool) -> None:
31 | path = PATH_TEST / "input_fast_test_derivative_couplings.yml"
32 | with open(path, 'r', encoding="utf8") as f:
33 | inp = yaml.load(f, UniqueSafeLoader)
34 |
35 | # change scratch
36 | if scrath_is_None:
37 | inp["scratch_path"] = None
38 | del inp["path_hdf5"]
39 | else:
40 | inp["scratch_path"] = (tmp_path / "level0" / "level1").as_posix()
41 | # change HDF5 path
42 | inp["path_hdf5"] = (Path(inp["scratch_path"]) / "test_init.hdf5").as_posix()
43 |
44 | path_inp = tmp_path / "test_init.yml"
45 | with open(path_inp, 'w', encoding="utf8") as f:
46 | yaml.dump(inp, f)
47 |
48 | config = process_input(path_inp, 'derivative_couplings')
49 | initialize(config)
50 | assert Path(config.path_hdf5).exists()
51 |
52 |
53 | class TestSaveBasisToHDF5:
54 | """Test ``save_basis_to_hdf5``."""
55 |
56 | PARAM = {
57 | "None": None,
58 | "MOLOPT": ["BASIS_MOLOPT"],
59 | "MOLOPT_UZH": ["BASIS_MOLOPT", "BASIS_MOLOPT_UZH"],
60 | "ADMM": ["BASIS_MOLOPT", "BASIS_ADMM", "BASIS_ADMM_MOLOPT"],
61 | }
62 |
63 | @pytest.fixture(scope="function", params=PARAM.items(), ids=PARAM, name="input")
64 | def get_input(
65 | self,
66 | request: _pytest.fixtures.SubRequest,
67 | tmp_path: Path,
68 | ) -> tuple[DictConfig, set[str]]:
69 | name, basis_file_name = request.param
70 |
71 | # COnstruct the settings
72 | hdf5_file = tmp_path / f"{name}.hdf5"
73 | config = DictConfig(
74 | path_hdf5=hdf5_file,
75 | cp2k_general_settings=DictConfig(
76 | path_basis=PATH_TEST,
77 | basis_file_name=basis_file_name,
78 | ),
79 | )
80 |
81 | # Ensure that a fresh .hdf5 is created
82 | if os.path.isfile(hdf5_file):
83 | os.remove(hdf5_file)
84 | with h5py.File(hdf5_file, "w-"):
85 | pass
86 |
87 | # Construct a set with all keys that are supposed to be in the .hdf5 file
88 | with open(PATH_TEST / "test_initialization.yaml", "r", encoding="utf8") as f:
89 | keys = set(yaml.load(f, Loader=yaml.SafeLoader)[name])
90 | return config, keys
91 |
92 | def test_pass(self, input: tuple[DictConfig, set[str]]) -> None:
93 | config, ref = input
94 |
95 | save_basis_to_hdf5(config)
96 |
97 | with h5py.File(config.path_hdf5, "r") as f:
98 | assertion.eq(RecursiveKeysView(f), ref)
99 |
--------------------------------------------------------------------------------
/test/test_multipole.py:
--------------------------------------------------------------------------------
1 | """Test quadrupole calculation."""
2 |
3 | import shutil
4 | from pathlib import Path
5 |
6 | import numpy as np
7 | from assertionlib import assertion
8 | from qmflows.parsers import readXYZ
9 |
10 | from nanoqm.integrals.multipole_matrices import compute_matrix_multipole
11 | from nanoqm.workflows.input_validation import process_input
12 |
13 | from .utilsTest import PATH_TEST
14 |
15 |
16 | def test_quadropole(tmp_path):
17 | """Test the calculation of a quadrupole."""
18 | file_path = PATH_TEST / "input_test_single_points.yml"
19 | config = process_input(file_path, 'single_points')
20 | path_original_hdf5 = config.path_hdf5
21 | path_test_hdf5 = (Path(tmp_path) / "multipoles.hdf5").as_posix()
22 |
23 | # copy the precomputed data to the temporal HDF5
24 | shutil.copyfile(path_original_hdf5, path_test_hdf5)
25 | config.path_hdf5 = path_test_hdf5
26 |
27 | mol = readXYZ((PATH_TEST / "ethylene.xyz").as_posix())
28 | matrix = compute_matrix_multipole(mol, config, "quadrupole")
29 | # The matrix contains the overlap + dipole + quadrupole
30 | assertion.shape_eq(matrix, (10, 46, 46))
31 | # Check that the matrices are symmetric
32 | for i in range(10):
33 | arr = matrix[i].reshape(46, 46)
34 | assertion.truth(np.allclose(arr, arr.T))
35 |
--------------------------------------------------------------------------------
/test/test_read_cp2k_basis.py:
--------------------------------------------------------------------------------
1 | """Read basis in CP2K format."""
2 |
3 | from __future__ import annotations
4 |
5 | import os
6 | import shutil
7 | from pathlib import Path
8 |
9 | import numpy as np
10 | import yaml
11 | import h5py
12 | from nanoutils import RecursiveKeysView, RecursiveItemsView
13 | from packaging.version import Version
14 | from assertionlib import assertion
15 |
16 | import nanoqm
17 | from nanoqm.workflows.initialization import store_cp2k_basis
18 | from .utilsTest import PATH_TEST
19 |
20 |
21 | class TestRedCP2KBasis:
22 | def test_pass(self, tmp_path: Path) -> None:
23 | """Read Basis set in CP2K format."""
24 | tmp_hdf5 = Path(tmp_path) / 'cp2k_basis.hdf5'
25 | with h5py.File(tmp_hdf5, "x"):
26 | pass
27 |
28 | path_basis = os.path.join(
29 | os.path.dirname(nanoqm.__file__),
30 | 'basis',
31 | 'BASIS_MOLOPT',
32 | )
33 |
34 | store_cp2k_basis(tmp_hdf5, path_basis)
35 |
36 | with h5py.File(tmp_hdf5, 'r') as f5:
37 | dset = f5["cp2k/basis/c/DZVP-MOLOPT-GTH/0/coefficients"]
38 |
39 | # Check that the format is store
40 | ref = [2, 0, 2, 7, 2, 2, 1]
41 | np.testing.assert_array_equal(dset.attrs['basisFormat'], ref)
42 |
43 | # Check Shape of the coefficients
44 | assertion.eq(dset.shape, (5, 7))
45 |
46 | def test_legacy(self, tmp_path: Path) -> None:
47 | hdf5_file = tmp_path / "legacy.hdf5"
48 | shutil.copy2(PATH_TEST / "legacy.hdf5", hdf5_file)
49 |
50 | store_cp2k_basis(hdf5_file, PATH_TEST / "BASIS_MOLOPT")
51 | with open(PATH_TEST / "test_initialization.yaml", "r", encoding="utf8") as f1:
52 | ref = set(yaml.load(f1, Loader=yaml.SafeLoader)["MOLOPT"])
53 |
54 | with h5py.File(hdf5_file, "r") as f2:
55 | assertion.eq(RecursiveKeysView(f2), ref)
56 | assertion.assert_(Version, f2.attrs.get("__version__"))
57 |
58 | for name, dset in RecursiveItemsView(f2):
59 | if not name.endswith("coefficients"):
60 | continue
61 | basis_fmt = dset.attrs.get("basisFormat")
62 | assertion.isinstance(basis_fmt, np.ndarray, message=name)
63 | assertion.eq(basis_fmt.ndim, 1, message=name)
64 | assertion.eq(basis_fmt.dtype, np.int64, message=name)
65 |
--------------------------------------------------------------------------------
/test/test_schemas.py:
--------------------------------------------------------------------------------
1 | """Check the schemas."""
2 | from assertionlib import assertion
3 |
4 | from nanoqm.workflows.input_validation import process_input
5 | from nanoqm import _data
6 |
7 | from .utilsTest import PATH_TEST
8 |
9 |
10 | def test_input_validation():
11 | """Test the input validation schema."""
12 | schemas = ("absorption_spectrum", "derivative_couplings")
13 | paths = [PATH_TEST / x for x in
14 | ["input_test_absorption_spectrum.yml", "input_fast_test_derivative_couplings.yml"]]
15 | for s, p in zip(schemas, paths):
16 | d = process_input(p, s)
17 | assertion.isinstance(d, _data.GeneralOptions)
18 |
--------------------------------------------------------------------------------
/test/test_tools.py:
--------------------------------------------------------------------------------
1 | """Test the workflows tools."""
2 | import numpy as np
3 | from qmflows.parsers import parse_string_xyz
4 |
5 | from nanoqm.common import number_spherical_functions_per_atom
6 |
7 | from .utilsTest import PATH_TEST
8 |
9 |
10 | def test_calc_sphericals():
11 | """Test the calculation of spherical functions."""
12 | with open(PATH_TEST / 'Cd33Se33.xyz', 'r', encoding="utf8") as f:
13 | mol = parse_string_xyz(f.read())
14 | path_hdf5 = PATH_TEST / "Cd33Se33.hdf5"
15 | xs = number_spherical_functions_per_atom(
16 | mol, "cp2k", "DZVP-MOLOPT-SR-GTH", path_hdf5)
17 |
18 | expected = np.concatenate((np.repeat(25, 33), np.repeat(13, 33)))
19 |
20 | assert np.array_equal(xs, expected)
21 |
--------------------------------------------------------------------------------
/test/test_version.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from packaging.version import Version
3 | from assertionlib import assertion
4 |
5 | import nanoqm
6 |
7 |
8 | def test_version() -> None:
9 | """Check that the nano-qmflows version is PEP 440 compliant."""
10 | assertion.assert_(Version, nanoqm.__version__)
11 |
12 |
13 | def test_dev_version(is_release: bool) -> None:
14 | if not is_release:
15 | pytest.skip("Requires a nano-qmflows release")
16 | return None
17 | version = Version(nanoqm.__version__)
18 | assertion.not_(version.is_devrelease)
19 |
--------------------------------------------------------------------------------
/test/test_workflow_IPR.py:
--------------------------------------------------------------------------------
1 | """Test the IPR workflow."""
2 |
3 | import os
4 | from os.path import join
5 | from pathlib import Path
6 |
7 | import numpy as np
8 | from nanoqm.workflows.input_validation import process_input
9 | from nanoqm.workflows.workflow_ipr import workflow_ipr
10 |
11 | from .utilsTest import PATH_TEST, requires_cp2k
12 |
13 |
14 | @requires_cp2k
15 | def test_workflow_IPR(tmp_path: Path) -> None:
16 | """Test the Inverse Participation Ratio workflow."""
17 | file_path = PATH_TEST / 'input_test_IPR.yml'
18 | config = process_input(file_path, 'ipr_calculation')
19 |
20 | # create scratch path
21 | config.path_hdf5 = join(tmp_path, "F2.hdf5")
22 | config.workdir = tmp_path
23 |
24 | ref = np.array([
25 | [-14.944306373596191, 1.4802774436230284],
26 | [-12.682127952575684, 1.9999967522621063],
27 | [-12.682127952575684, 1.9999969940206397],
28 | [-9.349729537963867, 1.9999969936606203],
29 | [-9.349729537963867, 1.999997231558098],
30 | [-5.862362384796143, 1.7524837324869675],
31 | ])
32 | try:
33 | ipr = workflow_ipr(config)
34 | np.testing.assert_allclose(ipr, ref, rtol=5e-04)
35 |
36 | # Check restart
37 | ipr2 = workflow_ipr(config)
38 | np.testing.assert_allclose(ipr2, ref, rtol=5e-04)
39 | finally:
40 | if os.path.isfile("IPR.txt"):
41 | os.remove("IPR.txt")
42 |
--------------------------------------------------------------------------------
/test/test_workflow_coop.py:
--------------------------------------------------------------------------------
1 | """Test the COOP workflow."""
2 |
3 | import os
4 | from pathlib import Path
5 | from os.path import join
6 |
7 | import numpy as np
8 |
9 | from nanoqm.workflows.input_validation import process_input
10 | from nanoqm.workflows.workflow_coop import workflow_crystal_orbital_overlap_population
11 |
12 | from .utilsTest import PATH_TEST, requires_cp2k
13 |
14 |
15 | @requires_cp2k
16 | def test_workflow_coop(tmp_path: Path) -> None:
17 | """Test the Crystal Orbital Overlap Population workflow."""
18 | file_path = PATH_TEST / 'input_test_coop.yml'
19 | config = process_input(file_path, 'coop_calculation')
20 |
21 | # create scratch path
22 | config.path_hdf5 = join(tmp_path, "HF.hdf5")
23 | config.workdir = tmp_path
24 |
25 | ref = np.array([
26 | [-29.309432983398438, 0.07755493190601538],
27 | [-11.921581268310547, 0.03888611797746827],
28 | [-8.125808715820312, 5.685036211368225e-20],
29 | [-8.125808715820312, 1.564841425458713e-21],
30 | [6.837515830993652, -0.7486973445492844],
31 | ])
32 | try:
33 | coop = workflow_crystal_orbital_overlap_population(config)
34 | np.testing.assert_allclose(coop, ref, atol=5e-04)
35 |
36 | # Check restart
37 | coop2 = workflow_crystal_orbital_overlap_population(config)
38 | np.testing.assert_allclose(coop2, ref, atol=5e-04)
39 | finally:
40 | if os.path.isfile("COOP.txt"):
41 | os.remove("COOP.txt")
42 |
--------------------------------------------------------------------------------
/test/test_workflow_single_points.py:
--------------------------------------------------------------------------------
1 | """Test a single point calculation using CP2K."""
2 | import os
3 | from pathlib import Path
4 |
5 | import h5py
6 | import pytest
7 | from assertionlib import assertion
8 |
9 | from nanoqm.common import is_data_in_hdf5
10 | from nanoqm.workflows.input_validation import process_input
11 | from nanoqm.workflows.workflow_single_points import workflow_single_points
12 |
13 | from .utilsTest import PATH_TEST, remove_files
14 |
15 |
16 | def run_single_point(tmp_path: Path, input_file: str):
17 | """Run a single point calculation using cp2k."""
18 | file_path = PATH_TEST / input_file
19 | config = process_input(file_path, 'single_points')
20 | config.scratch_path = tmp_path
21 | config.path_hdf5 = os.path.join(tmp_path, 'single_points.hdf5')
22 | with h5py.File(config.path_hdf5, "x"):
23 | pass
24 |
25 | try:
26 | path_orbitals, path_energies = workflow_single_points(config)
27 | if config.compute_orbitals:
28 | assertion.truth(is_data_in_hdf5(config.path_hdf5, path_orbitals[0]))
29 | finally:
30 | remove_files()
31 |
32 |
33 | @pytest.mark.slow
34 | def test_single_point(tmp_path: Path):
35 | """Check that the couplings run."""
36 | run_single_point(tmp_path, "input_test_single_points.yml")
37 |
--------------------------------------------------------------------------------
/test/utilsTest.py:
--------------------------------------------------------------------------------
1 | """Functions use for testing."""
2 |
3 | import re
4 | import subprocess
5 | import textwrap
6 | import fnmatch
7 | import os
8 | import shutil
9 | from pathlib import Path
10 |
11 | import pytest
12 | from qmflows.packages import Result
13 | from qmflows.test_utils import find_executable
14 | from packaging.version import Version
15 |
16 | __all__ = [
17 | "PATH_TEST",
18 | "CP2K_VERSION",
19 | "remove_files",
20 | "validate_status",
21 | "requires_cp2k",
22 | ]
23 |
24 | # Environment data
25 | CP2K_EXEC = "cp2k.ssmp"
26 | ROOT = Path(__file__).parents[1]
27 | PATH_TEST = ROOT / "test" / "test_files"
28 |
29 | #: A mark for skipping tests if CP2K is not installed
30 | requires_cp2k = pytest.mark.skipif(
31 | find_executable(CP2K_EXEC) is None,
32 | reason="Requires CP2K",
33 | )
34 |
35 |
36 | def _get_cp2K_version(executable: str) -> Version:
37 | path = find_executable(executable)
38 | if path is None:
39 | return Version("0.0")
40 |
41 | out = subprocess.run(
42 | f"{path} --version",
43 | check=True, capture_output=True, text=True, shell=True,
44 | )
45 |
46 | match = re.search(r"CP2K version\s+(\S+)", out.stdout)
47 | if match is None:
48 | raise ValueError(f"Failed to parse the `{path!r} --version` output:\n\n{out.stdout}")
49 | return Version(match[1])
50 |
51 |
52 | # Environment data
53 | CP2K_VERSION = _get_cp2K_version(CP2K_EXEC)
54 |
55 |
56 | def remove_files() -> None:
57 | """Remove tmp files in cwd."""
58 | for path in fnmatch.filter(os.listdir('.'), "plams_workdir*"):
59 | shutil.rmtree(path)
60 | for ext in ("hdf5", "db", "lock"):
61 | name = f"cache.{ext}"
62 | if os.path.exists(name):
63 | os.remove(name)
64 |
65 |
66 | def _read_result_file(result: Result, extension: str, max_line: int = 100) -> "None | str":
67 | """Find and read the first file in ``result`` with the provided file extension.
68 |
69 | Returns ``None`` if no such file can be found.
70 | """
71 | root = result.archive["plams_dir"]
72 | if root is None:
73 | return None
74 |
75 | iterator = (os.path.join(root, i) for i in os.listdir(root)
76 | if os.path.splitext(i)[1] == extension)
77 | for i in iterator:
78 | with open(i, "r", encoding="utf8") as f:
79 | ret_list = f.readlines()
80 | ret = "..." if len(ret_list) > max_line else ""
81 | ret += "".join(ret_list[-max_line:])
82 | return textwrap.indent(ret, 4 * " ")
83 | else:
84 | return None
85 |
86 |
87 | def validate_status(result: Result, *, print_out: bool = True, print_err: bool = True) -> None:
88 | """Validate the status of the ``qmflows.Result`` object is set to ``"successful"``.
89 |
90 | Parameters
91 | ----------
92 | result : qmflows.Result
93 | The to-be validated ``Result`` object.
94 | print_out : bool
95 | Whether to included the content of the ``Result`` objects' .out file in the exception.
96 | print_err : bool
97 | Whether to included the content of the ``Result`` objects' .err file in the exception.
98 |
99 | Raises
100 | ------
101 | AssertionError
102 | Raised when :code:`result.status != "successful"`.
103 |
104 | """
105 | # TODO: Import `validate_status` from qmflows once 0.11.2 has been release
106 | if result.status == "successful":
107 | return None
108 |
109 | msg = f"Unexpected {result.job_name} status: {result.status!r}"
110 |
111 | if print_out:
112 | out = _read_result_file(result, ".out")
113 | if out is not None:
114 | msg += f"\n\nout_file:\n{out}"
115 | if print_err:
116 | err = _read_result_file(result, ".err")
117 | if err is not None:
118 | msg += f"\n\nerr_file:\n{err}"
119 | raise AssertionError(msg)
120 |
--------------------------------------------------------------------------------
/test_requirements.txt:
--------------------------------------------------------------------------------
1 | assertionlib>=2.2.3
2 | pytest>=5.4.0
3 | pytest-cov>=2.3.1
4 | pytest-mock>=0.4.0
5 | ipython>=5.0.0
6 |
--------------------------------------------------------------------------------
/typings/README.rst:
--------------------------------------------------------------------------------
1 | Stub files with partial type annotations for third-party packages.
2 |
--------------------------------------------------------------------------------
/typings/h5py/__init__.pyi:
--------------------------------------------------------------------------------
1 | import types
2 | from typing import Any, Literal as L
3 | from collections.abc import KeysView, MutableMapping, Generator, Sequence
4 |
5 | from numpy.typing import DTypeLike, ArrayLike
6 |
7 | class AttributeManager(MutableMapping[str, Any]):
8 | def __setitem__(self, __key: str, __value: ArrayLike) -> None: ...
9 | def __getitem__(self, __key: str) -> Any: ...
10 | def __len__(self) -> int: ...
11 | def __iter__(self) -> Generator[str, None, None]: ...
12 | def __delitem__(self, __key: str) -> None: ...
13 |
14 | class Group(MutableMapping[str, Any]):
15 | def __getitem__(self, key: str) -> Any: ...
16 | def __len__(self) -> int: ...
17 | def __iter__(self) -> Generator[str, None, None]: ...
18 | def __setitem__(self, __key: str, __value: Any) -> None: ...
19 | def __delitem__(self, __key: str) -> None: ...
20 | def require_dataset(self, name: str, shape: Sequence[int], dtype: DTypeLike, exact: bool = ..., **kwds: Any) -> Any: ...
21 | def keys(self) -> KeysView[str]: ...
22 | def move(self, source: str, dest: str) -> None: ...
23 | @property
24 | def attrs(self) -> AttributeManager: ...
25 |
26 | class File(Group):
27 | def __init__(
28 | self,
29 | name,
30 | mode: L["r", "r+", "w", "w-", "x", "a"] = ...,
31 | ) -> None: ...
32 | def __enter__(self) -> File: ...
33 | def __exit__(
34 | self,
35 | __exc_type: type[BaseException] | None,
36 | __exc_val: BaseException | None,
37 | __exc_tb: types.TracebackType | None,
38 | ) -> None: ...
39 |
--------------------------------------------------------------------------------
/typings/mendeleev/__init__.pyi:
--------------------------------------------------------------------------------
1 | from .mendeleev import element as element
2 |
--------------------------------------------------------------------------------
/typings/mendeleev/mendeleev.pyi:
--------------------------------------------------------------------------------
1 | from .models import Element
2 |
3 | def element(ids: int | str) -> Element: ...
4 |
--------------------------------------------------------------------------------
/typings/mendeleev/models.pyi:
--------------------------------------------------------------------------------
1 | class Element:
2 | mass_number: int
3 |
--------------------------------------------------------------------------------
/typings/noodles/__init__.pyi:
--------------------------------------------------------------------------------
1 | from .interface import gather, schedule, unpack
2 |
3 | __all__ = ["gather", "schedule", "unpack"]
4 |
--------------------------------------------------------------------------------
/typings/noodles/interface/__init__.pyi:
--------------------------------------------------------------------------------
1 | from .functions import gather, unpack
2 | from .decorator import schedule, PromisedObject
3 |
4 | __all__ = ["gather", "schedule", "PromisedObject", "unpack"]
5 |
--------------------------------------------------------------------------------
/typings/noodles/interface/decorator.pyi:
--------------------------------------------------------------------------------
1 | from collections.abc import Callable
2 | from typing import Any
3 | from typing_extensions import ParamSpec
4 |
5 | _P = ParamSpec("_P")
6 |
7 | def schedule(f: Callable[_P, object], **hints: Any) -> Callable[_P, PromisedObject]: ...
8 |
9 | class PromisedObject(Any):
10 | def __init__(self, workflow: Any) -> None: ...
11 | def __call__(self, *args: Any, **kwargs: Any) -> PromisedObject: ...
12 | def __getattr__(self, attr: str) -> PromisedObject: ...
13 | def __setattr__(self, attr: str, value: Any) -> None: ...
14 | def __result__(self) -> Any: ...
15 | def __lt__(self, other) -> PromisedObject: ...
16 | def __gt__(self, other) -> PromisedObject: ...
17 | def __eq__(self, other) -> PromisedObject: ...
18 | def __ne__(self, other) -> PromisedObject: ...
19 | def __ge__(self, other) -> PromisedObject: ...
20 | def __le__(self, other) -> PromisedObject: ...
21 | def __bool__(self) -> PromisedObject: ...
22 | def __abs__(self) -> PromisedObject: ...
23 | def __sub__(self, other) -> PromisedObject: ...
24 | def __add__(self, other) -> PromisedObject: ...
25 | def __mul__(self, other) -> PromisedObject: ...
26 | def __rmul__(self, other) -> PromisedObject: ...
27 | def __truediv__(self, other) -> PromisedObject: ...
28 | def __floordiv__(self, other) -> PromisedObject: ...
29 | def __mod__(self, other) -> PromisedObject: ...
30 | def __pow__(self, other) -> PromisedObject: ...
31 | def __pos__(self) -> PromisedObject: ...
32 | def __neg__(self) -> PromisedObject: ...
33 | def __matmul__(self, other) -> PromisedObject: ...
34 | def __index__(self) -> PromisedObject: ...
35 | def __inv__(self) -> PromisedObject: ...
36 | def __lshift__(self, n) -> PromisedObject: ...
37 | def __rshift__(self, n) -> PromisedObject: ...
38 | def __and__(self, other) -> PromisedObject: ...
39 | def __or__(self, other) -> PromisedObject: ...
40 | def __xor__(self, other) -> PromisedObject: ...
41 | def __contains__(self, item: Any) -> PromisedObject: ...
42 | def __getitem__(self, name: Any) -> PromisedObject: ...
43 | def __setitem__(self, attr: str, value: Any) -> None: ...
44 |
--------------------------------------------------------------------------------
/typings/noodles/interface/functions.pyi:
--------------------------------------------------------------------------------
1 | from typing import TypeVar, Protocol, SupportsIndex
2 | from collections.abc import Generator
3 |
4 | from .decorator import schedule
5 |
6 | _T = TypeVar("_T")
7 | _T_co = TypeVar("_T_co", covariant=True)
8 |
9 | class _SupportsGetItem(Protocol[_T_co]):
10 | def __getitem__(self, __key: SupportsIndex) -> _T_co: ...
11 |
12 | @schedule
13 | def gather(*a: _T) -> list[_T]: ...
14 | def unpack(t: _SupportsGetItem[_T], n: SupportsIndex) -> Generator[_T, None, None]: ...
15 |
--------------------------------------------------------------------------------
/typings/noodles/serial/__init__.pyi:
--------------------------------------------------------------------------------
1 | from .registry import Serialiser
2 |
3 | __all__ = ["Serialiser"]
4 |
--------------------------------------------------------------------------------
/typings/noodles/serial/dataclass.pyi:
--------------------------------------------------------------------------------
1 | import dataclasses
2 | from typing import TypeVar, Any
3 | from collections.abc import Callable
4 | from noodles.serial import Serialiser
5 |
6 | _DataClass = Any
7 | _T = TypeVar("_T")
8 | _Self = TypeVar("_Self", bound=_DataClass)
9 |
10 |
11 | class SerDataClass(Serialiser):
12 | def __init__(self) -> None: ...
13 | def encode(self, obj: _DataClass, make_rec: Callable[[dict[str, Any]], _T]) -> _T: ...
14 | def decode(self, cls: type[_Self], data: dict[str, Any]) -> _Self: ...
15 |
--------------------------------------------------------------------------------
/typings/noodles/serial/registry.pyi:
--------------------------------------------------------------------------------
1 | import abc
2 | from collections.abc import Callable
3 | from typing import Any, TypeVar
4 |
5 | _T = TypeVar("_T")
6 |
7 |
8 | class Serialiser(abc.ABC):
9 | name: str
10 | def __init__(self, name: str | Callable[..., Any] = ...) -> None: ...
11 | @abc.abstractmethod
12 | def encode(self, obj: Any, make_rec: Callable[[Any], _T]) -> _T: ...
13 | @abc.abstractmethod
14 | def decode(self, cls: type[_T], data: Any) -> _T: ...
15 |
--------------------------------------------------------------------------------
/typings/scipy/__init__.pyi:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/typings/scipy/__init__.pyi
--------------------------------------------------------------------------------
/typings/scipy/constants.pyi:
--------------------------------------------------------------------------------
1 | physical_constants: dict[str, tuple[float, str, float]]
2 |
--------------------------------------------------------------------------------
/typings/scipy/linalg.pyi:
--------------------------------------------------------------------------------
1 | from typing import overload, Any, Literal as L
2 |
3 | import numpy as np
4 | from numpy.typing import ArrayLike, NDArray
5 |
6 | @overload
7 | def sqrtm(
8 | A: ArrayLike,
9 | disp: L[True] = ...,
10 | blocksize: int = ...,
11 | ) -> NDArray[np.floating[Any]]: ...
12 | @overload
13 | def sqrtm(
14 | A: ArrayLike,
15 | disp: L[False],
16 | blocksize: int = ...,
17 | ) -> tuple[NDArray[np.floating[Any]], float]: ...
18 |
--------------------------------------------------------------------------------
/typings/scipy/optimize.pyi:
--------------------------------------------------------------------------------
1 | from collections.abc import Callable
2 | from typing import Any
3 |
4 | import numpy as np
5 | from numpy.typing import ArrayLike, NDArray
6 |
7 | def curve_fit(
8 | f: Callable[..., Any],
9 | xdata: ArrayLike,
10 | ydata: ArrayLike,
11 | p0: None | ArrayLike = ...,
12 | sigma: None | ArrayLike = ...,
13 | absolute_sigma: bool = ...,
14 | check_finite: bool = ...,
15 | bounds: tuple[ArrayLike, ArrayLike] = ...,
16 | method: None | str = ...,
17 | jac: Callable[..., Any] | str | None = ...,
18 | **kwargs: Any,
19 | ) -> tuple[NDArray[np.floating[Any]], NDArray[np.floating[Any]]]: ...
20 |
21 | def linear_sum_assignment(
22 | cost_matrix: ArrayLike,
23 | maximize: bool = ...,
24 | ) -> NDArray[np.intp]: ...
25 |
--------------------------------------------------------------------------------
/typings/scipy/spatial/__init__.pyi:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/typings/scipy/spatial/__init__.pyi
--------------------------------------------------------------------------------
/typings/scipy/spatial/distance.pyi:
--------------------------------------------------------------------------------
1 | from typing import overload, Any, Protocol, Literal, SupportsIndex, SupportsFloat
2 |
3 | import numpy as np
4 | from numpy.typing import ArrayLike, NDArray
5 |
6 | _MetricKind = Literal[
7 | 'braycurtis',
8 | 'canberra',
9 | 'chebychev', 'chebyshev', 'cheby', 'cheb', 'ch',
10 | 'cityblock', 'cblock', 'cb', 'c',
11 | 'correlation', 'co',
12 | 'cosine', 'cos',
13 | 'dice',
14 | 'euclidean', 'euclid', 'eu', 'e',
15 | 'hamming', 'hamm', 'ha', 'h',
16 | 'minkowski', 'mi', 'm', 'pnorm',
17 | 'jaccard', 'jacc', 'ja', 'j',
18 | 'jensenshannon', 'js',
19 | 'kulsinski', 'kulczynski1',
20 | 'mahalanobis', 'mahal', 'mah',
21 | 'rogerstanimoto',
22 | 'russellrao',
23 | 'seuclidean', 'se', 's',
24 | 'sokalmichener',
25 | 'sokalsneath',
26 | 'sqeuclidean', 'sqe', 'sqeuclid',
27 | 'yule',
28 | ]
29 |
30 | class _MetricCallback1(Protocol):
31 | def __call__(
32 | self, __XA: NDArray[Any], __XB: NDArray[Any]
33 | ) -> None | str | bytes | SupportsFloat | SupportsIndex: ...
34 |
35 | class _MetricCallback2(Protocol):
36 | def __call__(
37 | self, __XA: NDArray[Any], __XB: NDArray[Any], **kwargs: Any
38 | ) -> None | str | bytes | SupportsFloat | SupportsIndex: ...
39 |
40 | @overload
41 | def cdist(
42 | XA: ArrayLike,
43 | XB: ArrayLike,
44 | metric: _MetricKind = ...,
45 | *,
46 | out: None | NDArray[np.floating[Any]] = ...,
47 | p: float = ...,
48 | w: None | ArrayLike = ...,
49 | V: None | ArrayLike = ...,
50 | VI: None | ArrayLike = ...,
51 | ) -> NDArray[np.floating[Any]]: ...
52 | @overload
53 | def cdist(
54 | XA: ArrayLike,
55 | XB: ArrayLike,
56 | metric: _MetricCallback1 | _MetricCallback2,
57 | *,
58 | out: None | NDArray[np.floating[Any]] = ...,
59 | **kwargs: Any,
60 | ) -> NDArray[np.floating[Any]]: ...
61 |
--------------------------------------------------------------------------------
/typings/scm/__init__.pyi:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/typings/scm/__init__.pyi
--------------------------------------------------------------------------------
/typings/scm/plams/__init__.pyi:
--------------------------------------------------------------------------------
1 | from scm.plams.core.settings import Settings
2 | from scm.plams.core.functions import add_to_class
3 | from scm.plams.mol.atom import Atom
4 | from scm.plams.mol.bond import Bond
5 | from scm.plams.mol.molecule import Molecule
6 | from scm.plams.mol.pdbtools import PDBHandler, PDBRecord
7 |
8 | __all__ = [
9 | "Atom",
10 | "Bond",
11 | "Molecule",
12 | "Settings",
13 | "PDBHandler",
14 | "PDBRecord",
15 | "add_to_class",
16 | ]
17 |
--------------------------------------------------------------------------------
/typings/scm/plams/core/__init__.pyi:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/typings/scm/plams/core/__init__.pyi
--------------------------------------------------------------------------------
/typings/scm/plams/core/functions.pyi:
--------------------------------------------------------------------------------
1 | from collections.abc import Callable
2 | from typing import Any, TypeVar
3 |
4 | _FT = TypeVar("_FT", bound=Callable[..., Any])
5 |
6 | def add_to_class(classname: type[Any]) -> Callable[[_FT], _FT]: ...
7 |
--------------------------------------------------------------------------------
/typings/scm/plams/core/settings.pyi:
--------------------------------------------------------------------------------
1 | import sys
2 | import types
3 | from collections.abc import Callable, Iterable, Mapping, Sequence
4 | from typing import Any, Generic, TypeVar, overload, Protocol
5 |
6 | _KT = TypeVar("_KT")
7 | _VT = TypeVar("_VT")
8 | _MT = TypeVar("_MT", bound=_SupportsMissing)
9 | _ST = TypeVar("_ST", bound=Settings[Any, Any])
10 |
11 | class _SupportsMissing(Protocol):
12 | def __missing__(self, __key: Any) -> Any: ...
13 |
14 | class Settings(dict[_KT, _VT]):
15 | def copy(self: _ST) -> _ST: ...
16 | def soft_update(self: _ST, other: Mapping[_KT, _VT]) -> _ST: ...
17 | def update(self, other: Mapping[_KT, _VT]) -> None: ... # type: ignore[override]
18 | def merge(self: _ST, other: Mapping[_KT, _VT]) -> _ST: ...
19 | def find_case(self, key: _KT) -> _KT: ...
20 | def as_dict(self) -> dict[_KT, _VT]: ...
21 | @classmethod
22 | def suppress_missing(cls: type[_MT]) -> SuppressMissing[_MT]: ...
23 | def get_nested(self, key_tuple: Iterable[Any], suppress_missing: bool = False) -> Any: ...
24 | def set_nested(self, key_tuple: Sequence[Any], value: Any, suppress_missing: bool = False) -> None: ...
25 | def flatten(self, flatten_list: bool = ...) -> Settings[tuple[Any, ...], Any]: ...
26 | def unflatten(self, unflatten_list: bool = ...) -> Settings[Any, Any]: ...
27 | @classmethod # type: ignore[override]
28 | @overload
29 | def fromkeys(cls, __iterable: Iterable[_KT]) -> Settings[_KT, Any]: ...
30 | @classmethod
31 | @overload
32 | def fromkeys(cls, __iterable: Iterable[_KT], __value: _VT) -> Settings[_KT, _VT]: ...
33 | def __missing__(self, __key: _KT) -> Settings[Any, Any]: ...
34 | def __getattr__(self, name: _KT) -> _VT: ... # type: ignore[misc]
35 | def __setattr__(self, name: _KT, value: _VT) -> None: ... # type: ignore[misc,override]
36 | def __str__(self) -> str: ...
37 | def __repr__(self) -> str: ...
38 | def __add__(self: _ST, other: Mapping[_KT, _VT]) -> _ST: ...
39 | def __iadd__(self: _ST, other: Mapping[_KT, _VT]) -> _ST: ...
40 | def __copy__(self: _ST) -> _ST: ...
41 |
42 | class SuppressMissing(Generic[_MT]):
43 | obj: _MT
44 | missing: Callable[[Any, _MT, Any], Any]
45 | def __init__(self, obj: type[_MT]) -> None: ...
46 | def __enter__(self) -> None: ...
47 | def __exit__(
48 | self, exc_type: None | type[BaseException], exc_value: None | BaseException, traceback: None | types.TracebackType
49 | ) -> None: ...
50 |
--------------------------------------------------------------------------------
/typings/scm/plams/mol/__init__.pyi:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/typings/scm/plams/mol/__init__.pyi
--------------------------------------------------------------------------------
/typings/scm/plams/mol/atom.pyi:
--------------------------------------------------------------------------------
1 | import builtins
2 | from collections.abc import Iterable, Iterator, Mapping, Sequence
3 | from typing import Any, SupportsFloat
4 |
5 | import numpy as np
6 | from scm.plams import Bond, Molecule, Settings
7 |
8 | class Atom:
9 | atnum: int
10 | mol: Molecule
11 | bonds: list[Bond]
12 | properties: Settings[Any, Any]
13 | coords: tuple[float, float, float]
14 | id: int # Only available after calling `Molecule.set_atoms_id`
15 | def __init__(
16 | self,
17 | atnum: int = ...,
18 | symbol: None | builtins.str = ...,
19 | coords: None | Iterable[builtins.str | bytes | SupportsFloat] = ...,
20 | unit: builtins.str = ...,
21 | bonds: None | list[Bond] = ...,
22 | mol: None | Molecule = ...,
23 | **other: Any,
24 | ) -> None: ...
25 | def __iter__(self) -> Iterator[float]: ...
26 | @property
27 | def symbol(self) -> builtins.str: ...
28 | @symbol.setter
29 | def symbol(self, symbol: builtins.str) -> None: ...
30 | @property
31 | def x(self) -> float: ...
32 | @x.setter
33 | def x(self, value: float) -> None: ...
34 | @property
35 | def y(self) -> float: ...
36 | @y.setter
37 | def y(self, value: float) -> None: ...
38 | @property
39 | def z(self) -> float: ...
40 | @z.setter
41 | def z(self, value: float) -> None: ...
42 | @property
43 | def mass(self) -> float: ...
44 | @property
45 | def radius(self) -> float: ...
46 | @property
47 | def connectors(self) -> float: ...
48 | @property
49 | def is_metallic(self) -> int: ...
50 | @property
51 | def is_electronegative(self) -> int: ...
52 | def str(
53 | self,
54 | symbol: bool,
55 | suffix: str = ...,
56 | suffix_dict: Mapping[str, Any] = ...,
57 | unit: str = ...,
58 | space: int = ...,
59 | decimal: int = ...,
60 | ) -> str: ...
61 | def translate(self, vector: Iterable[float], unit: builtins.str = ...) -> None: ...
62 | def move_to(self, point: Iterable[float], unit: builtins.str = ...) -> None: ...
63 | def distance_to(
64 | self, point: Iterable[float], unit: builtins.str = ..., result_unit: builtins.str = ...
65 | ) -> tuple[float, float, float]: ...
66 | def vector_to(self, point: Iterable[float], unit: builtins.str = ..., result_unit: builtins.str = ...) -> float: ...
67 | def angle(
68 | self,
69 | point1: Iterable[float],
70 | point2: Iterable[float],
71 | point1unit: builtins.str = ...,
72 | point2unit: builtins.str = ...,
73 | result_unit: builtins.str = ...,
74 | ) -> builtins.str: ...
75 | def rotate(self, matrix: np.ndarray | Sequence[Sequence[float]]) -> None: ...
76 | def neighbors(self) -> list[Atom]: ...
77 |
--------------------------------------------------------------------------------
/typings/scm/plams/mol/bond.pyi:
--------------------------------------------------------------------------------
1 | from collections.abc import Generator, Iterable
2 | from typing import Any, ClassVar
3 |
4 | from scm.plams import Atom, Molecule, Settings
5 |
6 | class Bond:
7 | AR: ClassVar[float]
8 | atom1: Atom
9 | atom2: Atom
10 | order: float
11 | mol: Molecule
12 | properties: Settings[Any, Any]
13 | def __init__(
14 | self, atom1: None | Atom = ..., atom2: None | Atom = ..., order: float = ..., mol: None | Molecule = ..., **other: Any
15 | ) -> None: ...
16 | def __iter__(self) -> Generator[Atom, None, None]: ...
17 | def is_aromatic(self) -> bool: ...
18 | def length(self, unit: str = ...) -> float: ...
19 | def as_vector(self, start: None | Atom = ..., unit: str = ...) -> tuple[float, float, float]: ...
20 | def other_end(self, atom: Atom) -> Atom: ...
21 | def resize(self, moving_atom: Atom, length: float, unit: str = ...) -> None: ...
22 | def rotate(self, moving_atom: Atom, angle: float, unit: str = ...) -> None: ...
23 |
--------------------------------------------------------------------------------
/typings/scm/plams/mol/pdbtools.pyi:
--------------------------------------------------------------------------------
1 | from typing import IO, Any
2 |
3 | class PDBRecord:
4 | name: str
5 | value: list[str]
6 | model: list[Any]
7 | def __init__(self, s: str) -> None: ...
8 | def __str__(self) -> str: ...
9 | def is_multiline(self) -> bool: ...
10 | def extend(self, s: str) -> bool: ...
11 |
12 | class PDBHandler:
13 | records: dict[str, list[PDBRecord]]
14 | def __init__(self, textfile: None | str | IO[str] = ...) -> None: ...
15 | def singlemodel(self) -> bool: ...
16 | def read(self, f: IO[str]) -> None: ...
17 | def write(self, f: IO[str]) -> None: ...
18 | def calc_master(self) -> PDBRecord: ...
19 | def check_master(self) -> bool: ...
20 | def get_models(self) -> list[list[Any]]: ...
21 | def add_record(self, record: PDBRecord) -> None: ...
22 | def add_model(self, model: list[PDBRecord]) -> None: ...
23 |
--------------------------------------------------------------------------------
/typings/wheel/__init__.pyi:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/typings/wheel/__init__.pyi
--------------------------------------------------------------------------------
/typings/wheel/bdist_wheel.pyi:
--------------------------------------------------------------------------------
1 | from setuptools import Command
2 |
3 | class bdist_wheel(Command):
4 | def get_tag(self) -> tuple[str, str, str]: ...
5 | def finalize_options(self) -> None: ...
6 | def initialize_options(self) -> None: ...
7 | def run(self) -> None: ...
8 |
--------------------------------------------------------------------------------