├── .gitattributes ├── .github ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE │ ├── 00_bug_report.md │ ├── 10_feature_request.md │ └── 20_help_request.md ├── PUll_REQUEST_TEMPLATE │ └── pull_request_template.md ├── dependabot.yml └── workflows │ ├── pypi.yaml │ └── pythonapp.yml ├── .gitignore ├── .readthedocs.yml ├── CHANGELOG.md ├── CITATION.cff ├── LICENSE.txt ├── MANIFEST.in ├── README.rst ├── codecov.yaml ├── conftest.py ├── data └── carboxylic_acids │ └── dataset.csv ├── doc_environment.yml ├── doc_requirements.txt ├── docs ├── Makefile ├── _images │ ├── default_tree.jpg │ ├── merged_tree.jpg │ ├── nac_worflow.png │ └── tree.png ├── _static │ └── default.css ├── _templates │ └── layout.html ├── absorption_spectrum.rst ├── conf.py ├── coop.rst ├── derivative_couplings.rst ├── distribute_absorption_spectrum.rst ├── docs_command_line.rst ├── docs_cp2k_interface.rst ├── docs_derivative_coupling.rst ├── docs_integrals.rst ├── docs_molecular_orbitals.rst ├── docs_workflows.rst ├── documentation.rst ├── includereadme.rst ├── index.rst ├── intro.rst ├── ipr.rst ├── make.bat ├── single_points.rst └── theory.rst ├── install_requirements.txt ├── libint ├── compute_integrals.cc ├── include │ ├── compute_integrals.hpp │ └── namd.hpp └── py_compute_integrals.cc ├── licenses ├── LICENSE_LIBHDF5.txt ├── LICENSE_LIBINT2.txt └── README.rst ├── linting_requirements.txt ├── nanoqm ├── __init__.py ├── _data.py ├── _logger.py ├── _monkey_patch.py ├── analysis │ ├── __init__.py │ └── tools.py ├── basis │ ├── BASIS_ADMM │ ├── BASIS_ADMM_MOLOPT │ ├── BASIS_MOLOPT │ ├── GTH_POTENTIALS │ ├── aux_fit.json │ └── valence_electrons.json ├── common.py ├── compute_integrals.pyi ├── integrals │ ├── __init__.py │ ├── multipole_matrices.py │ └── nonAdiabaticCoupling.py ├── py.typed ├── schedule │ ├── __init__.py │ ├── components.py │ ├── scheduleCP2K.py │ └── scheduleCoupling.py └── workflows │ ├── __init__.py │ ├── distribute_jobs.py │ ├── initialization.py │ ├── input_validation.py │ ├── orbitals_type.py │ ├── run_workflow.py │ ├── schemas.py │ ├── templates.py │ ├── tools.py │ ├── workflow_coop.py │ ├── workflow_coupling.py │ ├── workflow_ipr.py │ ├── workflow_single_points.py │ └── workflow_stddft_spectrum.py ├── pyproject.toml ├── scripts ├── convert_legacy_hdf5.py ├── download_cp2k.sh ├── get_whl_name.py ├── hamiltonians │ ├── plot_couplings.py │ ├── plot_mos_energies.py │ └── plot_spectra.py ├── prepare_test_dir.sh ├── pyxaid │ ├── iconds_excess_energy.py │ ├── plot_average_energy.py │ ├── plot_cooling.py │ ├── plot_spectra_pyxaid.py │ └── plot_states_pops.py ├── qmflows │ ├── README.rst │ ├── convolution.py │ ├── coordination_ldos.py │ ├── dos_cp2k.py │ ├── mergeHDF5.py │ ├── opt_anion_cp2k.py │ ├── opt_cation_cp2k.py │ ├── opt_cp2k.py │ ├── plot_dos.py │ ├── removeHDF5folders.py │ └── remove_mos_hdf5.py └── reenumerate.py ├── setup.py ├── test ├── __init__.py ├── test_absorption_spectrum.py ├── test_analysis_tools.py ├── test_citation_cff.py ├── test_cli.py ├── test_coupling.py ├── test_cpk2_error_call.py ├── test_distribute.py ├── test_files │ ├── ALL_BASIS_SETS │ ├── BASIS_ADMM │ ├── BASIS_ADMM_MOLOPT │ ├── BASIS_MOLOPT │ ├── BASIS_MOLOPT_UZH │ ├── C.xyz │ ├── Cd.hdf5 │ ├── Cd.xyz │ ├── Cd33Se33.hdf5 │ ├── Cd33Se33.xyz │ ├── Cd33Se33_fivePoints.xyz │ ├── F2.xyz │ ├── GTH_POTENTIALS │ ├── HF.xyz │ ├── He.hdf5 │ ├── He.xyz │ ├── O2_coupling.xyz │ ├── ethylene.hdf5 │ ├── ethylene.xyz │ ├── ethylene_couplings.xyz │ ├── file_cell_parameters.txt │ ├── file_distribute_cell_parameters.txt │ ├── guanine_distribution.yml │ ├── input_couplings_alphas.yml │ ├── input_couplings_both.yml │ ├── input_fast_test_derivative_couplings.yml │ ├── input_test_IPR.yml │ ├── input_test_absorption_spectrum.yml │ ├── input_test_absorption_spectrum_all.yml │ ├── input_test_absorption_spectrum_unrestricted.yml │ ├── input_test_b3lyp.yml │ ├── input_test_coop.yml │ ├── input_test_distribute_absorption_spectrum.yml │ ├── input_test_distribute_derivative_couplings.yml │ ├── input_test_pbe.yml │ ├── input_test_pbe0.yml │ ├── input_test_single_points.yml │ ├── input_test_single_points_hybrid_functional.yml │ ├── legacy.hdf5 │ ├── mypy.ini │ ├── oxygen.hdf5 │ ├── test_files.hdf5 │ └── test_initialization.yaml ├── test_initialization.py ├── test_input_validation.py ├── test_multipole.py ├── test_read_cp2k_basis.py ├── test_schemas.py ├── test_tools.py ├── test_version.py ├── test_workflow_IPR.py ├── test_workflow_coop.py ├── test_workflow_single_points.py └── utilsTest.py ├── test_requirements.txt └── typings ├── README.rst ├── h5py └── __init__.pyi ├── mendeleev ├── __init__.pyi ├── mendeleev.pyi └── models.pyi ├── noodles ├── __init__.pyi ├── interface │ ├── __init__.pyi │ ├── decorator.pyi │ └── functions.pyi └── serial │ ├── __init__.pyi │ ├── dataclass.pyi │ └── registry.pyi ├── scipy ├── __init__.pyi ├── constants.pyi ├── linalg.pyi ├── optimize.pyi └── spatial │ ├── __init__.pyi │ └── distance.pyi ├── scm ├── __init__.pyi └── plams │ ├── __init__.pyi │ ├── core │ ├── __init__.pyi │ ├── functions.pyi │ └── settings.pyi │ └── mol │ ├── __init__.pyi │ ├── atom.pyi │ ├── bond.pyi │ ├── molecule.pyi │ └── pdbtools.pyi └── wheel ├── __init__.pyi └── bdist_wheel.pyi /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto eol=lf 2 | tests/test_files/** linguist-generated 3 | -------------------------------------------------------------------------------- /.github/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, gender identity and expression, level of experience, 9 | education, socio-economic status, nationality, personal appearance, race, 10 | religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at f.zapata@esciencecenter.nl. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing guidelines 2 | 3 | We welcome any kind of contribution to our software, from simple comment or question to a full fledged [pull request](https://help.github.com/articles/about-pull-requests/). Please read and follow our [Code of Conduct](CODE_OF_CONDUCT.md). 4 | 5 | A contribution can be one of the following cases: 6 | 7 | 1. you have a question; 8 | 1. you think you may have found a bug (including unexpected behavior); 9 | 1. you want to make some kind of change to the code base (e.g. to fix a bug, to add a new feature, to update documentation). 10 | 11 | The sections below outline the steps in each case. 12 | 13 | ## You have a question 14 | 15 | 1. use the search functionality [here](https://github.com/SCM-NV/nano-qmflows/issues) to see if someone already filed the same issue; 16 | 1. if your issue search did not yield any relevant results, make a new issue; 17 | 1. apply the "Question" label; apply other labels when relevant. 18 | 19 | ## You think you may have found a bug 20 | 21 | 1. use the search functionality [here](https://github.com/SCM-NV/nano-qmflows/issues) to see if someone already filed the same issue; 22 | 1. if your issue search did not yield any relevant results, make a new issue, making sure to provide enough information to the rest of the community to understand the cause and context of the problem. Depending on the issue, you may want to include: 23 | - the [SHA hashcode](https://help.github.com/articles/autolinked-references-and-urls/#commit-shas) of the commit that is causing your problem; 24 | - some identifying information (name and version number) for dependencies you're using; 25 | - information about the operating system; 26 | 1. apply relevant labels to the newly created issue. 27 | 28 | ## You want to make some kind of change to the code base 29 | 30 | 1. (**important**) announce your plan to the rest of the community _before you start working_. This announcement should be in the form of a (new) issue; 31 | 1. (**important**) wait until some kind of consensus is reached about your idea being a good idea; 32 | 1. if needed, fork the repository to your own Github profile and create your own feature branch off of the latest master commit. While working on your feature branch, make sure to stay up to date with the master branch by pulling in changes, possibly from the 'upstream' repository (follow the instructions [here](https://help.github.com/articles/configuring-a-remote-for-a-fork/) and [here](https://help.github.com/articles/syncing-a-fork/)); 33 | 1. make sure the existing tests still work by running ``pytest test``; 34 | 1. add your own tests (if necessary); 35 | 1. update or expand the documentation; 36 | 1. [push](http://rogerdudler.github.io/git-guide/) your feature branch to (your fork of) the [nano-qmflows](https://github.com/SCM-NV/nano-qmflows) repository on GitHub; 37 | 1. create the pull request, e.g. following the instructions [here](https://help.github.com/articles/creating-a-pull-request/). 38 | 39 | In case you feel like you've made a valuable contribution, but you don't know how to write or run tests for it, or how to generate the documentation: don't let this discourage you from making the pull request; we can help you! Just go ahead and submit the pull request, but keep in mind that you might be asked to append additional commits to your pull request. 40 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/00_bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Something doesn't work like I expected. 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | Use your best judgment to provide a useful level of information. Depending on the nature of the issue, consider including, e.g. 11 | 12 | - Which version of the software you're running 13 | - Any logging or error output you see 14 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/10_feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: I would like a new feature to be included in the library 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | Tell us what would be a nice feature to make the **nano-qmflows** library even better! 11 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/20_help_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Help wanted 3 | about: I need some help with the code 4 | title: '' 5 | labels: help wanted 6 | assignees: '' 7 | 8 | --- 9 | -------------------------------------------------------------------------------- /.github/PUll_REQUEST_TEMPLATE/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ### All Submissions: 2 | 3 | * [ ] Have you followed the guidelines in our Contributing document? 4 | * [ ] Have you checked to ensure there aren't other open [Pull Requests](https://github.com/SCM-NV/nano-qmflows/pulls) for the same update/change? 5 | 6 | 7 | ### New Feature Submissions: 8 | 9 | 1. [ ] Does your submission pass tests? 10 | 2. [ ] Have you check your code quality (e.g. using [flake8](http://flake8.pycqa.org/en/latest/)) prior to submission? 11 | 12 | ### Changes to Core Features: 13 | 14 | * [ ] Have you added an explanation of what your changes do and why you'd like us to include them? 15 | * [ ] Have you written new tests for your core changes, as applicable? 16 | * [ ] Have you successfully ran tests with your changes locally? 17 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "github-actions" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "monthly" 12 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | x# Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | /tmp 26 | nanoqm/_version.py 27 | wheelhouse/ 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *,cover 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | 56 | # Sphinx documentation 57 | docs/_build/ 58 | 59 | # PyBuilder 60 | target/ 61 | 62 | # emacs 63 | *~ 64 | 65 | #vi 66 | *.swp 67 | 68 | #Profiling 69 | *.lprof 70 | *.prof 71 | *.out 72 | 73 | # pytest 74 | .pytest* 75 | 76 | # VScode 77 | .vscode 78 | 79 | # Mypy 80 | .mypy_cache/ 81 | 82 | # MacOS 83 | .DS_Store 84 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Optionally build your docs in additional formats such as PDF 9 | formats: all 10 | 11 | # Optionally set the version of Python and requirements required to build your docs 12 | conda: 13 | environment: doc_environment.yml 14 | -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | # YAML 1.2 2 | # Metadata for citation of this software according to the CFF format (https://citation-file-format.github.io/) 3 | cff-version: 1.2.0 4 | message: If you use this software, please cite it as below. 5 | title: nano-qmflows 6 | abstract: Nano-QMFlows is a generic python library for computing (numerically) electronic properties for nanomaterials like the non-adiabatic coupling vectors (NACV) using several quantum chemical (QM) packages. 7 | authors: 8 | - given-names: Felipe 9 | family-names: Zapata 10 | orcid: "https://orcid.org/0000-0001-8286-677X" 11 | - given-names: Bas 12 | family-names: van Beek 13 | orcid: "https://orcid.org/0000-0003-2463-6559" 14 | 15 | keywords: 16 | - computational-chemistry 17 | - materials-science 18 | - python 19 | - Workflows 20 | version: '0.14.2' 21 | date-released: "2023-10-11" # yyyy-mm-dd 22 | repository-code: https://github.com/SCM-NV/nano-qmflows 23 | license: "Apache-2.0" 24 | doi: 10.5281/zenodo.2576893 25 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | exclude test/* 2 | include *_requirements.txt 3 | 4 | include nanoqm/py.typed 5 | recursive-include nanoqm *.pyi 6 | include nanoqm/basis/GTH_POTENTIALS 7 | include nanoqm/basis/*.json 8 | include nanoqm/basis/BASIS_* 9 | 10 | include libint/include/*.hpp 11 | -------------------------------------------------------------------------------- /codecov.yaml: -------------------------------------------------------------------------------- 1 | coverage: 2 | status: 3 | project: 4 | default: 5 | target: 80 6 | patch: 7 | default: 8 | target: 0 9 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | 5 | import os 6 | import shutil 7 | from collections.abc import Generator 8 | 9 | import pytest 10 | from nanoqm._logger import logger as nanoqm_logger 11 | from scm.plams import add_to_class, Cp2kJob 12 | 13 | 14 | @add_to_class(Cp2kJob) 15 | def get_runscript(self) -> str: 16 | """Run a parallel version of CP2K without mpirun or srun, \ 17 | as this can cause issues with some executables. 18 | 19 | This method is monkey-patched into the PLAMS ``Cp2kJob`` class. 20 | 21 | """ 22 | cp2k_command = self.settings.get("executable", "cp2k.ssmp") 23 | return f"{cp2k_command} -i {self._filename('inp')} -o {self._filename('out')}" 24 | 25 | 26 | @pytest.fixture(autouse=True, scope="session") 27 | def is_release() -> Generator[bool, None, None]: 28 | """Yield whether the test suite is run for a nano-qmflows release or not.""" 29 | env_var = os.environ.get("IS_RELEASE", 0) 30 | try: 31 | yield bool(int(env_var)) 32 | except ValueError as ex: 33 | raise ValueError("The `IS_RELEASE` environment variable expected an integer") from ex 34 | 35 | 36 | @pytest.fixture(autouse=True, scope="function") 37 | def cleunup_files() -> Generator[None, None, None]: 38 | # TODO: Investigate if these files can be removed by their respective test(s) 39 | yield None 40 | if os.path.isfile("quantum.hdf5"): 41 | os.remove("quantum.hdf5") 42 | if os.path.isfile("input_parameters.yml"): 43 | os.remove("input_parameters.yml") 44 | if os.path.isdir("overlaps"): 45 | shutil.rmtree("overlaps") 46 | 47 | 48 | @pytest.fixture(autouse=True, scope="session") 49 | def prepare_logger() -> Generator[None, None, None]: 50 | """Remove the logging output to stdout while running tests.""" 51 | import noodles 52 | import qmflows 53 | noodles_logger = logging.getLogger("noodles") 54 | qmflows_logger = logging.getLogger("qmflows") 55 | 56 | nanoqm_handlers = nanoqm_logger.handlers.copy() 57 | noodles_handlers = noodles_logger.handlers.copy() 58 | qmflows_handlers = qmflows_logger.handlers.copy() 59 | 60 | for handler in nanoqm_handlers: 61 | nanoqm_logger.removeHandler(handler) 62 | for handler in noodles_handlers: 63 | noodles_logger.removeHandler(handler) 64 | for handler in qmflows_handlers: 65 | qmflows_logger.removeHandler(handler) 66 | 67 | yield None 68 | 69 | for handler in nanoqm_handlers: 70 | nanoqm_logger.addHandler(handler) 71 | for handler in noodles_handlers: 72 | noodles_logger.addHandler(handler) 73 | for handler in qmflows_handlers: 74 | qmflows_logger.addHandler(handler) 75 | -------------------------------------------------------------------------------- /doc_environment.yml: -------------------------------------------------------------------------------- 1 | name: nanoqm-doc 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python 6 | - boost 7 | - eigen 8 | - "libint>=2.6.0" 9 | - highfive 10 | - pip 11 | - pip: 12 | - -r doc_requirements.txt 13 | - . 14 | -------------------------------------------------------------------------------- /doc_requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx>=2.1 2 | sphinx_rtd_theme 3 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SOURCEDIR = . 8 | BUILDDIR = _build 9 | 10 | # Put it first so that "make" without argument is like "make help". 11 | help: 12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 13 | 14 | .PHONY: help Makefile 15 | 16 | # Catch-all target: route all unknown targets to Sphinx using the new 17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 18 | %: Makefile 19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/_images/default_tree.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/docs/_images/default_tree.jpg -------------------------------------------------------------------------------- /docs/_images/merged_tree.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/docs/_images/merged_tree.jpg -------------------------------------------------------------------------------- /docs/_images/nac_worflow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/docs/_images/nac_worflow.png -------------------------------------------------------------------------------- /docs/_images/tree.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/docs/_images/tree.png -------------------------------------------------------------------------------- /docs/_templates/layout.html: -------------------------------------------------------------------------------- 1 | {% extends "!layout.html" %} 2 | 3 | {%- block sidebarlogo %} 4 | {%- if logo %} 5 | 8 | {%- endif %} 9 | {%- endblock %} -------------------------------------------------------------------------------- /docs/coop.rst: -------------------------------------------------------------------------------- 1 | Crystal Orbital Overlap Population (COOP) calculation 2 | ===================================================== 3 | 4 | The workflow coop_calculation allows to compute the crystal orbital overlap population between two selected elements. 5 | 6 | Preparing the input 7 | ------------------- 8 | 9 | The following is an example of input file to perform the COOP calculation between Cd and Se for the Cd33Se33 system. 10 | 11 | .. code-block:: yaml 12 | 13 | workflow: 14 | coop_calculation 15 | 16 | project_name: Cd33Se33 17 | active_space: [50, 50] 18 | path_hdf5: "Cd33Se33.hdf5" 19 | path_traj_xyz: "Cd33Se33.xyz" 20 | scratch_path: "/tmp/COOP" 21 | 22 | coop_elements: ["Cd", "Se"] 23 | 24 | cp2k_general_settings: 25 | basis: "DZVP-MOLOPT-SR-GTH" 26 | potential: "GTH-PBE" 27 | cell_parameters: 28.0 28 | periodic: none 29 | executable: cp2k.popt 30 | 31 | cp2k_settings_main: 32 | specific: 33 | template: pbe_main 34 | cp2k: 35 | force_eval: 36 | dft: 37 | scf: 38 | eps_scf: 1e-6 39 | 40 | cp2k_settings_guess: 41 | specific: 42 | template: 43 | pbe_guess 44 | 45 | 46 | In your working directory, copy the previous input into an *input_test_coop.yml* file. 47 | Also copy locally the file containing the coordinates of the relaxed Cd33Se33 system, Cd33Se33.xyz_. 48 | 49 | Your *input_test_coop.yml* input file now contains all settings to perform the coop calculations and needs to be edited according to your system and preferences. 50 | Please note that this input is very similar to the basic example of single point calculation provided in a previous tutorial_ (please refer to it for a more extensive description of the above options) 51 | except for the following options: **workflow**, **coop_elements**. 52 | 53 | - **workflow**: The workflow you need for your calculations, in this case set to coop_calculation is this case. 54 | - **coop_elements**: List of the two elements to calculate the COOP for, here Cd and Se. 55 | 56 | In the cp2k_general_settings, you can customize the settings used to generate the cp2k input. To help you creating your custom input requirements, please consult the cp2k manual_ and the templates_ available in nano-qmflows. 57 | 58 | .. _Cd33Se33.xyz: https://github.com/SCM-NV/nano-qmflows/blob/master/test/test_files/Cd33Se33.xyz 59 | .. _tutorial: https://qmflows-namd.readthedocs.io/en/latest/single_points.html 60 | .. _manual: https://manual.cp2k.org/ 61 | .. _templates: https://github.com/SCM-NV/nano-qmflows/blob/master/nanoqm/workflows/templates.py 62 | 63 | Setting up the calculation 64 | --------------------------- 65 | 66 | Once all settings of your yml input have been customized, can to launch your coop calculation. 67 | 68 | - First, activate the conda environment with QMFlows: 69 | 70 | ``conda activate qmflows`` 71 | 72 | - Then, load the module with your version of cp2k, for example: 73 | 74 | ``module load CP2K/7.1.0`` 75 | 76 | - Finally, use the command run_workflow.py to submit your calculation. 77 | 78 | ``run_workflow.py -i input_test_coop.yml`` 79 | 80 | Results 81 | ------- 82 | 83 | Once your calculation has finished successfully, you will find a *COOP.txt* file in your working directory. 84 | The two columns of this file contain, respectively, the orbitals’ energies and the corresponding COOP values for the selected atoms pair. 85 | -------------------------------------------------------------------------------- /docs/docs_command_line.rst: -------------------------------------------------------------------------------- 1 | Command line interface 2 | ---------------------- 3 | Running a workflow 4 | ################## 5 | .. automodule:: nanoqm.workflows.run_workflow 6 | 7 | Workflows distribution 8 | ###################### 9 | .. automodule:: nanoqm.workflows.distribute_jobs -------------------------------------------------------------------------------- /docs/docs_cp2k_interface.rst: -------------------------------------------------------------------------------- 1 | CP2K Interface 2 | -------------- 3 | .. automodule:: nanoqm.schedule.scheduleCP2K -------------------------------------------------------------------------------- /docs/docs_derivative_coupling.rst: -------------------------------------------------------------------------------- 1 | Derivative Couplings 2 | -------------------- 3 | .. automodule:: nanoqm.integrals.nonAdiabaticCoupling -------------------------------------------------------------------------------- /docs/docs_integrals.rst: -------------------------------------------------------------------------------- 1 | Integrals 2 | --------- 3 | .. automodule:: nanoqm.integrals.multipole_matrices -------------------------------------------------------------------------------- /docs/docs_molecular_orbitals.rst: -------------------------------------------------------------------------------- 1 | Molecular Orbitals 2 | ------------------ 3 | .. automodule:: nanoqm.schedule.components -------------------------------------------------------------------------------- /docs/docs_workflows.rst: -------------------------------------------------------------------------------- 1 | Workflows 2 | --------- 3 | 4 | The following workflows are available: 5 | 6 | .. autofunction:: nanoqm.workflows.workflow_coop.workflow_crystal_orbital_overlap_population 7 | .. autofunction:: nanoqm.workflows.workflow_coupling.workflow_derivative_couplings 8 | .. autofunction:: nanoqm.workflows.workflow_ipr.workflow_ipr 9 | .. autofunction:: nanoqm.workflows.workflow_single_points.workflow_single_points 10 | .. autofunction:: nanoqm.workflows.workflow_stddft_spectrum.workflow_stddft 11 | -------------------------------------------------------------------------------- /docs/documentation.rst: -------------------------------------------------------------------------------- 1 | 2 | For a more detailed description of **nano-qmflows** read the documentation 3 | 4 | .. toctree:: 5 | docs_command_line 6 | docs_cp2k_interface 7 | docs_derivative_coupling 8 | docs_molecular_orbitals 9 | docs_integrals 10 | docs_workflows -------------------------------------------------------------------------------- /docs/includereadme.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | 2 | Welcome to nano-qmflows's documentation! 3 | ======================================== 4 | 5 | Contents: 6 | 7 | .. toctree:: 8 | :maxdepth: 2 9 | :caption: Introduction 10 | 11 | includereadme 12 | theory 13 | 14 | .. toctree:: 15 | :maxdepth: 2 16 | :caption: Tutorials 17 | 18 | intro 19 | single_points 20 | coop 21 | ipr 22 | derivative_couplings 23 | absorption_spectrum 24 | distribute_absorption_spectrum 25 | 26 | .. toctree:: 27 | :maxdepth: 2 28 | :caption: Library Documentation 29 | 30 | documentation 31 | 32 | 33 | 34 | Indices and tables 35 | ================== 36 | 37 | * :ref:`genindex` 38 | * :ref:`modindex` 39 | * :ref:`search` 40 | 41 | -------------------------------------------------------------------------------- /docs/intro.rst: -------------------------------------------------------------------------------- 1 | Introduction to the Tutorials 2 | ============================= 3 | 4 | The *nano-qmflows* packages offers the following set of workflows to compute different properties: 5 | * single_points 6 | * coop_calculation 7 | * ipr_calculation 8 | * derivative_coupling 9 | * absorption_spectrum 10 | * distribute_absorption_spectrum 11 | 12 | Known Issues 13 | ------------ 14 | 15 | Distribution of the workflow over multiple nodes 16 | ################################################ 17 | 18 | `CP2K` can uses multiple nodes to perform the computation of the molecular orbitals using the **MPI** protocol. Unfortunately, the `MPI` implementation for the computation of the *derivative coupling matrix* is experimental and unestable. The practical consequences of the aforemention issues, is that **the calculation of the coupling matrices are carried out in only 1 computational node**. It means that if you want ask for more than 1 node to compute the molecular orbitals with `CP2K`, once the workflow starts to compute the *derivative couplings* only 1 node will be used at a time and the rest will remain idle wating computational resources. 19 | 20 | 21 | Reporting a bug or requesting a feature 22 | --------------------------------------- 23 | To report an issue or request a new feature you can use the github issues_ tracker. 24 | 25 | .. _HDF5: http://www.h5py.org/ 26 | .. _issues: https://github.com/SCM-NV/nano-qmflows/issues 27 | .. _QMflows: https://github.com/SCM-NV/qmflows 28 | .. _PYXAID: https://www.acsu.buffalo.edu/~alexeyak/pyxaid/overview.html 29 | .. _YAML: https://pyyaml.org/wiki/PyYAML 30 | 31 | 32 | -------------------------------------------------------------------------------- /docs/ipr.rst: -------------------------------------------------------------------------------- 1 | Inverse Participation Ratio (IPR) calculation 2 | ============================================= 3 | 4 | The workflow ipr_calculation returns the inverse participation ratio for the selected orbitals. 5 | For finite systems, the IPR is defined as the inverse of number of atoms that contribute to a given electronic state i. 6 | It assumes its maximum value, 1, in the case of a state localized to a single atom (1/1) and tends to 0 (1/*N*, where *N* is the total number of atoms in the system) when the wave function is distributed equally over all atoms. 7 | 8 | Preparing the input 9 | ------------------- 10 | 11 | The following is an example of input file to perform the IPR calculation for the Cd33Se33 system. 12 | 13 | .. code-block:: yaml 14 | 15 | workflow: 16 | ipr_calculation 17 | 18 | project_name: Cd33Se33 19 | active_space: [50, 50] 20 | path_hdf5: "Cd33Se33.hdf5" 21 | path_traj_xyz: "Cd33Se33.xyz" 22 | scratch_path: "/tmp/IPR" 23 | 24 | cp2k_general_settings: 25 | basis: "DZVP-MOLOPT-SR-GTH" 26 | potential: "GTH-PBE" 27 | cell_parameters: 28.0 28 | periodic: none 29 | executable: cp2k.popt 30 | 31 | cp2k_settings_main: 32 | specific: 33 | template: pbe_main 34 | cp2k: 35 | force_eval: 36 | dft: 37 | scf: 38 | eps_scf: 1e-6 39 | 40 | cp2k_settings_guess: 41 | specific: 42 | template: 43 | pbe_guess 44 | 45 | 46 | In your working directory, copy the previous input into an *input_test_ipr.yml* file. 47 | Also copy locally the file containing the coordinates of the relaxed Cd33Se33 system, Cd33Se33.xyz_. 48 | 49 | Your *input_test_ipr.yml* input file now contains all settings to perform the coop calculations and needs to be edited according to your system and preferences. 50 | Please note that this input is very similar to the basic example of single point calculation provided in a previous tutorial_ (please refer to it for a more extensive description of the above options) 51 | except for the **workflow** option, set in this case to *ipr_calculation*. 52 | 53 | Here again you can customize the settings used to generate the cp2k input in the cp2k_general_settings. To help you creating your custom input requirements, please consult the cp2k manual_ and the templates_ available in nano-qmflows. 54 | 55 | .. _Cd33Se33.xyz: https://github.com/SCM-NV/nano-qmflows/blob/master/test/test_files/Cd33Se33.xyz 56 | .. _tutorial: https://qmflows-namd.readthedocs.io/en/latest/single_points.html 57 | .. _manual: https://manual.cp2k.org/ 58 | .. _templates: https://github.com/SCM-NV/nano-qmflows/blob/master/nanoqm/workflows/templates.py 59 | 60 | Setting up the calculation 61 | --------------------------- 62 | 63 | Once all settings of your yml input have been customized, can to launch your ipr calculation. 64 | 65 | - First, activate the conda environment with QMFlows: 66 | 67 | ``conda activate qmflows`` 68 | 69 | - Then, load the module with your version of cp2k, for example: 70 | 71 | ``module load CP2K/7.1.0`` 72 | 73 | - Finally, use the command run_workflow.py to submit your calculation. 74 | 75 | ``run_workflow.py -i input_test_ipr.yml`` 76 | 77 | Results 78 | ------- 79 | 80 | Once your calculation has finished successfully, you will find a *IPR.txt* file in your working directory. 81 | The two columns of this file contain, respectively, the orbitals’ energies and the corresponding IPR values. 82 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/theory.rst: -------------------------------------------------------------------------------- 1 | Theory 2 | ========== 3 | 4 | Nonadiabatic coupling matrix 5 | ----------------------------- 6 | 7 | The current implementation of the nonadiabatic coupling is based on: 8 | Plasser, F.; Granucci, G.; Pittner, j.; Barbatti, M.; Persico, M.; 9 | Lischka. *Surface hopping dynamics using a locally diabatic formalism: 10 | Charge transfer in the ethylene dimer cation and excited state dynamics 11 | in the 2-pyridone dimer*. **J. Chem. Phys. 2012, 137, 22A514.** 12 | 13 | The total time-dependent wave function :math:`\Psi(\mathbf{R}, t)` can be 14 | expressed in terms of a linear combination of ``N`` adiabatic electronic 15 | eigenstates :math:`\phi_{i}(\mathbf{R}(t))`, 16 | 17 | .. math:: 18 | \Psi(\mathbf{R}, t) = \sum^{N}_{i=1} c_i(t)\phi_{i}(\mathbf{R}(t)) \quad \mathbf(1) 19 | 20 | The time-dependent coefficients are propagated according to 21 | 22 | .. math:: 23 | 24 | \frac{dc_j(t)}{dt} = -i\hbar^2 c_j(t) E_j(t) - \sum^{N}_{i=1}c_i(t)\sigma_{ji}(t) \quad \mathbf(2) 25 | 26 | where :math:`E_j(t)` is the energy of the jth adiabatic state and :math:`\sigma_{ji}(t)` the nonadiabatic matrix, which elements are given by the expression 27 | 28 | .. math:: 29 | \sigma_{ji}(t) = \langle \phi_{j}(\mathbf{R}(t)) \mid \frac{\partial}{\partial t} \mid \phi_{i}(\mathbf{R}(t)) \rangle \quad \mathbf(3) 30 | 31 | that can be approximate using three consecutive molecular geometries 32 | 33 | .. math:: 34 | \sigma_{ji}(t) \approx \frac{1}{4 \Delta t} (3\mathbf{S}{ji}(t) - 3\mathbf{S}{ij}(t) - \mathbf{S}{ji}(t-\Delta t) + \mathbf{S}{ij}(t-\Delta t)) \quad \mathbf(4) 35 | 36 | where :math:`\mathbf{S}_{ji}(t)` is the overlap matrix between two consecutive time steps 37 | 38 | .. math:: 39 | \mathbf{S}{ij}(t) = \langle \phi{j}(\mathbf{R}(t-\Delta t)) \mid \phi_{i}(\mathbf{R}(t)) \rangle \quad \mathbf(5) 40 | 41 | and the overlap matrix is calculated in terms of atomic orbitals 42 | 43 | .. math:: 44 | \mathbf{S}{ji}(t) = \sum{\mu} C^{*}{\mu i}(t) \sum{\nu} C_{\nu j}(t - \Delta t) \mathbf{S}_{\mu \nu}(t) \quad \mathbf(6) 45 | 46 | Where :math:C_{\mu i} are the Molecular orbital coefficients and :math:`\mathbf{S}_{\mu \nu}` The atomic orbitals overlaps. 47 | 48 | .. math:: 49 | \mathbf{S}{\mu \nu}(\mathbf{R}(t), \mathbf{R}(t - \Delta t)) = \langle \chi{\mu}(\mathbf{R}(t)) \mid \chi_{\nu}(\mathbf{R}(t - \Delta t)\rangle \quad \mathbf(7) 50 | 51 | 52 | Nonadiabatic coupling algorithm implementation 53 | ---------------------------------------------- 54 | 55 | The figure belows shows schematically the workflow for calculating the Nonadiabatic 56 | coupling matrices from a molecular dynamic trajectory. The uppermost node represent 57 | a molecular dynamics 58 | trajectory that is subsequently divided in its components andfor each geometry the molecular 59 | orbitals are computed. These molecular orbitals are stored in a HDF5_. 60 | binary file and subsequently calculations retrieve sets of three molecular orbitals that are 61 | use to calculate the nonadiabatic coupling matrix using equations **4** to **7**. 62 | These coupling matrices are them feed to the PYXAID_ package to carry out nonadiabatic molecular dynamics. 63 | 64 | The Overlap between primitives are calculated using the Obara-Saika recursive scheme and has been implemented using the C++ libint2_ library for efficiency reasons. 65 | The libint2_ library uses either OpenMP_ or C++ threads to distribute the integrals among the available CPUs. 66 | Also, all the heavy numerical processing is carried out by the highly optimized functions in NumPy_. 67 | 68 | The **nonadiabaticCoupling** package relies on *QMWorks* to run the Quantum mechanical simulations using the [CP2K](https://www.cp2k.org/) package. Also, the noodles_ is used 69 | to schedule expensive numerical computations that are required to calculate the nonadiabatic coupling matrix. 70 | 71 | 72 | .. _OpenMP: https://www.openmp.org/ 73 | .. _libint2: https://github.com/evaleev/libint/wiki 74 | .. _HDF5: http://www.h5py.org/ 75 | .. _PYXAID: https://www.acsu.buffalo.edu/~alexeyak/pyxaid/overview.html 76 | .. _multiprocessing: https://docs.python.org/3.6/library/multiprocessing.html 77 | .. _NumPy: http://www.numpy.org 78 | .. _noodles: http://nlesc.github.io/noodles/ 79 | -------------------------------------------------------------------------------- /install_requirements.txt: -------------------------------------------------------------------------------- 1 | h5py>=2.9.0 2 | mendeleev>=0.1.0 3 | more-itertools>=2.4.0 4 | noodles>=0.3.4 5 | numpy>=1.17.3,<2 6 | scipy>=1.3.2 7 | schema>=0.6.0,!=0.7.5 8 | pyyaml>=5.1 9 | plams==1.5.1 10 | qmflows>=0.12.1 11 | packaging>=17.1 12 | Nano-Utils>=2.0.0 13 | -------------------------------------------------------------------------------- /libint/include/compute_integrals.hpp: -------------------------------------------------------------------------------- 1 | /* 2 | * This module contains the implementation of several 3 | * kind of integrals used for non-adiabatic molecular dynamics, 4 | * including the overlaps integrals between different geometries 5 | * And the dipoles and quadrupoles to compute absorption spectra. 6 | * This module is based on libint and Eigen. 7 | * Copyright (C) 2018-2022 the Netherlands eScience Center. 8 | */ 9 | 10 | #ifndef INT_H_ 11 | #define INT_H_ 12 | 13 | #include "namd.hpp" 14 | 15 | namd::Matrix compute_integrals_couplings( 16 | const std::string &path_xyz_1, 17 | const std::string &path_xyz_2, 18 | const std::string &path_hdf5, 19 | const std::string &basis_name 20 | ); 21 | 22 | namd::Matrix compute_integrals_multipole( 23 | const std::string &path_xyz, 24 | const std::string &path_hdf5, 25 | const std::string &basis_name, 26 | const std::string &multipole 27 | ); 28 | 29 | #endif // INT_H_ 30 | -------------------------------------------------------------------------------- /libint/include/namd.hpp: -------------------------------------------------------------------------------- 1 | /* 2 | * This module contains the implementation of several 3 | * kind of integrals used for non-adiabatic molecular dynamics, 4 | * including the overlaps integrals between different geometries 5 | * And the dipoles and quadrupoles to compute absorption spectra. 6 | * This module is based on libint and Eigen. 7 | * Copyright (C) 2018-2022 the Netherlands eScience Center. 8 | */ 9 | 10 | #ifndef NAMD_H_ 11 | #define NAMD_H_ 12 | 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | 23 | // integrals library 24 | #include 25 | 26 | // Eigen matrix algebra library 27 | #include 28 | 29 | // HDF5 funcionality 30 | #include 31 | #include 32 | #include 33 | 34 | #if defined(_OPENMP) 35 | #include 36 | #endif 37 | 38 | namespace namd { 39 | 40 | using real_t = libint2::scalar_type; 41 | // import dense, dynamically sized Matrix type from Eigen; 42 | // this is a matrix with row-major storage 43 | // (http://en.wikipedia.org/wiki/Row-major_order) to meet the layout of the 44 | // integrals returned by the Libint integral library 45 | using Matrix = 46 | Eigen::Matrix; 47 | 48 | struct CP2K_Contractions { 49 | int l; // Angular momentum quantum number for a given shell-type 50 | int count; // Number of contractions for a given shell-type 51 | }; 52 | 53 | struct CP2K_Basis_Atom { 54 | // Contains the basis specificationf for a given atom 55 | std::string symbol; 56 | libint2::svector> coefficients; 57 | libint2::svector> exponents; 58 | libint2::svector> basis_format; 59 | }; 60 | 61 | // Map from atomic_number to symbol 62 | const std::unordered_map map_elements = { 63 | {1, "h"}, {2, "he"}, {3, "li"}, {4, "be"}, {5, "b"}, {6, "c"}, 64 | {7, "n"}, {8, "o"}, {9, "f"}, {10, "ne"}, {11, "na"}, {12, "mg"}, 65 | {13, "al"}, {14, "si"}, {15, "p"}, {16, "s"}, {17, "cl"}, {18, "ar"}, 66 | {19, "k"}, {20, "ca"}, {21, "sc"}, {22, "ti"}, {23, "v"}, {24, "cr"}, 67 | {25, "mn"}, {26, "fe"}, {27, "co"}, {28, "ni"}, {29, "cu"}, {30, "zn"}, 68 | {31, "ga"}, {32, "ge"}, {33, "as"}, {34, "se"}, {35, "br"}, {36, "kr"}, 69 | {37, "rb"}, {38, "sr"}, {39, "y"}, {40, "zr"}, {41, "nb"}, {42, "mo"}, 70 | {43, "tc"}, {44, "ru"}, {45, "rh"}, {46, "pd"}, {47, "ag"}, {48, "cd"}, 71 | {49, "in"}, {50, "sn"}, {51, "sb"}, {52, "te"}, {53, "i"}, {54, "xe"}, 72 | {55, "cs"}, {56, "ba"}, {57, "la"}, {58, "ce"}, {59, "pr"}, {60, "nd"}, 73 | {61, "pm"}, {62, "sm"}, {63, "eu"}, {64, "gd"}, {65, "tb"}, {66, "dy"}, 74 | {67, "ho"}, {68, "er"}, {69, "tm"}, {70, "yb"}, {71, "lu"}, {72, "hf"}, 75 | {73, "ta"}, {74, "w"}, {75, "re"}, {76, "os"}, {77, "ir"}, {78, "pt"}, 76 | {79, "au"}, {80, "hg"}, {81, "tl"}, {82, "pb"}, {83, "bi"}, {84, "po"}, 77 | {85, "at"}, {86, "rn"}, {87, "fr"}, {88, "ra"}, {89, "ac"}, {90, "th"}, 78 | {91, "pa"}, {92, "u"}, {93, "np"}, {94, "pu"}, {95, "am"}, {96, "cm"}}; 79 | 80 | // Map from atomic symbols to the number of valence electrons 81 | // See also `nanoqm.workflows.templates.valence_electrons` 82 | const std::unordered_map valence_electrons = { 83 | {"ag", 11}, 84 | {"al", 3}, 85 | {"ar", 8}, 86 | {"as", 5}, 87 | {"at", 7}, 88 | {"au", 11}, 89 | {"b", 3}, 90 | {"ba", 10}, 91 | {"be", 4}, 92 | {"bi", 5}, 93 | {"br", 7}, 94 | {"c", 4}, 95 | {"ca", 10}, 96 | {"cd", 12}, 97 | {"cl", 7}, 98 | {"co", 17}, 99 | {"cr", 4}, 100 | {"cs", 9}, 101 | {"cu", 11}, 102 | {"f", 7}, 103 | {"fe", 16}, 104 | {"ga", 13}, 105 | {"ge", 4}, 106 | {"h", 1}, 107 | {"he", 2}, 108 | {"hf", 12}, 109 | {"hg", 12}, 110 | {"i", 7}, 111 | {"in", 13}, 112 | {"ir", 17}, 113 | {"k", 9}, 114 | {"kr", 8}, 115 | {"li", 3}, 116 | {"mg", 10}, 117 | {"mn", 15}, 118 | {"mo", 14}, 119 | {"n", 5}, 120 | {"na", 9}, 121 | {"nb", 13}, 122 | {"ne", 8}, 123 | {"ni", 18}, 124 | {"o", 6}, 125 | {"os", 16}, 126 | {"p", 5}, 127 | {"pb", 4}, 128 | {"pd", 18}, 129 | {"po", 6}, 130 | {"pt", 18}, 131 | {"rb", 9}, 132 | {"re", 15}, 133 | {"rh", 17}, 134 | {"rn", 8}, 135 | {"ru", 16}, 136 | {"s", 6}, 137 | {"sb", 5}, 138 | {"sc", 11}, 139 | {"se", 6}, 140 | {"si", 4}, 141 | {"sn", 4}, 142 | {"sr", 10}, 143 | {"ta", 13}, 144 | {"tc", 15}, 145 | {"te", 6}, 146 | {"ti", 12}, 147 | {"tl", 13}, 148 | {"u", 14}, 149 | {"v", 13}, 150 | {"w", 14}, 151 | {"xe", 8}, 152 | {"y", 11}, 153 | {"zn", 12}, 154 | {"zr", 12}, 155 | }; 156 | 157 | } // namespace namd 158 | #endif // NAMD_H_ 159 | -------------------------------------------------------------------------------- /licenses/LICENSE_LIBHDF5.txt: -------------------------------------------------------------------------------- 1 | Copyright Notice and License Terms for 2 | HDF5 (Hierarchical Data Format 5) Software Library and Utilities 3 | ----------------------------------------------------------------------------- 4 | 5 | HDF5 (Hierarchical Data Format 5) Software Library and Utilities 6 | Copyright 2006 by The HDF Group. 7 | 8 | NCSA HDF5 (Hierarchical Data Format 5) Software Library and Utilities 9 | Copyright 1998-2006 by The Board of Trustees of the University of Illinois. 10 | 11 | All rights reserved. 12 | 13 | Redistribution and use in source and binary forms, with or without modification, are permitted for any 14 | purpose (including commercial purposes) provided that the following conditions are met: 15 | 16 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions, and 17 | the following disclaimer. 18 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions, 19 | and the following disclaimer in the documentation and/or materials provided with the distribution. 20 | 3. Neither the name of The HDF Group, the name of the University, nor the name of any Contributor 21 | may be used to endorse or promote products derived from this software without specific prior 22 | written permission from The HDF Group, the University, or the Contributor, respectively. 23 | 24 | DISCLAIMER: 25 | THIS SOFTWARE IS PROVIDED BY THE HDF GROUP AND THE CONTRIBUTORS "AS IS" WITH NO 26 | WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED. IN NO EVENT SHALL THE HDF GROUP OR 27 | THE CONTRIBUTORS BE LIABLE FOR ANY DAMAGES SUFFERED BY THE USERS ARISING OUT OF THE USE 28 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | 30 | You are under no obligation whatsoever to provide any bug fixes, patches, or upgrades to the features, 31 | functionality or performance of the source code ("Enhancements") to anyone; however, if you choose 32 | to make your Enhancements available either publicly, or directly to The HDF Group, without imposing a 33 | separate written license agreement for such Enhancements, then you hereby grant the following 34 | license: a non-exclusive, royalty-free perpetual license to install, use, modify, prepare derivative works, 35 | incorporate into other computer software, distribute, and sublicense such enhancements or derivative 36 | works thereof, in binary and source code form. 37 | 38 | Limited portions of HDF5 were developed by Lawrence Berkeley National Laboratory (LBNL). LBNL's 39 | Copyright Notice and Licensing Terms can be found here: COPYING_LBNL_HDF5 file in this directory or 40 | at http://support.hdfgroup.org/ftp/HDF5/releases/COPYING_LBNL_HDF5. 41 | 42 | Contributors: National Center for Supercomputing Applications (NCSA) at the University of Illinois, 43 | Fortner Software, Unidata Program Center (netCDF), The Independent JPEG Group (JPEG), Jean-loup 44 | Gailly and Mark Adler (gzip), and Digital Equipment Corporation (DEC). 45 | 46 | Portions of HDF5 were developed with support from the Lawrence Berkeley National Laboratory (LBNL) 47 | and the United States Department of Energy under Prime Contract No. DE-AC02-05CH11231. 48 | 49 | Portions of HDF5 were developed with support from Lawrence Livermore National Laboratory and the 50 | United States Department of Energy under Prime Contract No. DE-AC52-07NA27344. 51 | 52 | Portions of HDF5 were developed with support from the University of California, Lawrence Livermore 53 | National Laboratory (UC LLNL). The following statement applies to those portions of the product and 54 | must be retained in any redistribution of source code, binaries, documentation, and/or accompanying 55 | materials: 56 | 57 | This work was partially produced at the University of California, Lawrence Livermore National 58 | Laboratory (UC LLNL) under contract no. W-7405-ENG-48 (Contract 48) between the U.S. Department 59 | of Energy (DOE) and The Regents of the University of California (University) for the operation of UC 60 | LLNL. 61 | 62 | DISCLAIMER: 63 | THIS WORK WAS PREPARED AS AN ACCOUNT OF WORK SPONSORED BY AN AGENCY OF THEUNITED 64 | STATES GOVERNMENT. NEITHER THE UNITED STATES GOVERNMENT NOR THE UNIVERSITY OF 65 | CALIFORNIA NOR ANY OF THEIR EMPLOYEES, MAKES ANY WARRANTY, EXPRESS OR IMPLIED, OR 66 | ASSUMES ANY LIABILITY OR RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS, OR USEFULNESS OF 67 | ANY INFORMATION, APPARATUS, PRODUCT, OR PROCESS DISCLOSED, OR REPRESENTS THAT ITS USE 68 | WOULD NOT INFRINGE PRIVATELY- OWNED RIGHTS. REFERENCE HEREIN TO ANY SPECIFIC 69 | COMMERCIAL PRODUCTS, PROCESS, OR SERVICE BY TRADE NAME, TRADEMARK, MANUFACTURER, OR 70 | OTHERWISE, DOES NOT NECESSARILY CONSTITUTE OR IMPLY ITS ENDORSEMENT, RECOMMENDATION, 71 | OR FAVORING BY THE UNITED STATES GOVERNMENT OR THE UNIVERSITY OF CALIFORNIA. THE VIEWS 72 | AND OPINIONS OF AUTHORS EXPRESSED HEREIN DO NOT NECESSARILY STATE OR REFLECT THOSE OF 73 | THE UNITED STATES GOVERNMENT OR THE UNIVERSITY OF CALIFORNIA, AND SHALL NOT BE USED FOR 74 | ADVERTISING OR PRODUCT ENDORSEMENT PURPOSES. 75 | -------------------------------------------------------------------------------- /licenses/LICENSE_LIBINT2.txt: -------------------------------------------------------------------------------- 1 | Libint - a library for the evaluation of molecular integrals of many-body operators over Gaussian functions 2 | 3 | Copyright (C) 2004-2020 Edward F. Valeev 4 | 5 | Libint is free software. Most of the source code of the Libint compiler (code generator) 6 | can be redistributed and/or modified under the terms of the GNU General Public License, version 3, 7 | as published by the Free Software Foundation (see the accompanying file COPYING; 8 | if you did not receive this file refer to ). 9 | The rest of Libint, including the generated source, can be distributed under 10 | the GNU Lesser General Public License, version 3, (see the file COPYING.LESSER, 11 | or ). The applicable license is specified in each 12 | source file. If not indicated, the applicable license is GNU LGPL, version 3 (not GPL). 13 | 14 | Libint is distributed in the hope that it will be useful, 15 | but WITHOUT ANY WARRANTY; without even the implied warranty of 16 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 17 | GNU General Public License for more details. 18 | 19 | See https://github.com/evaleev/libint for the Libint source code. 20 | -------------------------------------------------------------------------------- /licenses/README.rst: -------------------------------------------------------------------------------- 1 | Licenses for various third-party binaries distributed in the Nano-QMFlows wheels. 2 | 3 | Note that aforementioned binaries are absent from source-only Nano-QMFlows distributions. 4 | -------------------------------------------------------------------------------- /linting_requirements.txt: -------------------------------------------------------------------------------- 1 | pydocstyle[toml]>=6.1 2 | flake8>=5 3 | Flake8-pyproject>=1.0.1 4 | 5 | # Mypy-related stuff 6 | mypy 7 | qmflows 8 | numpy>=1.21 9 | types-pyyaml 10 | types-setuptools 11 | pyparsing>=3.0.8 12 | Nano-Utils>=2.0.0 13 | Nano-CAT 14 | -------------------------------------------------------------------------------- /nanoqm/__init__.py: -------------------------------------------------------------------------------- 1 | """Nano-QMFlows is a generic python library for computing (numerically) electronic properties \ 2 | for nanomaterials like the non-adiabatic coupling vectors (NACV) using several quantum \ 3 | chemical (QM) packages.""" 4 | 5 | # Monkey patch noodles with support for slots-containing dataclasses 6 | from . import _monkey_patch 7 | 8 | from ._version import __version__ as __version__, __version_tuple__ as __version_tuple__ 9 | from ._logger import logger as logger 10 | 11 | from .analysis import ( 12 | autocorrelate, dephasing, convolute, func_conv, gauss_function, 13 | parse_list_of_lists, read_couplings, read_energies, 14 | read_energies_pyxaid, read_pops_pyxaid, spectral_density 15 | ) 16 | 17 | from .integrals import (calculate_couplings_levine, compute_overlaps_for_coupling) 18 | 19 | from .schedule import (calculate_mos, lazy_couplings) 20 | 21 | from .workflows import (workflow_derivative_couplings, workflow_stddft) 22 | 23 | del _monkey_patch 24 | 25 | __all__ = [ 26 | '__version__', '__version_tuple__', 'logger', 27 | 'autocorrelate', 'calculate_couplings_levine', 'calculate_mos', 28 | 'compute_overlaps_for_coupling', 'convolute', 'dephasing', 29 | 'func_conv', 'gauss_function', 'lazy_couplings', 30 | 'parse_list_of_lists', 'read_couplings', 'read_energies', 31 | 'read_energies_pyxaid', 'read_pops_pyxaid', 'spectral_density', 32 | 'workflow_derivative_couplings', 'workflow_stddft', 33 | ] 34 | -------------------------------------------------------------------------------- /nanoqm/_logger.py: -------------------------------------------------------------------------------- 1 | """The Nano-QMFlows logger.""" 2 | 3 | from __future__ import annotations 4 | 5 | import os 6 | import sys 7 | import types 8 | import logging 9 | import contextlib 10 | from typing import ClassVar 11 | 12 | from qmflows.type_hints import PathLike 13 | 14 | __all__ = ["logger", "stdout_handler", "EnableFileHandler"] 15 | 16 | #: The Nano-QMFlows logger. 17 | logger = logging.getLogger("nanoqm") 18 | logger.setLevel(logging.DEBUG) 19 | 20 | qmflows_logger = logging.getLogger("qmflows") 21 | noodles_logger = logging.getLogger("noodles") 22 | noodles_logger.setLevel(logging.WARNING) 23 | 24 | #: The Nano-QMFlows stdout handler. 25 | stdout_handler = logging.StreamHandler(stream=sys.stdout) 26 | stdout_handler.setLevel(logging.DEBUG) 27 | stdout_handler.setFormatter(logging.Formatter( 28 | fmt='[%(asctime)s] %(levelname)s: %(message)s', 29 | datefmt='%H:%M:%S', 30 | )) 31 | logger.addHandler(stdout_handler) 32 | 33 | 34 | class EnableFileHandler(contextlib.ContextDecorator): 35 | """Add a file handler to the noodles, qmflows and nanoqm loggers. 36 | 37 | Attributes 38 | ---------- 39 | handler : logging.FileHandler 40 | The relevant titular handler. 41 | 42 | """ 43 | 44 | __slots__ = ("handler",) 45 | 46 | LOGGERS: ClassVar = (logger, qmflows_logger, noodles_logger) 47 | 48 | def __init__(self, path: PathLike) -> None: 49 | """Initialize the context manager. 50 | 51 | Parameters 52 | ---------- 53 | path : path-like object 54 | Path to the log file. 55 | 56 | """ 57 | self.handler = logging.FileHandler(os.fsdecode(path)) 58 | self.handler.setLevel(logging.DEBUG) 59 | self.handler.setFormatter(logging.Formatter( 60 | fmt='%(asctime)s---%(levelname)s\n%(message)s\n', 61 | datefmt='%H:%M:%S', 62 | )) 63 | 64 | def __enter__(self) -> None: 65 | """Add the file handler.""" 66 | for logger in self.LOGGERS: 67 | if self.handler not in logger.handlers: 68 | logger.addHandler(self.handler) 69 | 70 | def __exit__( 71 | self, 72 | exc_type: type[BaseException] | None, 73 | exc_value: BaseException | None, 74 | exc_traceback: types.TracebackType | None, 75 | ) -> None: 76 | """Remove the file handler.""" 77 | for logger in self.LOGGERS: 78 | if self.handler in logger.handlers: 79 | logger.removeHandler(self.handler) 80 | -------------------------------------------------------------------------------- /nanoqm/_monkey_patch.py: -------------------------------------------------------------------------------- 1 | """Monkey patch noodles with support for slots-containing dataclasses.""" 2 | 3 | import dataclasses 4 | 5 | from noodles.serial.dataclass import SerDataClass 6 | from scm.plams import add_to_class 7 | 8 | 9 | @add_to_class(SerDataClass) 10 | def encode(self, obj, make_rec): 11 | """Encode the passed dataclass.""" 12 | if hasattr(obj, "__dict__"): 13 | return make_rec(obj.__dict__) 14 | else: 15 | return make_rec(dataclasses.asdict(obj)) 16 | -------------------------------------------------------------------------------- /nanoqm/analysis/__init__.py: -------------------------------------------------------------------------------- 1 | """Tools for postprocessing.""" 2 | from .tools import (autocorrelate, convolute, dephasing, func_conv, 3 | gauss_function, parse_list_of_lists, read_couplings, 4 | read_energies, read_energies_pyxaid, read_pops_pyxaid, 5 | spectral_density) 6 | 7 | __all__ = [ 8 | 'autocorrelate', 'dephasing', 'convolute', 'func_conv', 'gauss_function', 9 | 'parse_list_of_lists', 'read_couplings', 'read_energies', 10 | 'read_energies_pyxaid', 'read_pops_pyxaid', 'spectral_density'] 11 | -------------------------------------------------------------------------------- /nanoqm/basis/aux_fit.json: -------------------------------------------------------------------------------- 1 | { 2 | "Ag": [9, 9, 10, 11, 12], 3 | "Al": [6, 6, 7, 8, 9], 4 | "Ar": [], 5 | "As": [6, 6, 7, 9, 9], 6 | "At": [6, 6, 7, 9, 9], 7 | "Au": [9, 9, 10, 11, 12], 8 | "B": [3, 3, 3, 3, 3], 9 | "Ba": [8, 8, 9, 10, 11], 10 | "Be": [3, 3, 3, 3, 3], 11 | "Bi": [6, 6, 7, 9, 9], 12 | "Br": [6, 6, 7, 9, 9], 13 | "C": [3, 3, 3, 3, 3], 14 | "Ca": [7, 8, 9, 10, 11], 15 | "Cd": [9, 9, 10, 11, 12], 16 | "Cl": [3, 3, 3, 3, 3, 3], 17 | "Co": [10, 10, 10, 11, 12], 18 | "Cr": [10, 10, 11, 12, 13], 19 | "Cs": [7, 8, 9, 10, 11], 20 | "Cu": [9, 9, 10, 11, 12], 21 | "F": [3, 3, 3, 3, 3], 22 | "Fe": [10, 10, 11, 12, 13], 23 | "Ga": [9, 10, 11, 12, 13], 24 | "Ge": [6, 6, 6, 7, 9], 25 | "H": [3, 3, 3, 3, 3], 26 | "He": [3, 3, 3, 3, 3], 27 | "Hf": [10, 10, 11, 12, 13], 28 | "Hg": [9, 9, 10, 11, 12], 29 | "I": [6, 6, 6, 7, 9], 30 | "In": [9, 10, 11, 12, 13], 31 | "Ir": [10, 10, 11, 12, 13], 32 | "K": [7, 8, 9, 10, 11], 33 | "Kr": [], 34 | "Li": [4, 4, 4, 4, 4], 35 | "Mg": [3, 3, 3, 3, 3], 36 | "Mn": [10, 10, 11, 12, 13], 37 | "Mo": [10, 10, 11, 12, 13], 38 | "N": [3, 3, 3, 3, 3], 39 | "Na": [3, 3, 3, 3, 3], 40 | "Nb": [10, 10, 11, 12, 13], 41 | "Ne": [], 42 | "Ni": [10, 10, 11, 12, 13], 43 | "O": [3, 3, 3, 3, 3], 44 | "Os": [10, 10, 11, 12, 13], 45 | "P": [3, 3, 3, 3, 3], 46 | "Pb": [6, 6, 6, 7, 9], 47 | "Pd": [10, 10, 11, 12, 13], 48 | "Po": [6, 6, 6, 7, 9], 49 | "Pt": [10, 10, 11, 12, 13], 50 | "Rb": [7, 8, 9, 10, 11], 51 | "Re": [10, 10, 11, 12, 13], 52 | "Rh": [10, 10, 11, 12, 13], 53 | "Rn": [], 54 | "Ru": [10, 10, 11, 12, 13], 55 | "S": [3, 3, 3, 3, 3], 56 | "Sb": [6, 6, 6, 7, 9], 57 | "Sc": [10, 10, 11, 12, 13], 58 | "Se": [6, 6, 6, 7, 9], 59 | "Si": [3, 3, 3, 3, 3], 60 | "Sn": [6, 6, 6, 7, 9], 61 | "Sr": [8, 8, 9, 10, 11], 62 | "Ta": [10, 10, 11, 12, 13], 63 | "Tc": [10, 10, 11, 12, 13], 64 | "Te": [6, 6, 6, 7, 9], 65 | "Ti": [10, 10, 11, 12, 13], 66 | "Tl": [9, 10, 11, 12, 13], 67 | "V": [10, 10, 11, 12, 13], 68 | "W": [10, 10, 11, 12, 13], 69 | "Xe": [], 70 | "Y": [10, 10, 11, 12, 13], 71 | "Zn": [9, 9, 10, 11, 12], 72 | "Zr": [10, 10, 11, 12, 13] 73 | } 74 | -------------------------------------------------------------------------------- /nanoqm/basis/valence_electrons.json: -------------------------------------------------------------------------------- 1 | { 2 | "Ag": 11, 3 | "Al": 3, 4 | "Ar": 8, 5 | "As": 5, 6 | "At": 7, 7 | "Au": 11, 8 | "B": 3, 9 | "Ba": 10, 10 | "Be": 4, 11 | "Bi": 5, 12 | "Br": 7, 13 | "C": 4, 14 | "Ca": 10, 15 | "Cd": 12, 16 | "Cl": 7, 17 | "Co": 17, 18 | "Cr": 4, 19 | "Cs": 9, 20 | "Cu": 11, 21 | "F": 7, 22 | "Fe": 16, 23 | "Ga": 13, 24 | "Ge": 4, 25 | "H": 1, 26 | "He": 2, 27 | "Hf": 12, 28 | "Hg": 12, 29 | "I": 7, 30 | "In": 13, 31 | "Ir": 17, 32 | "K": 9, 33 | "Kr": 8, 34 | "Li": 3, 35 | "Mg": 10, 36 | "Mn": 15, 37 | "Mo": 14, 38 | "N": 5, 39 | "Na": 9, 40 | "Nb": 13, 41 | "Ne": 8, 42 | "Ni": 18, 43 | "O": 6, 44 | "Os": 16, 45 | "P": 5, 46 | "Pb": 4, 47 | "Pd": 18, 48 | "Po": 6, 49 | "Pt": 18, 50 | "Rb": 9, 51 | "Re": 15, 52 | "Rh": 17, 53 | "Rn": 8, 54 | "Ru": 16, 55 | "S": 6, 56 | "Sb": 5, 57 | "Sc": 11, 58 | "Se": 6, 59 | "Si": 4, 60 | "Sn": 4, 61 | "Sr": 10, 62 | "Ta": 13, 63 | "Tc": 15, 64 | "Te": 6, 65 | "Ti": 12, 66 | "Tl": 13, 67 | "U": 14, 68 | "V": 13, 69 | "W": 14, 70 | "Xe": 8, 71 | "Y": 11, 72 | "Zn": 12, 73 | "Zr": 12 74 | } 75 | -------------------------------------------------------------------------------- /nanoqm/compute_integrals.pyi: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import numpy.typing as npt 3 | 4 | def compute_integrals_couplings( 5 | __path_xyz_1: str, 6 | __path_xyz_2: str, 7 | __path_hdf5: str, 8 | __basis_name: str, 9 | ) -> npt.NDArray[np.float64]: ... 10 | 11 | def compute_integrals_multipole( 12 | __path_xyz: str, 13 | __path_hdf5: str, 14 | __basis_name: str, 15 | __multipole: str, 16 | ) -> npt.NDArray[np.float64]: ... 17 | 18 | def get_thread_count() -> int: ... 19 | 20 | def get_thread_type() -> str: ... 21 | -------------------------------------------------------------------------------- /nanoqm/integrals/__init__.py: -------------------------------------------------------------------------------- 1 | """Nonadiabatic coupling implementation.""" 2 | from .nonAdiabaticCoupling import (calculate_couplings_3points, 3 | calculate_couplings_levine, 4 | compute_overlaps_for_coupling, 5 | correct_phases) 6 | 7 | __all__ = ['calculate_couplings_3points', 'calculate_couplings_levine', 8 | 'calculate_couplings_levine', 'compute_overlaps_for_coupling', 9 | 'correct_phases'] 10 | -------------------------------------------------------------------------------- /nanoqm/integrals/multipole_matrices.py: -------------------------------------------------------------------------------- 1 | """Compute multipole integrals using `Libint2 `. 2 | 3 | The interface to the C++ Libint2 library is located at the parent folder, 4 | in the `libint` folder. 5 | 6 | Index 7 | ----- 8 | .. currentmodule:: nanoqm.integrals.multipole_matrices 9 | .. autosummary:: 10 | get_multipole_matrix 11 | compute_matrix_multipole 12 | 13 | API 14 | --- 15 | .. autofunction:: get_multipole_matrix 16 | .. autofunction:: compute_matrix_multipole 17 | """ 18 | 19 | from __future__ import annotations 20 | 21 | import os 22 | import uuid 23 | from os.path import join 24 | from pathlib import Path 25 | from typing import TYPE_CHECKING, Literal 26 | 27 | from qmflows.common import AtomXYZ 28 | 29 | from .. import logger 30 | from ..common import is_data_in_hdf5, retrieve_hdf5_data, store_arrays_in_hdf5, tuplesXYZ_to_plams 31 | from ..compute_integrals import compute_integrals_multipole, get_thread_count, get_thread_type 32 | 33 | if TYPE_CHECKING: 34 | from numpy.typing import NDArray 35 | from numpy import float64 as f8 36 | from .. import _data 37 | 38 | 39 | def get_multipole_matrix( 40 | config: _data.AbsorptionSpectrum, 41 | inp: _data.AbsorptionData, 42 | multipole: Literal["overlap", "dipole", "quadrupole"], 43 | ) -> NDArray[f8]: 44 | """Retrieve the `multipole` number `i` from the trajectory. Otherwise compute it. 45 | 46 | Parameters 47 | ---------- 48 | config 49 | Global configuration to run a workflow 50 | inp 51 | Information about the current point, e.g. molecular geometry. 52 | multipole 53 | Either overlap, dipole or quadrupole. 54 | 55 | Returns 56 | ------- 57 | np.ndarray 58 | Tensor containing the multipole. 59 | 60 | """ 61 | point = f'point_{inp.i + config.enumerate_from}' 62 | path_hdf5 = config.path_hdf5 63 | path_multipole_hdf5 = join(config.orbitals_type, multipole, point) 64 | matrix_multipole = search_multipole_in_hdf5( 65 | path_hdf5, path_multipole_hdf5, multipole) 66 | 67 | if matrix_multipole is None: 68 | matrix_multipole = compute_matrix_multipole(inp.mol, config, multipole) 69 | store_arrays_in_hdf5(path_hdf5, path_multipole_hdf5, matrix_multipole) 70 | 71 | return matrix_multipole 72 | 73 | 74 | def search_multipole_in_hdf5( 75 | path_hdf5: str | Path, 76 | path_multipole_hdf5: str, 77 | multipole: str, 78 | ) -> None | NDArray[f8]: 79 | """Search if the multipole is already store in the HDF5.""" 80 | if is_data_in_hdf5(path_hdf5, path_multipole_hdf5): 81 | logger.info(f"retrieving multipole: {multipole} from the hdf5") 82 | return retrieve_hdf5_data(path_hdf5, path_multipole_hdf5) 83 | 84 | logger.info(f"computing multipole: {multipole}") 85 | return None 86 | 87 | 88 | def compute_matrix_multipole( 89 | mol: list[AtomXYZ], 90 | config: _data.GeneralOptions, 91 | multipole: Literal["overlap", "dipole", "quadrupole"], 92 | ) -> NDArray[f8]: 93 | """Compute a `multipole` matrix: overlap, dipole, etc. for a given geometry `mol`. 94 | 95 | The multipole is Computed in spherical coordinates. 96 | 97 | Note: for the dipole and quadrupole the super_matrix contains all the matrices stack all the 98 | 0-axis. 99 | 100 | Parameters 101 | ---------- 102 | mol 103 | Molecule to compute the dipole 104 | config 105 | Dictionary with the current configuration 106 | multipole 107 | kind of multipole to compute 108 | 109 | Returns 110 | ------- 111 | np.ndarray 112 | Matrix with entries <ψi | x^i y^j z^k | ψj> 113 | 114 | """ 115 | path_hdf5 = config.path_hdf5 116 | 117 | # Write molecule in temporal file 118 | path = join(config.scratch_path, f"molecule_{uuid.uuid4()}.xyz") 119 | mol_plams = tuplesXYZ_to_plams(mol) 120 | mol_plams.write(path) 121 | 122 | # name of the basis set 123 | basis_name = config.cp2k_general_settings.basis 124 | thread_count = get_thread_count() 125 | thread_type = get_thread_type() 126 | logger.info(f"Will scale over {thread_count} {thread_type} threads") 127 | 128 | if multipole == 'overlap': 129 | matrix_multipole = compute_integrals_multipole( 130 | path, path_hdf5, basis_name, multipole) 131 | elif multipole == 'dipole': 132 | # The tensor contains the overlap + {x, y, z} dipole matrices 133 | super_matrix = compute_integrals_multipole( 134 | path, path_hdf5, basis_name, multipole) 135 | dim = super_matrix.shape[1] 136 | 137 | # Reshape the super_matrix as a tensor containing overlap + {x, y, z} dipole matrices 138 | matrix_multipole = super_matrix.reshape(4, dim, dim) 139 | 140 | elif multipole == 'quadrupole': 141 | # The tensor contains the overlap + {xx, xy, xz, yy, yz, zz} quadrupole matrices 142 | super_matrix = compute_integrals_multipole( 143 | path, path_hdf5, basis_name, multipole) 144 | dim = super_matrix.shape[1] 145 | 146 | # Reshape to 3d tensor containing overlap + {x, y, z} + {xx, xy, xz, yy, yz, zz} 147 | # quadrupole matrices 148 | matrix_multipole = super_matrix.reshape(10, dim, dim) 149 | 150 | # Delete the tmp molecule file 151 | os.remove(path) 152 | 153 | return matrix_multipole 154 | -------------------------------------------------------------------------------- /nanoqm/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SCM-NV/nano-qmflows/522f8e0dae70b8431e499c3f5c7d50f259edf11b/nanoqm/py.typed -------------------------------------------------------------------------------- /nanoqm/schedule/__init__.py: -------------------------------------------------------------------------------- 1 | """Schedule API.""" 2 | from .scheduleCoupling import (compute_phases, lazy_couplings, 3 | write_hamiltonians) 4 | from .scheduleCP2K import (prepare_cp2k_settings, prepare_job_cp2k) 5 | from .components import (calculate_mos, create_point_folder, split_file_geometries) 6 | 7 | __all__ = ['calculate_mos', 'compute_phases', 8 | 'create_point_folder', 'lazy_couplings', 9 | 'prepare_cp2k_settings', 'prepare_job_cp2k', 10 | 'split_file_geometries', 'write_hamiltonians'] 11 | -------------------------------------------------------------------------------- /nanoqm/schedule/scheduleCP2K.py: -------------------------------------------------------------------------------- 1 | """Module to configure and run CP2K jobs. 2 | 3 | Index 4 | ----- 5 | .. currentmodule:: nanoqm.schedule.scheduleCP2K 6 | .. autosummary:: 7 | prepare_job_cp2k 8 | 9 | API 10 | --- 11 | .. autofunction:: prepare_job_cp2k 12 | 13 | """ 14 | 15 | from __future__ import annotations 16 | 17 | import fnmatch 18 | import os 19 | from os.path import join 20 | from pathlib import Path 21 | from typing import TYPE_CHECKING 22 | 23 | from noodles import schedule # Workflow Engine 24 | from qmflows import Settings, cp2k, templates 25 | from qmflows.packages import CP2K, CP2K_Result 26 | from qmflows.parsers import string_to_plams_Molecule 27 | from qmflows.type_hints import PromisedObject 28 | 29 | from .. import logger 30 | if TYPE_CHECKING: 31 | from .. import _data 32 | 33 | 34 | def try_to_read_wf(path_dir: str | os.PathLike[str]) -> str: 35 | """Try to get a wave function file from ``path_dir``. 36 | 37 | Returns 38 | ------- 39 | str 40 | Path to the wave function file. 41 | 42 | Raises 43 | ------ 44 | RuntimeError 45 | If there is not a wave function file. 46 | 47 | """ 48 | xs = os.listdir(path_dir) 49 | files = list(filter(lambda x: fnmatch.fnmatch(x, '*wfn'), xs)) 50 | if files: 51 | return join(path_dir, files[0]) 52 | else: 53 | msg = f"There are no wave function file in path: {os.fspath(path_dir)!r}\n" 54 | msg += print_cp2k_error(path_dir, "err") 55 | msg += print_cp2k_error(path_dir, "out") 56 | raise RuntimeError(msg) 57 | 58 | 59 | def prepare_cp2k_settings( 60 | settings: Settings, 61 | dict_input: _data.ComponentsData, 62 | guess_job: None | CP2K_Result, 63 | ) -> Settings: 64 | """Fill in the parameters for running a single job in CP2K. 65 | 66 | Parameters 67 | ---------- 68 | settings 69 | Input for CP2K 70 | dict_input 71 | Input for the current molecular geometry 72 | guess_job 73 | Previous job to read the guess wave function 74 | 75 | Returns 76 | ....... 77 | CP2K 78 | job to run 79 | 80 | """ 81 | dft = settings.specific.cp2k.force_eval.dft 82 | dft['print']['mo']['filename'] = dict_input.job_files.get_MO 83 | 84 | # Global parameters for CP2K 85 | settings.specific.cp2k['global']['project'] = f'point_{dict_input.k}' 86 | 87 | if guess_job is not None: 88 | plams_dir = guess_job.archive['plams_dir'] 89 | if plams_dir is None: 90 | raise RuntimeError("There are no wave function file in path: None\n") 91 | dft.wfn_restart_file_name = try_to_read_wf(plams_dir) 92 | 93 | input_args = templates.singlepoint.overlay(settings) 94 | 95 | return input_args 96 | 97 | 98 | @schedule 99 | def prepare_job_cp2k( 100 | settings: Settings, 101 | dict_input: _data.ComponentsData, 102 | guess_job: None | PromisedObject, 103 | ) -> CP2K: 104 | """Generate a :class:`qmflows.packages.CP2K` job. 105 | 106 | Parameters 107 | ---------- 108 | settings 109 | Input for CP2K 110 | dict_input 111 | Input for the current molecular geometry 112 | guess_job 113 | Previous job to read the guess wave function 114 | 115 | Returns 116 | ------- 117 | :class:`qmflows.packages.CP2K` 118 | job to run 119 | 120 | """ 121 | job_settings = prepare_cp2k_settings(settings, dict_input, guess_job) 122 | 123 | # remove keywords not use on the next translation phase 124 | for x in ('basis', 'potential'): 125 | if x in job_settings: 126 | del job_settings[x] 127 | 128 | return cp2k( 129 | job_settings, string_to_plams_Molecule(dict_input.geometry), 130 | work_dir=dict_input.point_dir, 131 | ) 132 | 133 | 134 | def print_cp2k_error(path_dir: str | os.PathLike[str], prefix: str) -> str: 135 | """Search for error in the CP2K output files.""" 136 | err_file = next(Path(path_dir).glob(f"*{prefix}"), None) 137 | msg = "" 138 | if err_file is not None: 139 | with open(err_file, 'r', encoding="utf8") as handler: 140 | err = handler.read() 141 | msg = f"CP2K {prefix} file:\n{err}\n" 142 | logger.error(msg) 143 | 144 | return msg 145 | -------------------------------------------------------------------------------- /nanoqm/workflows/__init__.py: -------------------------------------------------------------------------------- 1 | """Simulation workflows.""" 2 | from .initialization import initialize 3 | from .workflow_coop import workflow_crystal_orbital_overlap_population 4 | from .workflow_coupling import workflow_derivative_couplings 5 | from .workflow_single_points import workflow_single_points 6 | from .workflow_stddft_spectrum import workflow_stddft 7 | 8 | __all__ = [ 9 | 'initialize', 'workflow_crystal_orbital_overlap_population', 10 | 'workflow_derivative_couplings', 'workflow_single_points', 'workflow_stddft'] 11 | -------------------------------------------------------------------------------- /nanoqm/workflows/orbitals_type.py: -------------------------------------------------------------------------------- 1 | """Module to run restricted and unrestricted calculations.""" 2 | 3 | from __future__ import annotations 4 | 5 | import copy 6 | from typing import Any, Callable, TypeVar, TYPE_CHECKING 7 | 8 | from noodles import gather 9 | from qmflows import run 10 | 11 | from .. import logger 12 | from .initialization import initialize 13 | 14 | if TYPE_CHECKING: 15 | from qmflows.type_hints import PromisedObject 16 | from .. import _data 17 | 18 | _T = TypeVar("_T", bound=_data.GeneralOptions) 19 | 20 | __all__ = ["select_orbitals_type"] 21 | 22 | 23 | def select_orbitals_type(config: _T, workflow: Callable[[_T], PromisedObject]) -> Any: 24 | """Call a workflow using restriced or unrestricted orbitals.""" 25 | # Dictionary containing the general configuration 26 | initialize(config) 27 | 28 | if config.orbitals_type != "both": 29 | logger.info("starting workflow calculation!") 30 | promises = workflow(config) 31 | return run(promises, folder=config.workdir, always_cache=False) 32 | else: 33 | config_alphas = copy.copy(config) 34 | config_betas = copy.copy(config) 35 | config_alphas.orbitals_type = "alphas" 36 | promises_alphas = workflow(config_alphas) 37 | config_betas.orbitals_type = "betas" 38 | promises_betas = workflow(config_betas) 39 | all_promises = gather(promises_alphas, promises_betas) 40 | alphas, betas = run(all_promises, folder=config.workdir, always_cache=False) 41 | return alphas, betas 42 | -------------------------------------------------------------------------------- /nanoqm/workflows/run_workflow.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Comman line interface to run the workflows. 3 | 4 | Usage: 5 | run_workflow.py -i input.yml 6 | 7 | Available workflow: 8 | * absorption_spectrum 9 | * derivative_couplings 10 | * single_points 11 | * ipr_calculation 12 | * coop_calculation 13 | 14 | """ 15 | 16 | from __future__ import annotations 17 | 18 | import argparse 19 | import os 20 | from collections.abc import Callable 21 | from typing import Any 22 | 23 | import yaml 24 | 25 | from .. import logger 26 | from ..common import UniqueSafeLoader 27 | from .input_validation import process_input 28 | from .workflow_coop import workflow_crystal_orbital_overlap_population 29 | from .workflow_coupling import workflow_derivative_couplings 30 | from .workflow_ipr import workflow_ipr 31 | from .workflow_single_points import workflow_single_points 32 | from .workflow_stddft_spectrum import workflow_stddft 33 | 34 | msg = "run_workflow.py -i input.yml" 35 | 36 | parser = argparse.ArgumentParser(description=msg) 37 | parser.add_argument('-i', required=True, help="Input file in YAML format") 38 | 39 | dict_workflows: dict[str, Callable[[Any], object]] = { 40 | 'absorption_spectrum': workflow_stddft, 41 | 'derivative_couplings': workflow_derivative_couplings, 42 | 'single_points': workflow_single_points, 43 | 'ipr_calculation': workflow_ipr, 44 | 'coop_calculation': workflow_crystal_orbital_overlap_population, 45 | } 46 | 47 | 48 | def main() -> None: 49 | """Parse the command line arguments and run workflow.""" 50 | args = parser.parse_args() 51 | input_file: str = args.i 52 | with open(input_file, 'r', encoding="utf8") as f: 53 | dict_input = yaml.load(f, Loader=UniqueSafeLoader) 54 | if 'workflow' not in dict_input: 55 | raise RuntimeError( 56 | "The name of the workflow is required in the input file") 57 | else: 58 | workflow_name = dict_input['workflow'] 59 | 60 | # Read and process input 61 | inp = process_input(input_file, workflow_name) 62 | 63 | # run workflow 64 | function = dict_workflows[workflow_name] 65 | 66 | logger.info(f"Running worflow using: {os.path.abspath(input_file)}") 67 | function(inp) 68 | 69 | 70 | if __name__ == "__main__": 71 | main() 72 | -------------------------------------------------------------------------------- /nanoqm/workflows/tools.py: -------------------------------------------------------------------------------- 1 | """Common utilities use by the workflows.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TYPE_CHECKING 6 | 7 | from .. import logger 8 | from ..common import is_data_in_hdf5 9 | from .workflow_single_points import workflow_single_points 10 | 11 | if TYPE_CHECKING: 12 | from .. import _data 13 | 14 | __all__ = ["compute_single_point_eigenvalues_coefficients"] 15 | 16 | 17 | def compute_single_point_eigenvalues_coefficients(config: _data.SinglePoints) -> None: 18 | """Check if hdf5 contains the required eigenvalues and coefficients. 19 | 20 | If not, it runs the single point calculation. 21 | """ 22 | node_path_coefficients = f'{config.project_name}/point_0/cp2k/mo/coefficients' 23 | node_path_eigenvalues = f'{config.project_name}/point_0/cp2k/mo/eigenvalues' 24 | 25 | node_paths = (node_path_coefficients, node_path_eigenvalues) 26 | if all(is_data_in_hdf5(config.path_hdf5, x) for x in node_paths): 27 | logger.info("Coefficients and eigenvalues already in hdf5.") 28 | else: 29 | # Call the single point workflow to calculate the eigenvalues and 30 | # coefficients 31 | logger.info("Starting single point calculation.") 32 | workflow_single_points(config) 33 | -------------------------------------------------------------------------------- /nanoqm/workflows/workflow_coupling.py: -------------------------------------------------------------------------------- 1 | """Workflow to compute the derivate coupling between states. 2 | 3 | The ``workflow_derivative_couplings`` expected a file with a trajectory-like 4 | file with the molecular geometries to compute the couplings. 5 | 6 | Index 7 | ----- 8 | .. currentmodule:: nanoqm.workflows.workflow_coupling 9 | .. autosummary:: 10 | 11 | """ 12 | 13 | from __future__ import annotations 14 | 15 | import os 16 | from os.path import join 17 | from typing import List, Tuple, TYPE_CHECKING 18 | 19 | from noodles import gather, schedule, unpack 20 | from noodles.interface import PromisedObject 21 | 22 | from .. import logger 23 | from ..schedule.components import calculate_mos 24 | from ..schedule.scheduleCoupling import (calculate_overlap, lazy_couplings, 25 | write_hamiltonians) 26 | from .orbitals_type import select_orbitals_type 27 | 28 | if TYPE_CHECKING: 29 | from .. import _data 30 | 31 | __all__ = ['workflow_derivative_couplings'] 32 | 33 | #: Type defining the derivative couplings calculation 34 | ResultPaths = Tuple[List[str], List[str]] 35 | 36 | 37 | def workflow_derivative_couplings( 38 | config: _data.DerivativeCoupling 39 | ) -> ResultPaths | tuple[ResultPaths, ResultPaths]: 40 | """Compute the derivative couplings for a molecular dynamic trajectory. 41 | 42 | Parameters 43 | ---------- 44 | config 45 | Dictionary with the configuration to run the workflows 46 | 47 | Return 48 | ------ 49 | Folders where the Hamiltonians are stored. 50 | 51 | """ 52 | return select_orbitals_type(config, run_workflow_couplings) 53 | 54 | 55 | def run_workflow_couplings(config: _data.DerivativeCoupling) -> PromisedObject: 56 | """Run the derivative coupling workflow using `config`.""" 57 | # compute the molecular orbitals 58 | logger.info("starting couplings calculation!") 59 | mo_paths_hdf5, energy_paths_hdf5 = unpack(calculate_mos(config), 2) 60 | 61 | # Overlap matrix at two different times 62 | promised_overlaps = calculate_overlap(config, mo_paths_hdf5) 63 | 64 | # Calculate Non-Adiabatic Coupling 65 | promised_crossing_and_couplings = lazy_couplings(config, promised_overlaps) 66 | 67 | # Write the results in PYXAID format 68 | config.path_hamiltonians = create_path_hamiltonians(config.workdir, config.orbitals_type) 69 | 70 | # Inplace scheduling of write_hamiltonians function. 71 | # Equivalent to add @schedule on top of the function 72 | schedule_write_ham = schedule(write_hamiltonians) 73 | 74 | # Number of matrix computed 75 | config.npoints = len(config.geometries) - 2 76 | 77 | # Write Hamilotians in PYXAID format 78 | promise_files = schedule_write_ham( 79 | config, promised_crossing_and_couplings, mo_paths_hdf5) 80 | 81 | return gather(promise_files, energy_paths_hdf5) 82 | 83 | 84 | def create_path_hamiltonians(workdir: str | os.PathLike[str], orbitals_type: str) -> str: 85 | """Create the Paths to store the resulting hamiltonians.""" 86 | prefix = "hamiltonians" 87 | name = prefix if not orbitals_type else f"{orbitals_type}_{prefix}" 88 | path_hamiltonians = join(workdir, name) 89 | if not os.path.exists(path_hamiltonians): 90 | os.makedirs(path_hamiltonians) 91 | 92 | return path_hamiltonians 93 | -------------------------------------------------------------------------------- /nanoqm/workflows/workflow_ipr.py: -------------------------------------------------------------------------------- 1 | """Inverse Participation Ratio calculation. 2 | 3 | Index 4 | ----- 5 | .. currentmodule:: nanoqm.workflows.workflow_ipr 6 | .. autosummary:: 7 | workflow_ipr 8 | 9 | """ 10 | 11 | from __future__ import annotations 12 | 13 | from typing import TYPE_CHECKING 14 | 15 | import numpy as np 16 | from scipy.linalg import sqrtm 17 | from qmflows.parsers import readXYZ 18 | 19 | from .. import logger 20 | from ..common import h2ev, number_spherical_functions_per_atom, retrieve_hdf5_data 21 | from ..integrals.multipole_matrices import compute_matrix_multipole 22 | from .initialization import initialize 23 | from .tools import compute_single_point_eigenvalues_coefficients 24 | 25 | if TYPE_CHECKING: 26 | from .. import _data 27 | 28 | __all__ = ['workflow_ipr'] 29 | 30 | 31 | def workflow_ipr(config: _data.IPR) -> np.ndarray: 32 | """Compute the Inverse Participation Ratio main function.""" 33 | # Dictionary containing the general information 34 | initialize(config) 35 | 36 | # Checking if hdf5 contains the required eigenvalues and coefficientsa 37 | compute_single_point_eigenvalues_coefficients(config) 38 | 39 | # Logger info 40 | logger.info("Starting IPR calculation.") 41 | 42 | # Get eigenvalues and coefficients from hdf5 43 | node_path_coefficients = 'coefficients/point_0/' 44 | node_path_eigenvalues = 'eigenvalues/point_0' 45 | atomic_orbitals = retrieve_hdf5_data(config.path_hdf5, node_path_coefficients) 46 | energies = retrieve_hdf5_data(config.path_hdf5, node_path_eigenvalues) 47 | energies *= h2ev # To get them from Hartree to eV 48 | 49 | # Converting the xyz-file to a mol-file 50 | mol = readXYZ(config.path_traj_xyz) 51 | 52 | # Computing the overlap-matrix S and its square root 53 | overlap = compute_matrix_multipole(mol, config, 'overlap') 54 | squared_overlap = sqrtm(overlap) 55 | 56 | # Converting the coeficients from AO-basis to MO-basis 57 | transformed_orbitals = np.dot(squared_overlap, atomic_orbitals) 58 | 59 | # Now we add up the rows of the c_MO that belong to the same atom 60 | sphericals = number_spherical_functions_per_atom( 61 | mol, 62 | 'cp2k', 63 | config.cp2k_general_settings.basis, 64 | config.path_hdf5, 65 | ) # Array with number of spherical orbitals per atom 66 | 67 | # New matrix with the atoms on the rows and the MOs on the columns 68 | indices = np.zeros(len(mol), dtype='int') 69 | indices[1:] = np.cumsum(sphericals[:-1]) 70 | accumulated_transf_orbitals = np.add.reduceat(transformed_orbitals, indices, 0) 71 | 72 | # Finally, we can calculate the IPR 73 | ipr = np.zeros(accumulated_transf_orbitals.shape[1]) 74 | 75 | for i in range(accumulated_transf_orbitals.shape[1]): 76 | ipr[i] = np.sum(np.absolute(accumulated_transf_orbitals[:, i])**4) / \ 77 | (np.sum(np.absolute(accumulated_transf_orbitals[:, i])**2))**2 78 | 79 | # Lastly, we save the output as a txt-file 80 | result = np.zeros((accumulated_transf_orbitals.shape[1], 2)) 81 | result[:, 0] = energies 82 | result[:, 1] = 1.0 / ipr 83 | np.savetxt('IPR.txt', result) 84 | return result 85 | -------------------------------------------------------------------------------- /nanoqm/workflows/workflow_single_points.py: -------------------------------------------------------------------------------- 1 | """Workflow to perform single point calculation in a trajectory. 2 | 3 | Index 4 | ----- 5 | .. currentmodule:: nanoqm.workflows.workflow_single_points 6 | .. autosummary:: 7 | workflow_single_points 8 | 9 | """ 10 | 11 | from __future__ import annotations 12 | 13 | from typing import TYPE_CHECKING 14 | 15 | from qmflows import run 16 | 17 | from .. import logger 18 | from ..schedule.components import calculate_mos 19 | from .initialization import initialize 20 | 21 | if TYPE_CHECKING: 22 | from .. import _data 23 | 24 | __all__ = ['workflow_single_points'] 25 | 26 | 27 | def workflow_single_points( 28 | config: _data.SinglePoints, 29 | ) -> tuple[list[tuple[str, str, str]], list[str]]: 30 | """Perform single point calculations for a given trajectory. 31 | 32 | Parameters 33 | ---------- 34 | config 35 | Input to run the workflow. 36 | 37 | Returns 38 | ------- 39 | List with the node path to the molecular orbitals in the HDF5. 40 | 41 | """ 42 | # Dictionary containing the general configuration 43 | initialize(config) 44 | 45 | logger.info("starting!") 46 | 47 | # compute the molecular orbitals 48 | # Unpack 49 | mo_paths_hdf5 = calculate_mos(config) 50 | 51 | # Pack 52 | return tuple(run(mo_paths_hdf5, folder=config.workdir)) 53 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | # Minimum requirements for the build system to execute. 3 | requires = [ 4 | "setuptools>=61.0", 5 | "wheel>=0.21", 6 | "oldest-supported-numpy", 7 | "setuptools_scm[toml]>=6.2", 8 | ] 9 | build-backend = "setuptools.build_meta" 10 | 11 | [project] 12 | name = "nano-qmflows" 13 | dynamic = [ 14 | "dependencies", 15 | "optional-dependencies", 16 | "version", 17 | "readme", 18 | ] 19 | description = "Derivative coupling calculation" 20 | license = { text = "Apache-2.0" } 21 | authors = [ 22 | { name = "Felipe Zapata & Ivan Infante", email = "f.zapata@esciencecenter.nl" }, 23 | ] 24 | keywords = [ 25 | "chemistry", 26 | "Photochemistry", 27 | "Simulation", 28 | ] 29 | classifiers = [ 30 | "License :: OSI Approved :: Apache Software License", 31 | "Natural Language :: English", 32 | "Operating System :: MacOS", 33 | "Operating System :: POSIX :: Linux", 34 | "Programming Language :: C++", 35 | "Programming Language :: Python", 36 | "Programming Language :: Python :: 3", 37 | "Programming Language :: Python :: 3 :: Only", 38 | "Programming Language :: Python :: 3.8", 39 | "Programming Language :: Python :: 3.9", 40 | "Programming Language :: Python :: 3.10", 41 | "Programming Language :: Python :: 3.11", 42 | "Programming Language :: Python :: 3.12", 43 | "Programming Language :: Python :: Implementation :: CPython", 44 | "Development Status :: 4 - Beta", 45 | "Intended Audience :: Science/Research", 46 | "Topic :: Scientific/Engineering :: Chemistry", 47 | "Typing :: Typed", 48 | ] 49 | requires-python = ">=3.8" 50 | 51 | [project.urls] 52 | Homepage = "https://github.com/SCM-NV/nano-qmflows" 53 | "Bug Tracker" = "https://github.com/SCM-NV/nano-qmflows/issues" 54 | Documentation = "https://qmflows-namd.readthedocs.io/en/latest/" 55 | 56 | [project.scripts] 57 | "run_workflow.py" = "nanoqm.workflows.run_workflow:main" 58 | "distribute_jobs.py" = "nanoqm.workflows.distribute_jobs:main" 59 | 60 | [tool.setuptools] 61 | license-files = ["LICENSE*.txt"] 62 | 63 | [tool.setuptools.packages.find] 64 | exclude = ["test"] 65 | 66 | [tool.setuptools.package-data] 67 | nanoqm = [ 68 | "basis/*.json", 69 | "basis/BASIS*", 70 | "basis/GTH_POTENTIALS", 71 | "py.typed", 72 | "*.pyi", 73 | ] 74 | 75 | [tool.setuptools.dynamic] 76 | dependencies = { file = ["install_requirements.txt"] } 77 | optional-dependencies.test = { file = ["test_requirements.txt"] } 78 | optional-dependencies.doc = { file = ["doc_requirements.txt"] } 79 | optional-dependencies.lint = { file = ["linting_requirements.txt"] } 80 | readme = { file = ["README.rst"], content-type = "text/x-rst" } 81 | 82 | [tool.setuptools_scm] 83 | write_to = "nanoqm/_version.py" 84 | 85 | [tool.mypy] 86 | plugins = "numpy.typing.mypy_plugin" 87 | show_error_codes = true 88 | mypy_path = "typings" 89 | files = ["nanoqm", "typings"] 90 | 91 | [[tool.mypy.overrides]] 92 | module = [ 93 | "schema.*", 94 | "matplotlib.*", 95 | ] 96 | ignore_missing_imports = true 97 | 98 | [tool.pydocstyle] 99 | add_ignore = ["D401"] 100 | 101 | [tool.coverage.run] 102 | branch = true 103 | source = ["nanoqm"] 104 | 105 | [tool.pytest.ini_options] 106 | testpaths = "test" 107 | addopts = "--tb=short --cov --cov-report xml --cov-report term --cov-report html --cache-clear --pdbcls=IPython.terminal.debugger:TerminalPdb --durations=6" 108 | markers = [ 109 | "slow: A marker for slow tests requiring external quantum-chemical packages." 110 | ] 111 | filterwarnings = [ 112 | "error::qmflows.warnings_qmflows.QMFlows_Warning", 113 | "ignore:Generic keyword '_pytestfixturefunction' not implemented for package \\w+:qmflows.warnings_qmflows.Key_Warning", 114 | ] 115 | 116 | [tool.flake8] 117 | max-line-length = 100 118 | per-file-ignores = [ 119 | "nanoqm/workflows/input_validation.py: E704,E501", 120 | "nanoqm/analysis/tools.py: F821", 121 | ] 122 | 123 | [tool.cibuildwheel] 124 | build = [ 125 | "cp39-manylinux_x86_64", 126 | "cp39-manylinux-aarch64", 127 | "cp39-macosx_x86_64", 128 | "cp39-macosx_arm64", 129 | ] 130 | before-all = "cp licenses/LICENSE_LIBHDF5.txt licenses/LICENSE_LIBINT2.txt ." 131 | build-frontend = "build" 132 | 133 | [tool.cibuildwheel.linux] 134 | environment = { QMFLOWS_INCLUDEDIR="", QMFLOWS_LIBDIR="", CFLAGS="-Werror", LDFLAGS="-Wl,--strip-debug" } 135 | manylinux-x86_64-image = "ghcr.io/nlesc-nano/manylinux2014_x86_64-qmflows:latest" 136 | manylinux-aarch64-image = "ghcr.io/nlesc-nano/manylinux2014_aarch64-qmflows:latest" 137 | repair-wheel-command = [ 138 | "auditwheel -v repair -w {dest_dir} {wheel}", 139 | "pipx run abi3audit --strict --verbose --report {wheel}", 140 | ] 141 | 142 | [tool.cibuildwheel.macos] 143 | environment = { QMFLOWS_INCLUDEDIR="", QMFLOWS_LIBDIR="", LDFLAGS="-Wl", MACOSX_DEPLOYMENT_TARGET="10.14" } 144 | repair-wheel-command = [ 145 | "delocate-listdeps {wheel}", 146 | "delocate-wheel --require-archs {delocate_archs} -w {dest_dir} {wheel}", 147 | "pipx run abi3audit --strict --verbose --report {wheel}", 148 | ] 149 | 150 | [[tool.cibuildwheel.overrides]] 151 | select = "cp39-macosx_arm64" 152 | environment = { QMFLOWS_INCLUDEDIR="", QMFLOWS_LIBDIR="", LDFLAGS="-Wl", MACOSX_DEPLOYMENT_TARGET="11" } 153 | -------------------------------------------------------------------------------- /scripts/convert_legacy_hdf5.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """Convert old HDF5 files to the new storage layout.""" 4 | 5 | from __future__ import annotations 6 | 7 | import argparse 8 | from itertools import chain 9 | from pathlib import Path 10 | from collections.abc import Iterable 11 | 12 | import h5py 13 | import numpy as np 14 | 15 | 16 | def exists(input_file: str) -> Path: 17 | """Check if the input file exists.""" 18 | path = Path(input_file) 19 | if not path.exists(): 20 | raise argparse.ArgumentTypeError(f"{input_file} doesn't exist!") 21 | 22 | return path 23 | 24 | 25 | class LegacyConverter: 26 | """Convert legacy HDF5 files to the new storage layout.""" 27 | 28 | def __init__(self, source: h5py.File, dest: h5py.File) -> None: 29 | """Initialize the converter.""" 30 | self.source = source 31 | self.dest = dest 32 | self.project = self.get_project_name() 33 | 34 | def get_project_name(self) -> str: 35 | """Get the project root name.""" 36 | # There are only two names onder root: cp2k and project name 37 | diff = set(self.source.keys()) - {'cp2k'} 38 | return diff.pop() 39 | 40 | def copy_data_set(self, old_path: str, new_path: str) -> None: 41 | """Copy data set from old ``source`` to new ``dest``.""" 42 | if old_path in self.source: 43 | data = self.source[old_path][()] 44 | self.dest.require_dataset(new_path, shape=np.shape(data), data=data, dtype=np.float32) 45 | 46 | def copy_node_values(self, old_names: Iterable[str], new_names: Iterable[str]) -> None: 47 | """Copy the data set values from the old file to the new one.""" 48 | for old, new in zip(old_names, new_names): 49 | self.copy_data_set(old, new) 50 | 51 | def copy_orbitals(self) -> None: 52 | """Copy orbitals from old ``source`` to new ``dest``.""" 53 | points = [k for k in self.source[self.project].keys() if k.startswith("point_")] 54 | keys = {"coefficients", "eigenvalues", "energy"} 55 | # "project/point_x/cp2k/mo/" 56 | old_names = chain.from_iterable( 57 | [f"{self.project}/{point}/cp2k/mo/{k}" for point in points] for k in keys 58 | ) 59 | new_names = chain.from_iterable([f"{k}/{point}" for point in points] for k in keys) 60 | self.copy_node_values(old_names, new_names) 61 | 62 | def copy_couplings(self) -> None: 63 | """Copy couplings and swap matrix.""" 64 | couplings = [k for k in self.source[self.project].keys() if k.startswith("coupling_")] 65 | old_names = [f"{self.project}/{cs}" for cs in couplings] 66 | new_names = couplings 67 | self.copy_node_values(old_names, new_names) 68 | 69 | swaps = f"{self.project}/swaps" 70 | old_names = [swaps] 71 | new_names = ["swaps"] 72 | self.copy_node_values(old_names, new_names) 73 | 74 | def copy_overlaps(self) -> None: 75 | """Copy the overlaps to the new layout.""" 76 | overlaps = [k for k in self.source[self.project].keys() if k.startswith("overlaps_")] 77 | keys = {"mtx_sji_t0", "mtx_sji_t0_corrected"} 78 | old_names = chain(*[[f"{self.project}/{over}/{k}" for over in overlaps] for k in keys]) 79 | new_names = chain(*[[f"{over}/{k}" for over in overlaps] for k in keys]) 80 | self.copy_node_values(old_names, new_names) 81 | 82 | def copy_multipoles(self) -> None: 83 | """Copy the multipoles to the new layout.""" 84 | multipole = f"{self.project}/multipole" 85 | if multipole in self.source: 86 | points = [k for k in self.source[multipole].keys() if k.startswith("point_")] 87 | old_names = [f"{self.project}/multipole/{p}/dipole" for p in points] 88 | new_names = [f"dipole/{p}" for p in points] 89 | self.copy_node_values(old_names, new_names) 90 | 91 | def copy_all(self) -> None: 92 | """Copy all the has been stored in the legacy format.""" 93 | for fun in {"copy_orbitals", "copy_couplings", "copy_overlaps", "copy_multipoles"}: 94 | method = getattr(self, fun) 95 | method() 96 | 97 | 98 | def convert(path_hdf5: Path) -> None: 99 | """Convert ``path_hdf5`` to new storage format.""" 100 | new_hdf5 = path_hdf5 101 | old_hdf5 = path_hdf5.rename(f'old_{path_hdf5.name}') 102 | 103 | with h5py.File(new_hdf5, 'a') as dest, h5py.File(old_hdf5, 'r') as source: 104 | converter = LegacyConverter(source, dest) 105 | converter.copy_all() 106 | 107 | 108 | def main(): 109 | """Perform the conversion.""" 110 | parser = argparse.ArgumentParser("convert_legacy_hdf5") 111 | parser.add_argument("input", type=exists, help="HDF5 file to convert") 112 | 113 | args = parser.parse_args() 114 | convert(args.input) 115 | 116 | 117 | if __name__ == "__main__": 118 | main() 119 | -------------------------------------------------------------------------------- /scripts/download_cp2k.sh: -------------------------------------------------------------------------------- 1 | # Bash script for downloading CP2K 2 | 3 | set -euo pipefail 4 | 5 | ARCH="$1" 6 | VERSION="$2" 7 | 8 | # After the 9.1 release CP2K switched to a `.` version scheme (e.g. `2021.1`) 9 | if [[ $VERSION =~ [0-9][0-9][0-9][0-9]\.[0-9]+ ]]; then 10 | PLAT="Linux-gnu" 11 | VERSION_LONG=v"$VERSION" 12 | else 13 | PLAT="Linux" 14 | VERSION_LONG=v"$VERSION".0 15 | fi 16 | 17 | echo "Installing CP2K $ARCH $VERSION binaries" 18 | curl -Lsf https://github.com/cp2k/cp2k/releases/download/$VERSION_LONG/cp2k-$VERSION-$PLAT-$ARCH.ssmp -o /usr/local/bin/cp2k.ssmp 19 | chmod u+rx /usr/local/bin/cp2k.ssmp 20 | -------------------------------------------------------------------------------- /scripts/get_whl_name.py: -------------------------------------------------------------------------------- 1 | """Find the first file in the passed directory matching a given regex pattern.""" 2 | 3 | from __future__ import annotations 4 | 5 | import os 6 | import re 7 | import argparse 8 | 9 | 10 | def main(directory: str | os.PathLike[str], pattern: str | re.Pattern[str]) -> str: 11 | pattern = re.compile(pattern) 12 | for i in os.listdir(directory): 13 | if pattern.search(i) is not None: 14 | return os.path.join(directory, i) 15 | else: 16 | raise FileNotFoundError( 17 | f"Failed to identify a file in {os.fspath(directory)!r} " 18 | f"with the following pattern: {pattern!r}" 19 | ) 20 | 21 | 22 | if __name__ == "__main__": 23 | parser = argparse.ArgumentParser( 24 | usage="python get_whl_name.py . manylinux2014_x86_64", description=__doc__ 25 | ) 26 | parser.add_argument("directory", help="The to-be searched directory") 27 | parser.add_argument("pattern", help="The to-be searched regex pattern") 28 | 29 | args = parser.parse_args() 30 | print(main(args.directory, args.pattern)) 31 | -------------------------------------------------------------------------------- /scripts/hamiltonians/plot_couplings.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | """This programs plots the electronic coupling between two states. 3 | 4 | It reads all Ham_*_im files and cache them in a tensor saved on disk. 5 | Usage: 6 | plot_couplings.py -p . -s1 XX -s2 YY -dt 1.0 7 | 8 | p = path to the hamiltonian files 9 | s1 = state 1 index 10 | s2 = state 2 index 11 | dt = time step in fs 12 | """ 13 | 14 | from __future__ import annotations 15 | 16 | import numpy as np 17 | import matplotlib.pyplot as plt 18 | import argparse 19 | import glob 20 | import os.path 21 | 22 | r2meV = 13605.698 # From Rydeberg to eV 23 | 24 | 25 | def main(path_output: str, s1: int, s2: int, dt: float) -> None: 26 | # Check if the file with couplings exists 27 | if not os.path.isfile('couplings.npy'): 28 | # Check all the files stored 29 | files_im = glob.glob('Ham_*_im') 30 | # Read the couplings 31 | couplings = np.stack( 32 | [np.loadtxt(f'Ham_{f}_im') for f in range(len(files_im))] 33 | ) 34 | # Save the file for fast reading afterwards 35 | np.save('couplings', couplings) 36 | else: 37 | couplings = np.load('couplings.npy') 38 | ts = np.arange(couplings.shape[0]) * dt 39 | plt.plot(ts, couplings[:, s1, s2] * r2meV) 40 | plt.xlabel('Time (fs)') 41 | plt.ylabel('Energy (meV)') 42 | plt.show() 43 | 44 | 45 | def read_cmd_line(parser: argparse.ArgumentParser) -> tuple[str, int, int, float]: 46 | """ 47 | Parse Command line options. 48 | """ 49 | args = parser.parse_args() 50 | return (args.p, args.s1, args.s2, args.dt) 51 | 52 | 53 | if __name__ == "__main__": 54 | msg = "plot_decho -p -s1 -s2 \ 55 | -dt