├── .coveragerc
├── .gitattributes
├── .github
└── workflows
│ ├── publish-to-pypi.yml
│ └── tests.yml
├── .gitignore
├── CHANGELOG.md
├── LICENSE
├── README.md
├── docs
├── Makefile
├── make.bat
└── source
│ ├── api.rst
│ ├── conf.py
│ └── index.rst
├── examples
├── 01_engines
│ └── gromacs
│ │ ├── GmxEngine.ipynb
│ │ └── SlurmGmxEngine.ipynb
├── 02_TrajectoryFunctionWrappers
│ ├── PyTrajectoryFunctionWrapper.ipynb
│ └── SlurmTrajectoryFunctionWrapper.ipynb
├── 03_trajectory_propagation_and_subtrajectory_extraction
│ ├── ConditionalTrajectoryPropagator.ipynb
│ ├── FrameExtractors.ipynb
│ └── InPartsTrajectoryPropagator.ipynb
├── 04_application_examples
│ └── WeightedEnsemble.ipynb
├── 05_developer_topics
│ └── slurm
│ │ └── SlurmProcess.ipynb
├── README.md
└── resources
│ ├── ala_calc_cvs.slurm
│ ├── ala_cv_funcs.py
│ └── gromacs
│ └── capped_alanine_dipeptide
│ ├── conf.gro
│ ├── conf_in_C7eq.trr
│ ├── conf_in_alphaR.trr
│ ├── index.ndx
│ ├── md.mdp
│ ├── mdrun.slurm
│ └── topol_amber99sbildn.top
├── pyproject.toml
├── pytest.ini
├── src
└── asyncmd
│ ├── __init__.py
│ ├── _config.py
│ ├── _version.py
│ ├── config.py
│ ├── gromacs
│ ├── __init__.py
│ ├── mdconfig.py
│ ├── mdengine.py
│ └── utils.py
│ ├── mdconfig.py
│ ├── mdengine.py
│ ├── slurm.py
│ ├── tools.py
│ ├── trajectory
│ ├── __init__.py
│ ├── convert.py
│ ├── functionwrapper.py
│ ├── propagate.py
│ └── trajectory.py
│ └── utils.py
└── tests
├── conftest.py
├── gromacs
├── test_mdengine.py
├── test_mdp.py
└── test_utils.py
├── test_data
├── gromacs
│ ├── conf.gro
│ ├── empty.mdp
│ ├── gen-vel-continuation.mdp
│ ├── index.ndx
│ ├── md_compressed_out.mdp
│ ├── md_full_prec_out.mdp
│ └── topol_amber99sbildn.top
├── mdconfig
│ └── dummy_mdconfig.dat
└── trajectory
│ ├── ala.gro
│ ├── ala.tpr
│ ├── ala_traj.trr
│ └── ala_traj.xtc
├── test_mdconfig.py
├── test_slurm.py
└── trajectory
├── test_convert.py
├── test_propagate.py
└── test_trajectory.py
/.coveragerc:
--------------------------------------------------------------------------------
1 | # .coveragerc to control coverage.py runs for asyncmd
2 | # for use with pytest and pytest-cov
3 |
4 | [run]
5 | # see if all possible branches were visited
6 | branch = True
7 | # enable cython file coverage
8 | #plugins = Cython.Coverage
9 | source =
10 | asyncmd
11 |
12 | [report]
13 | exclude_lines =
14 | # exclude lines that contain one of:
15 | pragma: no cover
16 | raise NotImplementedError
17 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | *.trr filter=lfs diff=lfs merge=lfs -text
2 | *.pdf filter=lfs diff=lfs merge=lfs -text
3 | *.xtc filter=lfs diff=lfs merge=lfs -text
4 | *.tpr filter=lfs diff=lfs merge=lfs -text
5 |
--------------------------------------------------------------------------------
/.github/workflows/publish-to-pypi.yml:
--------------------------------------------------------------------------------
1 | name: Publish asyncmd to PyPI and TestPyPI
2 |
3 | on:
4 | push:
5 | tags: ["v*"] # run only on version-tag pushes
6 |
7 | jobs:
8 | build:
9 | name: Build distribution
10 | runs-on: ubuntu-latest
11 |
12 | steps:
13 | - uses: actions/checkout@v4
14 | with:
15 | persist-credentials: false
16 | lfs: true
17 | - name: Set up Python
18 | uses: actions/setup-python@v5
19 | with:
20 | python-version: "3.x"
21 | - name: Install pypa/build
22 | run: >-
23 | python3 -m
24 | pip install
25 | build
26 | --user
27 | - name: Build a binary wheel and a source tarball
28 | run: python3 -m build
29 | - name: Store the distribution packages
30 | uses: actions/upload-artifact@v4
31 | with:
32 | name: python-package-distributions
33 | path: dist/
34 |
35 | publish-to-pypi:
36 | name: >-
37 | Publish asyncmd to PyPI
38 | needs:
39 | - build
40 | runs-on: ubuntu-latest
41 | environment:
42 | name: pypi
43 | url: https://pypi.org/p/asyncmd
44 | permissions:
45 | id-token: write # IMPORTANT: mandatory for trusted publishing
46 |
47 | steps:
48 | - name: Download all the dists
49 | uses: actions/download-artifact@v4
50 | with:
51 | name: python-package-distributions
52 | path: dist/
53 | - name: Publish asyncmd to PyPI
54 | uses: pypa/gh-action-pypi-publish@release/v1
55 |
56 | github-release:
57 | name: >-
58 | Sign asyncmd distribution with Sigstore
59 | and upload them to GitHub Release
60 | needs:
61 | - publish-to-pypi
62 | runs-on: ubuntu-latest
63 |
64 | permissions:
65 | contents: write # IMPORTANT: mandatory for making GitHub Releases
66 | id-token: write # IMPORTANT: mandatory for sigstore
67 |
68 | steps:
69 | - name: Download all the dists
70 | uses: actions/download-artifact@v4
71 | with:
72 | name: python-package-distributions
73 | path: dist/
74 | - name: Sign the dists with Sigstore
75 | uses: sigstore/gh-action-sigstore-python@v3.0.0
76 | with:
77 | inputs: >-
78 | ./dist/*.tar.gz
79 | ./dist/*.whl
80 | - name: Create GitHub Release
81 | env:
82 | GITHUB_TOKEN: ${{ github.token }}
83 | run: >-
84 | gh release create
85 | "$GITHUB_REF_NAME"
86 | --repo "$GITHUB_REPOSITORY"
87 | --notes ""
88 | - name: Upload artifact signatures to GitHub Release
89 | env:
90 | GITHUB_TOKEN: ${{ github.token }}
91 | # Upload to GitHub Release using the `gh` CLI.
92 | # `dist/` contains the built packages, and the
93 | # sigstore-produced signatures and certificates.
94 | run: >-
95 | gh release upload
96 | "$GITHUB_REF_NAME" dist/**
97 | --repo "$GITHUB_REPOSITORY"
98 |
99 | publish-to-testpypi:
100 | name: Publish asyncmd to TestPyPI
101 | needs:
102 | - build
103 | runs-on: ubuntu-latest
104 |
105 | environment:
106 | name: testpypi
107 | url: https://test.pypi.org/p/asyncmd
108 |
109 | permissions:
110 | id-token: write # IMPORTANT: mandatory for trusted publishing
111 |
112 | steps:
113 | - name: Download all the dists
114 | uses: actions/download-artifact@v4
115 | with:
116 | name: python-package-distributions
117 | path: dist/
118 | - name: Publish asyncmd to TestPyPI
119 | uses: pypa/gh-action-pypi-publish@release/v1
120 | with:
121 | repository-url: https://test.pypi.org/legacy/
122 |
123 | test-testpypi-install:
124 | name: Test TestPyPi Installation
125 | needs:
126 | - publish-to-testpypi
127 | runs-on: ubuntu-latest
128 |
129 | steps:
130 | - name: Set up Python
131 | uses: actions/setup-python@v5
132 | with:
133 | python-version: "3.x"
134 | - name: Install asyncmd from TestPyPi (no-deps)
135 | run: >-
136 | python3 -m
137 | pip install
138 | --index-url https://test.pypi.org/simple/
139 | --no-deps
140 | asyncmd
141 |
--------------------------------------------------------------------------------
/.github/workflows/tests.yml:
--------------------------------------------------------------------------------
1 | name: Tests
2 | on:
3 | pull_request:
4 | types: [opened, synchronize, reopened, ready_for_review]
5 | branches: ["main"]
6 | push:
7 | branches: ["main"]
8 | tags: ["v*"]
9 | schedule:
10 | # run test automatically every day at 3
11 | - cron: "0 3 * * *"
12 |
13 | defaults:
14 | run:
15 | shell: bash -el {0}
16 |
17 | jobs:
18 | tests:
19 | name: Tests (${{ matrix.python-version }}, ${{ matrix.os }})
20 | runs-on: ${{ matrix.os }}
21 | strategy:
22 | matrix:
23 | os: ["ubuntu-latest", # "macos-latest", "windows-latest"
24 | ]
25 | python-version: ["3.10", "3.11", "3.12", "3.13"]
26 | fail-fast: false
27 |
28 | steps:
29 | - uses: actions/checkout@v4
30 | with:
31 | lfs: true
32 | - uses: conda-incubator/setup-miniconda@v3
33 | with:
34 | auto-update-conda: true
35 | python-version: ${{ matrix.python-version }}
36 | miniforge-version: latest
37 | - name: Install
38 | run: |
39 | conda install gromacs
40 | python -m pip install -e .\[tests-all\]
41 | - name: List versions
42 | run: |
43 | conda list
44 | python -c "import asyncmd; print('asyncmd version is: ', asyncmd.__version__)"
45 | - name: Unit tests
46 | env:
47 | PY_COLORS: "1"
48 | run: pytest -vv --runall --cov=asyncmd --cov-report=xml
49 | - name: Upload coverage reports to Codecov
50 | uses: codecov/codecov-action@v5
51 | with:
52 | token: ${{ secrets.CODECOV_TOKEN }}
53 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # MacOS finder/search subfolders
2 | .DS_Store
3 |
4 | # Visualstudio Code per project settings folder
5 | .vscode
6 |
7 | # ---> Python
8 | # Byte-compiled / optimized / DLL files
9 | __pycache__/
10 | *.py[cod]
11 | *$py.class
12 |
13 | # C extensions
14 | *.so
15 |
16 | # Distribution / packaging
17 | .Python
18 | build/
19 | develop-eggs/
20 | dist/
21 | downloads/
22 | eggs/
23 | .eggs/
24 | lib/
25 | lib64/
26 | parts/
27 | sdist/
28 | var/
29 | wheels/
30 | share/python-wheels/
31 | *.egg-info/
32 | .installed.cfg
33 | *.egg
34 | MANIFEST
35 |
36 | # PyInstaller
37 | # Usually these files are written by a python script from a template
38 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
39 | *.manifest
40 | *.spec
41 |
42 | # Installer logs
43 | pip-log.txt
44 | pip-delete-this-directory.txt
45 |
46 | # Unit test / coverage reports
47 | htmlcov/
48 | .tox/
49 | .nox/
50 | .coverage
51 | .coverage.*
52 | .cache
53 | nosetests.xml
54 | coverage.xml
55 | *.cover
56 | *.py,cover
57 | .hypothesis/
58 | .pytest_cache/
59 | cover/
60 |
61 | # Translations
62 | *.mo
63 | *.pot
64 |
65 | # Django stuff:
66 | *.log
67 | local_settings.py
68 | db.sqlite3
69 | db.sqlite3-journal
70 |
71 | # Flask stuff:
72 | instance/
73 | .webassets-cache
74 |
75 | # Scrapy stuff:
76 | .scrapy
77 |
78 | # Sphinx documentation
79 | docs/_build/
80 | docs/build/
81 |
82 | # PyBuilder
83 | .pybuilder/
84 | target/
85 |
86 | # Jupyter Notebook
87 | .ipynb_checkpoints
88 |
89 | # IPython
90 | profile_default/
91 | ipython_config.py
92 |
93 | # pyenv
94 | # For a library or package, you might want to ignore these files since the code is
95 | # intended to run in multiple environments; otherwise, check them in:
96 | # .python-version
97 |
98 | # pipenv
99 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
100 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
101 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
102 | # install all needed dependencies.
103 | #Pipfile.lock
104 |
105 | # poetry
106 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
107 | # This is especially recommended for binary packages to ensure reproducibility, and is more
108 | # commonly ignored for libraries.
109 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
110 | #poetry.lock
111 |
112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
113 | __pypackages__/
114 |
115 | # Celery stuff
116 | celerybeat-schedule
117 | celerybeat.pid
118 |
119 | # SageMath parsed files
120 | *.sage.py
121 |
122 | # Environments
123 | .env
124 | .venv
125 | env/
126 | venv/
127 | ENV/
128 | env.bak/
129 | venv.bak/
130 |
131 | # Spyder project settings
132 | .spyderproject
133 | .spyproject
134 |
135 | # Rope project settings
136 | .ropeproject
137 |
138 | # mkdocs documentation
139 | /site
140 |
141 | # mypy
142 | .mypy_cache/
143 | .dmypy.json
144 | dmypy.json
145 |
146 | # Pyre type checker
147 | .pyre/
148 |
149 | # pytype static type analyzer
150 | .pytype/
151 |
152 | # Cython debug symbols
153 | cython_debug/
154 |
155 | # PyCharm
156 | # JetBrains specific template is maintainted in a separate JetBrains.gitignore that can
157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
158 | # and can be added to the global gitignore or merged into this file. For a more nuclear
159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
160 | #.idea/
161 |
162 | # ---> JupyterNotebooks
163 | # gitignore template for Jupyter Notebooks
164 | # website: http://jupyter.org/
165 |
166 | .ipynb_checkpoints
167 | */.ipynb_checkpoints/*
168 |
169 | # IPython
170 | profile_default/
171 | ipython_config.py
172 |
173 | # Remove previous ipynb_checkpoints
174 | # git rm -r .ipynb_checkpoints/
175 |
176 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | All notable changes to this project will be documented in this file.
4 |
5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7 |
8 | ## [Unreleased]
9 |
10 | ### Added
11 |
12 | - `SlurmGmxEngine` (and `GmxEngine`) now expose the `mdrun_time_conversion_factor` to enable users to control the expected time it takes to set up the environment inside the slurm job. Both engines also have improved consistency checks for mdp options when performing energy minimization.
13 | - `MDEngine`, `GmxEngine`, and `SlurmGmxEngine`: removed unused `running` property
14 |
15 | ## [0.3.3] - 2025-05-06
16 |
17 | ### Added
18 |
19 | - Add `CHANGELOG.md` file
20 | - `SlurmProcess` now supports arbitrary sbatch options via `sbatch_options`, this is also true for the `SlurmGmxEngine` and `SlurmTrajectoryFunctionWrapper` classes via the added keyword argument `sbatch_options`
21 | - Example IPython notebook on `FrameExtractors`
22 | - Some tests for `slurm` module
23 |
24 | ### Fixed
25 |
26 | - `ConditionalTrajectoryPropagator`, `InPartsTrajectoryPropagator`, and weighted ensemble example IPython notebooks now use the correct paths to input files
27 | - No more f-strings in logging statements
28 | - Add scipy to dependencies
29 |
30 | ## [0.3.2] - 2025-01-10
31 |
32 | ### Added
33 |
34 | - First release on PyPi
35 |
36 | [unreleased]: https://github.com/bio-phys/asyncmd/compare/v0.3.3...HEAD
37 | [0.3.3]: https://github.com/bio-phys/asyncmd/compare/v0.3.2...v0.3.3
38 | [0.3.2]: https://github.com/bio-phys/asyncmd/releases/tag/v0.3.2
39 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # asyncmd
2 |
3 | ## Synopsis
4 |
5 | asyncmd is a library to write **concurrent** code to run and analyze molecular dynamics simulations using pythons **async/await** synthax.
6 |
7 | ## Motivation
8 |
9 | Molecular dynamics simulations are fun and we can learn a lot about the simulated system. Running many molecular dynamics simulations of the same system concurrently is tedious, error-prone and boring but we can learn even more about the simulated system and are more efficient in doing so.
10 | This library addresses the tedious, error-prone and boring part of setting up many similar simulations, but it leaves you with the fun part of understanding the simulated system.
11 |
12 | ## Code Example
13 |
14 | Run `N` [GROMACS] engines concurently from configurations randomly picked up along a trajectory (`traj.trr`) for `n_steps` integration steps each, drawing random Maxwell-Boltzmann velocities for each configuration on the way. Finally turn the python function `func` (which acts on `Trajectory` objects) into an asyncronous and cached function by wrapping it and apply it on all generated trajectories concurrently:
15 |
16 | ```python
17 | import asyncio
18 | import numpy as np
19 | import asyncmd
20 | import asyncmd.gromacs as asyncgmx
21 |
22 | in_traj = asyncmd.Trajectory(trajectory_files="traj.trr", structure_file="conf.gro")
23 | # get a random number generator and draw N random frames (with replacement)
24 | rng = np.default_rng()
25 | frame_idxs = rng.choice(len(in_traj), size=N)
26 | # use the RandomVelocitiesFrameExtractor to directly get the frames with MB-vels
27 | extractor = asyncmd.trajectory.convert.RandomVelocitiesFrameExtractor(T=303)
28 | mdps = [asyncgmx.MDP("config.mdp") for _ in range(N)]
29 | # MDConfig objects (like MDP) behave like dictionaries and are easy to modify
30 | for i, mdp in enumerate(mdps):
31 | # here we just modify the output frequency for every engine separately
32 | # but you can set any mdp option like this
33 | # Note how the values are in the correct types? I.e. that they are ints?
34 | mdp["nstxout"] *= (i + 1)
35 | mdp["nstvout"] *= (i + 1)
36 | # create N gromacs engines
37 | engines = [asyncgmx.GmxEngine(mdp=mdp, gro_file="conf.gro", top_file="topol.top",
38 | # optional (can be omited or None), however naturally without an index file
39 | # you can not reference custom groups in the .mdp-file or MDP object
40 | ndx_file="index.ndx",
41 | )
42 | for mdp in mdps]
43 | # extract starting configurations with MB-vels and save them to current directory
44 | start_confs = await asyncio.gather(*(extractor.extract_async(
45 | outfile=f"start_conf{i}.trr",
46 | traj_in=in_traj, idx=idx)
47 | for i, idx in enumerate(frame_idxs)))
48 | # prepare the MD (for gromacs this is essentially a `grompp` call)
49 | await asyncio.gather(*(e.prepare(starting_configuration=conf,
50 | workdir=".", deffnm=f"engine{i}")
51 | for i, (conf, e) in enumerate(zip(start_confs, engines))
52 | )
53 | )
54 | # and run the molecular dynamics
55 | out_trajs = await asyncio.gather(*(e.run_steps(nsteps=n_steps) for e in engines))
56 | # wrapp `func` and apply it on all output trajectories concurrently
57 | wrapped_func = asyncmd.trajectory.PyTrajectoryFunctionWrapper(function=func)
58 | cv_vals = await asyncio.gather(*(wrapped_func(traj) for traj in out_trajs))
59 | ```
60 |
61 | Note that running via the [SLURM] queueing system is as easy as replacing the `GmxEngine` with a `SlurmGmxEngine` and the `PyTrajectoryFunctionWrapper` with a `SlurmTrajectoryFunctionWrapper` (and suppling them both with sbatch script skeletons).
62 |
63 | For an in-depth introduction see also the `examples` folder in this repository which contains jupyter notebooks on various topics.
64 |
65 | ## Installation
66 |
67 | ### pip install from PyPi
68 |
69 | asyncmd is published on [PyPi] (since v0.3.2), installing is as easy as:
70 |
71 | ```bash
72 | pip install asyncmd
73 | ```
74 |
75 | ### pip install directly from the repository
76 |
77 | Please note that you need to have [git-lfs] (an open source git extension) setup to get all input files needed to run the notebooks in the `examples` folder. However, no [git-lfs] is needed to get a working version of the library.
78 |
79 | ```bash
80 | git clone https://github.com/bio-phys/asyncmd.git
81 | cd asyncmd
82 | pip install .
83 | ```
84 |
85 | ## Documentation and API Reference
86 |
87 | The documentation can be build with [sphinx], use e.g. the following to build it in html format:
88 |
89 | ```bash
90 | cd asyncmd # Need to be at the top folder of the repository for the next line to work
91 | sphinx-build -b html docs/source docs/build/html
92 | ```
93 |
94 | Use ```pip install .\[docs\]``` to install the requirements needed to build the documentation.
95 |
96 | ## Tests
97 |
98 | Tests use [pytest]. To run them just install asycmd with the test requirements
99 |
100 | ```bash
101 | git clone https://github.com/bio-phys/asyncmd.git
102 | cd asyncmd
103 | pip install .\[tests\]
104 | # or use
105 | pip install .\[tests-all\]
106 | # to also install optional dependencies needed to run all tests
107 | ```
108 |
109 | And then run the tests (against the installed version) as
110 |
111 | ```bash
112 | pytest
113 | ```
114 |
115 | ## Contribute
116 |
117 | If you discover any issues or want to propose a new feature please feel free to open an [issue](https://github.com/bio-phys/asyncmd/issues) or a [pull request](https://github.com/bio-phys/asyncmd/pulls)!
118 |
119 | ### Developer install
120 |
121 | For the developer install I recommend:
122 |
123 | ```bash
124 | git clone https://github.com/bio-phys/asyncmd.git
125 | cd asyncmd
126 | pip install -e .\[dev\]
127 | ```
128 |
129 | This will in addition to the requirements to run the tests and to build the documentation install [coverage] and its [pytest-cov] plugin such that you have an idea of the test coverage for your newly added code. To get a nice html coverage report you can run the tests as
130 |
131 | ```bash
132 | pytest --cov=asyncmd --cov-report=html
133 | ```
134 |
135 | ### Contributors
136 |
137 | This project was originally conceived and started by Hendrik Jung in 2021/2022. For more check the `pyproject.toml` file. When you contribute code dont forget to add your name there to claim the credit for your work!
138 |
139 | ## License
140 |
141 | asyncmd is under the terms of the GNU general public license version 3 or later, i.e. SPDX identifier "GPL-3.0-or-later".
142 |
143 | ---
144 | This README.md is printed from 100% recycled electrons.
145 |
146 | [coverage]: https://pypi.org/project/coverage/
147 | [git-lfs]: https://git-lfs.com/
148 | [GROMACS]: https://www.gromacs.org/
149 | [PyPi]: https://pypi.org/project/asyncmd/
150 | [pytest]: https://docs.pytest.org/en/latest/
151 | [pytest-cov]: https://pypi.org/project/pytest-cov/
152 | [SLURM]: https://slurm.schedmd.com/documentation.html
153 | [sphinx]: https://www.sphinx-doc.org/en/master/index.html
154 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | %SPHINXBUILD% >NUL 2>NUL
14 | if errorlevel 9009 (
15 | echo.
16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
17 | echo.installed, then set the SPHINXBUILD environment variable to point
18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
19 | echo.may add the Sphinx directory to PATH.
20 | echo.
21 | echo.If you don't have Sphinx installed, grab it from
22 | echo.https://www.sphinx-doc.org/
23 | exit /b 1
24 | )
25 |
26 | if "%1" == "" goto help
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/source/api.rst:
--------------------------------------------------------------------------------
1 | Overview for users
2 | ==================
3 |
4 | trajectory
5 | **********
6 | The :py:mod:`asyncmd.trajectory` module contains a
7 | :py:class:`asyncmd.Trajectory` class which is the return object for all MD
8 | engines. These objects enable easy access to a number properties of the
9 | underlying trajectory, like the length in frames or time, the intergration step
10 | and many more. Note :py:class:`asyncmd.Trajectory` are unique objects in the
11 | sense that every combination of underlying ``trajectory_files`` will give you the
12 | same object back even if you instantiate it multiple times, i.e. ``is`` will be
13 | ``True`` for the two objects (in addition to ``==`` beeing ``True``).
14 | Also note that it is possible to pickle and unpickle :py:class:`asyncmd.Trajectory`
15 | objects. You can even change the filepath of the underlying trajectories, i.e.
16 | copy/move them to another location (consider also moving the npz cache files)
17 | and still unpickle to get a working :py:class:`asyncmd.Trajectory` object as
18 | long as the relative path between your python workdir and the trajectory files
19 | does not change. Or you can change the workdir of the python interpreter as long
20 | as the trajectory files remain at the same location in the filesystem.
21 |
22 | ..
23 | TODO: reference to the dev section where we explain the two hidden funcs
24 | to forget traj objects
25 |
26 | There are also a number of ``TrajectoryFunctionWrapper`` classes which
27 | can be used to wrapp (python) functions or arbitrary executables for easy
28 | asyncronous application on :py:class:`asyncmd.Trajectory`, either submitted
29 | via slurm or ran locally. The benefit of the wrapped functions is that the
30 | calculated values will be cached automatically. The caching is even persistent
31 | over multiple reloads and invocations of the python interpreter. To this end
32 | the default caching mechanism creates hidden numpy npz files for every
33 | :py:class:`asyncmd.Trajectory` (named after the trajectory) in which the values
34 | are stored. Other caching mechanism are an in-memory cache and the option to
35 | store all cached values in a :py:class:`h5py.File` or :py:class:`h5py.Group`.
36 | You can set the default caching mechanism for all
37 | :py:class:`asyncmd.Trajectory` centrally via
38 | :py:func:`asyncmd.config.set_default_trajectory_cache_type` or overwrite it for
39 | each :py:class:`asyncmd.Trajectory` at init by passing ``cache_type``. See also
40 | :py:func:`asyncmd.config.register_h5py_cache` to register the h5py cache.
41 |
42 | .. py:currentmodule:: asyncmd.trajectory.convert
43 |
44 | It also contains a number of classes to extract frames from
45 | :py:class:`asyncmd.Trajectory` objects in the module
46 | :py:mod:`asyncmd.trajectory.convert`:
47 |
48 | - :py:class:`NoModificationFrameExtractor`
49 |
50 | - :py:class:`InvertedVelocitiesFrameExtractor`
51 |
52 | - :py:class:`RandomVelocitiesFrameExtractor`
53 |
54 | Note that implementing your own ``FrameExtractor`` with a custom modification
55 | is as easy as subclassing the abstract base class :py:class:`FrameExtractor`
56 | and overwriting its :py:meth:`FrameExtractor.apply_modification` method.
57 |
58 | The :py:mod:`asyncmd.trajectory.convert` module furthermore contains a class to
59 | concatenate :py:class:`asyncmd.Trajectory` segments, the
60 | :py:class:`TrajectoryConcatenator`. It can be used to concatenate lists of
61 | trajectory segments in any order (and possibly with inverted momenta) by
62 | passing a list of trajectory segments and a list of tuples (slices) that
63 | specify the frames to use in the concatenated output trajectory. Note that this
64 | class gives you all customizability at the cost of complexity, if you just want
65 | to construct a transition from trajectry segments the
66 | :py:func:`asyncmd.trajectory.construct_TP_from_plus_and_minus_traj_segments` is
67 | most probably easier to use and what you want (it uses the
68 | :py:class:`TrajectoryConcatenator` under the hood anyway).
69 |
70 | Note that both the `FrameExtractor`s and the `TrajectoryConcatenator` have an
71 | async version of their functions doing the work (`extract` and `concatenate`
72 | respectively). The awaitable versions do exactly the same as their sync
73 | counterparts, just that they do so in a seperate thread.
74 |
75 | .. py:currentmodule:: asyncmd
76 |
77 | Another notable part of the :py:mod:`asyncmd.trajectory` module are the
78 | :py:class:`asyncmd.trajectory.InPartsTrajectoryPropagator` and
79 | :py:class:`asyncmd.trajectory.ConditionalTrajectoryPropagator`, which
80 | can both be used to propagate trajectories in chunks of a given walltime. While
81 | the former is used to propagate a trajectory until a given total number of
82 | integration steps, the later one can be used to propagate a trajectory until
83 | any of the given conditions is fulfilled. The later is especially usefull for
84 | pathsampling and committor simulations (here the conditions would be the state
85 | functions), but can be used in general for any situation where the time
86 | integration should be stopped on given criteria (as opposed to after a fixed
87 | number of integratiopn steps or when a given walltime is reached).
88 | There is also a handy function to create a transition, i.e. a trajectory that
89 | connects to (different) conditions from two conditional propagation runs ending
90 | in different conditions, the
91 | :py:func:`asyncmd.trajectory.construct_TP_from_plus_and_minus_traj_segments`
92 | function. It is most likely usefull for users implementing some form of
93 | pathsampling.
94 |
95 | Trajectory
96 | ----------
97 | .. autoclass:: asyncmd.Trajectory
98 | :members:
99 | :special-members:
100 | :inherited-members:
101 |
102 | TrajectoryFunctionWrappers
103 | --------------------------
104 |
105 | .. autoclass:: asyncmd.trajectory.PyTrajectoryFunctionWrapper
106 | :members:
107 | :special-members:
108 | :inherited-members:
109 |
110 | .. autoclass:: asyncmd.trajectory.SlurmTrajectoryFunctionWrapper
111 | :members:
112 | :special-members:
113 | :inherited-members:
114 |
115 | FrameExtractors
116 | ---------------
117 |
118 | .. autoclass:: asyncmd.trajectory.convert.FrameExtractor
119 | :members:
120 | :special-members:
121 | :inherited-members:
122 |
123 | .. autoclass:: asyncmd.trajectory.convert.NoModificationFrameExtractor
124 | :members:
125 | :inherited-members:
126 |
127 | .. autoclass:: asyncmd.trajectory.convert.InvertedVelocitiesFrameExtractor
128 | :members:
129 | :inherited-members:
130 |
131 | .. autoclass:: asyncmd.trajectory.convert.RandomVelocitiesFrameExtractor
132 | :members:
133 | :special-members:
134 | :inherited-members:
135 |
136 | Trajectory propagation
137 | ----------------------------------
138 |
139 | .. autoclass:: asyncmd.trajectory.InPartsTrajectoryPropagator
140 | :members:
141 | :special-members:
142 | :inherited-members:
143 |
144 | .. autoclass:: asyncmd.trajectory.ConditionalTrajectoryPropagator
145 | :members:
146 | :special-members:
147 | :inherited-members:
148 |
149 | Trajectory concatenation
150 | ------------------------
151 |
152 | .. autofunction:: asyncmd.trajectory.construct_TP_from_plus_and_minus_traj_segments
153 |
154 | .. autoclass:: asyncmd.trajectory.convert.TrajectoryConcatenator
155 | :members:
156 | :special-members:
157 | :inherited-members:
158 |
159 | gromacs
160 | *******
161 |
162 | The :py:mod:`asyncmd.gromacs` module contains all classes and functions to
163 | control gromacs engines from python. Most notably the
164 | :py:class:`asyncmd.gromacs.MDP` (which provides a dictionary-like interface to
165 | read, modify and write gromacs mdp files), and the two gromacs engines
166 | :py:class:`asyncmd.gromacs.GmxEngine` and
167 | :py:class:`asyncmd.gromacs.SlurmGmxEngine` which share most of their interface
168 | with the important difference that the
169 | :py:class:`asyncmd.gromacs.SlurmGmxEngine` submits all MD simulations via
170 | slurm while the :py:class:`asyncmd.gromacs.GmxEngine` runs locally on the same
171 | machine as the python process.
172 |
173 | MDP
174 | ---
175 | .. autoclass:: asyncmd.gromacs.MDP
176 | :members:
177 |
178 | Engine classes
179 | --------------
180 | .. autoclass:: asyncmd.gromacs.GmxEngine
181 | :members:
182 | :special-members:
183 | :inherited-members:
184 |
185 | .. autoclass:: asyncmd.gromacs.SlurmGmxEngine
186 | :members:
187 | :special-members:
188 | :inherited-members:
189 |
190 | config
191 | ******
192 |
193 | Various functions for configuring :py:mod:`asyncmd` behaviour during runtime.
194 | Most notably are probably the functions to limit resource use (i.e. number of
195 | concurrent SLURM jobs, number of open files, number of processes, etc.) and
196 | functions to influence the :py:class:`asyncmd.Trajectory` CV value caching
197 | like setting the default cache type for all :py:class:`asyncmd.Trajectory` or
198 | registering a ``h5py`` file (or group) for caching.
199 |
200 | General resource usage
201 | ----------------------
202 |
203 | .. autofunction:: asyncmd.config.set_max_process
204 |
205 | .. autofunction:: asyncmd.config.set_max_files_open
206 |
207 | SLURM settings and resource usage
208 | ---------------------------------
209 |
210 | .. autofunction:: asyncmd.config.set_slurm_max_jobs
211 |
212 | .. autofunction:: asyncmd.config.set_slurm_settings
213 |
214 | CV value caching
215 | ----------------
216 |
217 | .. autofunction:: asyncmd.config.set_default_trajectory_cache_type
218 |
219 | .. autofunction:: asyncmd.config.register_h5py_cache
220 |
221 | Overview for developers
222 | =======================
223 |
224 | This section is relevant for developers of :py:mod:`asyncmd`, e.g. when you
225 | want to add the option to steer an additional molecular dynamcis engines (like
226 | NAMD or LAMMPS) or add additional ways to wrapp functions acting on
227 | :py:class:`asyncmd.Trajectory`.
228 |
229 | .. py:currentmodule:: asyncmd.slurm
230 |
231 | This section also contains the interface of the classes, which are used under
232 | the hood by various user facing-classes in :py:mod:`asyncmd` to interact with
233 | the SLURM queueing system.
234 | Namely there is the :py:class:`SlurmProcess`, which emulates the interface of
235 | :py:class:`asyncio.subprocess.Process` and which is used to submit and wait for
236 | single SLURM jobs. Additionally (one level deeper under the hood) there is the
237 | :py:class:`SlurmClusterMediator`, which is a singleton class acting as the
238 | central communication point between the single :py:class:`SlurmProcess` and the
239 | SLURM commands ("sacct", "sbatch", etc.).
240 |
241 | SLURM interface classes
242 | ***********************
243 |
244 | .. autoclass:: asyncmd.slurm.SlurmProcess
245 | :member-order: bysource
246 | :members:
247 | :private-members:
248 | :special-members:
249 | :inherited-members:
250 |
251 | .. autoclass:: asyncmd.slurm.SlurmClusterMediator
252 | :member-order: bysource
253 | :members:
254 | :private-members:
255 | :special-members:
256 | :inherited-members:
257 |
258 | .. :undoc-members:
259 |
260 | Wrapper classes for functions acting on trajectories
261 | ****************************************************
262 |
263 | .. :py:currentmodule:: asyncmd.trajectory.functionwrapper
264 |
265 | All wrapper classes for functions acting on :py:class:`asyncmd.Trajectory`
266 | should subclass :py:class:`TrajectoryFunctionWrapper` to make full and easy use
267 | of the caching mechanism already implemented. You then only need to implement
268 | :py:meth:`TrajectoryFunctionWrapper._get_id_str` and
269 | :py:meth:`TrajectoryFunctionWrapper.get_values_for_trajectory` to get a fully
270 | functional TrajectoryFunctionWrapper class. See also the (reference)
271 | implementation of the other wrapper classes,
272 | :py:class:`PyTrajectoryFunctionWrapper` and
273 | :py:class:`SlurmTrajectoryFunctionWrapper`.
274 |
275 | .. autoclass:: asyncmd.trajectory.functionwrapper.TrajectoryFunctionWrapper
276 | :member-order: bysource
277 | :members: __call__, _get_id_str, get_values_for_trajectory
278 | :special-members:
279 | :private-members:
280 | :inherited-members:
281 |
282 | .. :undoc-members:
283 |
284 | Molecular dynamics configuration file parsing and writing
285 | *********************************************************
286 |
287 | All molecular dynamics configuration file wrappers should subclass
288 | :py:class:`asyncmd.mdconfig.MDConfig`. This class defines the two abstract
289 | methods ``parse()`` and ``write()`` as well as the dictionary-like interface by
290 | subclassing from :class:`collections.abc.MutableMapping`.
291 |
292 | Most often you can probably subclass
293 | :py:class:`asyncmd.mdconfig.LineBasedMDConfig` directly. This has the advantage
294 | that you will only need to define the datatypes of the values (if you want
295 | them to be typed) and define a function that knows how to parse single lines of
296 | the config file format. To this end you should overwrite the abstract method
297 | :py:func:`asyncmd.mdconfig.LineBasedMDConfig._parse_line` in your subclass.
298 | The function will get single lines to parse is expected to return the key, list
299 | of value(s) pair as a :py:class:`dict` with one item, e.g.
300 | ``{key: list of value(s)}``. If the line is parsed as comment the returned dict
301 | must be empty, e.g. ``{}``. If the option/key is present but without associated
302 | value(s) the list in the dict must be empty, e.g. ``{key: []}``.
303 |
304 | .. autoclass:: asyncmd.mdconfig.MDConfig
305 | :member-order: bysource
306 | :members: write, parse
307 | :inherited-members:
308 |
309 | .. autoclass:: asyncmd.mdconfig.LineBasedMDConfig
310 | :member-order: bysource
311 | :members:
312 | :private-members:
313 | :inherited-members:
314 |
315 | Molecular dynamics simulation engine wrappers
316 | *********************************************
317 |
318 | All molecular dynamics engines should subclass the abstract base class
319 | :py:class:`asyncmd.mdengine.MDEngine`, which defines the common interface
320 | expected from all py:module:`asyncmd` engine classes.
321 |
322 | In addition the module :py:mod:`asyncmd.mdengine` defines exceptions that the
323 | engines should raise when applicable. Currently defined are:
324 |
325 | - :py:class:`asyncmd.mdengine.EngineError` (a generic error, should be
326 | raised when no more specific error applies)
327 |
328 | - :py:class:`asyncmd.mdengine.EngineCrashedError` (should be raised when the
329 | wrapped MD engine code raises an exception during the MD integration)
330 |
331 | .. autoclass:: asyncmd.mdengine.MDEngine
332 | :members:
333 | :member-order: bysource
334 | :inherited-members:
335 | :undoc-members:
336 |
337 | .. autoclass:: asyncmd.mdengine.EngineError
338 |
339 | .. autoclass:: asyncmd.mdengine.EngineCrashedError
340 |
341 | API (Hierachical module layout plan)
342 | ====================================
343 |
344 | .. autosummary::
345 | :recursive:
346 | :toctree: generated
347 |
348 | asyncmd
349 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | # import os
14 | # import sys
15 | # sys.path.insert(0, os.path.abspath('.'))
16 |
17 |
18 | # -- Project information -----------------------------------------------------
19 |
20 | project = 'asyncmd'
21 | copyright = '2022, Hendrik Jung'
22 | author = 'Hendrik Jung'
23 |
24 | # The full version, including alpha/beta/rc tags
25 | release = '0.1.0'
26 |
27 |
28 | # -- General configuration ---------------------------------------------------
29 |
30 | # Add any Sphinx extension module names here, as strings. They can be
31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
32 | # ones.
33 | extensions = [
34 | 'sphinx.ext.autosummary',
35 | 'sphinx.ext.autodoc',
36 | 'sphinx.ext.napoleon'
37 | ]
38 |
39 | # Add any paths that contain templates here, relative to this directory.
40 | templates_path = ['_templates']
41 |
42 | # List of patterns, relative to source directory, that match files and
43 | # directories to ignore when looking for source files.
44 | # This pattern also affects html_static_path and html_extra_path.
45 | exclude_patterns = []
46 |
47 |
48 | # -- Options for HTML output -------------------------------------------------
49 |
50 | # The theme to use for HTML and HTML Help pages. See the documentation for
51 | # a list of builtin themes.
52 | #
53 | html_theme = 'alabaster'
54 |
55 | # Add any paths that contain custom static files (such as style sheets) here,
56 | # relative to this directory. They are copied after the builtin static files,
57 | # so a file named "default.css" will overwrite the builtin "default.css".
58 | html_static_path = ['_static']
59 |
60 |
61 | # -- Options for autosummary extension----------------------------------------
62 | autosummary_imported_members = False # default = False
63 | autosummary_ignore_module_all = True # default = True
64 |
65 | # -- Options for autodoc extension -------------------------------------------
66 | autodoc_default_options = {
67 | # document members (default = False)
68 | "members": False,
69 | }
70 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | .. asyncmd documentation master file, created by
2 | sphinx-quickstart on Sun Apr 24 02:14:01 2022.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Welcome to asyncmd's documentation!
7 | ===================================
8 |
9 | `asyncmd` is is a library to write **concurrent** code to run and analyze
10 | molecular dynamics simulations using pythons **async/await** synthax.
11 |
12 |
13 | .. toctree::
14 | :maxdepth: 3
15 | :caption: Contents:
16 |
17 | api
18 |
19 |
20 | Indices and tables
21 | ==================
22 |
23 | * :ref:`genindex`
24 | * :ref:`modindex`
25 | * :ref:`search`
26 |
--------------------------------------------------------------------------------
/examples/03_trajectory_propagation_and_subtrajectory_extraction/FrameExtractors.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# `asyncmd.trajectory.convert.FrameExtractor`\n",
8 | "\n",
9 | "The `asyncmd.trajectory.convert` module contains various predefined classes to extract single frames from Trajectories, possibly after applying a modification. It is also very easy to write your own `FrameExtractor` class with a custom modification. This is as easy as subclassing from `asyncmd.trajectory.convert.FrameExtractor` and implementing the `apply_modification` method (see below).\n",
10 | "\n",
11 | "The `asyncmd.trajectory.convert` module also contains a `TrajectoryConcatenator`, which can be used to concatenate/write out trajectories from a list of trajectories and slices (it is explained in more detail in the notebook `ConditionalTrajectoryPropagator.ipynp`)."
12 | ]
13 | },
14 | {
15 | "cell_type": "markdown",
16 | "metadata": {},
17 | "source": [
18 | "### Imports and some basic checks that everything is available"
19 | ]
20 | },
21 | {
22 | "cell_type": "code",
23 | "execution_count": 1,
24 | "metadata": {},
25 | "outputs": [],
26 | "source": [
27 | "%%bash\n",
28 | "# if using the module system to make gromacs and friends available:\n",
29 | "# check that they are loaded!\n",
30 | "#module list"
31 | ]
32 | },
33 | {
34 | "cell_type": "code",
35 | "execution_count": 2,
36 | "metadata": {},
37 | "outputs": [
38 | {
39 | "name": "stdout",
40 | "output_type": "stream",
41 | "text": [
42 | "/usr/local/gromacs-2022.4/bin/gmx\n"
43 | ]
44 | }
45 | ],
46 | "source": [
47 | "%%bash\n",
48 | "# unix only, check that gmx is available\n",
49 | "which gmx"
50 | ]
51 | },
52 | {
53 | "cell_type": "code",
54 | "execution_count": 3,
55 | "metadata": {},
56 | "outputs": [],
57 | "source": [
58 | "%matplotlib inline"
59 | ]
60 | },
61 | {
62 | "cell_type": "code",
63 | "execution_count": 4,
64 | "metadata": {},
65 | "outputs": [],
66 | "source": [
67 | "import os\n",
68 | "import asyncio\n",
69 | "import matplotlib.pyplot as plt\n",
70 | "import numpy as np\n",
71 | "import MDAnalysis as mda"
72 | ]
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": 5,
77 | "metadata": {},
78 | "outputs": [
79 | {
80 | "name": "stderr",
81 | "output_type": "stream",
82 | "text": [
83 | "Could not initialize SLURM cluster handling. If you are sure SLURM (sinfo/sacct/etc) is available try calling `asyncmd.config.set_slurm_settings()` with the appropriate arguments.\n"
84 | ]
85 | }
86 | ],
87 | "source": [
88 | "import asyncmd\n",
89 | "from asyncmd import gromacs as asyncgmx\n",
90 | "from asyncmd import trajectory as asynctraj"
91 | ]
92 | },
93 | {
94 | "cell_type": "markdown",
95 | "metadata": {},
96 | "source": [
97 | "### Setup working directory\n",
98 | "We will write the trajectory output to it."
99 | ]
100 | },
101 | {
102 | "cell_type": "code",
103 | "execution_count": 6,
104 | "metadata": {},
105 | "outputs": [],
106 | "source": [
107 | "workdir = \".\""
108 | ]
109 | },
110 | {
111 | "cell_type": "markdown",
112 | "metadata": {},
113 | "source": [
114 | "### Load and modify the parameter file (mdp file) for the molecular dynamics simulations"
115 | ]
116 | },
117 | {
118 | "cell_type": "code",
119 | "execution_count": 7,
120 | "metadata": {},
121 | "outputs": [
122 | {
123 | "name": "stdout",
124 | "output_type": "stream",
125 | "text": [
126 | "C-rescale\n"
127 | ]
128 | }
129 | ],
130 | "source": [
131 | "# Pcoupl = C-rescale needs gromacs version >= 2021\n",
132 | "mdp = asyncgmx.MDP(\"../resources/gromacs/capped_alanine_dipeptide/md.mdp\")\n",
133 | "print(mdp[\"Pcoupl\"])\n",
134 | "# set nstxout-compressed, such that the engines will produce XTC trajectories\n",
135 | "mdp[\"nstxout-compressed\"] = 20\n",
136 | "# and deactivate trr trajectory output\n",
137 | "mdp[\"nstxout\"] = mdp[\"nstvout\"] = 0"
138 | ]
139 | },
140 | {
141 | "cell_type": "code",
142 | "execution_count": 8,
143 | "metadata": {},
144 | "outputs": [],
145 | "source": [
146 | "# if your gmx version is >= 2021 you should comment the next line since C-rescale give the correct ensemble (and Berendsen doesnt!)\n",
147 | "#mdp[\"Pcoupl\"] = \"Berendsen\""
148 | ]
149 | },
150 | {
151 | "cell_type": "markdown",
152 | "metadata": {},
153 | "source": [
154 | "### Initialize a `GmxEngine` to create a short trajectory\n",
155 | "We will use this trajectory to extract frames from using the various FrameExtractor classes."
156 | ]
157 | },
158 | {
159 | "cell_type": "code",
160 | "execution_count": 9,
161 | "metadata": {},
162 | "outputs": [
163 | {
164 | "name": "stderr",
165 | "output_type": "stream",
166 | "text": [
167 | "/home/think/.conda/envs/asyncmd_dev/lib/python3.10/site-packages/MDAnalysis/coordinates/XDR.py:240: UserWarning: Reload offsets from trajectory\n",
168 | " ctime or size or n_atoms did not match\n",
169 | " warnings.warn(\"Reload offsets from trajectory\\n \"\n"
170 | ]
171 | }
172 | ],
173 | "source": [
174 | "engine = asyncgmx.GmxEngine(mdconfig=mdp,\n",
175 | " gro_file=\"../resources/gromacs/capped_alanine_dipeptide/conf.gro\",\n",
176 | " top_file=\"../resources/gromacs/capped_alanine_dipeptide/topol_amber99sbildn.top\",\n",
177 | " mdrun_extra_args=\"-ntomp 2\", # for gmx sans (thread)MPI\n",
178 | " #\"mdrun_extra_args=\"-nt 2\", # for gmx with (thrad)MPI\n",
179 | " )\n",
180 | "\n",
181 | "await engine.prepare(starting_configuration=None, workdir=workdir, deffnm=\"traj_to_extract_from\")\n",
182 | "traj = await engine.run_steps(nsteps=1e4)"
183 | ]
184 | },
185 | {
186 | "cell_type": "markdown",
187 | "metadata": {},
188 | "source": [
189 | "### Extract Frames using the predefined FrameExtractor classes\n",
190 | "\n",
191 | "Each `FrameExtractor` takes (one of) the arguments `mda_transformations` and `mda_transformations_setup_func` which allow you to pass/setup MDAnalysis on-the-fly transformations to e.g. center on a given molecule and wrap all molecules/atoms back into the simulation box while extracting and writing out the frame. See the `FrameExtractor` docstrings for when to use `mda_transformations` and when `mda_transformations_setup_func` and see https://docs.mdanalysis.org/stable/documentation_pages/trajectory_transformations.html for more on MDAnalysis transformations."
192 | ]
193 | },
194 | {
195 | "cell_type": "code",
196 | "execution_count": 10,
197 | "metadata": {},
198 | "outputs": [],
199 | "source": [
200 | "# these are all the FrameExtractors,\n",
201 | "# note that FrameExtractor is an abstract base class, i.e. you can not instantiate it (or its subclasses without implementing the apply_modification method)\n",
202 | "from asyncmd.trajectory.convert import (FrameExtractor, NoModificationFrameExtractor,\n",
203 | " InvertedVelocitiesFrameExtractor, RandomVelocitiesFrameExtractor)"
204 | ]
205 | },
206 | {
207 | "cell_type": "code",
208 | "execution_count": 11,
209 | "metadata": {},
210 | "outputs": [
211 | {
212 | "name": "stdout",
213 | "output_type": "stream",
214 | "text": [
215 | "Trajectory(trajectory_files=frame_0.trr, structure_file=traj_to_extract_from.tpr)\n",
216 | "Trajectory(trajectory_files=frame_1.trr, structure_file=traj_to_extract_from.tpr)\n",
217 | "Trajectory(trajectory_files=frame_2.trr, structure_file=traj_to_extract_from.tpr)\n",
218 | "Trajectory(trajectory_files=frame_3.trr, structure_file=traj_to_extract_from.tpr)\n",
219 | "Trajectory(trajectory_files=frame_4.trr, structure_file=traj_to_extract_from.tpr)\n",
220 | "Trajectory(trajectory_files=frame_5.trr, structure_file=traj_to_extract_from.tpr)\n",
221 | "Trajectory(trajectory_files=frame_6.trr, structure_file=traj_to_extract_from.tpr)\n",
222 | "Trajectory(trajectory_files=frame_7.trr, structure_file=traj_to_extract_from.tpr)\n",
223 | "Trajectory(trajectory_files=frame_8.trr, structure_file=traj_to_extract_from.tpr)\n",
224 | "Trajectory(trajectory_files=frame_9.trr, structure_file=traj_to_extract_from.tpr)\n"
225 | ]
226 | }
227 | ],
228 | "source": [
229 | "# extract a number of frames, each FrameExtractor works the same, so we will only use the RandomVelocitiesFrameExtractor\n",
230 | "extractor = RandomVelocitiesFrameExtractor(T=303, # temperature for Maxwell-Boltzmann velocities in Kelvin\n",
231 | " )\n",
232 | "n_frames = 10\n",
233 | "for i in range(n_frames):\n",
234 | " # the extract method returns the frame as an asyncmd.Trajectory\n",
235 | " print(extractor.extract(outfile=f\"frame_{i}.trr\", # where to write the frame to\n",
236 | " traj_in=traj, # the trajectory from which we take the original frame\n",
237 | " idx=np.random.randint(len(traj)), # the index of the frame in traj_in\n",
238 | " )\n",
239 | " )"
240 | ]
241 | },
242 | {
243 | "cell_type": "markdown",
244 | "metadata": {},
245 | "source": [
246 | "### The extract method also has an async counterpart"
247 | ]
248 | },
249 | {
250 | "cell_type": "code",
251 | "execution_count": 12,
252 | "metadata": {},
253 | "outputs": [
254 | {
255 | "data": {
256 | "text/plain": [
257 | "[Trajectory(trajectory_files=frame_0.trr, structure_file=traj_to_extract_from.tpr),\n",
258 | " Trajectory(trajectory_files=frame_1.trr, structure_file=traj_to_extract_from.tpr),\n",
259 | " Trajectory(trajectory_files=frame_2.trr, structure_file=traj_to_extract_from.tpr),\n",
260 | " Trajectory(trajectory_files=frame_3.trr, structure_file=traj_to_extract_from.tpr),\n",
261 | " Trajectory(trajectory_files=frame_4.trr, structure_file=traj_to_extract_from.tpr),\n",
262 | " Trajectory(trajectory_files=frame_5.trr, structure_file=traj_to_extract_from.tpr),\n",
263 | " Trajectory(trajectory_files=frame_6.trr, structure_file=traj_to_extract_from.tpr),\n",
264 | " Trajectory(trajectory_files=frame_7.trr, structure_file=traj_to_extract_from.tpr),\n",
265 | " Trajectory(trajectory_files=frame_8.trr, structure_file=traj_to_extract_from.tpr),\n",
266 | " Trajectory(trajectory_files=frame_9.trr, structure_file=traj_to_extract_from.tpr)]"
267 | ]
268 | },
269 | "execution_count": 12,
270 | "metadata": {},
271 | "output_type": "execute_result"
272 | }
273 | ],
274 | "source": [
275 | "# it has exactly the same arguments as the extract method\n",
276 | "await asyncio.gather(*(extractor.extract_async(outfile=f\"frame_{i}.trr\", # where to write the frame to\n",
277 | " traj_in=traj, # the trajectory from which we take the original frame\n",
278 | " idx=np.random.randint(len(traj)), # the index of the frame in traj_in\n",
279 | " overwrite=True, # overwrite=True makes sure we overwrite the existing outfiles (from the cell above) without asking/error\n",
280 | " )\n",
281 | " for i in range(n_frames)\n",
282 | " )\n",
283 | " )"
284 | ]
285 | },
286 | {
287 | "cell_type": "markdown",
288 | "metadata": {},
289 | "source": [
290 | "### Writing your own `FrameExtractor` subclass"
291 | ]
292 | },
293 | {
294 | "cell_type": "code",
295 | "execution_count": 13,
296 | "metadata": {},
297 | "outputs": [],
298 | "source": [
299 | "# it is as easy as this:\n",
300 | "class CustomFrameExtractor(FrameExtractor):\n",
301 | " def apply_modification(self, universe, ts):\n",
302 | " # universe is the mdanalysis universe of the Trajectory/Frame that is beeing extracted\n",
303 | " # ts is the timestep of the Frame that is beeing extracted\n",
304 | " \n",
305 | " # Here you can no apply you desired modifications to the timestep\n",
306 | " ts.positions *= 100 ## dont do this in real live ;)\n",
307 | "\n",
308 | " # the function does not (need to) return anything. Any return will be ignored\n",
309 | " # But the changes to the timestep and universe will natrually be written out with the extracted frame\n",
310 | "\n",
311 | "\n",
312 | "# see also the implementations of the InvertedVelocitiesFrameExtractor and the RandomVelocitiesFrameExtractor in asyncmd/trajectory/convert.py"
313 | ]
314 | },
315 | {
316 | "cell_type": "code",
317 | "execution_count": 14,
318 | "metadata": {},
319 | "outputs": [],
320 | "source": [
321 | "custom_extract = CustomFrameExtractor()"
322 | ]
323 | },
324 | {
325 | "cell_type": "code",
326 | "execution_count": 15,
327 | "metadata": {},
328 | "outputs": [
329 | {
330 | "data": {
331 | "text/plain": [
332 | "Trajectory(trajectory_files=frame_custom.trr, structure_file=traj_to_extract_from.tpr)"
333 | ]
334 | },
335 | "execution_count": 15,
336 | "metadata": {},
337 | "output_type": "execute_result"
338 | }
339 | ],
340 | "source": [
341 | "custom_extract.extract(outfile=\"frame_custom.trr\",\n",
342 | " traj_in=traj,\n",
343 | " idx=0,\n",
344 | " )"
345 | ]
346 | },
347 | {
348 | "cell_type": "code",
349 | "execution_count": null,
350 | "metadata": {},
351 | "outputs": [],
352 | "source": []
353 | }
354 | ],
355 | "metadata": {
356 | "kernelspec": {
357 | "display_name": "Python 3 (ipykernel)",
358 | "language": "python",
359 | "name": "python3"
360 | },
361 | "language_info": {
362 | "codemirror_mode": {
363 | "name": "ipython",
364 | "version": 3
365 | },
366 | "file_extension": ".py",
367 | "mimetype": "text/x-python",
368 | "name": "python",
369 | "nbconvert_exporter": "python",
370 | "pygments_lexer": "ipython3",
371 | "version": "3.13.2"
372 | }
373 | },
374 | "nbformat": 4,
375 | "nbformat_minor": 4
376 | }
377 |
--------------------------------------------------------------------------------
/examples/03_trajectory_propagation_and_subtrajectory_extraction/InPartsTrajectoryPropagator.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# `asyncmd.trajectory.InPartsTrajectoryPropagator`\n",
8 | "\n",
9 | "Useful for making efficient use of backfilling and/or running simulations that are longer than the timelimit, i.e. when using slurm or another queuing system. Use it together with e.g. the `SlurmGmxEngine` and not localy like here done for demonstration purposes."
10 | ]
11 | },
12 | {
13 | "cell_type": "markdown",
14 | "metadata": {},
15 | "source": [
16 | "### Imports and some basic checks that everything is available"
17 | ]
18 | },
19 | {
20 | "cell_type": "code",
21 | "execution_count": 1,
22 | "metadata": {},
23 | "outputs": [],
24 | "source": [
25 | "%%bash\n",
26 | "# if using the module system to make gromacs and friends available:\n",
27 | "# check that they are loaded!\n",
28 | "#module list"
29 | ]
30 | },
31 | {
32 | "cell_type": "code",
33 | "execution_count": 2,
34 | "metadata": {},
35 | "outputs": [
36 | {
37 | "name": "stdout",
38 | "output_type": "stream",
39 | "text": [
40 | "/usr/local/gromacs-2022.4/bin/gmx\n"
41 | ]
42 | }
43 | ],
44 | "source": [
45 | "%%bash\n",
46 | "# unix only, check that gmx is available\n",
47 | "which gmx"
48 | ]
49 | },
50 | {
51 | "cell_type": "code",
52 | "execution_count": 3,
53 | "metadata": {},
54 | "outputs": [],
55 | "source": [
56 | "%matplotlib inline"
57 | ]
58 | },
59 | {
60 | "cell_type": "code",
61 | "execution_count": 4,
62 | "metadata": {},
63 | "outputs": [],
64 | "source": [
65 | "import os\n",
66 | "import asyncio\n",
67 | "import matplotlib.pyplot as plt\n",
68 | "import numpy as np\n",
69 | "import MDAnalysis as mda"
70 | ]
71 | },
72 | {
73 | "cell_type": "code",
74 | "execution_count": 5,
75 | "metadata": {},
76 | "outputs": [
77 | {
78 | "name": "stderr",
79 | "output_type": "stream",
80 | "text": [
81 | "Could not initialize SLURM cluster handling. If you are sure SLURM (sinfo/sacct/etc) is available try calling `asyncmd.config.set_slurm_settings()` with the appropriate arguments.\n"
82 | ]
83 | }
84 | ],
85 | "source": [
86 | "import asyncmd\n",
87 | "from asyncmd import gromacs as asyncgmx\n",
88 | "from asyncmd import trajectory as asynctraj"
89 | ]
90 | },
91 | {
92 | "cell_type": "markdown",
93 | "metadata": {},
94 | "source": [
95 | "### Setup working directory\n",
96 | "We will write the trajectory output to it."
97 | ]
98 | },
99 | {
100 | "cell_type": "code",
101 | "execution_count": 6,
102 | "metadata": {},
103 | "outputs": [],
104 | "source": [
105 | "workdir = \".\""
106 | ]
107 | },
108 | {
109 | "cell_type": "markdown",
110 | "metadata": {},
111 | "source": [
112 | "### Load two different configurations as `asyncmd.Trajectory`"
113 | ]
114 | },
115 | {
116 | "cell_type": "code",
117 | "execution_count": 7,
118 | "metadata": {},
119 | "outputs": [],
120 | "source": [
121 | "# create an asyncmd.Trajectory of the initial configuration from the `GmxEngine.ipynb` notebook\n",
122 | "conf_in_alphaR = asyncmd.Trajectory(trajectory_files=\"../resources/gromacs/capped_alanine_dipeptide/conf_in_alphaR.trr\",\n",
123 | " structure_file=\"../resources/gromacs/capped_alanine_dipeptide/conf.gro\",\n",
124 | " )\n",
125 | "# create a second asyncmd.Trajectory of another configuration (in another state)\n",
126 | "conf_in_C7eq = asyncmd.Trajectory(trajectory_files=\"../resources/gromacs/capped_alanine_dipeptide/conf_in_C7eq.trr\",\n",
127 | " structure_file=\"../resources/gromacs/capped_alanine_dipeptide/conf.gro\",\n",
128 | " )"
129 | ]
130 | },
131 | {
132 | "cell_type": "markdown",
133 | "metadata": {},
134 | "source": [
135 | "### Load and potentialy modify the parameter file (mdp file) for the molecular dynamics simulations"
136 | ]
137 | },
138 | {
139 | "cell_type": "code",
140 | "execution_count": 8,
141 | "metadata": {},
142 | "outputs": [
143 | {
144 | "name": "stdout",
145 | "output_type": "stream",
146 | "text": [
147 | "C-rescale\n"
148 | ]
149 | }
150 | ],
151 | "source": [
152 | "# Pcoupl = C-rescale needs gromacs version >= 2021\n",
153 | "mdp = asyncgmx.MDP(\"../resources/gromacs/capped_alanine_dipeptide/md.mdp\")\n",
154 | "print(mdp[\"Pcoupl\"])\n",
155 | "# set nstxout-compressed, such that the engines will produce XTC trajectories\n",
156 | "mdp[\"nstxout-compressed\"] = 20\n",
157 | "# and deactivate trr trajectory output\n",
158 | "mdp[\"nstxout\"] = mdp[\"nstvout\"] = 0"
159 | ]
160 | },
161 | {
162 | "cell_type": "code",
163 | "execution_count": 9,
164 | "metadata": {},
165 | "outputs": [],
166 | "source": [
167 | "# if your gmx version is >= 2021 you should comment the next line since C-rescale give the correct ensemble (and Berendsen doesnt!)\n",
168 | "#mdp[\"Pcoupl\"] = \"Berendsen\""
169 | ]
170 | },
171 | {
172 | "cell_type": "markdown",
173 | "metadata": {},
174 | "source": [
175 | "### Initialize the `InPartsTrajectoryPropagator`s\n",
176 | "The `InPartsTrajectoryPropagator` produces a Trajectory of a given total length (`n_steps`) in parts of a given walltime (`walltime_per_part`). This is useful to make full use of SLURMs backfilling and also to generate Trajectories of a given total length that exceeds the queues timelimit."
177 | ]
178 | },
179 | {
180 | "cell_type": "code",
181 | "execution_count": 10,
182 | "metadata": {},
183 | "outputs": [],
184 | "source": [
185 | "# The walltime per trajectory part determines how long each of the parts of the trajectory will be\n",
186 | "\n",
187 | "walltime = 10 / (60 * 60) # walltime is measured in hours, so this will be 180 s per part!\n",
188 | "\n",
189 | "propas = [asynctraj.InPartsTrajectoryPropagator(\n",
190 | " n_steps=2e4,\n",
191 | " engine_cls=asyncgmx.GmxEngine,\n",
192 | " engine_kwargs={\"mdconfig\": mdp,\n",
193 | " \"gro_file\": \"../resources/gromacs/capped_alanine_dipeptide/conf.gro\",\n",
194 | " \"top_file\": \"../resources/gromacs/capped_alanine_dipeptide/topol_amber99sbildn.top\",\n",
195 | " \"mdrun_extra_args\": \"-ntomp 2\", # for gmx sans (thread)MPI\n",
196 | " #\"mdrun_extra_args\": \"-nt 2\", # for gmx with (thrad)MPI\n",
197 | " },\n",
198 | " walltime_per_part=walltime,\n",
199 | " )\n",
200 | " for _ in range(2)]"
201 | ]
202 | },
203 | {
204 | "cell_type": "markdown",
205 | "metadata": {},
206 | "source": [
207 | "### We will use the `propagate_and_concatenate()` method which directly concatenates the generated trajectory parts into one trajectory\n",
208 | "\n",
209 | "The `propagate` method returns the list of trajectory parts and `cut_and_concatenate` can make the list into one continous trajectory. The `propagate_and_concatenate` method just calls both of them in order for conviencience."
210 | ]
211 | },
212 | {
213 | "cell_type": "code",
214 | "execution_count": 11,
215 | "metadata": {},
216 | "outputs": [],
217 | "source": [
218 | "# the `propagate_and_concatenate` method returns the concatenated trajectory of the requested (total) length\n",
219 | "# Using asyncio.gather as usual to do both MD runs in parallel\n",
220 | "wdir_alphaR = os.path.join(workdir, \"from_alphaR\")\n",
221 | "os.mkdir(wdir_alphaR)\n",
222 | "wdir_C7eq = os.path.join(workdir, \"from_C7eq\")\n",
223 | "os.mkdir(wdir_C7eq)\n",
224 | "traj_from_alphaR, traj_from_C7eq = await asyncio.gather(propas[0].propagate_and_concatenate(\n",
225 | " starting_configuration=conf_in_alphaR,\n",
226 | " workdir=wdir_alphaR,\n",
227 | " deffnm=\"from_alphaR\",\n",
228 | " tra_out=os.path.join(wdir_alphaR, \"traj_from_alphaR.xtc\")\n",
229 | " ),\n",
230 | " propas[1].propagate_and_concatenate(\n",
231 | " starting_configuration=conf_in_C7eq,\n",
232 | " workdir=wdir_C7eq,\n",
233 | " deffnm=\"from_C7_eq\",\n",
234 | " tra_out=os.path.join(wdir_C7eq, \"traj_from_C7_eq.xtc\")\n",
235 | " )\n",
236 | " )"
237 | ]
238 | },
239 | {
240 | "cell_type": "code",
241 | "execution_count": 12,
242 | "metadata": {},
243 | "outputs": [
244 | {
245 | "name": "stdout",
246 | "output_type": "stream",
247 | "text": [
248 | "The trajectory from alphaR has 1001 frames, the one from C7_eq has 1001 frames.\n"
249 | ]
250 | }
251 | ],
252 | "source": [
253 | "print(f\"The trajectory from alphaR has {len(traj_from_alphaR)} frames, the one from C7_eq has {len(traj_from_C7eq)} frames.\")"
254 | ]
255 | },
256 | {
257 | "cell_type": "markdown",
258 | "metadata": {},
259 | "source": [
260 | "## You can easily extend your simulations\n",
261 | "Here we will just reset the number of steps for the existing propagator objects, but it would work the same if we would have initialized two new ones (using `engine_cls` and `engine_kwargs` compatible with our previous run, you can change the walltime)."
262 | ]
263 | },
264 | {
265 | "cell_type": "code",
266 | "execution_count": 13,
267 | "metadata": {},
268 | "outputs": [],
269 | "source": [
270 | "# double the number of integration steps we want to do\n",
271 | "propas[0].n_steps *= 2\n",
272 | "propas[1].n_steps *= 2"
273 | ]
274 | },
275 | {
276 | "cell_type": "code",
277 | "execution_count": 14,
278 | "metadata": {},
279 | "outputs": [],
280 | "source": [
281 | "# and run again, this time passing continuation=True and overwrite=True such that we overwrite the old concatenated trajectory\n",
282 | "traj_from_alphaR, traj_from_C7eq = await asyncio.gather(propas[0].propagate_and_concatenate(\n",
283 | " starting_configuration=conf_in_alphaR,\n",
284 | " workdir=wdir_alphaR,\n",
285 | " deffnm=\"from_alphaR\",\n",
286 | " tra_out=os.path.join(wdir_alphaR, \"traj_from_alphaR.xtc\"),\n",
287 | " continuation=True,\n",
288 | " overwrite=True,\n",
289 | " ),\n",
290 | " propas[1].propagate_and_concatenate(\n",
291 | " starting_configuration=conf_in_C7eq,\n",
292 | " workdir=wdir_C7eq,\n",
293 | " deffnm=\"from_C7_eq\",\n",
294 | " tra_out=os.path.join(wdir_C7eq, \"traj_from_C7_eq.xtc\"),\n",
295 | " continuation=True,\n",
296 | " overwrite=True,\n",
297 | " )\n",
298 | " )"
299 | ]
300 | },
301 | {
302 | "cell_type": "code",
303 | "execution_count": 15,
304 | "metadata": {},
305 | "outputs": [
306 | {
307 | "name": "stdout",
308 | "output_type": "stream",
309 | "text": [
310 | "The trajectory from alphaR has 2001 frames, the one from C7_eq has 2001 frames.\n"
311 | ]
312 | },
313 | {
314 | "name": "stderr",
315 | "output_type": "stream",
316 | "text": [
317 | "/home/think/.conda/envs/asyncmd_dev/lib/python3.10/site-packages/MDAnalysis/coordinates/XDR.py:240: UserWarning: Reload offsets from trajectory\n",
318 | " ctime or size or n_atoms did not match\n",
319 | " warnings.warn(\"Reload offsets from trajectory\\n \"\n"
320 | ]
321 | }
322 | ],
323 | "source": [
324 | "print(f\"The trajectory from alphaR has {len(traj_from_alphaR)} frames, the one from C7_eq has {len(traj_from_C7eq)} frames.\")"
325 | ]
326 | },
327 | {
328 | "cell_type": "code",
329 | "execution_count": null,
330 | "metadata": {},
331 | "outputs": [],
332 | "source": []
333 | }
334 | ],
335 | "metadata": {
336 | "kernelspec": {
337 | "display_name": "Python 3 (ipykernel)",
338 | "language": "python",
339 | "name": "python3"
340 | },
341 | "language_info": {
342 | "codemirror_mode": {
343 | "name": "ipython",
344 | "version": 3
345 | },
346 | "file_extension": ".py",
347 | "mimetype": "text/x-python",
348 | "name": "python",
349 | "nbconvert_exporter": "python",
350 | "pygments_lexer": "ipython3",
351 | "version": "3.13.2"
352 | }
353 | },
354 | "nbformat": 4,
355 | "nbformat_minor": 4
356 | }
357 |
--------------------------------------------------------------------------------
/examples/05_developer_topics/slurm/SlurmProcess.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "e4e10720-3bb5-463f-b9b0-53b0fd8867d4",
6 | "metadata": {},
7 | "source": [
8 | "# `SlurmProcess`\n",
9 | "## A wrapper around SLURM with the same interface as `asyncio.subprocess`"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 1,
15 | "id": "2a0f1a0b-f7ad-4f33-9819-13479276d38d",
16 | "metadata": {},
17 | "outputs": [
18 | {
19 | "name": "stderr",
20 | "output_type": "stream",
21 | "text": [
22 | "/u/hejung/conda-envs/asyncmd_dev/lib/python3.10/site-packages/tqdm/auto.py:22: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
23 | " from .autonotebook import tqdm as notebook_tqdm\n"
24 | ]
25 | }
26 | ],
27 | "source": [
28 | "import os\n",
29 | "import numpy as np\n",
30 | "import asyncmd"
31 | ]
32 | },
33 | {
34 | "cell_type": "markdown",
35 | "id": "f1669109-6813-44b9-80c8-ee8d2243b16a",
36 | "metadata": {},
37 | "source": [
38 | "## Define a simple sbatch script\n",
39 | "\n",
40 | "The script just prints whatever input it gets via stdin.\n",
41 | "\n",
42 | "### NOTE: You might need to change the partition and memory to adopt it to the cluster you are using."
43 | ]
44 | },
45 | {
46 | "cell_type": "code",
47 | "execution_count": 2,
48 | "id": "f5c15ec3-6f5a-4468-ac46-0f9c590ab49f",
49 | "metadata": {},
50 | "outputs": [],
51 | "source": [
52 | "slurm_script_content = \"\"\"#!/bin/bash -l\n",
53 | "# Standard output and error will be determined from `jobname` variable passed to SlurmProcess\n",
54 | "# Initial working directory will be determined by `workdir` variable passed to SlurmProcess\n",
55 | "# Queue (Partition):\n",
56 | "#SBATCH --partition=s.bio ## ADOPT TO YOUR CLUSTER!\n",
57 | "#SBATCH --mem=4750\n",
58 | "#\n",
59 | "# Number of nodes and MPI tasks per node:\n",
60 | "#SBATCH --nodes=1\n",
61 | "#SBATCH --ntasks-per-node=1\n",
62 | "#SBATCH --cpus-per-task=1\n",
63 | "# Wall clock limit:\n",
64 | "#SBATCH --time=24:00:00\n",
65 | "\n",
66 | "export OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK\n",
67 | "export MPI_NUM_RANKS=$SLURM_NTASKS_PER_NODE\n",
68 | "export OMP_PLACES=cores\n",
69 | "\n",
70 | "# sleep for 10 seconds such that we can actually pipe some input to stdin before the job finishes\n",
71 | "sleep 10\n",
72 | "\n",
73 | "# print whatever we got via stdin\n",
74 | "printf \"$( 0
52 | # psi: > -50 but smaller 30 degree
53 | deg = 180/np.pi
54 | state[(phi <= 0) & (-50/deg <= psi) & (psi <= 30/deg)] = True
55 | return state
56 |
57 |
58 | def C7_eq(traj, skip=1):
59 | """
60 | Calculate C7_eq state function.
61 |
62 | The C7_eq state is defined in the space of the two dihedral angles psi
63 | and phi, for a configuration to belong to the state:
64 | phi: -pi < phi < 0
65 | psi: 120 degree < psi < 200 degree
66 |
67 | Parameters
68 | ----------
69 | traj : asyncmd.Trajectory
70 | The trajectory for which the state function is calculated.
71 | skip : int, optional
72 | stride for trajectory iteration, by default 1
73 |
74 | Returns
75 | -------
76 | numpy.ndarray, shape=(n_frames,)
77 | Array with boolean values for every configuration on the trajectory
78 | indicating if a configuration falls into the state or not.
79 | """
80 | u = mda.Universe(traj.structure_file, *traj.trajectory_files)
81 | psi_ag = u.select_atoms("resname ALA and name N") # idx 6
82 | psi_ag += u.select_atoms("resname ALA and name CA") # idx 8
83 | psi_ag += u.select_atoms("resname ALA and name C") # idx 14
84 | psi_ag += u.select_atoms("resname NME and name N") # idx 16
85 | phi_ag = u.select_atoms("resname ACE and name C") # idx 4
86 | phi_ag += u.select_atoms("resname ALA and name N") # idx 6
87 | phi_ag += u.select_atoms("resname ALA and name CA") # idx 8
88 | phi_ag += u.select_atoms("resname ALA and name C") # idx 14
89 | # empty arrays to fill
90 | state = np.full((len(u.trajectory[::skip]),), False, dtype=bool)
91 | phi = np.empty((len(u.trajectory[::skip]),), dtype=np.float64)
92 | psi = np.empty((len(u.trajectory[::skip]),), dtype=np.float64)
93 | for f, ts in enumerate(u.trajectory[::skip]):
94 | phi[f] = calc_dihedrals(*(at.position for at in phi_ag), box=ts.dimensions)
95 | psi[f] = calc_dihedrals(*(at.position for at in psi_ag), box=ts.dimensions)
96 | # make sure MDAnalysis closes the underlying trajectory files directly
97 | u.trajectory.close()
98 | # phi: -pi -> 0
99 | # psi: 120 -> 200 degree
100 | deg = 180/np.pi
101 | state[(phi <= 0) & ((120/deg <= psi) | (-160/deg >= psi))] = True
102 | return state
103 |
104 |
105 | def descriptor_func_psi_phi(traj, skip=1):
106 | """
107 | Calculate psi and phi angle internal coordiantes.
108 |
109 | Parameters
110 | ----------
111 | traj : asyncmd.Trajectory
112 | Input trajectory.
113 | skip : int, optional
114 | stride for trajectory iteration, by default 1
115 |
116 | Returns
117 | -------
118 | np.ndarray
119 | psi, phi values for trajectory, shape=(n_frames, 2)
120 | """
121 | u = mda.Universe(traj.structure_file, *traj.trajectory_files)
122 | psi_ag = u.select_atoms("index 6 or index 8 or index 14 or index 16")
123 | phi_ag = u.select_atoms("index 4 or index 6 or index 8 or index 14")
124 | # empty arrays to fill
125 | phi = np.empty((len(u.trajectory[::skip]), 1), dtype=np.float64)
126 | psi = np.empty((len(u.trajectory[::skip]), 1), dtype=np.float64)
127 | for f, ts in enumerate(u.trajectory[::skip]):
128 | phi[f, 0] = calc_dihedrals(*(at.position for at in phi_ag), box=ts.dimensions)
129 | psi[f, 0] = calc_dihedrals(*(at.position for at in psi_ag), box=ts.dimensions)
130 | # make sure MDAnalysis closes the underlying trajectory files directly
131 | u.trajectory.close()
132 | return np.concatenate((psi, phi), axis=1)
133 |
134 |
135 | if __name__ == "__main__":
136 | parser = argparse.ArgumentParser(
137 | description="Calculate CV values for alanine dipeptide",
138 | )
139 | parser.add_argument("structure_file", type=str)
140 | parser.add_argument("trajectory_files", type=str, nargs="+")
141 | parser.add_argument("output_file", type=str)
142 | parser.add_argument("-f", "--function", type=str,
143 | default="descriptors",
144 | choices=["alphaR", "C7eq", "descriptors_psi_phi"])
145 | parser.add_argument("-s", "--skip", type=int, default=1)
146 | args = parser.parse_args()
147 | # NOTE: since args is a namespace args.trajectory_file will be the path to
148 | # the trajectory file, i.e. we can pass args instead of an
149 | # aimmd.Trajectory to the functions above
150 | if args.function == "descriptors_psi_phi":
151 | vals = descriptor_func_psi_phi(args, skip=args.skip)
152 | elif args.function == "alphaR":
153 | vals = alpha_R(args, skip=args.skip)
154 | elif args.function == "C7eq":
155 | vals = C7_eq(args, skip=args.skip)
156 |
157 | np.save(args.output_file, vals)
158 |
--------------------------------------------------------------------------------
/examples/resources/gromacs/capped_alanine_dipeptide/conf_in_C7eq.trr:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:7a19ee0e2e2bf2e58e980c86b6735f09e0326b51f40c0878abcb3f838da8e489
3 | size 39744
4 |
--------------------------------------------------------------------------------
/examples/resources/gromacs/capped_alanine_dipeptide/conf_in_alphaR.trr:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:3e9678bdb4cc38d78de9c49e8b9ac7ac315f528f91c4278831d582ba5feb3a20
3 | size 39744
4 |
--------------------------------------------------------------------------------
/examples/resources/gromacs/capped_alanine_dipeptide/md.mdp:
--------------------------------------------------------------------------------
1 | ;integrator = md-vv ; verlocity-verlet
2 | integrator = md ; leap-frog (not perfectly time reversible because we start with v_{t+1/2} and x_{t})
3 | ;integrator = sd ; leap-frog langevin dynamics, inverse friction is set via tau-t
4 | dt = 0.002
5 | nsteps = -1 ; unlimited steps
6 | nstxout = 20
7 | nstvout = 20
8 | nstlog = 20
9 | ;nstenergy = 20
10 | nstxout-compressed = 0
11 | nstlist = 50
12 | ns-type = grid
13 | cutoff-scheme = Verlet
14 | ;verlet-buffer-tolerance = 0.001 ; kJ/mol/ps (default=0.005)
15 | rlist = 1.1
16 | coulombtype = PME
17 | rcoulomb = 1.1
18 | rvdw = 1.1
19 | Tcoupl = v-rescale
20 | tc-grps = Protein SOL
21 | ;tau-t = 10. 1. ; very large (almost no) tcouple for protein, large tau for solvent (docs say \Tau~0.5 for production)
22 | tau-t = 0.5 0.5
23 | ref-t = 300 300
24 | Pcoupl = C-rescale ; requires gromacs 2021?
25 | ;Pcoupl = Berendsen
26 | tau-p = 1.0
27 | compressibility = 4.5e-5
28 | ref-p = 1.0
29 | gen-vel = no
30 | constraints = h-bonds
31 | ;lincs-iter = 1 ; 1 is the default
32 |
--------------------------------------------------------------------------------
/examples/resources/gromacs/capped_alanine_dipeptide/mdrun.slurm:
--------------------------------------------------------------------------------
1 | #!/bin/bash -l
2 | #SBATCH --ntasks=2
3 | #SBATCH --cpus-per-task=1
4 | #SBATCH --mem=4500
5 | ### Things you might want to set to run resource-efficient (non-exhaustive)
6 | ##SBATCH --partition=
7 | ##SBATCH --time=
8 | ##SBATCH --nodes=
9 |
10 | # Note: make sure that you activate the correct environment, preferably the same you run asyncmd from
11 | source ~/asyncmd_workshop_test/source_modules_phys.sh
12 |
13 | srun {mdrun_cmd}
14 |
--------------------------------------------------------------------------------
/examples/resources/gromacs/capped_alanine_dipeptide/topol_amber99sbildn.top:
--------------------------------------------------------------------------------
1 | ;
2 | ; File 'topol_amber99sbildn.top' was generated
3 | ; By user: think (1000)
4 | ; On host: Kruemel
5 | ; At date: Sun Aug 29 15:06:35 2021
6 | ;
7 | ; This is a standalone topology file
8 | ;
9 | ; Created by:
10 | ; :-) GROMACS - gmx pdb2gmx, 2020.4 (-:
11 | ;
12 | ; Executable: /usr/local/gromacs-2020.4/bin/gmx
13 | ; Data prefix: /usr/local/gromacs-2020.4
14 | ; Working dir: /home/think/Documents/sources/OPS/aimmd/examples/distributed/gmx_infiles/new_from_scratch
15 | ; Command line:
16 | ; gmx pdb2gmx -ignh -f AD_initial_frame.pdb -p topol_amber99sbildn.top
17 | ; Force field was read from the standard GROMACS share directory.
18 | ;
19 |
20 | ; Include forcefield parameters
21 | #include "amber99sb-ildn.ff/forcefield.itp"
22 |
23 | [ moleculetype ]
24 | ; Name nrexcl
25 | Protein_chain_A 3
26 |
27 | [ atoms ]
28 | ; nr type resnr residue atom cgnr charge mass typeB chargeB massB
29 | ; residue 1 ACE rtp ACE q 0.0
30 | 1 CT 1 ACE CH3 1 -0.3662 12.01
31 | 2 HC 1 ACE HH31 2 0.1123 1.008
32 | 3 HC 1 ACE HH32 3 0.1123 1.008
33 | 4 HC 1 ACE HH33 4 0.1123 1.008
34 | 5 C 1 ACE C 5 0.5972 12.01
35 | 6 O 1 ACE O 6 -0.5679 16 ; qtot 0
36 | ; residue 2 ALA rtp ALA q 0.0
37 | 7 N 2 ALA N 7 -0.4157 14.01
38 | 8 H 2 ALA H 8 0.2719 1.008
39 | 9 CT 2 ALA CA 9 0.0337 12.01
40 | 10 H1 2 ALA HA 10 0.0823 1.008
41 | 11 CT 2 ALA CB 11 -0.1825 12.01
42 | 12 HC 2 ALA HB1 12 0.0603 1.008
43 | 13 HC 2 ALA HB2 13 0.0603 1.008
44 | 14 HC 2 ALA HB3 14 0.0603 1.008
45 | 15 C 2 ALA C 15 0.5973 12.01
46 | 16 O 2 ALA O 16 -0.5679 16 ; qtot 0
47 | ; residue 3 NME rtp NME q 0.0
48 | 17 N 3 NME N 17 -0.4157 14.01
49 | 18 H 3 NME H 18 0.2719 1.008
50 | 19 CT 3 NME CH3 19 -0.149 12.01
51 | 20 H1 3 NME HH31 20 0.0976 1.008
52 | 21 H1 3 NME HH32 21 0.0976 1.008
53 | 22 H1 3 NME HH33 22 0.0976 1.008 ; qtot 0
54 |
55 | [ bonds ]
56 | ; ai aj funct c0 c1 c2 c3
57 | 1 2 1
58 | 1 3 1
59 | 1 4 1
60 | 1 5 1
61 | 5 6 1
62 | 5 7 1
63 | 7 8 1
64 | 7 9 1
65 | 9 10 1
66 | 9 11 1
67 | 9 15 1
68 | 11 12 1
69 | 11 13 1
70 | 11 14 1
71 | 15 16 1
72 | 15 17 1
73 | 17 18 1
74 | 17 19 1
75 | 19 20 1
76 | 19 21 1
77 | 19 22 1
78 |
79 | [ pairs ]
80 | ; ai aj funct c0 c1 c2 c3
81 | 1 8 1
82 | 1 9 1
83 | 2 6 1
84 | 2 7 1
85 | 3 6 1
86 | 3 7 1
87 | 4 6 1
88 | 4 7 1
89 | 5 10 1
90 | 5 11 1
91 | 5 15 1
92 | 6 8 1
93 | 6 9 1
94 | 7 12 1
95 | 7 13 1
96 | 7 14 1
97 | 7 16 1
98 | 7 17 1
99 | 8 10 1
100 | 8 11 1
101 | 8 15 1
102 | 9 18 1
103 | 9 19 1
104 | 10 12 1
105 | 10 13 1
106 | 10 14 1
107 | 10 16 1
108 | 10 17 1
109 | 11 16 1
110 | 11 17 1
111 | 12 15 1
112 | 13 15 1
113 | 14 15 1
114 | 15 20 1
115 | 15 21 1
116 | 15 22 1
117 | 16 18 1
118 | 16 19 1
119 | 18 20 1
120 | 18 21 1
121 | 18 22 1
122 |
123 | [ angles ]
124 | ; ai aj ak funct c0 c1 c2 c3
125 | 2 1 3 1
126 | 2 1 4 1
127 | 2 1 5 1
128 | 3 1 4 1
129 | 3 1 5 1
130 | 4 1 5 1
131 | 1 5 6 1
132 | 1 5 7 1
133 | 6 5 7 1
134 | 5 7 8 1
135 | 5 7 9 1
136 | 8 7 9 1
137 | 7 9 10 1
138 | 7 9 11 1
139 | 7 9 15 1
140 | 10 9 11 1
141 | 10 9 15 1
142 | 11 9 15 1
143 | 9 11 12 1
144 | 9 11 13 1
145 | 9 11 14 1
146 | 12 11 13 1
147 | 12 11 14 1
148 | 13 11 14 1
149 | 9 15 16 1
150 | 9 15 17 1
151 | 16 15 17 1
152 | 15 17 18 1
153 | 15 17 19 1
154 | 18 17 19 1
155 | 17 19 20 1
156 | 17 19 21 1
157 | 17 19 22 1
158 | 20 19 21 1
159 | 20 19 22 1
160 | 21 19 22 1
161 |
162 | [ dihedrals ]
163 | ; ai aj ak al funct c0 c1 c2 c3 c4 c5
164 | 2 1 5 6 9
165 | 2 1 5 7 9
166 | 3 1 5 6 9
167 | 3 1 5 7 9
168 | 4 1 5 6 9
169 | 4 1 5 7 9
170 | 1 5 7 8 9
171 | 1 5 7 9 9
172 | 6 5 7 8 9
173 | 6 5 7 9 9
174 | 5 7 9 10 9
175 | 5 7 9 11 9
176 | 5 7 9 15 9
177 | 8 7 9 10 9
178 | 8 7 9 11 9
179 | 8 7 9 15 9
180 | 7 9 11 12 9
181 | 7 9 11 13 9
182 | 7 9 11 14 9
183 | 10 9 11 12 9
184 | 10 9 11 13 9
185 | 10 9 11 14 9
186 | 15 9 11 12 9
187 | 15 9 11 13 9
188 | 15 9 11 14 9
189 | 7 9 15 16 9
190 | 7 9 15 17 9
191 | 10 9 15 16 9
192 | 10 9 15 17 9
193 | 11 9 15 16 9
194 | 11 9 15 17 9
195 | 9 15 17 18 9
196 | 9 15 17 19 9
197 | 16 15 17 18 9
198 | 16 15 17 19 9
199 | 15 17 19 20 9
200 | 15 17 19 21 9
201 | 15 17 19 22 9
202 | 18 17 19 20 9
203 | 18 17 19 21 9
204 | 18 17 19 22 9
205 |
206 | [ dihedrals ]
207 | ; ai aj ak al funct c0 c1 c2 c3
208 | 1 7 5 6 4
209 | 5 9 7 8 4
210 | 9 17 15 16 4
211 | 15 19 17 18 4
212 |
213 | ; Include Position restraint file
214 | #ifdef POSRES
215 | #include "posre.itp"
216 | #endif
217 |
218 | ; Include water topology
219 | #include "amber99sb-ildn.ff/tip3p.itp"
220 |
221 | #ifdef POSRES_WATER
222 | ; Position restraint for each water oxygen
223 | [ position_restraints ]
224 | ; i funct fcx fcy fcz
225 | 1 1 1000 1000 1000
226 | #endif
227 |
228 | ; Include topology for ions
229 | #include "amber99sb-ildn.ff/ions.itp"
230 |
231 | [ system ]
232 | ; Name
233 | MDANALYSIS FRAMES FROM 0, STEP 1: Created by PDBWriter
234 |
235 | [ molecules ]
236 | ; Compound #mols
237 | Protein_chain_A 1
238 | SOL 543
239 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools >= 64"]
3 | build-backend = "setuptools.build_meta"
4 |
5 | [project]
6 | name = "asyncmd"
7 | version = "0.3.3"
8 | dependencies = ["aiofiles",
9 | "mdanalysis",
10 | "numpy",
11 | "scipy",
12 | ]
13 | requires-python = ">=3.10"
14 | authors = [{ name = "Hendrik Jung", email = "hendrik.jung@biophys.mpg.de"}]
15 | maintainers = [{ name = "Hendrik Jung", email = "hendrik.jung@biophys.mpg.de"}]
16 | description = """asyncmd is a library to write concurrent code to run and \
17 | analyze molecular dynamics simulations using pythons async/await synthax."""
18 | readme = "README.md"
19 | keywords = ["molecular dynamics", "molecular-dynamics", "MD",
20 | "high performance computing", "HPC",
21 | "slurm", "SLURM",
22 | "gromacs", "GROMACS",
23 | ]
24 | classifiers = [
25 | "Development Status :: 4 - Beta",
26 | "Intended Audience :: Science/Research",
27 | "Natural Language :: English",
28 | "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
29 | "Operating System :: OS Independent",
30 | "Programming Language :: Python",
31 | "Programming Language :: Python :: 3",
32 | "Topic :: Scientific/Engineering",
33 | "Topic :: Scientific/Engineering :: Chemistry",
34 | "Topic :: Scientific/Engineering :: Physics",
35 | "Topic :: Software Development :: Libraries :: Python Modules",
36 | ]
37 |
38 | [project.optional-dependencies]
39 | docs = ["sphinx"]
40 | tests = ["pytest", "pytest-asyncio"]
41 | tests-all = ["asyncmd[tests]", "h5py", "coverage", "pytest-cov"]
42 | dev = ["asyncmd[docs,tests-all]"]
43 |
44 | [project.urls]
45 | #Documentation =
46 | Repository = "https://github.com/bio-phys/asyncmd.git"
47 | Issues = "https://github.com/bio-phys/asyncmd/issues"
48 |
49 | [tool.setuptools.packages.find]
50 | where = ["src"]
51 |
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | asyncio_mode = strict
3 | addopts = -ra
4 |
--------------------------------------------------------------------------------
/src/asyncmd/__init__.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 | from ._version import __version__, __git_hash__
16 |
17 | from . import config
18 | from .trajectory.trajectory import Trajectory
19 |
--------------------------------------------------------------------------------
/src/asyncmd/_config.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 |
16 |
17 | # NOTE: This file **only** contains the dictionaries with the values
18 | # and **no** functions to set them, the funcs all live in 'config.py'.
19 | # The idea here is that we can then without any issues import additional
20 | # stuff (like the config functions from 'slurm.py') in 'config.py'
21 | # without risking circular imports becasue all asyncmd files should only
22 | # need to import the _CONFIG and _SEMAPHORES dicts from '_config.py'.
23 |
24 |
25 | _GLOBALS = {}
26 | _SEMAPHORES = {}
27 |
--------------------------------------------------------------------------------
/src/asyncmd/_version.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 | import os
16 | import subprocess
17 |
18 |
19 | def _get_version_from_pyproject():
20 | """Get version string from pyproject.toml file."""
21 | pyproject_toml = os.path.join(os.path.dirname(__file__),
22 | "../../pyproject.toml")
23 | with open(pyproject_toml) as f:
24 | line = f.readline()
25 | while line:
26 | if line.startswith("version ="):
27 | version_line = line
28 | break
29 | line = f.readline()
30 | version = version_line.strip().split(" = ")[1]
31 | version = version.replace('"', '').replace("'", "")
32 | return version
33 |
34 |
35 | def _get_git_hash_and_tag():
36 | """Get git hash, date, and tag from git log."""
37 | git_hash = ""
38 | git_date = ""
39 | git_tag = ""
40 | p = subprocess.Popen(
41 | ["git", "log", "-1", "--format='%H || %as || %(describe:tags=true,match=v*)'"],
42 | stdout=subprocess.PIPE,
43 | stderr=subprocess.PIPE,
44 | cwd=os.path.dirname(__file__),
45 | )
46 | stdout, stderr = p.communicate()
47 | if p.returncode == 0:
48 | git_hash, git_date, git_describe = (stdout.decode("utf-8")
49 | .replace("'", "").replace('"', '')
50 | .strip().split("||"))
51 | git_date = git_date.strip().replace("-", "")
52 | git_describe = git_describe.strip()
53 | if "-" not in git_describe and git_describe != "":
54 | # git-describe returns either the git-tag or (if we are not exactly
55 | # at a tag) something like
56 | # $GITTAG-$NUM_COMMITS_DISTANCE-$CURRENT_COMMIT_HASH
57 | git_tag = git_describe[1:] # strip of the 'v'
58 | return git_hash, git_date, git_tag
59 |
60 | try:
61 | _version = _get_version_from_pyproject()
62 | except FileNotFoundError:
63 | # pyproject.toml not found
64 | import importlib.metadata
65 | __version__ = importlib.metadata.version("asyncmd")
66 | __git_hash__ = ""
67 | else:
68 | _git_hash, _git_date, _git_tag = _get_git_hash_and_tag()
69 | __git_hash__ = _git_hash
70 | if _version == _git_tag or _git_hash == "":
71 | # dont append git_hash to version, if it is a version-tagged commit or if
72 | # git_hash is empty (happens if git is installed but we are not in a repo)
73 | __version__ = _version
74 | else:
75 | __version__ = _version + f"+git{_git_date}.{_git_hash[:7]}"
76 |
--------------------------------------------------------------------------------
/src/asyncmd/config.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 | import os
16 | import asyncio
17 | import logging
18 | import resource
19 | import typing
20 |
21 |
22 | from ._config import _GLOBALS, _SEMAPHORES
23 | from .slurm import set_slurm_settings, set_all_slurm_settings
24 | # TODO: Do we want to set the _GLOBALS defaults here? E.g. CACHE_TYPE="npz"?
25 |
26 |
27 | logger = logging.getLogger(__name__)
28 |
29 |
30 | # can be called by the user to (re) set maximum number of processes used
31 | def set_max_process(num=None, max_num=None):
32 | """
33 | Set the maximum number of concurrent python processes.
34 |
35 | If num is None, default to os.cpu_count() / 4.
36 |
37 | Parameters
38 | ----------
39 | num : int, optional
40 | Number of processes, if None will default to 1/4 of the CPU count.
41 | max_num : int, optional
42 | If given the number of processes can not exceed this number independent
43 | of the value of CPU count. Useful mostly for code that runs on multiple
44 | different machines (with different CPU counts) but still wants to avoid
45 | spawning hundreds of processes.
46 | """
47 | # NOTE: I think we should use a conservative default, e.g. 0.25*cpu_count()
48 | # TODO: limit to 30-40?, i.e never higher even if we have 1111 cores?
49 | global _SEMAPHORES
50 | if num is None:
51 | logical_cpu_count = os.cpu_count()
52 | if logical_cpu_count is not None:
53 | num = int(logical_cpu_count / 4)
54 | else:
55 | # fallback if os.cpu_count() can not determine the number of cpus
56 | # play it save and not have more than 2?
57 | # TODO: think about a good number!
58 | num = 2
59 | if max_num is not None:
60 | num = min((num, max_num))
61 | _SEMAPHORES["MAX_PROCESS"] = asyncio.BoundedSemaphore(num)
62 |
63 |
64 | set_max_process()
65 |
66 |
67 | def set_max_files_open(num: typing.Optional[int] = None, margin: int = 30):
68 | """
69 | Set the maximum number of concurrently opened files.
70 |
71 | By default use the systems soft resource limit.
72 |
73 | Parameters
74 | ----------
75 | num : int, optional
76 | Maximum number of open files, if None use systems (soft) resourcelimit,
77 | by default None
78 | margin : int, optional
79 | Safe margin to keep, i.e. we will only ever open `num - margin` files,
80 | by default 30
81 |
82 | Raises
83 | ------
84 | ValueError
85 | If num <= margin.
86 | """
87 | # ensure that we do not open too many files
88 | # resource.getrlimit returns a tuple (soft, hard); we take the soft-limit
89 | # and to be sure 30 less (the reason beeing that we can not use the
90 | # semaphores from non-async code, but sometimes use the sync subprocess.run
91 | # and subprocess.check_call [which also need files/pipes to work])
92 | # also maybe we need other open files like a storage :)
93 | global _SEMAPHORES
94 | rlim_soft = resource.getrlimit(resource.RLIMIT_NOFILE)[0]
95 | if num is None:
96 | num = rlim_soft
97 | elif num > rlim_soft:
98 | logger.warning("Passed a wanted number of open files that is larger "
99 | "than the systems soft resource limit (%d > %d). "
100 | "Will be using num=%d instead. To set a higher number "
101 | "increase your systems limit on the number of open "
102 | "files and call this function again.",
103 | num, rlim_soft, rlim_soft,
104 | )
105 | num = rlim_soft
106 | if num - margin <= 0:
107 | raise ValueError("num must be larger than margin."
108 | f"Was num={num}, margin={margin}."
109 | )
110 | # NOTE: Each MAX_FILES_OPEN semaphore counts for 3 open files!
111 | # The reason is that we open 3 files at the same time for each
112 | # subprocess (stdin, stdout, stderr), but semaphores can only be
113 | # decreased (awaited) once at a time. The problem with just awaiting
114 | # it three times in a row is that we can get deadlocked by getting
115 | # 1-2 semaphores and waiting for the next (last) semaphore in all
116 | # threads. The problem is that this semaphore will never be freed
117 | # without any process getting a semaphore...
118 | semaval = int((num - margin) / 3)
119 | _SEMAPHORES["MAX_FILES_OPEN"] = asyncio.BoundedSemaphore(semaval)
120 |
121 |
122 | set_max_files_open()
123 |
124 |
125 | # SLURM semaphore stuff:
126 | # TODO: move this to slurm.py? and initialize only if slurm is available?
127 | # slurm max job semaphore, if the user sets it it will be used,
128 | # otherwise we can use an unlimited number of syncronous slurm-jobs
129 | # (if the simulation requires that much)
130 | # TODO: document that somewhere, bc usually clusters have a job number limit?!
131 | def set_slurm_max_jobs(num: typing.Union[int, None]):
132 | """
133 | Set the maximum number of simultaneously submitted SLURM jobs.
134 |
135 | Parameters
136 | ----------
137 | num : int or None
138 | The maximum number of simultaneous SLURM jobs for this invocation of
139 | python/asyncmd. `None` means do not limit the maximum number of jobs.
140 | """
141 | global _SEMAPHORES
142 | if num is None:
143 | _SEMAPHORES["SLURM_MAX_JOB"] = None
144 | else:
145 | _SEMAPHORES["SLURM_MAX_JOB"] = asyncio.BoundedSemaphore(num)
146 |
147 |
148 | set_slurm_max_jobs(num=None)
149 |
150 |
151 | # Trajectory function value config
152 | def set_default_trajectory_cache_type(cache_type: str):
153 | """
154 | Set the default cache type for TrajectoryFunctionValues.
155 |
156 | Note that this can be overwritten on a per trajectory basis by passing
157 | ``cache_type`` to ``Trajectory.__init__``.
158 |
159 | Parameters
160 | ----------
161 | cache_type : str
162 | One of "h5py", "npz", "memory".
163 |
164 | Raises
165 | ------
166 | ValueError
167 | Raised if ``cache_type`` is not one of the allowed values.
168 | """
169 | global _GLOBALS
170 | allowed_values = ["h5py", "npz", "memory"]
171 | cache_type = cache_type.lower()
172 | if cache_type not in allowed_values:
173 | raise ValueError(f"Given cache type must be one of {allowed_values}."
174 | + f" Was: {cache_type}.")
175 | _GLOBALS["TRAJECTORY_FUNCTION_CACHE_TYPE"] = cache_type
176 |
177 |
178 | def register_h5py_cache(h5py_group, make_default: bool = False):
179 | """
180 | Register a h5py file or group for CV value caching.
181 |
182 | Note that this also sets the default cache type to "h5py", i.e. it calls
183 | :func:`set_default_trajectory_cache_type` with ``cache_type="h5py"``.
184 |
185 | Note that a ``h5py.File`` is just a slightly special ``h5py.Group``, so you
186 | can pass either. :mod:`asyncmd` will use euther the file or the group as
187 | the root of its own stored values.
188 | E.g. you will have ``h5py_group["asyncmd/TrajectoryFunctionValueCache"]``
189 | always pointing to the cached trajectory values and if ``h5py_group`` is
190 | the top-level group (i.e. the file) you also have ``(file["/asyncmd/TrajectoryFunctionValueCache"] == h5py_group["asyncmd/TrajectoryFunctionValueCache"])``.
191 |
192 | Parameters
193 | ----------
194 | h5py_group : h5py.Group or h5py.File
195 | The file or group to use for caching.
196 | make_default: bool,
197 | Whether we should also make "h5py" the default trajectory function
198 | cache type. By default False.
199 | """
200 | global _GLOBALS
201 | if make_default:
202 | set_default_trajectory_cache_type(cache_type="h5py")
203 | _GLOBALS["H5PY_CACHE"] = h5py_group
204 |
--------------------------------------------------------------------------------
/src/asyncmd/gromacs/__init__.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 | from .mdconfig import MDP
16 | from .mdengine import GmxEngine, SlurmGmxEngine
17 |
--------------------------------------------------------------------------------
/src/asyncmd/gromacs/utils.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 | import os
16 | import logging
17 | import aiofiles.os
18 |
19 | from ..trajectory.trajectory import Trajectory
20 | from .mdconfig import MDP
21 |
22 |
23 | logger = logging.getLogger(__name__)
24 |
25 |
26 | def nstout_from_mdp(mdp: MDP, traj_type: str = "TRR") -> int:
27 | """
28 | Get minimum number of steps between outputs for trajectories from MDP.
29 |
30 | Parameters
31 | ----------
32 | mdp : MDP
33 | Config object from which the output step should be read.
34 | traj_type : str, optional
35 | Trajectory format for which output step should be read, "XTC" or "TRR",
36 | by default "TRR".
37 |
38 | Returns
39 | -------
40 | int
41 | Minimum number of steps between two writes.
42 |
43 | Raises
44 | ------
45 | ValueError
46 | Raised when an unknown trajectory format `traj_type` is given.
47 | ValueError
48 | Raised when the given MDP would result in no output for the given
49 | trajectory format `traj_type`.
50 | """
51 | if traj_type.upper() == "TRR":
52 | keys = ["nstxout", "nstvout", "nstfout"]
53 | elif traj_type.upper() == "XTC":
54 | keys = ["nstxout-compressed", "nstxtcout"]
55 | else:
56 | raise ValueError("traj_type must be one of 'TRR' or 'XTC'.")
57 |
58 | vals = []
59 | for k in keys:
60 | try:
61 | # need to check for 0 (== no output!) in case somone puts the
62 | # defaults (or reads an mdout.mdp where gmx lists all the defaults)
63 | v = mdp[k]
64 | if v == 0:
65 | v = float("inf")
66 | vals += [v]
67 | except KeyError:
68 | # not set, defaults to 0, so we ignore it
69 | pass
70 |
71 | nstout = min(vals, default=None)
72 | if (nstout is None) or (nstout == float("inf")):
73 | raise ValueError(f"The MDP you passed results in no {traj_type} "
74 | +"trajectory output.")
75 | return nstout
76 |
77 |
78 | async def get_all_traj_parts(folder: str, deffnm: str,
79 | traj_type: str = "TRR") -> "list[Trajectory]":
80 | """
81 | Find and return a list of trajectory parts produced by a GmxEngine.
82 |
83 | NOTE: This returns only the parts that exist in ascending order.
84 |
85 | Parameters
86 | ----------
87 | folder : str
88 | path to a folder to search for trajectory parts
89 | deffnm : str
90 | deffnm (prefix of filenames) used in the simulation
91 | traj_type : str, optional
92 | Trajectory file ending("XTC", "TRR", "TNG", ...), by default "TRR"
93 |
94 | Returns
95 | -------
96 | list[Trajectory]
97 | Ordered list of all trajectory parts with given deffnm and type.
98 | """
99 | ending = traj_type.lower()
100 | traj_files = await get_all_file_parts(folder=folder, deffnm=deffnm,
101 | file_ending=ending)
102 | trajs = [Trajectory(trajectory_files=traj_file,
103 | structure_file=os.path.join(folder, f"{deffnm}.tpr")
104 | )
105 | for traj_file in traj_files]
106 | return trajs
107 |
108 |
109 | async def get_all_file_parts(folder: str, deffnm: str, file_ending: str) -> "list[str]":
110 | """
111 | Find and return all files with given ending produced by GmxEngine.
112 |
113 | NOTE: This returns only the parts that exist in ascending order.
114 |
115 | Parameters
116 | ----------
117 | folder : str
118 | Path to a folder to search for trajectory parts.
119 | deffnm : str
120 | deffnm (prefix of filenames) used in the simulation.
121 | file_ending : str
122 | File ending of the requested filetype (with or without preceeding ".").
123 |
124 | Returns
125 | -------
126 | list[str]
127 | Ordered list of filepaths for files with given ending.
128 | """
129 | def partnum_suffix(num):
130 | # construct gromacs num part suffix from simulation_part
131 | num_suffix = ".part{:04d}".format(num)
132 | return num_suffix
133 |
134 | if not file_ending.startswith("."):
135 | file_ending = "." + file_ending
136 | content = await aiofiles.os.listdir(folder)
137 | filtered = [f for f in content
138 | if (f.startswith(f"{deffnm}.part")
139 | and f.endswith(file_ending)
140 | and (len(f) == len(deffnm) + 9 + len(file_ending))
141 | )
142 | ]
143 | partnums = [int(f[len(deffnm) + 5:len(deffnm) + 9]) # get the 4 number digits
144 | for f in filtered]
145 | partnums.sort()
146 | parts = [os.path.join(folder, f"{deffnm}{partnum_suffix(num)}{file_ending}")
147 | for num in partnums]
148 | return parts
149 |
150 |
151 | def ensure_mdp_options(mdp: MDP, genvel: str = "no", continuation: str = "yes") -> MDP:
152 | """
153 | Ensure that some commonly used mdp options have the given values.
154 |
155 | NOTE: Modifies the `MDP` inplace and returns it.
156 |
157 | Parameters
158 | ----------
159 | mdp : MDP
160 | Config object for which values should be ensured.
161 | genvel : str, optional
162 | Value for genvel option ("yes" or "no"), by default "no".
163 | continuation : str, optional
164 | Value for continuation option ("yes" or "no"), by default "yes".
165 |
166 | Returns
167 | -------
168 | MDP
169 | Reference to input config object with values for options as given.
170 | """
171 | try:
172 | # make sure we do not generate velocities with gromacs
173 | genvel_test = mdp["gen-vel"]
174 | except KeyError:
175 | logger.info("Setting 'gen-vel = %s' in mdp.", genvel)
176 | mdp["gen-vel"] = genvel
177 | else:
178 | if genvel_test != genvel:
179 | logger.warning("Setting 'gen-vel = %s' in mdp "
180 | "(was '%s').", genvel, genvel_test)
181 | mdp["gen-vel"] = genvel
182 | try:
183 | # TODO/FIXME: this could also be 'unconstrained-start'!
184 | # however already the gmx v4.6.3 docs say
185 | # "continuation: formerly know as 'unconstrained-start'"
186 | # so I think we can ignore that for now?!
187 | continuation_test = mdp["continuation"]
188 | except KeyError:
189 | logger.info("Setting 'continuation = %s' in mdp.", continuation)
190 | mdp["continuation"] = continuation
191 | else:
192 | if continuation_test != continuation:
193 | logger.warning("Setting 'continuation = %s' in mdp "
194 | "(was '%s').", continuation, continuation_test)
195 | mdp["continuation"] = continuation
196 |
197 | return mdp
198 |
--------------------------------------------------------------------------------
/src/asyncmd/mdconfig.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 | import os
16 | import abc
17 | import typing
18 | import shutil
19 | import logging
20 | import collections
21 |
22 |
23 | logger = logging.getLogger(__name__)
24 |
25 |
26 | class FlagChangeList(collections.abc.MutableSequence):
27 | """A list that knows if it has been changed after initializing."""
28 |
29 | def __init__(self, data: list) -> None:
30 | """
31 | Initialize a `FlagChangeList`.
32 |
33 | Parameters
34 | ----------
35 | data : list
36 | The data this `FlagChangeList` will hold.
37 |
38 | Raises
39 | ------
40 | TypeError
41 | Raised when data is not a :class:`list`.
42 | """
43 | if not isinstance(data, list):
44 | raise TypeError("FlagChangeList must be initialized with a list.")
45 | self._data = data
46 | self._changed = False
47 |
48 | @property
49 | def changed(self) -> bool:
50 | """
51 | Whether this `FlagChangeList` has been modified since creation.
52 |
53 | Returns
54 | -------
55 | bool
56 | """
57 | return self._changed
58 |
59 | def __repr__(self) -> str:
60 | return self._data.__repr__()
61 |
62 | def __getitem__(self, index: int) -> typing.Any:
63 | return self._data.__getitem__(index)
64 |
65 | def __len__(self) -> int:
66 | return self._data.__len__()
67 |
68 | def __setitem__(self, index: int, value) -> None:
69 | self._data.__setitem__(index, value)
70 | self._changed = True
71 |
72 | def __delitem__(self, index: int) -> None:
73 | self._data.__delitem__(index)
74 | self._changed = True
75 |
76 | def insert(self, index: int, value: typing.Any):
77 | """
78 | Insert `value` at position given by `index`.
79 |
80 | Parameters
81 | ----------
82 | index : int
83 | The index of the new value in the `FlagChangeList`.
84 | value : typing.Any
85 | The value to insert into this `FlagChangeList`.
86 | """
87 | self._data.insert(index, value)
88 | self._changed = True
89 |
90 |
91 | class TypedFlagChangeList(FlagChangeList):
92 | """
93 | A :class:`FlagChangeList` with an ensured type for individual list items.
94 | """
95 |
96 | def __init__(self, data: typing.Iterable, dtype) -> None:
97 | """
98 | Initialize a `TypedFlagChangeList`.
99 |
100 | Parameters
101 | ----------
102 | data : Iterable
103 | (Initial) data for this `TypedFlagChangeList`.
104 | dtype : Callable datatype
105 | The datatype for all entries in this `TypedFlagChangeList`. Will be
106 | called on every value seperately and is expected to convert to the
107 | desired datatype.
108 | """
109 | self._dtype = dtype # set first to use in _convert_type method
110 | if getattr(data, '__len__', None) is None:
111 | # convienience for singular options,
112 | # if it has no len attribute we assume it is the only item
113 | data = [data]
114 | elif isinstance(data, str):
115 | # strings have a length but we still do not want to split them into
116 | # single letters, so just put a list around
117 | data = [data]
118 | typed_data = [self._convert_type(v, index=i)
119 | for i, v in enumerate(data)]
120 | super().__init__(data=typed_data)
121 |
122 | def _convert_type(self, value, index=None):
123 | # here we ignore index, but passing it should in principal make it
124 | # possible to use different dtypes for different indices
125 | return self._dtype(value)
126 |
127 | def __setitem__(self, index: int, value) -> None:
128 | typed_value = self._convert_type(value, index=index)
129 | self._data.__setitem__(index, typed_value)
130 | self._changed = True
131 |
132 | def insert(self, index: int, value) -> None:
133 | """
134 | Insert `value` at position given by `index`.
135 |
136 | Parameters
137 | ----------
138 | index : int
139 | The index of the new value in the `TypedFlagChangeList`.
140 | value : typing.Any
141 | The value to insert into this `TypedFlagChangeList`.
142 | """
143 | typed_value = self._convert_type(value, index=index)
144 | self._data.insert(index, typed_value)
145 | self._changed = True
146 |
147 |
148 | # NOTE: only to define the interface
149 | class MDConfig(collections.abc.MutableMapping):
150 | @abc.abstractmethod
151 | def parse(self):
152 | # should read original file and populate self with key, value pairs
153 | raise NotImplementedError
154 |
155 | @abc.abstractmethod
156 | def write(self, outfile):
157 | # write out current config stored in self to outfile
158 | raise NotImplementedError
159 |
160 |
161 | class LineBasedMDConfig(MDConfig):
162 | # abstract base class for line based parsing and writing,
163 | # subclasses must implement `_parse_line()` method and should set the
164 | # appropriate separator characters for their line format
165 | # We assume that every line/option can be parsed and written on its own!
166 | # We assume the order of the options in the written file is not relevant!
167 | # We represent every line/option with a key (str), list of values pair
168 | # values can have a specific type (e.g. int or float) or default to str.
169 | # NOTE: Initially written for gmx, but we already had e.g. namd in mind and
170 | # tried to make this as general as possible
171 |
172 | # these are the gmx mdp options but should be fairly general
173 | # (i.e. work at least for namd?)
174 | _KEY_VALUE_SEPARATOR = " = "
175 | _INTER_VALUE_CHAR = " "
176 | # NOTE on typing
177 | # use these to specify config parameters that are of type int or float
178 | # parsed lines with dict key matching will then be converted
179 | # any lines not matching will be left in their default str type
180 | _FLOAT_PARAMS = [] # can have multiple values per config option
181 | _FLOAT_SINGLETON_PARAMS = [] # must have one value per config option
182 | _INT_PARAMS = [] # multiple int per option
183 | _INT_SINGLETON_PARAMS = [] # one int per option
184 | _STR_SINGLETON_PARAMS = [] # strings with only one value per option
185 | # NOTE on SPECIAL_PARAM_DISPATCH
186 | # can be used to set custom type convert functions on a per parameter basis
187 | # the key must match the key in the dict for in the parsed line,
188 | # the value must be a function taking the corresponding (parsed) line and
189 | # which must return a FlagChangeList or subclass thereof
190 | # this function will also be called with the new list of value(s) when the
191 | # option is changed, i.e. it must also be able to check and cast a list of
192 | # new values into the expected FlagChangeList format
193 | # [note that it is probably easiest to subclass TypedFlagChangeList and
194 | # overwrite only the '_check_type()' method]
195 | _SPECIAL_PARAM_DISPATCH = {}
196 |
197 | def __init__(self, original_file: str) -> None:
198 | """
199 | Initialize a :class:`LineBasedMDConfig`.
200 |
201 | Parameters
202 | ----------
203 | original_file : str
204 | Path to original config file (absolute or relative).
205 | """
206 | self._config = {}
207 | self._changed = False
208 | self._type_dispatch = self._construct_type_dispatch()
209 | # property to set/check file and parse to config dictionary all in one
210 | self.original_file = original_file
211 |
212 | def _construct_type_dispatch(self):
213 | def convert_len1_list_or_singleton(val, dtype):
214 | # helper func that accepts len1 lists
215 | # (as expected from `_parse_line`)
216 | # but that also accepts single values and converts them to given
217 | # dtype (which is what we expect can/will happen when the users set
218 | # singleton vals, i.e. "val" instead of ["val"]
219 | if isinstance(val, str) or getattr(val, '__len__', None) is None:
220 | return dtype(val)
221 | else:
222 | return dtype(val[0])
223 |
224 | # construct type conversion dispatch
225 | type_dispatch = collections.defaultdict(
226 | # looks a bit strange, but the factory func
227 | # is called to produce the default value, i.e.
228 | # we need a func that returns our default func
229 | lambda:
230 | lambda l: TypedFlagChangeList(data=l,
231 | dtype=str)
232 | )
233 | type_dispatch.update({param: lambda l: TypedFlagChangeList(
234 | data=l,
235 | dtype=float
236 | )
237 | for param in self._FLOAT_PARAMS})
238 | type_dispatch.update({param: lambda v: convert_len1_list_or_singleton(
239 | val=v,
240 | dtype=float,
241 | )
242 | for param in self._FLOAT_SINGLETON_PARAMS})
243 | type_dispatch.update({param: lambda l: TypedFlagChangeList(
244 | data=l,
245 | dtype=int,
246 | )
247 | for param in self._INT_PARAMS})
248 | type_dispatch.update({param: lambda v: convert_len1_list_or_singleton(
249 | val=v,
250 | dtype=int,
251 | )
252 | for param in self._INT_SINGLETON_PARAMS})
253 | type_dispatch.update({param: lambda v: convert_len1_list_or_singleton(
254 | val=v,
255 | dtype=str,
256 | )
257 | for param in self._STR_SINGLETON_PARAMS})
258 | type_dispatch.update(self._SPECIAL_PARAM_DISPATCH)
259 | return type_dispatch
260 |
261 | def __getstate__(self) -> dict:
262 | state = self.__dict__.copy()
263 | state["_type_dispatch"] = None
264 | return state
265 |
266 | def __setstate__(self, state: dict) -> None:
267 | self.__dict__.update(state)
268 | self._type_dispatch = self._construct_type_dispatch()
269 |
270 | @abc.abstractmethod
271 | def _parse_line(self, line: str) -> dict:
272 | """
273 | Parse a line of the configuration file and return a :class:`dict`.
274 |
275 | Parameters
276 | ----------
277 | line : str
278 | A single line of the read-in configuration file
279 |
280 | Returns
281 | ------
282 | parsed : dict
283 | Dictionary with a single (key, list of value(s)) pair representing
284 | the parsed line.
285 | """
286 | # NOTE: this is the only function needed to complete the class,
287 | # the rest of this metaclass assumes the following for this func:
288 | # it must parse a single line and return the key, list of value(s) pair
289 | # as a dict with one item, e.g. {key: list of value(s)}
290 | # if the line is parsed as comment the dict must be empty, e.g. {}
291 | # if the option/key is present but without value the list must be empty
292 | # e.g. {key: []}
293 | raise NotImplementedError
294 |
295 | def __getitem__(self, key):
296 | return self._config[key]
297 |
298 | def __setitem__(self, key, value) -> None:
299 | typed_value = self._type_dispatch[key](value)
300 | self._config[key] = typed_value
301 | self._changed = True
302 |
303 | def __delitem__(self, key) -> None:
304 | self._config.__delitem__(key)
305 | self._changed = True
306 |
307 | def __iter__(self):
308 | return self._config.__iter__()
309 |
310 | def __len__(self) -> int:
311 | return self._config.__len__()
312 |
313 | def __repr__(self) -> str:
314 | return str({"changed": self._changed,
315 | "original_file": self.original_file,
316 | "content": self._config.__repr__(),
317 | }
318 | )
319 |
320 | def __str__(self) -> str:
321 | repr_str = (f"{type(self)} has been changed since parsing: "
322 | + f"{self._changed}\n"
323 | )
324 | repr_str += "Current content:\n"
325 | repr_str += "----------------\n"
326 | for key, val in self.items():
327 | repr_str += f"{key} : {val}\n"
328 | return repr_str
329 |
330 | @property
331 | def original_file(self) -> str:
332 | """
333 | Return the original config file this :class:`LineBasedMDConfig` parsed.
334 |
335 | Returns
336 | -------
337 | str
338 | Path to the original file.
339 | """
340 | return self._original_file
341 |
342 | @original_file.setter
343 | def original_file(self, value: str) -> None:
344 | # NOTE: (re)setting the file also replaces the current config with
345 | # what we parse from that file
346 | value = os.path.relpath(value)
347 | if not os.path.isfile(value):
348 | raise ValueError(f"Can not access the file {value}")
349 | self._original_file = value
350 | self.parse()
351 |
352 | @property
353 | def changed(self) -> bool:
354 | """
355 | Indicate if the current configuration differs from original_file.
356 |
357 | Returns
358 | -------
359 | bool
360 | Whether we changed the configuration w.r.t. ``original_file``.
361 | """
362 | # NOTE: we default to False, i.e. we expect that anything that
363 | # does not have a self.changed attribute is not a container
364 | # and we (the dictionary) would know that it changed
365 | return self._changed or any([getattr(v, "changed", False)
366 | for v in self._config.values()]
367 | )
368 |
369 | def parse(self):
370 | """Parse the current ``self.original_file`` to update own state."""
371 | with open(self.original_file, "r") as f:
372 | # NOTE: we split at newlines on all platforms by iterating over the
373 | # file, i.e. python takes care of the differnt platforms and
374 | # newline chars for us :)
375 | parsed = {}
376 | for line in f:
377 | line_parsed = self._parse_line(line.rstrip("\n"))
378 | # check for duplicate options, we warn but take the last one
379 | for key in line_parsed:
380 | try:
381 | # check if we already have a value for that option
382 | _ = parsed[key]
383 | except KeyError:
384 | # as it should be
385 | pass
386 | else:
387 | # warn that we will only keep the last occurenc of key
388 | logger.warning("Parsed duplicate configuration option "
389 | "(%s). Last values encountered take "
390 | "precedence.", key)
391 | parsed.update(line_parsed)
392 | # convert the known types
393 | self._config = {key: self._type_dispatch[key](value)
394 | for key, value in parsed.items()}
395 | self._changed = False
396 |
397 | def write(self, outfile: str, overwrite: bool = False) -> None:
398 | """
399 | Write current configuration to outfile.
400 |
401 | Parameters
402 | ----------
403 | outfile : str
404 | Path to outfile (relative or absolute).
405 | overwrite : bool, optional
406 | If True overwrite existing files, by default False.
407 |
408 | Raises
409 | ------
410 | ValueError
411 | Raised when `overwrite=False` but `outfile` exists.
412 | """
413 | outfile = os.path.relpath(outfile)
414 | if os.path.exists(outfile) and not overwrite:
415 | raise ValueError(f"overwrite=False and file exists ({outfile}).")
416 | if not self.changed:
417 | # just copy the original
418 | shutil.copy2(src=self.original_file, dst=outfile)
419 | else:
420 | # construct content for new file
421 | lines = []
422 | for key, value in self._config.items():
423 | line = f"{key}{self._KEY_VALUE_SEPARATOR}"
424 | try:
425 | if len(value) >= 0:
426 | if isinstance(value, str):
427 | # it is a string singleton option
428 | line += f"{value}"
429 | else:
430 | line += self._INTER_VALUE_CHAR.join(str(v)
431 | for v in value
432 | )
433 | except TypeError:
434 | # not a Sequence/Iterable or string,
435 | # i.e. (probably) one of the float/int singleton options
436 | line += f"{value}"
437 | lines += [line]
438 | # concatenate the lines and write out at once
439 | with open(outfile, "w") as f:
440 | f.write("\n".join(lines))
441 |
--------------------------------------------------------------------------------
/src/asyncmd/mdengine.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 | import abc
16 | from .trajectory.trajectory import Trajectory
17 |
18 |
19 | class EngineError(Exception):
20 | """Exception raised when something goes wrong with the (MD)-Engine."""
21 | pass
22 |
23 |
24 | class EngineCrashedError(EngineError):
25 | """Exception raised when the (MD)-Engine crashes during a run."""
26 | pass
27 |
28 |
29 | class MDEngine(abc.ABC):
30 | """
31 | Abstract base class to define a common interface for all :class:`MDEngine`.
32 | """
33 | @abc.abstractmethod
34 | async def apply_constraints(self, conf_in: Trajectory,
35 | conf_out_name: str) -> Trajectory:
36 | # apply constraints to given conf_in, write conf_out_name and return it
37 | raise NotImplementedError
38 |
39 | @abc.abstractmethod
40 | # TODO: think about the most general interface!
41 | # NOTE: We assume that we do not change the system for/in one engine,
42 | # i.e. .top, .ndx, mdp-object, ...?! should go into __init__
43 | async def prepare(self, starting_configuration: Trajectory, workdir: str,
44 | deffnm: str) -> None:
45 | raise NotImplementedError
46 |
47 | @abc.abstractmethod
48 | # TODO: should this be a classmethod?
49 | #@classmethod
50 | async def prepare_from_files(self, workdir: str, deffnm: str) -> None:
51 | # this should prepare the engine to continue a previously stopped simulation
52 | # starting with the last trajectory part in workdir that is compatible with deffnm
53 | raise NotImplementedError
54 |
55 | @abc.abstractmethod
56 | async def run_walltime(self, walltime: float) -> Trajectory:
57 | # run for specified walltime
58 | # NOTE: must be possible to run this multiple times after preparing once!
59 | raise NotImplementedError
60 |
61 | @abc.abstractmethod
62 | async def run_steps(self, nsteps: int,
63 | steps_per_part: bool = False) -> Trajectory:
64 | # run for specified number of steps
65 | # NOTE: not sure if we need it, but could be useful
66 | # NOTE: make sure we can run multiple times after preparing once!
67 | raise NotImplementedError
68 |
69 | @property
70 | @abc.abstractmethod
71 | def current_trajectory(self) -> Trajectory | None:
72 | # return current trajectory: Trajectory or None
73 | # if we retun a Trajectory it is either what we are working on atm
74 | # or the trajectory we finished last
75 | raise NotImplementedError
76 |
77 | @property
78 | @abc.abstractmethod
79 | def output_traj_type(self) -> str:
80 | # return a string with the ending (without ".") of the trajectory
81 | # type this engine uses
82 | # NOTE: this should not be implemented as a property in subclasses
83 | # as it must be available at the classlevel too
84 | # so cls.output_traj_type must also be the string
85 | # If you want/need to check the values (i.e. you would like to
86 | # execute code like in a property) have a look at the descriptor
87 | # implementation in gromacs/mdengine.py which checks for allowed
88 | # values (at least when set on an instance) but is accesible from
89 | # the class level too, e.g. like a 'classproperty' (which is not
90 | # a thing in python)
91 | raise NotImplementedError
92 |
93 | @property
94 | @abc.abstractmethod
95 | def steps_done(self) -> int:
96 | raise NotImplementedError
97 |
--------------------------------------------------------------------------------
/src/asyncmd/tools.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 | import os
16 | import shutil
17 | import aiofiles
18 |
19 |
20 | def ensure_executable_available(executable: str) -> str:
21 | """
22 | Ensure the given executable is available and executable.
23 |
24 | Takes a relative or absolute path to an executable or the name of an
25 | executable available in $PATH. Returns the full path to the executable.
26 |
27 | Parameters
28 | ----------
29 | executable : str
30 | Name or path of an executable.
31 |
32 | Returns
33 | -------
34 | path_to_executable : str
35 | Full path to the given executable if it exists.
36 |
37 | Raises
38 | ------
39 | ValueError
40 | If the given name does not exist or can not be executed.
41 | """
42 | if os.path.isfile(os.path.abspath(executable)):
43 | # see if it is a relative path starting from cwd
44 | # (or a full path starting with /)
45 | executable = os.path.abspath(executable)
46 | if not os.access(executable, os.X_OK):
47 | raise ValueError(f"{executable} must be executable.")
48 | elif shutil.which(executable) is not None:
49 | # see if we find it in $PATH
50 | executable = shutil.which(executable)
51 | else:
52 | raise ValueError(f"{executable} must be an existing path or accesible "
53 | + "via the $PATH environment variable.")
54 | return executable
55 |
56 |
57 | def remove_file_if_exist(f: str):
58 | """
59 | Remove a given file if it exists.
60 |
61 | Parameters
62 | ----------
63 | f : str
64 | Path to the file to remove.
65 | """
66 | try:
67 | os.remove(f)
68 | except FileNotFoundError:
69 | # TODO: should we info/warn if the file is not there?
70 | pass
71 |
72 |
73 | async def remove_file_if_exist_async(f: str):
74 | """
75 | Remove a given file if it exists asynchronously.
76 |
77 | Parameters
78 | ----------
79 | f : str
80 | Path to the file to remove.
81 | """
82 | try:
83 | await aiofiles.os.remove(f)
84 | except FileNotFoundError:
85 | # TODO: should we info/warn if the file is not there?
86 | pass
87 |
--------------------------------------------------------------------------------
/src/asyncmd/trajectory/__init__.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 | from .functionwrapper import (PyTrajectoryFunctionWrapper,
16 | SlurmTrajectoryFunctionWrapper,
17 | )
18 | from .propagate import (ConditionalTrajectoryPropagator,
19 | TrajectoryPropagatorUntilAnyState,
20 | InPartsTrajectoryPropagator,
21 | construct_TP_from_plus_and_minus_traj_segments,
22 | )
23 | from .trajectory import (_forget_trajectory,
24 | _forget_all_trajectories,
25 | )
26 |
--------------------------------------------------------------------------------
/src/asyncmd/utils.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 | from .mdengine import MDEngine
16 | from .mdconfig import MDConfig
17 | from .trajectory.trajectory import Trajectory
18 | from .gromacs import utils as gmx_utils
19 | from .gromacs import mdengine as gmx_engine
20 | from .gromacs import mdconfig as gmx_config
21 |
22 |
23 | async def get_all_traj_parts(folder: str, deffnm: str, engine: MDEngine) -> "list[Trajectory]":
24 | """
25 | List all trajectories in folder by given engine class with given deffnm.
26 |
27 | Parameters
28 | ----------
29 | folder : str
30 | Absolute or relative path to a folder.
31 | deffnm : str
32 | deffnm used by the engines simulation run from which we want the trajs.
33 | engine : MDEngine
34 | The engine that produced the trajectories
35 | (or one from the same class and with similar init args)
36 |
37 | Returns
38 | -------
39 | list[Trajectory]
40 | All trajectory parts from folder that match deffnm and engine in order.
41 |
42 | Raises
43 | ------
44 | ValueError
45 | Raised when the engine class is unknown.
46 | """
47 | if isinstance(engine, (gmx_engine.GmxEngine, gmx_engine.SlurmGmxEngine)):
48 | return await gmx_utils.get_all_traj_parts(folder=folder, deffnm=deffnm,
49 | traj_type=engine.output_traj_type,
50 | )
51 | else:
52 | raise ValueError(f"Engine {engine} is not a known MDEngine class."
53 | + " Maybe someone just forgot to add the function?")
54 |
55 |
56 | async def get_all_file_parts(folder: str, deffnm: str, file_ending: str,
57 | ) -> "list[str]":
58 | """
59 | Find and return all files with given ending produced by a `MDEngine`.
60 |
61 | NOTE: This returns only the parts that exist in ascending order.
62 |
63 | Parameters
64 | ----------
65 | folder : str
66 | Path to a folder to search for trajectory parts.
67 | deffnm : str
68 | deffnm (prefix of filenames) used in the simulation.
69 | file_ending : str
70 | File ending of the requested filetype (with or without preceeding ".").
71 |
72 | Returns
73 | -------
74 | list[str]
75 | Ordered list of filepaths for files with given ending.
76 | """
77 | # TODO: we just use the function from the gromacs engines for now, i.e. we
78 | # assume that the filename scheme will be the same for other engines
79 | return await gmx_utils.get_all_file_parts(folder=folder, deffnm=deffnm,
80 | file_ending=file_ending)
81 |
82 |
83 | def nstout_from_mdconfig(mdconfig: MDConfig, output_traj_type: str) -> int:
84 | """
85 | Return output step for given mdconfig and output_traj_type.
86 |
87 | Parameters
88 | ----------
89 | mdconfig : MDConfig
90 | An engine specific subclass of MDConfig.
91 | output_traj_type : str
92 | The output trajectory type for which we want the output frequency.
93 |
94 | Returns
95 | -------
96 | int
97 | (Smallest) output step in integration steps.
98 |
99 | Raises
100 | ------
101 | ValueError
102 | Raised when the MDConfig subclass is not known.
103 | """
104 | if isinstance(mdconfig, gmx_config.MDP):
105 | return gmx_utils.nstout_from_mdp(mdp=mdconfig,
106 | traj_type=output_traj_type,
107 | )
108 | else:
109 | raise ValueError(f"mdconfig {mdconfig} is not a known MDConfig class."
110 | + " Maybe someone just forgot to add the function?")
111 |
112 |
113 | def ensure_mdconfig_options(mdconfig: MDConfig, genvel: str = "no",
114 | continuation: str = "yes") -> MDConfig:
115 | """
116 | Ensure that some commonly used mdconfig options have the given values.
117 |
118 | NOTE: Modifies the `MDConfig` inplace and returns it.
119 |
120 | Parameters
121 | ----------
122 | mdconfig : MDConfig
123 | Config object for which values should be ensured.
124 | genvel : str, optional
125 | Whether to generate velocities from a Maxwell-Boltzmann distribution
126 | ("yes" or "no"), by default "no".
127 | continuation : str, optional
128 | Whether to apply constraints to the initial configuration
129 | ("yes" or "no"), by default "yes"
130 |
131 | Returns
132 | -------
133 | MDConfig
134 | Reference to input config object with values for options as given.
135 |
136 | Raises
137 | ------
138 | ValueError
139 | If the MDConfig belongs to an unknown subclass not dispatcheable to any
140 | specific engine submodule.
141 | """
142 | if isinstance(mdconfig, gmx_config.MDP):
143 | return gmx_utils.ensure_mdp_options(mdp=mdconfig, genvel=genvel,
144 | continuation=continuation,
145 | )
146 | else:
147 | raise ValueError(f"mdconfig {mdconfig} is not a known MDConfig class."
148 | + " Maybe someone just forgot to add the function?")
149 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 | import pytest
16 |
17 |
18 |
19 | def pytest_addoption(parser):
20 | parser.addoption(
21 | "--runslow", action="store_true", default=False, help="run slow tests"
22 | )
23 | #parser.addoption(
24 | # "--runold", action="store_true", default=False,
25 | # help="run tests for deprecated code"
26 | #)
27 | parser.addoption(
28 | "--runall", action="store_true", default=False, help="run all tests"
29 | )
30 |
31 |
32 | def pytest_configure(config):
33 | config.addinivalue_line("markers", "slow: mark test as slow to run")
34 | #config.addinivalue_line("markers", "old: mark test for deprecated code")
35 |
36 |
37 | def pytest_collection_modifyitems(config, items):
38 | if config.getoption("--runall"):
39 | # --runall given in cli: do not skip any tests
40 | return
41 | #old = False
42 | #skip_old = pytest.mark.skip(reason="need --runold option to run")
43 | slow = False
44 | skip_slow = pytest.mark.skip(reason="need --runslow option to run")
45 | if config.getoption("--runslow"):
46 | slow = True
47 | #if config.getoption("--runold"):
48 | # old = True
49 | for item in items:
50 | if not slow and "slow" in item.keywords:
51 | item.add_marker(skip_slow)
52 | #if not old and "old" in item.keywords:
53 | # item.add_marker(skip_old)
54 |
--------------------------------------------------------------------------------
/tests/gromacs/test_mdengine.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 | import pytest
16 | import logging
17 | import shutil
18 |
19 | import numpy as np
20 |
21 | from asyncmd.gromacs import GmxEngine, MDP
22 | from asyncmd import Trajectory
23 |
24 |
25 | # Decorator for test that need gmx grompp and/or gmx mdrun
26 | needs_gmx_install = pytest.mark.skipif(shutil.which("gmx") is None,
27 | reason="Need gromacs (gmx) executable.")
28 |
29 |
30 | class Test_GmxEngine:
31 | def setup_method(self):
32 | # some useful files
33 | self.gro = "tests/test_data/gromacs/conf.gro"
34 | self.ndx = "tests/test_data/gromcas/index.ndx"
35 | self.top = "tests/test_data/gromacs/topol_amber99sbildn.top"
36 | self.mdp_md_compressed_out = MDP("tests/test_data/gromacs/md_compressed_out.mdp")
37 | self.mdp_md_full_prec_out = MDP("tests/test_data/gromacs/md_full_prec_out.mdp")
38 |
39 | @pytest.mark.parametrize("integrator", ["steep", "cg", "l-bfgs"])
40 | def test_mpd_check_minimize(self, integrator, monkeypatch):
41 | # init an engine so we can use its mdconfig property (which does the checks)
42 | with monkeypatch.context() as m:
43 | # monkeypatch so we dont need to find a gromacs executable
44 | m.setattr("asyncmd.gromacs.mdengine.ensure_executable_available",
45 | lambda _: "/usr/bin/true")
46 | engine = GmxEngine(mdconfig=self.mdp_md_compressed_out,
47 | gro_file=self.gro,
48 | top_file=self.top)
49 | self.mdp_md_compressed_out["integrator"] = integrator
50 | with pytest.raises(ValueError):
51 | engine.mdp = self.mdp_md_compressed_out
52 | # this should work
53 | engine.output_traj_type = "trr"
54 | self.mdp_md_full_prec_out["integrator"] = integrator
55 | engine.mdp = self.mdp_md_full_prec_out
56 |
57 | def test_mdp_check_nsteps(self, caplog, monkeypatch):
58 | # init an engine so we can use its mdconfig property (which does the checks)
59 | with monkeypatch.context() as m:
60 | # monkeypatch so we dont need to find a gromacs executable
61 | m.setattr("asyncmd.gromacs.mdengine.ensure_executable_available",
62 | lambda _: "/usr/bin/true")
63 | engine = GmxEngine(mdconfig=self.mdp_md_compressed_out,
64 | gro_file=self.gro,
65 | top_file=self.top)
66 | # check that an nsteps value that is not -1 (but set) is changed
67 | self.mdp_md_compressed_out["nsteps"] = 100
68 | with caplog.at_level(logging.INFO):
69 | engine.mdp = self.mdp_md_compressed_out
70 | # make sure nsteps is now set to -1
71 | assert engine.mdp["nsteps"] == -1
72 | # and check the log
73 | info_str = "Changing nsteps from 100 to -1 (infinte), the run "
74 | info_str += "length is controlled via arguments of the run method."
75 | assert info_str in caplog.text
76 |
77 | # check that we set nsteps if it is unset
78 | del self.mdp_md_compressed_out["nsteps"]
79 | with caplog.at_level(logging.INFO):
80 | engine.mdp = self.mdp_md_compressed_out
81 | # make sure nsteps is now set to -1
82 | assert engine.mdp["nsteps"] == -1
83 | # and check the log
84 | info_str = "Setting previously undefined nsteps to -1 (infinite)."
85 | assert info_str in caplog.text
86 |
87 | def test_mdp_check_no_mdp_class(self, monkeypatch):
88 | # init should already fail
89 | with monkeypatch.context() as m:
90 | # monkeypatch so we dont need to find a gromacs executable
91 | m.setattr("asyncmd.gromacs.mdengine.ensure_executable_available",
92 | lambda _: "/usr/bin/true")
93 | with pytest.raises(TypeError):
94 | engine = GmxEngine(mdconfig=None,
95 | gro_file=self.gro,
96 | top_file=self.top)
97 |
98 | @pytest.mark.parametrize("conversion_factor", [-1., 1.1, 0.])
99 | def test_check_invalid_mdrun_time_conversion_factor(self, monkeypatch,
100 | conversion_factor):
101 | # init should already fail
102 | with monkeypatch.context() as m:
103 | # monkeypatch so we dont need to find a gromacs executable
104 | m.setattr("asyncmd.gromacs.mdengine.ensure_executable_available",
105 | lambda _: "/usr/bin/true")
106 | with pytest.raises(ValueError):
107 | engine = GmxEngine(mdconfig=self.mdp_md_compressed_out,
108 | gro_file=self.gro,
109 | top_file=self.top,
110 | mdrun_time_conversion_factor=conversion_factor,
111 | )
112 |
113 | @pytest.mark.slow
114 | @needs_gmx_install
115 | @pytest.mark.parametrize("starting_conf",
116 | [None,
117 | Trajectory("tests/test_data/trajectory/ala_traj.trr",
118 | "tests/test_data/trajectory/ala.tpr")
119 | ]
120 | )
121 | @pytest.mark.asyncio
122 | async def test_run_MD_compressed_out(self, tmpdir, starting_conf):
123 | engine = GmxEngine(mdconfig=self.mdp_md_compressed_out,
124 | gro_file=self.gro,
125 | top_file=self.top)
126 | await engine.prepare(starting_configuration=starting_conf,
127 | workdir=tmpdir,
128 | deffnm="test")
129 | nsteps = 10
130 | traj = await engine.run(nsteps=nsteps)
131 | # some basic checks
132 | assert len(traj) == engine.nstout / nsteps + 1
133 | assert engine.steps_done == nsteps
134 | assert np.isclose(engine.time_done, nsteps * engine.dt)
135 |
136 | @pytest.mark.slow
137 | @needs_gmx_install
138 | @pytest.mark.parametrize("starting_conf",
139 | [None,
140 | Trajectory("tests/test_data/trajectory/ala_traj.trr",
141 | "tests/test_data/trajectory/ala.tpr")
142 | ]
143 | )
144 | @pytest.mark.asyncio
145 | async def test_run_MD_full_prec_out(self, tmpdir, starting_conf):
146 | engine = GmxEngine(mdconfig=self.mdp_md_full_prec_out,
147 | gro_file=self.gro,
148 | top_file=self.top,
149 | output_traj_type="trr")
150 | await engine.prepare(starting_configuration=starting_conf,
151 | workdir=tmpdir,
152 | deffnm="test")
153 | nsteps = 10
154 | traj = await engine.run(nsteps=nsteps)
155 | # some basic checks
156 | assert len(traj) == engine.nstout / nsteps + 1
157 | assert engine.steps_done == nsteps
158 | assert np.isclose(engine.time_done, nsteps * engine.dt)
159 |
--------------------------------------------------------------------------------
/tests/gromacs/test_mdp.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 | import pytest
16 |
17 |
18 | from asyncmd.gromacs import MDP
19 |
20 |
21 | class Test_MDP:
22 | # NOTE: we are not testing any of the FLOAT,INT,STR params (yet)!
23 | def setup_method(self):
24 | # just an empty file
25 | empty_mdp_file = "tests/test_data/gromacs/empty.mdp"
26 | self.empty_mdp = MDP(original_file=empty_mdp_file)
27 |
28 | @pytest.mark.parametrize(["line", "beauty"],
29 | [ # comment lines
30 | (";", {}),
31 | ("; comment", {}),
32 | ("; comment =", {}),
33 | ("; comment = comment", {}),
34 | (";comment=comment", {}),
35 | # empty options, but with key
36 | ("ich-bin-ein-key = ", {"ich-bin-ein-key": []}),
37 | # CHARMM-GUI (still uses old gmx mdp format)
38 | ("ich_bin_ein_key = ", {"ich-bin-ein-key": []}),
39 | # options with values
40 | ("key = option", {"key": ["option"]}),
41 | ("key=option", {"key": ["option"]}),
42 | ("key = tilded~option", {"key": ["tilded~option"]}),
43 | ("key = slashed/option", {"key": ["slashed/option"]}),
44 | ("key = dotted.option", {"key": ["dotted.option"]}),
45 | ("key = minus-option", {"key": ["minus-option"]}),
46 | ]
47 | )
48 | def test_parse_line(self, line, beauty):
49 | # here we test the parse_line and key_char replace funcs
50 | ret_dict = self.empty_mdp._parse_line(line=line)
51 | assert ret_dict == beauty
52 | for key, val in beauty.items():
53 | assert ret_dict[key] == val
54 |
55 | @pytest.mark.parametrize("line",
56 | # these are all misformatted mdp lines
57 | # (and should therfore raise ValueErrors)
58 | ["not a valid mdp line",
59 | "also not = a valid mdp line",
60 | "still not ; a valid mdp line",
61 | ]
62 | )
63 | def test_parse_line_errs(self, line):
64 | with pytest.raises(ValueError):
65 | _ = self.empty_mdp._parse_line(line=line)
66 |
67 | @pytest.mark.parametrize(["key", "value", "beauty"],
68 | [
69 | # ref-t is a float param (not singleton)
70 | ("ref-t", ["303.2", "303.4"], [303.2, 303.4]),
71 | ("ref-t", [303.2, 303.4], [303.2, 303.4]),
72 | # dt is a float singleton key
73 | ("dt", 0.002, 0.002),
74 | ("dt", "0.002", 0.002),
75 | # nsteps is an int singleton option
76 | ("nsteps", "100", 100),
77 | ("nsteps", 100, 100),
78 | # annealing-npoints is an int param (not singleton)
79 | ("annealing-npoints", ["1", "2", "3"], [1, 2, 3]),
80 | ("annealing-npoints", [1, 2, 3], [1, 2, 3]),
81 | # string singleton param
82 | ("integrator", "test", "test"),
83 | # charm gui char-replace (unknown key, so not singleton)
84 | ("charmm_GUI_option", "needs_no_value", ["needs_no_value"]),
85 | ("charmm_GUI_option", "needs-no-value", ["needs-no-value"]),
86 | ])
87 | def test_setitem_getitem_delitem(self, key, value, beauty):
88 | # set it
89 | self.empty_mdp[key] = value
90 | # get it
91 | val_we_got = self.empty_mdp[key]
92 | # check that it matches up
93 | if getattr(val_we_got, "__len__", None) is None:
94 | assert val_we_got == beauty
95 | else:
96 | for subval_we_got, subval_beauty in zip(val_we_got, beauty):
97 | assert subval_we_got == subval_beauty
98 | # delete it
99 | del self.empty_mdp[key]
100 | # should be gone now, so we should gte a KeyError
101 | with pytest.raises(KeyError):
102 | _ = self.empty_mdp[key]
103 |
--------------------------------------------------------------------------------
/tests/gromacs/test_utils.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 |
16 | import pytest
17 | from asyncmd.gromacs import MDP
18 | from asyncmd.gromacs import utils
19 |
20 |
21 | @pytest.mark.parametrize(["file_name"], [("empty.mdp",), ("gen-vel-continuation.mdp",)])
22 | def test_ensure_mdconfig_options(file_name: str):
23 | mdp_file = f"tests/test_data/gromacs/{file_name}"
24 | mdp = MDP(original_file=mdp_file)
25 | utils.ensure_mdp_options(mdp)
26 | assert mdp["gen-vel"] == "no"
27 | assert mdp["continuation"] == "yes"
28 |
--------------------------------------------------------------------------------
/tests/test_data/gromacs/empty.mdp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bio-phys/asyncmd/831bd4f9c24d3b856cfa3e8f518241e55afce81e/tests/test_data/gromacs/empty.mdp
--------------------------------------------------------------------------------
/tests/test_data/gromacs/gen-vel-continuation.mdp:
--------------------------------------------------------------------------------
1 | ; Example MDP file to cover different cases for ensure_mdp_options function
2 |
3 | ; Case 1: gen-vel is set to "yes"
4 | gen-vel = yes
5 |
6 | ; Case 2: continuation is set to "no"
7 | continuation = no
8 |
--------------------------------------------------------------------------------
/tests/test_data/gromacs/md_compressed_out.mdp:
--------------------------------------------------------------------------------
1 | integrator = md
2 | dt = 0.002
3 | nsteps = -1 ; unlimited steps
4 | nstxout = 0
5 | nstvout = 0
6 | nstlog = 0
7 | nstenergy = 10
8 | nstxout-compressed = 10
9 | cutoff-scheme = Verlet
10 | verlet-buffer-tolerance = 0.001 ; kJ/mol/ps (default=0.005)
11 | rlist = 1.3
12 | coulombtype = PME
13 | rcoulomb = 1.1
14 | rvdw = 1.1
15 | Tcoupl = v-rescale
16 | tc-grps = Protein SOL
17 | tau-t = 0.5 0.5
18 | ref-t = 300 300
19 | Pcoupl = C-rescale ; requires gromacs 2021?
20 | ;Pcoupl = Berendsen
21 | tau-p = 1.0
22 | compressibility = 4.5e-5
23 | ref-p = 1.0
24 | gen-vel = no
25 | constraints = h-bonds
26 | ;lincs-iter = 1 ; 1 is the default
27 |
--------------------------------------------------------------------------------
/tests/test_data/gromacs/md_full_prec_out.mdp:
--------------------------------------------------------------------------------
1 | integrator = md
2 | dt = 0.002
3 | nsteps = -1 ; unlimited steps
4 | nstxout = 10
5 | nstvout = 10
6 | nstlog = 0
7 | nstenergy = 10
8 | nstxout-compressed = 0
9 | cutoff-scheme = Verlet
10 | verlet-buffer-tolerance = 0.001 ; kJ/mol/ps (default=0.005)
11 | rlist = 1.3
12 | coulombtype = PME
13 | rcoulomb = 1.1
14 | rvdw = 1.1
15 | Tcoupl = v-rescale
16 | tc-grps = Protein SOL
17 | tau-t = 0.5 0.5
18 | ref-t = 300 300
19 | Pcoupl = C-rescale ; requires gromacs 2021?
20 | ;Pcoupl = Berendsen
21 | tau-p = 1.0
22 | compressibility = 4.5e-5
23 | ref-p = 1.0
24 | gen-vel = no
25 | constraints = h-bonds
26 | ;lincs-iter = 1 ; 1 is the default
27 |
--------------------------------------------------------------------------------
/tests/test_data/gromacs/topol_amber99sbildn.top:
--------------------------------------------------------------------------------
1 | ;
2 | ; File 'topol_amber99sbildn.top' was generated
3 | ; By user: think (1000)
4 | ; On host: Kruemel
5 | ; At date: Sun Aug 29 15:06:35 2021
6 | ;
7 | ; This is a standalone topology file
8 | ;
9 | ; Created by:
10 | ; :-) GROMACS - gmx pdb2gmx, 2020.4 (-:
11 | ;
12 | ; Executable: /usr/local/gromacs-2020.4/bin/gmx
13 | ; Data prefix: /usr/local/gromacs-2020.4
14 | ; Working dir: /home/think/Documents/sources/OPS/aimmd/examples/distributed/gmx_infiles/new_from_scratch
15 | ; Command line:
16 | ; gmx pdb2gmx -ignh -f AD_initial_frame.pdb -p topol_amber99sbildn.top
17 | ; Force field was read from the standard GROMACS share directory.
18 | ;
19 |
20 | ; Include forcefield parameters
21 | #include "amber99sb-ildn.ff/forcefield.itp"
22 |
23 | [ moleculetype ]
24 | ; Name nrexcl
25 | Protein_chain_A 3
26 |
27 | [ atoms ]
28 | ; nr type resnr residue atom cgnr charge mass typeB chargeB massB
29 | ; residue 1 ACE rtp ACE q 0.0
30 | 1 CT 1 ACE CH3 1 -0.3662 12.01
31 | 2 HC 1 ACE HH31 2 0.1123 1.008
32 | 3 HC 1 ACE HH32 3 0.1123 1.008
33 | 4 HC 1 ACE HH33 4 0.1123 1.008
34 | 5 C 1 ACE C 5 0.5972 12.01
35 | 6 O 1 ACE O 6 -0.5679 16 ; qtot 0
36 | ; residue 2 ALA rtp ALA q 0.0
37 | 7 N 2 ALA N 7 -0.4157 14.01
38 | 8 H 2 ALA H 8 0.2719 1.008
39 | 9 CT 2 ALA CA 9 0.0337 12.01
40 | 10 H1 2 ALA HA 10 0.0823 1.008
41 | 11 CT 2 ALA CB 11 -0.1825 12.01
42 | 12 HC 2 ALA HB1 12 0.0603 1.008
43 | 13 HC 2 ALA HB2 13 0.0603 1.008
44 | 14 HC 2 ALA HB3 14 0.0603 1.008
45 | 15 C 2 ALA C 15 0.5973 12.01
46 | 16 O 2 ALA O 16 -0.5679 16 ; qtot 0
47 | ; residue 3 NME rtp NME q 0.0
48 | 17 N 3 NME N 17 -0.4157 14.01
49 | 18 H 3 NME H 18 0.2719 1.008
50 | 19 CT 3 NME CH3 19 -0.149 12.01
51 | 20 H1 3 NME HH31 20 0.0976 1.008
52 | 21 H1 3 NME HH32 21 0.0976 1.008
53 | 22 H1 3 NME HH33 22 0.0976 1.008 ; qtot 0
54 |
55 | [ bonds ]
56 | ; ai aj funct c0 c1 c2 c3
57 | 1 2 1
58 | 1 3 1
59 | 1 4 1
60 | 1 5 1
61 | 5 6 1
62 | 5 7 1
63 | 7 8 1
64 | 7 9 1
65 | 9 10 1
66 | 9 11 1
67 | 9 15 1
68 | 11 12 1
69 | 11 13 1
70 | 11 14 1
71 | 15 16 1
72 | 15 17 1
73 | 17 18 1
74 | 17 19 1
75 | 19 20 1
76 | 19 21 1
77 | 19 22 1
78 |
79 | [ pairs ]
80 | ; ai aj funct c0 c1 c2 c3
81 | 1 8 1
82 | 1 9 1
83 | 2 6 1
84 | 2 7 1
85 | 3 6 1
86 | 3 7 1
87 | 4 6 1
88 | 4 7 1
89 | 5 10 1
90 | 5 11 1
91 | 5 15 1
92 | 6 8 1
93 | 6 9 1
94 | 7 12 1
95 | 7 13 1
96 | 7 14 1
97 | 7 16 1
98 | 7 17 1
99 | 8 10 1
100 | 8 11 1
101 | 8 15 1
102 | 9 18 1
103 | 9 19 1
104 | 10 12 1
105 | 10 13 1
106 | 10 14 1
107 | 10 16 1
108 | 10 17 1
109 | 11 16 1
110 | 11 17 1
111 | 12 15 1
112 | 13 15 1
113 | 14 15 1
114 | 15 20 1
115 | 15 21 1
116 | 15 22 1
117 | 16 18 1
118 | 16 19 1
119 | 18 20 1
120 | 18 21 1
121 | 18 22 1
122 |
123 | [ angles ]
124 | ; ai aj ak funct c0 c1 c2 c3
125 | 2 1 3 1
126 | 2 1 4 1
127 | 2 1 5 1
128 | 3 1 4 1
129 | 3 1 5 1
130 | 4 1 5 1
131 | 1 5 6 1
132 | 1 5 7 1
133 | 6 5 7 1
134 | 5 7 8 1
135 | 5 7 9 1
136 | 8 7 9 1
137 | 7 9 10 1
138 | 7 9 11 1
139 | 7 9 15 1
140 | 10 9 11 1
141 | 10 9 15 1
142 | 11 9 15 1
143 | 9 11 12 1
144 | 9 11 13 1
145 | 9 11 14 1
146 | 12 11 13 1
147 | 12 11 14 1
148 | 13 11 14 1
149 | 9 15 16 1
150 | 9 15 17 1
151 | 16 15 17 1
152 | 15 17 18 1
153 | 15 17 19 1
154 | 18 17 19 1
155 | 17 19 20 1
156 | 17 19 21 1
157 | 17 19 22 1
158 | 20 19 21 1
159 | 20 19 22 1
160 | 21 19 22 1
161 |
162 | [ dihedrals ]
163 | ; ai aj ak al funct c0 c1 c2 c3 c4 c5
164 | 2 1 5 6 9
165 | 2 1 5 7 9
166 | 3 1 5 6 9
167 | 3 1 5 7 9
168 | 4 1 5 6 9
169 | 4 1 5 7 9
170 | 1 5 7 8 9
171 | 1 5 7 9 9
172 | 6 5 7 8 9
173 | 6 5 7 9 9
174 | 5 7 9 10 9
175 | 5 7 9 11 9
176 | 5 7 9 15 9
177 | 8 7 9 10 9
178 | 8 7 9 11 9
179 | 8 7 9 15 9
180 | 7 9 11 12 9
181 | 7 9 11 13 9
182 | 7 9 11 14 9
183 | 10 9 11 12 9
184 | 10 9 11 13 9
185 | 10 9 11 14 9
186 | 15 9 11 12 9
187 | 15 9 11 13 9
188 | 15 9 11 14 9
189 | 7 9 15 16 9
190 | 7 9 15 17 9
191 | 10 9 15 16 9
192 | 10 9 15 17 9
193 | 11 9 15 16 9
194 | 11 9 15 17 9
195 | 9 15 17 18 9
196 | 9 15 17 19 9
197 | 16 15 17 18 9
198 | 16 15 17 19 9
199 | 15 17 19 20 9
200 | 15 17 19 21 9
201 | 15 17 19 22 9
202 | 18 17 19 20 9
203 | 18 17 19 21 9
204 | 18 17 19 22 9
205 |
206 | [ dihedrals ]
207 | ; ai aj ak al funct c0 c1 c2 c3
208 | 1 7 5 6 4
209 | 5 9 7 8 4
210 | 9 17 15 16 4
211 | 15 19 17 18 4
212 |
213 | ; Include Position restraint file
214 | #ifdef POSRES
215 | #include "posre.itp"
216 | #endif
217 |
218 | ; Include water topology
219 | #include "amber99sb-ildn.ff/tip3p.itp"
220 |
221 | #ifdef POSRES_WATER
222 | ; Position restraint for each water oxygen
223 | [ position_restraints ]
224 | ; i funct fcx fcy fcz
225 | 1 1 1000 1000 1000
226 | #endif
227 |
228 | ; Include topology for ions
229 | #include "amber99sb-ildn.ff/ions.itp"
230 |
231 | [ system ]
232 | ; Name
233 | MDANALYSIS FRAMES FROM 0, STEP 1: Created by PDBWriter
234 |
235 | [ molecules ]
236 | ; Compound #mols
237 | Protein_chain_A 1
238 | SOL 543
239 |
--------------------------------------------------------------------------------
/tests/test_data/mdconfig/dummy_mdconfig.dat:
--------------------------------------------------------------------------------
1 | param_sans_dtype = test test123 12.3
2 | float_param = 1. 1.1 1.2 10.1
3 | float_singleton_param = 2.
4 | int_param = 1 2 3 4 5 6
5 | int_singleton_param = 6
6 | str_singleton_param = 1string
7 | empty_param =
8 |
--------------------------------------------------------------------------------
/tests/test_data/trajectory/ala.tpr:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:3c0e307293389ce3ac25601e20c808303d9092bdc80fbc15462778bfaa73fd10
3 | size 51908
4 |
--------------------------------------------------------------------------------
/tests/test_data/trajectory/ala_traj.trr:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:51a82fcab1899205abc35cb8d9b42ffda1efeaf2a6fd3c61637e086baa92f255
3 | size 715392
4 |
--------------------------------------------------------------------------------
/tests/test_data/trajectory/ala_traj.xtc:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:ab1c79fd1999f0b5b7b6886742a7aae7b5b0a97b7e918c51695f5a23d09455e0
3 | size 107004
4 |
--------------------------------------------------------------------------------
/tests/test_mdconfig.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 | import pytest
16 | import pickle
17 | import shlex
18 | import numpy as np
19 |
20 |
21 | from asyncmd.mdconfig import (FlagChangeList, TypedFlagChangeList,
22 | LineBasedMDConfig,
23 | )
24 |
25 |
26 | class Test_FlagChangeList:
27 | @pytest.mark.parametrize("no_list_data",
28 | [(1, 2, 3),
29 | "123",
30 | 1,
31 | ]
32 | )
33 | def test_init_errs(self, no_list_data):
34 | with pytest.raises(TypeError):
35 | _ = FlagChangeList(data=no_list_data)
36 |
37 | @pytest.mark.parametrize(["test_data", "data_len"],
38 | [(["test", "1", "2", "3"], 4),
39 | (["test", 1, 2, 3], 4),
40 | ([1., 2., 3., 4.], 4),
41 | ([1, 2, 3, 4], 4),
42 | ([1], 1),
43 | ([], 0),
44 | ]
45 | )
46 | def test_len_getitem(self, test_data, data_len):
47 | flag_list = FlagChangeList(data=test_data)
48 | # check that length is correct
49 | assert len(flag_list) == data_len
50 | # and check that all items are what we expect
51 | for idx in range(len(flag_list)):
52 | assert flag_list[idx] == test_data[idx]
53 |
54 | @pytest.mark.parametrize("test_data",
55 | [["test", "1", "2", "3"],
56 | ["test", 1, 2, 3],
57 | [1., 2., 3., 4.],
58 | [1, 2, 3, 4],
59 | [1],
60 | ]
61 | )
62 | def test_changed_setitem_delitem_insert(self, test_data):
63 | flag_list = FlagChangeList(data=test_data)
64 | assert not flag_list.changed
65 | # get an item and check that everything is still good
66 | _ = flag_list[0]
67 | assert not flag_list.changed
68 | # modify and see that we do set changed=True
69 | flag_list[0] = "1234"
70 | assert flag_list.changed
71 | # reininit to get a new list with changed=False
72 | flag_list = FlagChangeList(data=test_data)
73 | assert not flag_list.changed # as it should be
74 | # now delete an item and check again
75 | del flag_list[0]
76 | assert flag_list.changed
77 | # again reinit, this time to test insert
78 | flag_list = FlagChangeList(data=test_data)
79 | assert not flag_list.changed
80 | obj_to_insert = object()
81 | # get a random index to insert at
82 | if len(flag_list) > 0:
83 | idx = np.random.randint(low=0, high=len(flag_list))
84 | else:
85 | idx = 0
86 | flag_list.insert(idx, obj_to_insert)
87 | assert flag_list[idx] is obj_to_insert
88 |
89 |
90 | class Test_TypedFlagChangeList:
91 | @pytest.mark.parametrize("no_list_data",
92 | [(1, 2, 3),
93 | "123",
94 | 1,
95 | ]
96 | )
97 | def test_init(self, no_list_data):
98 | # all data are castable to int!
99 | dtype = int
100 | flag_list = TypedFlagChangeList(data=no_list_data, dtype=dtype)
101 | if (getattr(no_list_data, "__len__", None) is None or isinstance(no_list_data, str)):
102 | # strings have a length but are considered 'singletons'
103 | # data has no length, so it must be the first idx
104 | assert flag_list[0] == dtype(no_list_data)
105 | else:
106 | # data must be an iterable
107 | for idx, val in enumerate(flag_list):
108 | assert dtype(no_list_data[idx])
109 |
110 | @pytest.mark.parametrize(["test_data", "data_len", "data_dtype"],
111 | [(["test", "1", "2", "3"], 4, str),
112 | (["0", 1, 2, 3], 4, int),
113 | ([1., 2., 3., 4.], 4, float),
114 | ([1, 2, 3, 4], 4, int),
115 | ([1], 1, int),
116 | ([], 0, int), # here dtype should not matter
117 | ([], 0, str),
118 | ([], 0, float),
119 | ]
120 | )
121 | def test_len_getitem(self, test_data, data_len, data_dtype):
122 | flag_list = TypedFlagChangeList(data=test_data, dtype=data_dtype)
123 | # check that length is correct
124 | assert len(flag_list) == data_len
125 | # and check that all items are what we expect
126 | for idx in range(len(flag_list)):
127 | assert flag_list[idx] == data_dtype(test_data[idx])
128 |
129 | @pytest.mark.parametrize(["test_data", "data_dtype"],
130 | [(["test", "1", "2", "3"], str),
131 | # the "0" should become an int!
132 | (["0", 1, 2, 3], int),
133 | ([1., 2., 3., 4.], float),
134 | ([1, 2, 3, 4], int),
135 | # the ints should become floats!
136 | ([1, 2., 3, 4], float),
137 | ([1], int),
138 | ]
139 | )
140 | def test_changed_setitem_delitem_insert(self, test_data, data_dtype):
141 | flag_list = TypedFlagChangeList(data=test_data, dtype=data_dtype)
142 | assert not flag_list.changed
143 | # get an item and check that everything is still good
144 | _ = flag_list[0]
145 | assert not flag_list.changed
146 | # modify and see that we do set changed=True
147 | flag_list[0] = "1234" # can be converted to int, float and str
148 | assert flag_list.changed
149 | # reininit to get a new list with changed=False
150 | flag_list = TypedFlagChangeList(data=test_data, dtype=data_dtype)
151 | assert not flag_list.changed # as it should be
152 | # now delete an item and check again
153 | del flag_list[0]
154 | assert flag_list.changed
155 | # again reinit, this time to test insert
156 | flag_list = TypedFlagChangeList(data=test_data, dtype=data_dtype)
157 | assert not flag_list.changed
158 | obj_to_insert = "1234" # again: castable tol int, float and str
159 | # get a random index to insert at
160 | if len(flag_list) > 0:
161 | idx = np.random.randint(low=0, high=len(flag_list))
162 | else:
163 | idx = 0
164 | flag_list.insert(idx, obj_to_insert)
165 | # cast the object to right type
166 | obj_to_insert = data_dtype(obj_to_insert)
167 | assert flag_list[idx] == obj_to_insert
168 |
169 |
170 | # for the tests below we need to overwite LineBasedMDConfig._parse_line,
171 | # otherwise we can not initialize the ABC
172 | class DummyLineBasedMDConfig(LineBasedMDConfig):
173 | _KEY_VALUE_SEPARATOR = " = "
174 | _INTER_VALUE_CHAR = " "
175 | # can have multiple values per config option
176 | _FLOAT_PARAMS = ["float_param"]
177 | # must have one value per config option
178 | _FLOAT_SINGLETON_PARAMS = ["float_singleton_param"]
179 | # multiple ints per option
180 | _INT_PARAMS = ["int_param"]
181 | # one int per option
182 | _INT_SINGLETON_PARAMS = ["int_singleton_param"]
183 | # strings with only one value per option
184 | _STR_SINGLETON_PARAMS = ["str_singleton_param"]
185 |
186 | def _parse_line(self, line):
187 | parser = shlex.shlex(line, posix=True)
188 | parser.commenters = ""
189 | # TODO: what wordchars do we want for testing?!
190 | parser.wordchars += "-./~"
191 | tokens = list(parser)
192 | if len(tokens) >= 3 and tokens[1] == "=":
193 | # content line, either one or multiple values
194 | return {tokens[0]: tokens[2:]}
195 | elif len(tokens) == 2 and tokens[1] == "=":
196 | # empty line with key/token
197 | return {tokens[0]: []}
198 | else:
199 | raise RuntimeError("Smth went horribly wrong?!")
200 |
201 |
202 | class Test_LineBasedMDConfig:
203 | def setup_method(self):
204 | def compare_mdconf_vals_to_beauty(mdconf, beauty):
205 | # make sure that we do not have any extra keys!
206 | assert len(mdconf) == len(beauty)
207 | # this also checks __getitem__
208 | for key, beauty_val in beauty.items():
209 | if "singleton" in key:
210 | assert mdconf[key] == beauty_val
211 | else:
212 | # check both ways of accessing
213 | # get the full list and check the items
214 | val_for_key = mdconf[key]
215 | beauty_val = beauty[key]
216 | assert len(val_for_key) == len(beauty_val)
217 | for subval, beauty_subval in zip(val_for_key, beauty_val):
218 | assert subval == beauty_subval
219 | # get single items for the key and check them
220 | for idx, beauty_subval in enumerate(beauty_val):
221 | assert beauty[key][idx] == beauty_subval
222 |
223 | # bind comparission function to self to use in all tests
224 | self.compare_mdconf_vals_to_beauty = compare_mdconf_vals_to_beauty
225 |
226 | # and bind the *uninitialized* class to self
227 | self.dummy_class = DummyLineBasedMDConfig
228 |
229 | @pytest.mark.parametrize(["infile_to_parse", "beauty"],
230 | [("tests/test_data/mdconfig/dummy_mdconfig.dat",
231 | {"param_sans_dtype": ["test", "test123", "12.3"],
232 | "float_param": [1.0, 1.1, 1.2, 10.1],
233 | "float_singleton_param": 2.0,
234 | "int_param": [1, 2, 3, 4, 5, 6],
235 | "int_singleton_param": 6,
236 | "str_singleton_param": "1string",
237 | "empty_param": [],
238 | }
239 | ),
240 | ]
241 | )
242 | def test_changed_getitem_setitem_delitem(self, infile_to_parse, beauty):
243 | mdconf = self.dummy_class(original_file=infile_to_parse)
244 | # first check that everything is parsed as we expect
245 | self.compare_mdconf_vals_to_beauty(mdconf=mdconf, beauty=beauty)
246 | # now check that changed=False
247 | assert not mdconf.changed
248 | # now change stuff and assert again
249 | # this tests __setitem__
250 | mdconf["some_key"] = ["123", "456"]
251 | assert mdconf["some_key"][0] == "123"
252 | assert mdconf["some_key"][1] == "456"
253 | assert mdconf.changed
254 | # reload/reparse to get a 'fresh' mdconf
255 | mdconf.parse()
256 | # check again that everything is correct
257 | self.compare_mdconf_vals_to_beauty(mdconf=mdconf, beauty=beauty)
258 | assert not mdconf.changed
259 | # now set single items in sublists
260 | mdconf["int_param"][0] = 1337
261 | assert mdconf.changed
262 | # reparse and set singleton item
263 | mdconf.parse()
264 | self.compare_mdconf_vals_to_beauty(mdconf=mdconf, beauty=beauty)
265 | assert not mdconf.changed
266 | mdconf["float_singleton_param"] = 10.1
267 | assert mdconf.changed
268 | # reparse to get a fresh mdconf
269 | mdconf.parse()
270 | self.compare_mdconf_vals_to_beauty(mdconf=mdconf, beauty=beauty)
271 | # delete stuff (full keys first)
272 | del mdconf["float_param"]
273 | assert mdconf.changed
274 | # reparse and delete single items from sublist
275 | mdconf.parse()
276 | self.compare_mdconf_vals_to_beauty(mdconf=mdconf, beauty=beauty)
277 | assert not mdconf.changed
278 | del mdconf["float_param"][0]
279 | assert mdconf.changed
280 |
281 | @pytest.mark.parametrize(["infile_to_parse", "beauty"],
282 | [("tests/test_data/mdconfig/dummy_mdconfig.dat",
283 | {"param_sans_dtype": ["test", "test123", "12.3"],
284 | "float_param": [1.0, 1.1, 1.2, 10.1],
285 | "float_singleton_param": 2.0,
286 | "int_param": [1, 2, 3, 4, 5, 6],
287 | "int_singleton_param": 6,
288 | "str_singleton_param": "1string",
289 | "empty_param": [],
290 | }
291 | ),
292 | ]
293 | )
294 | def test_iter_len(self, infile_to_parse, beauty):
295 | mdconf = self.dummy_class(original_file=infile_to_parse)
296 | assert len(mdconf) == len(beauty)
297 | for key, val in mdconf.items():
298 | if "singleton" in key:
299 | # only one element
300 | assert mdconf[key] == beauty[key]
301 | else:
302 | # compare all elements seperately
303 | for subval, subval_beauty in zip(val, beauty[key]):
304 | assert subval == subval_beauty
305 |
306 | @pytest.mark.parametrize(["infile_to_parse", "beauty"],
307 | [("tests/test_data/mdconfig/dummy_mdconfig.dat",
308 | {"param_sans_dtype": ["test", "test123", "12.3"],
309 | "float_param": [1.0, 1.1, 1.2, 10.1],
310 | "float_singleton_param": 2.0,
311 | "int_param": [1, 2, 3, 4, 5, 6],
312 | "int_singleton_param": 6,
313 | "str_singleton_param": "1string",
314 | "empty_param": [],
315 | }
316 | ),
317 | ]
318 | )
319 | def test_parse_write_pickle(self, infile_to_parse, beauty, tmp_path):
320 | mdconf = self.dummy_class(original_file=infile_to_parse)
321 | self.compare_mdconf_vals_to_beauty(mdconf=mdconf, beauty=beauty)
322 | # write the parsed out to a new file
323 | outfile = tmp_path / "out_conf.dat"
324 | mdconf.write(outfile=outfile)
325 | # check that we raise an err if file exists
326 | with pytest.raises(ValueError):
327 | mdconf.write(outfile=outfile, overwrite=False)
328 | # and check that it works when we pass overwrite=True
329 | mdconf.write(outfile=outfile, overwrite=True)
330 | # test pickling
331 | outpickle = tmp_path / "pickle.pckl"
332 | with open(outpickle, "wb") as pfile:
333 | pickle.dump(mdconf, pfile)
334 | # now load the pickle and parse the written file,
335 | # check that everything matches
336 | with open(outpickle, "rb") as pfile:
337 | mdconf_from_pickle = pickle.load(pfile)
338 | self.compare_mdconf_vals_to_beauty(mdconf=mdconf_from_pickle,
339 | beauty=beauty)
340 | # and from parsing
341 | mdconf_parsed_written_out = self.dummy_class(original_file=outfile)
342 | self.compare_mdconf_vals_to_beauty(mdconf=mdconf_parsed_written_out,
343 | beauty=beauty)
344 | # write out a modified mdconf and read it back in
345 | mdconf["new_value"] = ["new_value"]
346 | # also add it to beauty to be able to use our compare func
347 | beauty["new_value"] = ["new_value"]
348 | outfile_for_modified = tmp_path / "out_mod_conf.dat"
349 | mdconf.write(outfile=outfile_for_modified)
350 | mdconf_parsed_modified = self.dummy_class(
351 | original_file=outfile_for_modified
352 | )
353 | self.compare_mdconf_vals_to_beauty(mdconf=mdconf_parsed_modified,
354 | beauty=beauty)
355 |
--------------------------------------------------------------------------------
/tests/test_slurm.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 | import pytest
16 | import logging
17 | from unittest.mock import patch, PropertyMock
18 |
19 | import asyncmd
20 | from asyncmd.slurm import SlurmClusterMediator, SlurmProcess
21 |
22 |
23 | LOGGER = logging.getLogger(__name__)
24 |
25 |
26 | class Test_SlurmProcess:
27 | @pytest.mark.parametrize("add_non_protected_sbatch_options_to_keep", [True, False])
28 | @pytest.mark.parametrize(["sbatch_options", "opt_name", "expected_opt_len"],
29 | [({"job-name": "TO_REMOVE"}, "job-name", 0),
30 | ({"chdir": "TO_REMOVE"}, "chdir", 0),
31 | ({"output": "TO_REMOVE"}, "output", 0),
32 | ({"error": "TO_REMOVE"}, "error", 0),
33 | ({"input": "TO_REMOVE"}, "input", 0),
34 | ({"exclude": "TO_REMOVE"}, "exclude", 0),
35 | ({"parsable": "TO_REMOVE"}, "parsable", 0),
36 | ({"keep_option": "TO_KEEP"}, "keep_option", 1),
37 | ]
38 | )
39 | def test__sanitize_sbatch_options_remove_protected(self,
40 | add_non_protected_sbatch_options_to_keep,
41 | sbatch_options, opt_name, expected_opt_len,
42 | caplog, monkeypatch):
43 | if add_non_protected_sbatch_options_to_keep:
44 | # add a dummy option that we want to keep in the dict
45 | # (Need to redefine the dict [mot use update] to not chnage the
46 | # originally passed in value from parametrize for the next round)
47 | sbatch_options = dict({"other_keep_option": "TO_KEEP_TOO"},
48 | **sbatch_options)
49 | # same here, dont use += 1
50 | expected_opt_len = expected_opt_len + 1
51 | with monkeypatch.context() as m:
52 | # monkeypatch to make sure we can execute the tests without slurm
53 | # (SlurmProcess checks if sbatch and friends are executable at init)
54 | m.setattr("asyncmd.slurm.ensure_executable_available",
55 | lambda _: "/usr/bin/true")
56 | with caplog.at_level(logging.WARNING):
57 | slurm_proc = SlurmProcess(jobname="test",
58 | sbatch_script="/usr/bin/true",
59 | sbatch_options=sbatch_options)
60 | # make sure we removed the option, it is the only one so sbatch_options
61 | # must now be an empty dict
62 | assert len(slurm_proc.sbatch_options) == expected_opt_len
63 | # and make sure we got the warning when we should
64 | # (i.e. if we should have removed something)
65 | if len(sbatch_options) != expected_opt_len:
66 | warn_str = f"Removing sbatch option '{opt_name}' from 'sbatch_options'"
67 | warn_str += " because it is used internaly by the `SlurmProcess`."
68 | assert warn_str in caplog.text
69 |
70 | @pytest.mark.parametrize(["sbatch_options", "time", "expect_warn"],
71 | [({"time": "20:00"}, None, False),
72 | ({"time": "15:00"}, 0.25, True),
73 | ({}, 0.25, False),
74 | ]
75 | )
76 | def test__sanitize_sbatch_options_remove_time(self, sbatch_options, time,
77 | expect_warn,
78 | caplog, monkeypatch):
79 | with monkeypatch.context() as m:
80 | # monkeypatch to make sure we can execute the tests without slurm
81 | # (SlurmProcess checks if sbatch and friends are executable at init)
82 | m.setattr("asyncmd.slurm.ensure_executable_available",
83 | lambda _: "/usr/bin/true")
84 | with caplog.at_level(logging.DEBUG):
85 | slurm_proc = SlurmProcess(jobname="test",
86 | sbatch_script="/usr/bin/true",
87 | time=time,
88 | sbatch_options=sbatch_options)
89 | # make sure we remove time from sbatch_options if given seperately
90 | if time is not None:
91 | assert len(slurm_proc.sbatch_options) == 0
92 | # make sure we get the warning when we remove it due to double option
93 | if expect_warn:
94 | warn_str = "Removing sbatch option 'time' from 'sbatch_options'. "
95 | warn_str += "Using the 'time' argument instead."
96 | assert warn_str in caplog.text
97 | # NOTE: we dont check for the debug log, but this is what we could do
98 | if time is None and not expect_warn:
99 | debug_str = "Using 'time' from 'sbatch_options' because self.time is None."
100 | assert debug_str in caplog.text
101 |
102 | @pytest.mark.parametrize(["time_in_h", "beauty"],
103 | [(0.25, "15:0"),
104 | (1, "60:0"),
105 | (10, "600:0"),
106 | (1/3600, "0:1"),
107 | (15/3600, "0:15"),
108 | (1/60, "1:0"),
109 | (5/60, "5:0"),
110 | ]
111 | )
112 | def test__slurm_timelimit_from_time_in_hours(self, time_in_h, beauty,
113 | monkeypatch):
114 | with monkeypatch.context() as m:
115 | # monkeypatch to make sure we can execute the tests without slurm
116 | # (SlurmProcess checks if sbatch and friends are executable at init)
117 | m.setattr("asyncmd.slurm.ensure_executable_available",
118 | lambda _: "/usr/bin/true")
119 | slurm_proc = SlurmProcess(jobname="test",
120 | sbatch_script="/usr/bin/true",
121 | time=time_in_h)
122 | slurm_timelimit = slurm_proc._slurm_timelimit_from_time_in_hours(
123 | time=time_in_h)
124 | assert slurm_timelimit == beauty
125 |
126 |
127 | class MockSlurmExecCompleted:
128 | async def communicate(self):
129 | return (b"15283217||||COMPLETED||||0:0||||ravc4011||||\n", b"")
130 |
131 |
132 | async def mock_slurm_call_completed(*args, **kwargs):
133 | return MockSlurmExecCompleted()
134 |
135 |
136 | @patch("asyncio.subprocess.create_subprocess_exec", new=mock_slurm_call_completed)
137 | @patch("os.path.abspath", return_value="/usr/bin/true")
138 | @patch("time.time", return_value=7)
139 | @pytest.mark.asyncio
140 | async def test_get_info_for_job_completed(mock_time, mock_abspath):
141 | slurm_cluster_mediator = SlurmClusterMediator()
142 | mock_abspath.assert_called()
143 | slurm_cluster_mediator._jobids = ["15283217"]
144 | slurm_cluster_mediator._jobids_sacct = ["15283217"]
145 | slurm_cluster_mediator._jobinfo = {"15283217": {"state": "RUNNING"}}
146 |
147 | # Call the function to update cache and get new result
148 | job_info = await slurm_cluster_mediator.get_info_for_job("15283217")
149 | mock_time.assert_called()
150 |
151 | # Check if _update_cached_jobinfo() was additionally called by get_info_for_job()
152 | assert job_info["nodelist"] == ["ravc4011"]
153 | assert job_info["exitcode"] == "0:0"
154 | assert job_info["state"] == "COMPLETED"
155 | assert job_info["parsed_exitcode"] == 0
156 |
157 |
158 | class MockSlurmExecFailed:
159 | async def communicate(self):
160 | return (b"15283217||||FAILED||||1:15||||ravc4007||||\n", b"")
161 |
162 |
163 | async def mock_slurm_call_failed(*args, **kwargs):
164 | return MockSlurmExecFailed()
165 |
166 |
167 | @patch("asyncio.subprocess.create_subprocess_exec", new=mock_slurm_call_failed)
168 | @patch("os.path.abspath", return_value="/usr/bin/true")
169 | @patch("time.time", return_value=7)
170 | @pytest.mark.asyncio
171 | async def test_get_info_for_job_failed(mock_time, mock_abspath):
172 | slurm_cluster_mediator = SlurmClusterMediator()
173 | slurm_cluster_mediator._all_nodes = [
174 | "ravc4001",
175 | "ravc4002",
176 | "ravc4003",
177 | "ravc4004",
178 | "ravc4005",
179 | "ravc4006",
180 | "ravc4007",
181 | "ravc4008",
182 | "ravc4009",
183 | "ravc4010",
184 | ]
185 | slurm_cluster_mediator._exclude_nodes = [
186 | "ravc4001",
187 | "ravc4002",
188 | "ravc4003",
189 | "ravc4004",
190 | "ravc4005",
191 | "ravc4006",
192 | ]
193 | slurm_cluster_mediator._jobids = ["15283217"]
194 | slurm_cluster_mediator._jobids_sacct = ["15283217"]
195 | slurm_cluster_mediator._jobinfo = {"15283217": {"state": "RUNNING"}}
196 |
197 | job_info = await slurm_cluster_mediator.get_info_for_job("15283217")
198 |
199 | assert job_info["nodelist"] == ["ravc4007"]
200 | assert job_info["exitcode"] == "1:15"
201 | assert job_info["state"] == "FAILED"
202 | assert job_info["parsed_exitcode"] == 1
203 |
204 |
205 | class MockSubprocess:
206 | async def communicate(self):
207 | return (b"", b"")
208 |
209 | @property
210 | def returncode(self):
211 | return 0
212 |
213 |
214 | @patch("asyncmd.slurm.SlurmProcess.slurm_cluster_mediator", new_callable=PropertyMock)
215 | @patch("os.path.isfile", return_value=True)
216 | @patch("os.path.abspath", return_value="/usr/bin/true")
217 | @patch("asyncmd.slurm.logger")
218 | @patch("subprocess.check_output", return_value="node1\nnode2\n")
219 | @patch("asyncio.create_subprocess_exec", return_value=MockSubprocess())
220 | def test_terminate(
221 | mock_create_subprocess_exec,
222 | mock_check_output,
223 | mock_logger,
224 | mock_isfile,
225 | mock_abspath,
226 | mock_slurm_cluster_mediator,
227 | ):
228 | slurm_process = SlurmProcess(jobname="test", sbatch_script="/usr/bin/true")
229 | slurm_process._jobid = ["15283217"]
230 |
231 | slurm_process.terminate()
232 |
233 | mock_logger.debug.assert_called()
234 |
--------------------------------------------------------------------------------
/tests/trajectory/test_propagate.py:
--------------------------------------------------------------------------------
1 | # This file is part of asyncmd.
2 | #
3 | # asyncmd is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU General Public License as published by
5 | # the Free Software Foundation, either version 3 of the License, or
6 | # (at your option) any later version.
7 | #
8 | # asyncmd is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with asyncmd. If not, see .
15 |
16 | import os
17 |
18 | import numpy as np
19 | import pytest
20 |
21 | from asyncmd import gromacs as asyncgmx
22 | from asyncmd import trajectory as asynctraj
23 |
24 |
25 | def condition_function(traj):
26 | return np.array([False, False, False, True, False])
27 |
28 |
29 | @pytest.mark.asyncio
30 | async def test_propagate():
31 | mdp = asyncgmx.MDP("tests/test_data/gromacs/empty.mdp")
32 | mdp["nstxout-compressed"] = 1
33 | mdp["nstxtcout"] = 1
34 |
35 | condition_function_wrapped = asynctraj.PyTrajectoryFunctionWrapper(
36 | condition_function
37 | )
38 | propa_somewhere = asynctraj.ConditionalTrajectoryPropagator(
39 | conditions=[condition_function_wrapped],
40 | engine_cls=asyncgmx.GmxEngine,
41 | engine_kwargs={
42 | "mdconfig": mdp,
43 | },
44 | walltime_per_part=0.01,
45 | )
46 | starting_configuration = asynctraj.trajectory.Trajectory(
47 | trajectory_files="tests/test_data/trajectory/ala_traj.xtc",
48 | structure_file="tests/test_data/trajectory/ala.gro",
49 | )
50 | workdir = "tests/trajectory"
51 | deffnm = "test_deffnm"
52 |
53 | trajectories, cond_fullfilled = await propa_somewhere.propagate(
54 | starting_configuration=starting_configuration, workdir=workdir, deffnm=deffnm
55 | )
56 |
57 | assert len(trajectories) > 0
58 | assert cond_fullfilled is not None
59 |
--------------------------------------------------------------------------------