├── .git_archival.txt ├── .gitattributes ├── .github ├── dependabot.yml └── workflows │ ├── mirror_gitee.yml │ ├── pub-docker.yml │ ├── pub-pypi.yml │ ├── pyright.yml │ └── test.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── Dockerfile ├── LICENSE ├── README.md ├── codecov.yml ├── docs ├── .gitignore ├── Makefile ├── cli.rst ├── conf.py ├── developer.md ├── dpgen2_configs.rst ├── exploration.md ├── figs │ ├── dpgen-flowchart.jpg │ └── exploration-strategy.jpg ├── index.rst ├── input.md ├── operator.md ├── quickcli.md ├── requirements.txt └── submit_args.rst ├── dpgen2 ├── __about__.py ├── __init__.py ├── __main__.py ├── conf │ ├── __init__.py │ ├── alloy_conf.py │ ├── conf_generator.py │ ├── file_conf.py │ └── unit_cells.py ├── constants.py ├── entrypoint │ ├── __init__.py │ ├── args.py │ ├── common.py │ ├── download.py │ ├── gui.py │ ├── main.py │ ├── showkey.py │ ├── status.py │ ├── submit.py │ ├── watch.py │ └── workflow.py ├── exploration │ ├── __init__.py │ ├── deviation │ │ ├── __init__.py │ │ ├── deviation_manager.py │ │ └── deviation_std.py │ ├── render │ │ ├── __init__.py │ │ ├── traj_render.py │ │ └── traj_render_lammps.py │ ├── report │ │ ├── __init__.py │ │ ├── report.py │ │ ├── report_adaptive_lower.py │ │ ├── report_trust_levels_base.py │ │ ├── report_trust_levels_max.py │ │ └── report_trust_levels_random.py │ ├── scheduler │ │ ├── __init__.py │ │ ├── convergence_check_stage_scheduler.py │ │ ├── scheduler.py │ │ └── stage_scheduler.py │ ├── selector │ │ ├── __init__.py │ │ ├── conf_filter.py │ │ ├── conf_selector.py │ │ ├── conf_selector_frame.py │ │ └── distance_conf_filter.py │ └── task │ │ ├── __init__.py │ │ ├── caly_task_group.py │ │ ├── calypso │ │ ├── __init__.py │ │ └── caly_input.py │ │ ├── conf_sampling_task_group.py │ │ ├── customized_lmp_template_task_group.py │ │ ├── diffcsp_task_group.py │ │ ├── lmp │ │ ├── __init__.py │ │ └── lmp_input.py │ │ ├── lmp_template_task_group.py │ │ ├── make_task_group_from_config.py │ │ ├── npt_task_group.py │ │ ├── stage.py │ │ ├── task.py │ │ └── task_group.py ├── flow │ ├── __init__.py │ └── dpgen_loop.py ├── fp │ ├── __init__.py │ ├── abacus.py │ ├── cp2k.py │ ├── deepmd.py │ ├── gaussian.py │ ├── prep_fp.py │ ├── run_fp.py │ ├── vasp.py │ └── vasp_input.py ├── op │ ├── __init__.py │ ├── caly_evo_step_merge.py │ ├── collect_data.py │ ├── collect_run_caly.py │ ├── diffcsp_gen.py │ ├── md_settings.py │ ├── prep_caly_dp_optim.py │ ├── prep_caly_input.py │ ├── prep_caly_model_devi.py │ ├── prep_dp_train.py │ ├── prep_lmp.py │ ├── prep_relax.py │ ├── run_caly_dp_optim.py │ ├── run_caly_model_devi.py │ ├── run_dp_train.py │ ├── run_lmp.py │ ├── run_relax.py │ └── select_confs.py ├── superop │ ├── __init__.py │ ├── block.py │ ├── caly_evo_step.py │ ├── prep_run_calypso.py │ ├── prep_run_diffcsp.py │ ├── prep_run_dp_train.py │ ├── prep_run_fp.py │ └── prep_run_lmp.py └── utils │ ├── __init__.py │ ├── artifact_uri.py │ ├── binary_file_input.py │ ├── bohrium_config.py │ ├── chdir.py │ ├── dflow_config.py │ ├── dflow_query.py │ ├── download_dpgen2_artifacts.py │ ├── obj_artifact.py │ ├── run_command.py │ ├── setup_ele_temp.py │ └── step_config.py ├── examples ├── almg │ ├── dp_template.json │ ├── input-v005.json │ └── input.json ├── calypso │ ├── dp_dpa1_train.json │ ├── dpa2_train.json │ └── input.test.json ├── ch4 │ └── input_dist.json ├── chno │ ├── dpa_manyi.json │ ├── input.json │ └── template.lammps ├── diffcsp │ └── dpgen.json └── water │ ├── input_distill.json │ ├── input_dpgen.json │ ├── input_dpgen_abacus.json │ ├── input_dpgen_cp2k.json │ ├── input_dpgen_slurm.json │ └── input_multitask.json ├── pyproject.toml └── tests ├── __init__.py ├── conf ├── __init__.py ├── context.py ├── test_alloy_conf.py ├── test_file_conf.py └── test_unit_cell.py ├── context.py ├── entrypoint ├── __init__.py ├── context.py ├── test_argparse.py ├── test_submit.py ├── test_submit_args.py └── test_workflow.py ├── exploration ├── __init__.py ├── context.py ├── test_conf_filter.py ├── test_conf_selector_frame.py ├── test_customized_lmp_templ_task_group.py ├── test_devi_manager.py ├── test_distance_conf_filter.py ├── test_exploration_group.py ├── test_exploration_scheduler.py ├── test_lmp_templ_task_group.py ├── test_make_task_group_from_config.py ├── test_report_adaptive_lower.py ├── test_report_trust_levels.py └── test_traj_render_lammps.py ├── fake_data_set.py ├── fp ├── __init__.py ├── context.py ├── data.abacus │ ├── INPUT │ ├── Na_ONCV_PBE-1.0.upf │ ├── OUT.ABACUS │ │ ├── INPUT │ │ ├── STRU_READIN_ADJUST.cif │ │ ├── istate.info │ │ ├── kpoints │ │ ├── running_scf.log │ │ └── warning.log │ ├── log │ └── sys-2 │ │ ├── set.000 │ │ ├── box.npy │ │ └── coord.npy │ │ ├── type.raw │ │ └── type_map.raw ├── data.cp2k │ ├── CELL_PARAMETER │ ├── coord.xyz │ ├── input.inp │ ├── output.log │ └── sys-3 │ │ ├── set.000 │ │ ├── box.npy │ │ └── coord.npy │ │ ├── type.raw │ │ └── type_map.raw ├── data.vasp.kp.gf │ ├── INCAR │ ├── POSCAR │ ├── make_kp_test.py │ ├── test.000 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.001 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.002 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.003 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.004 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.005 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.006 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.007 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.008 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.009 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.010 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.011 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.012 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.013 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.014 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.015 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.016 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.017 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.018 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.019 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.020 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.021 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.022 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.023 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.024 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.025 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.026 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.027 │ │ ├── POSCAR │ │ └── kp.ref │ ├── test.028 │ │ ├── POSCAR │ │ └── kp.ref │ └── test.029 │ │ ├── POSCAR │ │ └── kp.ref ├── test_abacus.py ├── test_cp2k.py ├── test_prep_vasp.py ├── test_run_vasp.py └── test_vasp.py ├── mocked_ops.py ├── op ├── __init__.py ├── context.py ├── test_collect_data.py ├── test_collect_run_caly.py ├── test_diffcsp_gen.py ├── test_prep_caly_dp_optim.py ├── test_prep_caly_input.py ├── test_prep_caly_model_devi.py ├── test_prep_dp_train.py ├── test_prep_relax.py ├── test_run_caly_dp_optim.py ├── test_run_caly_model_devi.py ├── test_run_dp_train.py ├── test_run_lmp.py └── test_run_relax.py ├── test_block_cl.py ├── test_caly_evo_step.py ├── test_check_examples.py ├── test_collect_data.py ├── test_dpgen_loop.py ├── test_gui.py ├── test_merge_caly_evo_step.py ├── test_prep_run_caly.py ├── test_prep_run_diffcsp.py ├── test_prep_run_dp_labeling.py ├── test_prep_run_dp_train.py ├── test_prep_run_gaussian.py ├── test_prep_run_lmp.py ├── test_prep_run_vasp.py ├── test_select_confs.py └── utils ├── __init__.py ├── context.py ├── test_binary_file_input.py ├── test_bohrium_config.py ├── test_dflow_config.py ├── test_dflow_query.py ├── test_dl_dpgen2_arti.py ├── test_dl_dpgen2_arti_by_def.py ├── test_ele_temp.py ├── test_run_command.py └── test_step_config.py /.git_archival.txt: -------------------------------------------------------------------------------- 1 | node: 0a89b7274d9ab85e81a92daad3ed7eb9c1d45046 2 | node-date: 2025-04-29T16:52:57+08:00 3 | describe-name: v0.0.7-146-g0a89b727 4 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | .git_archival.txt export-subst 2 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | -------------------------------------------------------------------------------- /.github/workflows/mirror_gitee.yml: -------------------------------------------------------------------------------- 1 | name: Mirror to Gitee Repository 2 | 3 | on: [ push, delete, create ] 4 | 5 | # Ensures that only one mirror task will run at a time. 6 | concurrency: 7 | group: git-mirror 8 | 9 | jobs: 10 | git-mirror: 11 | uses: deepmodeling/workflows/.github/workflows/mirror_gitee.yml@main 12 | secrets: 13 | SYNC_GITEE_PRIVATE_KEY: ${{ secrets.SYNC_GITEE_PRIVATE_KEY }} 14 | -------------------------------------------------------------------------------- /.github/workflows/pub-docker.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. 2 | # They are provided by a third-party and are governed by 3 | # separate terms of service, privacy policy, and support 4 | # documentation. 5 | 6 | name: Publish Docker image 7 | 8 | on: 9 | release: 10 | types: [published] 11 | push: 12 | branches: 13 | - 'master' 14 | 15 | jobs: 16 | push_to_registries: 17 | if: github.repository_owner == 'deepmodeling' 18 | name: Push Docker image to Docker Hub and ghcr 19 | runs-on: ubuntu-latest 20 | permissions: 21 | packages: write 22 | contents: read 23 | steps: 24 | - name: Check out the repo 25 | uses: actions/checkout@v4 26 | 27 | - name: Log in to Docker Hub 28 | uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 29 | with: 30 | username: ${{ secrets.DOCKER_USERNAME }} 31 | password: ${{ secrets.DOCKER_PASSWORD }} 32 | 33 | - name: Log in to the Container registry 34 | uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 35 | with: 36 | registry: ghcr.io 37 | username: ${{ github.actor }} 38 | password: ${{ secrets.GITHUB_TOKEN }} 39 | 40 | - name: Extract metadata (tags, labels) for Docker 41 | id: meta 42 | uses: docker/metadata-action@369eb591f429131d6889c46b94e711f089e6ca96 43 | with: 44 | images: | 45 | dptechnology/dpgen2 46 | ghcr.io/deepmodeling/dpgen2 47 | 48 | - name: Build and push Docker images 49 | uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 50 | with: 51 | context: . 52 | push: true 53 | tags: ${{ steps.meta.outputs.tags }} 54 | labels: ${{ steps.meta.outputs.labels }} 55 | -------------------------------------------------------------------------------- /.github/workflows/pub-pypi.yml: -------------------------------------------------------------------------------- 1 | name: Publish Python distributions to PyPI 2 | 3 | on: push 4 | 5 | jobs: 6 | build-n-publish: 7 | if: github.repository_owner == 'deepmodeling' 8 | name: Build and publish Python distributions to PyPI 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@master 12 | 13 | - name: Set up Python 3.9 14 | uses: actions/setup-python@master 15 | with: 16 | python-version: 3.9 17 | 18 | - name: Install pypa/build 19 | run: >- 20 | python -m 21 | pip install 22 | build 23 | --user 24 | 25 | - name: Build a binary wheel and a source tarball 26 | run: >- 27 | python -m 28 | build 29 | --sdist 30 | --wheel 31 | --outdir dist/ 32 | . 33 | 34 | - name: Publish distribution to PyPI 35 | if: startsWith(github.ref, 'refs/tags') 36 | uses: pypa/gh-action-pypi-publish@master 37 | with: 38 | password: ${{ secrets.PYPI_API_TOKEN }} 39 | -------------------------------------------------------------------------------- /.github/workflows/pyright.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | pull_request: 4 | name: Type checker 5 | jobs: 6 | test: 7 | name: pyright 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@master 11 | - uses: actions/setup-python@v5 12 | with: 13 | python-version: '3.10' 14 | - run: pip install -e .[gui] 15 | - uses: jakebailey/pyright-action@v2 16 | with: 17 | version: 1.1.318 18 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Python unit-tests 2 | 3 | on: 4 | - push 5 | - pull_request 6 | 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | strategy: 11 | fail-fast: false 12 | matrix: 13 | python-version: ["3.8", "3.9", "3.10"] 14 | 15 | steps: 16 | - uses: actions/checkout@master 17 | - name: Set up Python ${{ matrix.python-version }} 18 | uses: actions/setup-python@master 19 | with: 20 | python-version: ${{ matrix.python-version }} 21 | - name: Install dependencies 22 | run: | 23 | pip install -e .[test] 24 | pip install mock coverage pytest 25 | - name: Test 26 | run: SKIP_UT_WITH_DFLOW=0 DFLOW_DEBUG=1 coverage run --source=./dpgen2 -m unittest -v -f && coverage report 27 | - uses: codecov/codecov-action@v5 28 | env: 29 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 30 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | POTCAR 2 | *.pb 3 | *.bk 4 | *~ 5 | *.d 6 | *.o 7 | *.aux 8 | *.dvi 9 | *.pdf 10 | *.so 11 | *.bin 12 | *.intbin 13 | *.meta 14 | *.log 15 | *.bz2 16 | *.pyc 17 | \#* 18 | iter.* 19 | topol.tpr 20 | mdout.mdp 21 | traj*xtc 22 | traj.trr 23 | ener.edr 24 | state*cpt 25 | CMakeCache.txt 26 | CMakeFiles 27 | *.pb 28 | log.lammps 29 | restart.* 30 | dump.* 31 | *.out 32 | build 33 | dist 34 | dpgen2.egg-info 35 | */*.pyc 36 | */__pycache__ 37 | *.swp 38 | .eggs 39 | .coverage 40 | dbconfig.json 41 | .vscode/* 42 | .idea/* 43 | _build 44 | dpgen2/_version.py 45 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | repos: 4 | - repo: https://github.com/pre-commit/pre-commit-hooks 5 | rev: v4.4.0 6 | hooks: 7 | - id: trailing-whitespace 8 | exclude: "^tests/.*$" 9 | - id: end-of-file-fixer 10 | exclude: "^tests/fp/.*$" 11 | - id: check-yaml 12 | #- id: check-json 13 | - id: check-added-large-files 14 | - id: check-merge-conflict 15 | - id: check-symlinks 16 | - id: check-toml 17 | # Python 18 | - repo: https://github.com/astral-sh/ruff-pre-commit 19 | # Ruff version. 20 | rev: v0.1.3 21 | hooks: 22 | - id: ruff-format 23 | - repo: https://github.com/PyCQA/isort 24 | rev: 5.12.0 25 | hooks: 26 | - id: isort 27 | files: \.py$ 28 | # numpydoc 29 | - repo: https://github.com/Carreau/velin 30 | rev: 0.0.12 31 | hooks: 32 | - id: velin 33 | args: ["--write"] 34 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the version of Python and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.10" 13 | 14 | # Build documentation in the docs/ directory with Sphinx 15 | sphinx: 16 | configuration: docs/conf.py 17 | 18 | # If using Sphinx, optionally build your docs in additional formats such as PDF 19 | formats: all 20 | 21 | # Optionally declare the Python requirements required to build your docs 22 | python: 23 | install: 24 | - requirements: docs/requirements.txt 25 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM dptechnology/dflow:latest 2 | 3 | WORKDIR /data/dpgen2 4 | COPY ./ ./ 5 | RUN pip install --no-cache-dir . 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | DPGEN2 is the 2nd generation of the Deep Potential GENerator. 2 | 3 | The document is available at [this page](https://docs.deepmodeling.com/projects/dpgen2) 4 | 5 | For developers please read the [developers guide](docs/developer.md) 6 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | ignore: 2 | - "tests" 3 | coverage: 4 | status: 5 | project: 6 | default: 7 | threshold: 100% 8 | patch: 9 | default: 10 | threshold: 100% 11 | -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | api/ 2 | _build/ 3 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/cli.rst: -------------------------------------------------------------------------------- 1 | .. _cli: 2 | 3 | Command line interface 4 | ====================== 5 | 6 | .. _fullcli: 7 | .. argparse:: 8 | :module: dpgen2.entrypoint.main 9 | :func: main_parser 10 | :prog: dpgen2 11 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | import os 14 | import sys 15 | from datetime import ( 16 | date, 17 | ) 18 | 19 | # -- Project information ----------------------------------------------------- 20 | 21 | project = "DPGEN2" 22 | copyright = "2022-%d, DeepModeling" % date.today().year 23 | author = "DeepModeling" 24 | 25 | 26 | # -- General configuration --------------------------------------------------- 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = [ 32 | "deepmodeling_sphinx", 33 | "dargs.sphinx", 34 | "myst_parser", 35 | "sphinx_book_theme", 36 | "sphinx.ext.viewcode", 37 | "sphinx.ext.intersphinx", 38 | "numpydoc", 39 | "sphinx.ext.autosummary", 40 | "sphinxarg.ext", 41 | ] 42 | 43 | # Add any paths that contain templates here, relative to this directory. 44 | templates_path = ["_templates"] 45 | 46 | # List of patterns, relative to source directory, that match files and 47 | # directories to ignore when looking for source files. 48 | # This pattern also affects html_static_path and html_extra_path. 49 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 50 | 51 | 52 | # -- Options for HTML output ------------------------------------------------- 53 | 54 | # The theme to use for HTML and HTML Help pages. See the documentation for 55 | # a list of builtin themes. 56 | # 57 | html_theme = "sphinx_book_theme" 58 | 59 | # Add any paths that contain custom static files (such as style sheets) here, 60 | # relative to this directory. They are copied after the builtin static files, 61 | # so a file named "default.css" will overwrite the builtin "default.css". 62 | html_static_path = ["_static"] 63 | html_css_files = [] 64 | 65 | autodoc_default_flags = ["members"] 66 | autosummary_generate = True 67 | master_doc = "index" 68 | 69 | 70 | def run_apidoc(_): 71 | from sphinx.ext.apidoc import ( 72 | main, 73 | ) 74 | 75 | sys.path.append(os.path.join(os.path.dirname(__file__), "..")) 76 | cur_dir = os.path.abspath(os.path.dirname(__file__)) 77 | module = os.path.join(cur_dir, "..", "dpgen2") 78 | main( 79 | [ 80 | "-M", 81 | "--tocfile", 82 | "api", 83 | "-H", 84 | "DPGEN2 API", 85 | "-o", 86 | os.path.join(cur_dir, "api"), 87 | module, 88 | "--force", 89 | ] 90 | ) 91 | 92 | 93 | def setup(app): 94 | app.connect("builder-inited", run_apidoc) 95 | 96 | 97 | intersphinx_mapping = { 98 | "python": ("https://docs.python.org/", None), 99 | "numpy": ("https://docs.scipy.org/doc/numpy/", None), 100 | "dargs": ("https://docs.deepmodeling.com/projects/dargs/en/latest/", None), 101 | "dflow": ("https://deepmodeling.com/dflow/", None), 102 | "dpdata": ("https://docs.deepmodeling.com/projects/dpdata/en/latest/", None), 103 | } 104 | -------------------------------------------------------------------------------- /docs/dpgen2_configs.rst: -------------------------------------------------------------------------------- 1 | DPGEN2 configurations 2 | ===================== 3 | 4 | Op configs 5 | ---------- 6 | 7 | RunDPTrain 8 | ^^^^^^^^^^ 9 | .. _rundptrainargs: 10 | .. dargs:: 11 | :module: dpgen2.op.run_dp_train 12 | :func: config_args 13 | 14 | RunLmp 15 | ^^^^^^ 16 | .. _runlmpargs: 17 | .. dargs:: 18 | :module: dpgen2.op.run_lmp 19 | :func: config_args 20 | 21 | RunVasp 22 | ^^^^^^^ 23 | .. _runvaspargs: 24 | .. dargs:: 25 | :module: dpgen2.op.run_vasp 26 | :func: config_args 27 | 28 | 29 | Alloy configs 30 | ------------- 31 | .. _alloy configs: 32 | .. dargs:: 33 | :module: dpgen2.utils.alloy_conf 34 | :func: generate_alloy_conf_args 35 | 36 | 37 | Task group configs 38 | ------------------ 39 | .. _task_group_configs: 40 | .. dargs:: 41 | :module: dpgen2.exploration.task 42 | :func: task_group_args 43 | 44 | 45 | Step configs 46 | ------------ 47 | .. _stepconfigargs: 48 | .. dargs:: 49 | :module: dpgen2.utils.step_config 50 | :func: step_conf_args 51 | -------------------------------------------------------------------------------- /docs/figs/dpgen-flowchart.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deepmodeling/dpgen2/0a89b7274d9ab85e81a92daad3ed7eb9c1d45046/docs/figs/dpgen-flowchart.jpg -------------------------------------------------------------------------------- /docs/figs/exploration-strategy.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deepmodeling/dpgen2/0a89b7274d9ab85e81a92daad3ed7eb9c1d45046/docs/figs/exploration-strategy.jpg -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | ====================== 2 | DPGEN2's documentation 3 | ====================== 4 | 5 | DPGEN2 is the 2nd generation of the Deep Potential GENerator. 6 | 7 | .. Important:: The project DeePMD-kit is licensed under `GNU LGPLv3.0 `_. 8 | 9 | .. _user-guide: 10 | 11 | .. toctree:: 12 | :maxdepth: 3 13 | :caption: User Guide 14 | 15 | quickcli 16 | cli 17 | input.md 18 | submit_args 19 | .. dpgen2_configs 20 | 21 | .. _developer-guide: 22 | 23 | .. toctree:: 24 | :maxdepth: 5 25 | :caption: Developer Guide 26 | 27 | developer.md 28 | operator.md 29 | exploration.md 30 | api/api 31 | 32 | * :ref:`genindex` 33 | * :ref:`modindex` 34 | * :ref:`search` 35 | 36 | .. _feedback: 37 | .. _affiliated packages: 38 | -------------------------------------------------------------------------------- /docs/quickcli.md: -------------------------------------------------------------------------------- 1 | # Guide on dpgen2 commands 2 | 3 | One may use dpgen2 through command line interface. A full documentation of the cli is found [here](fullcli) 4 | 5 | ## Submit a workflow 6 | The dpgen2 workflow can be submitted via the `submit` command 7 | ```bash 8 | dpgen2 submit input.json 9 | ``` 10 | where `input.json` is the input script. A guide of writing the script is found [here](inputscript). 11 | When a workflow is submitted, a ID (WFID) of the workflow will be printed for later reference. 12 | 13 | ## Check the convergence of a workflow 14 | The convergence of stages of the workflow can be checked by the `status` command. It prints the indexes of the finished stages, iterations, and the accurate, candidate and failed ratio of explored configurations of each iteration. 15 | ```bash 16 | $ dpgen2 status input.json WFID 17 | # stage id_stg. iter. accu. cand. fail. 18 | # Stage 0 -------------------- 19 | 0 0 0 0.8333 0.1667 0.0000 20 | 0 1 1 0.7593 0.2407 0.0000 21 | 0 2 2 0.7778 0.2222 0.0000 22 | 0 3 3 1.0000 0.0000 0.0000 23 | # Stage 0 converged YES reached max numb iterations NO 24 | # All stages converged 25 | ``` 26 | 27 | ## Watch the progress of a workflow 28 | The progress of a workflow can be watched on-the-fly 29 | ```bash 30 | $ dpgen2 watch input.json WFID 31 | INFO:root:steps iter-000000--prep-run-train----------------------- finished 32 | INFO:root:steps iter-000000--prep-run-explore--------------------- finished 33 | INFO:root:steps iter-000000--prep-run-fp-------------------------- finished 34 | INFO:root:steps iter-000000--collect-data------------------------- finished 35 | INFO:root:steps iter-000001--prep-run-train----------------------- finished 36 | INFO:root:steps iter-000001--prep-run-explore--------------------- finished 37 | ... 38 | ``` 39 | The artifacts can be downloaded on-the-fly with `-d` flag. Note that the existing files are automatically skipped if one sets `dflow_config["archive_mode"] = None`. 40 | 41 | 42 | ## Show the keys of steps 43 | 44 | Each dpgen2 step is assigned a unique key. The keys of the finished steps can be checked with `showkey` command 45 | ```bash $ dpgen2 showkey input.json WFID 46 | 0 : iter-000000--prep-train 47 | 1 -> 4 : iter-000000--run-train-0000 -> iter-000000--run-train-0003 48 | 5 : iter-000000--prep-lmp 49 | 6 -> 14 : iter-000000--run-lmp-000000 -> iter-000000--run-lmp-000008 50 | 15 : iter-000000--select-confs 51 | 16 : iter-000000--prep-fp 52 | 17 -> 20 : iter-000000--run-fp-000000 -> iter-000000--run-fp-000003 53 | 21 : iter-000000--collect-data 54 | 22 : iter-000000--scheduler 55 | 23 : iter-000000--id 56 | 24 : iter-000001--prep-train 57 | 25 -> 28 : iter-000001--run-train-0000 -> iter-000001--run-train-0003 58 | 29 : iter-000001--prep-lmp 59 | 30 -> 38 : iter-000001--run-lmp-000000 -> iter-000001--run-lmp-000008 60 | 39 : iter-000001--select-confs 61 | 40 : iter-000001--prep-fp 62 | 41 -> 44 : iter-000001--run-fp-000000 -> iter-000001--run-fp-000003 63 | 45 : iter-000001--collect-data 64 | 46 : iter-000001--scheduler 65 | 47 : iter-000001--id 66 | ``` 67 | 68 | 69 | ## Resubmit a workflow 70 | 71 | If a workflow stopped abnormally, one may submit a new workflow with some steps of the old workflow reused. 72 | ```bash 73 | dpgen2 resubmit input.json WFID --reuse 0-41 74 | ``` 75 | The steps of workflow WDID 0-41 (0<=id<41, note that 41 is not included) will be reused in the new workflow. The indexes of the steps are printed by `dpgen2 showkey`. In the example, all the steps before the `iter-000001--run-fp-000000` will be used in the new workflow. 76 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | .[docs] 2 | -------------------------------------------------------------------------------- /docs/submit_args.rst: -------------------------------------------------------------------------------- 1 | .. _submitargs: 2 | 3 | Arguments of the submit script 4 | ============================== 5 | .. note:: 6 | One can load, modify, and export the input file by using our effective web-based tool `DP-GUI `_ online or hosted using the :ref:`command line interface ` :code:`dpgen2 gui`. All parameters below can be set in DP-GUI. By clicking "SAVE JSON", one can download the input file. 7 | 8 | .. dargs:: 9 | :module: dpgen2.entrypoint.args 10 | :func: submit_args 11 | 12 | 13 | .. _task_group_sec: 14 | 15 | Task group definition 16 | --------------------- 17 | 18 | LAMMPS task group 19 | ^^^^^^^^^^^^^^^^^ 20 | 21 | .. dargs:: 22 | :module: dpgen2.exploration.task 23 | :func: lmp_task_group_args 24 | 25 | CALYPSO task group 26 | ^^^^^^^^^^^^^^^^^^ 27 | 28 | .. dargs:: 29 | :module: dpgen2.exploration.task 30 | :func: caly_task_group_args 31 | -------------------------------------------------------------------------------- /dpgen2/__about__.py: -------------------------------------------------------------------------------- 1 | __version__ = "unknown" 2 | -------------------------------------------------------------------------------- /dpgen2/__init__.py: -------------------------------------------------------------------------------- 1 | try: 2 | from ._version import version as __version__ 3 | except ImportError: 4 | from .__about__ import ( 5 | __version__, 6 | ) 7 | -------------------------------------------------------------------------------- /dpgen2/__main__.py: -------------------------------------------------------------------------------- 1 | from .entrypoint.main import ( 2 | main, 3 | ) 4 | 5 | if __name__ == "__main__": 6 | main() 7 | -------------------------------------------------------------------------------- /dpgen2/conf/__init__.py: -------------------------------------------------------------------------------- 1 | from .alloy_conf import ( 2 | AlloyConfGenerator, 3 | ) 4 | from .conf_generator import ( 5 | ConfGenerator, 6 | ) 7 | from .file_conf import ( 8 | FileConfGenerator, 9 | ) 10 | 11 | conf_styles = { 12 | "alloy": AlloyConfGenerator, 13 | "file": FileConfGenerator, 14 | } 15 | -------------------------------------------------------------------------------- /dpgen2/conf/conf_generator.py: -------------------------------------------------------------------------------- 1 | import tempfile 2 | from abc import ( 3 | ABC, 4 | abstractmethod, 5 | ) 6 | from pathlib import ( 7 | Path, 8 | ) 9 | from typing import ( 10 | Dict, 11 | List, 12 | ) 13 | 14 | import dargs 15 | import dpdata 16 | 17 | 18 | class ConfGenerator(ABC): 19 | @abstractmethod 20 | def generate( 21 | self, 22 | type_map, 23 | ) -> dpdata.MultiSystems: 24 | r"""Method of generating configurations. 25 | 26 | Parameters 27 | ---------- 28 | type_map : List[str] 29 | The type map. 30 | 31 | Returns 32 | ------- 33 | confs: dpdata.MultiSystems 34 | The returned configurations in `dpdata.MultiSystems` format 35 | 36 | """ 37 | pass 38 | 39 | def get_file_content( 40 | self, 41 | type_map, 42 | fmt="lammps/lmp", 43 | ) -> List[str]: 44 | r"""Get the file content of configurations 45 | 46 | Parameters 47 | ---------- 48 | type_map : List[str] 49 | The type map. 50 | 51 | Returns 52 | ------- 53 | conf_list: List[str] 54 | A list of file content of configurations. 55 | 56 | """ 57 | ret = [] 58 | ms = self.generate(type_map) 59 | for ii in range(len(ms)): 60 | ss = ms[ii] 61 | for jj in range(ss.get_nframes()): 62 | with tempfile.NamedTemporaryFile() as ft: 63 | tf = Path(ft.name) 64 | ss[jj].to(fmt, tf) 65 | ret.append(tf.read_text()) 66 | return ret 67 | 68 | @staticmethod 69 | @abstractmethod 70 | def args() -> List[dargs.Argument]: 71 | pass 72 | 73 | @classmethod 74 | def normalize_config( 75 | cls, 76 | data: Dict = {}, 77 | strict: bool = True, 78 | ) -> Dict: 79 | r"""Normalized the argument. 80 | 81 | Parameters 82 | ---------- 83 | data : Dict 84 | The input dict of arguments. 85 | strict : bool 86 | Strictly check the arguments. 87 | 88 | Returns 89 | ------- 90 | data: Dict 91 | The normalized arguments. 92 | 93 | """ 94 | ta = cls.args() 95 | base = dargs.Argument("base", dict, ta) 96 | data = base.normalize_value(data, trim_pattern="_*") 97 | base.check_value(data, strict=strict) 98 | return data 99 | -------------------------------------------------------------------------------- /dpgen2/conf/file_conf.py: -------------------------------------------------------------------------------- 1 | import glob 2 | import os 3 | from pathlib import ( 4 | Path, 5 | ) 6 | from typing import ( 7 | List, 8 | Optional, 9 | Tuple, 10 | Union, 11 | ) 12 | 13 | import dpdata 14 | from dargs import ( 15 | Argument, 16 | Variant, 17 | ) 18 | 19 | from .conf_generator import ( 20 | ConfGenerator, 21 | ) 22 | 23 | 24 | class FileConfGenerator(ConfGenerator): 25 | def __init__( 26 | self, 27 | files: Union[str, List[str]], 28 | fmt: str = "auto", 29 | prefix: Optional[str] = None, 30 | remove_pbc: Optional[bool] = False, 31 | ): 32 | if not isinstance(files, list): 33 | assert isinstance(files, str) 34 | files = [files] 35 | if prefix is not None: 36 | pfiles = [Path(prefix) / Path(ii) for ii in files] 37 | else: 38 | pfiles = [Path(ii) for ii in files] 39 | self.files = [] 40 | for ii in pfiles: 41 | ff = glob.glob(str(ii.absolute())) 42 | ff.sort() 43 | self.files += ff 44 | self.fmt = fmt 45 | self.remove_pbc = remove_pbc 46 | 47 | def generate( 48 | self, 49 | type_map, 50 | ) -> dpdata.MultiSystems: 51 | if self.fmt in ["deepmd/npy/mixed"]: 52 | return self.generate_mixed(type_map) 53 | else: 54 | return self.generate_std(type_map) 55 | 56 | def generate_std( 57 | self, 58 | type_map, 59 | ) -> dpdata.MultiSystems: 60 | ms = dpdata.MultiSystems(type_map=type_map) 61 | for ff in self.files: 62 | ss = dpdata.System(ff, fmt=self.fmt, type_map=type_map) 63 | if self.remove_pbc: 64 | ss.remove_pbc() 65 | ms.append(ss) 66 | return ms 67 | 68 | def generate_mixed( 69 | self, 70 | type_map, 71 | ) -> dpdata.MultiSystems: 72 | if len(self.files) > 1: 73 | raise ValueError( 74 | 'the file format "deepmd/npy/mixed" is specified, ' 75 | "but more than one file is given, which is invalide " 76 | "please provide one path that can be interpreted as " 77 | "the dpdata.MultiSystems. " 78 | ) 79 | assert "deepmd/npy/mixed" == self.fmt 80 | ms = dpdata.MultiSystems(type_map=type_map) 81 | ms.from_deepmd_npy_mixed(self.files[0], fmt="deepmd/npy/mixed", labeled=False) # type: ignore 82 | return ms 83 | 84 | @staticmethod 85 | def doc() -> str: 86 | return "Generate alloys from user provided file(s). The file(s) are assume to be load by `dpdata`." 87 | 88 | @staticmethod 89 | def args() -> List[Argument]: 90 | doc_files = "The paths to the configuration files. widecards are supported." 91 | doc_prefix = "The prefix of file paths." 92 | doc_fmt = "The format (dpdata accepted formats) of the files." 93 | doc_remove_pbc = "The remove the pbc of the data. shift the coords to the center of box so it can be used with lammps." 94 | 95 | return [ 96 | Argument("files", [str, list], optional=False, doc=doc_files), 97 | Argument("prefix", str, optional=True, default=None, doc=doc_prefix), 98 | Argument("fmt", str, optional=True, default="auto", doc=doc_fmt), 99 | Argument( 100 | "remove_pbc", bool, optional=True, default=False, doc=doc_remove_pbc 101 | ), 102 | ] 103 | -------------------------------------------------------------------------------- /dpgen2/constants.py: -------------------------------------------------------------------------------- 1 | train_index_pattern = "%04d" 2 | train_task_pattern = "task." + train_index_pattern 3 | train_script_name = "input.json" 4 | train_log_name = "train.log" 5 | model_name_pattern = "model.%03d.pb" 6 | pytorch_model_name_pattern = "model.%03d.pth" 7 | model_name_match_pattern = r"model\.[0-9]{3,}(\.pb|\.pth)" 8 | lmp_index_pattern = "%06d" 9 | lmp_task_pattern = "task." + lmp_index_pattern 10 | lmp_conf_name = "conf.lmp" 11 | lmp_input_name = "in.lammps" 12 | plm_input_name = "input.plumed" 13 | plm_output_name = "output.plumed" 14 | lmp_traj_name = "traj.dump" 15 | lmp_pimd_traj_name = "traj.%s.dump" 16 | lmp_log_name = "log.lammps" 17 | lmp_model_devi_name = "model_devi.out" 18 | lmp_pimd_model_devi_name = "model_devi.%s.out" 19 | fp_index_pattern = "%06d" 20 | fp_task_pattern = "task." + fp_index_pattern 21 | fp_default_log_name = "fp.log" 22 | fp_default_out_data_name = "data" 23 | calypso_log_name = "caly.log" 24 | calypso_input_file = "input.dat" 25 | calypso_index_pattern = "%06d" 26 | calypso_task_pattern = "caly_task." + calypso_index_pattern 27 | calypso_opt_dir_name = "caly_pop." + calypso_index_pattern 28 | calypso_run_opt_file = "calypso_run_opt.py" 29 | calypso_check_opt_file = "calypso_check_opt.py" 30 | calypso_opt_log_name = "opt.log" 31 | calypso_traj_log_name = "traj.traj" 32 | 33 | default_image = "dptechnology/dpgen2:latest" 34 | default_host = "127.0.0.1:2746" 35 | -------------------------------------------------------------------------------- /dpgen2/entrypoint/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deepmodeling/dpgen2/0a89b7274d9ab85e81a92daad3ed7eb9c1d45046/dpgen2/entrypoint/__init__.py -------------------------------------------------------------------------------- /dpgen2/entrypoint/common.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import ( 3 | Path, 4 | ) 5 | from typing import ( 6 | Dict, 7 | List, 8 | Optional, 9 | Union, 10 | ) 11 | 12 | import dflow 13 | 14 | from dpgen2.utils import ( 15 | bohrium_config_from_dict, 16 | dump_object_to_file, 17 | load_object_from_file, 18 | matched_step_key, 19 | print_keys_in_nice_format, 20 | sort_slice_ops, 21 | workflow_config_from_dict, 22 | ) 23 | from dpgen2.utils.step_config import normalize as normalize_step_dict 24 | 25 | 26 | def global_config_workflow( 27 | wf_config, 28 | ): 29 | # dflow_config, dflow_s3_config 30 | workflow_config_from_dict(wf_config) 31 | 32 | if os.getenv("DFLOW_DEBUG"): 33 | dflow.config["mode"] = "debug" 34 | return None 35 | 36 | # bohrium configuration 37 | if wf_config.get("bohrium_config") is not None: 38 | bohrium_config_from_dict(wf_config["bohrium_config"]) 39 | 40 | 41 | def expand_sys_str(root_dir: Union[str, Path]) -> List[str]: 42 | root_dir = Path(root_dir) 43 | matches = [str(d) for d in root_dir.rglob("*") if (d / "type.raw").is_file()] 44 | if (root_dir / "type.raw").is_file(): 45 | matches.append(str(root_dir)) 46 | return matches 47 | 48 | 49 | def expand_idx(in_list) -> List[int]: 50 | ret = [] 51 | for ii in in_list: 52 | if isinstance(ii, int): 53 | ret.append(ii) 54 | elif isinstance(ii, str): 55 | step_str = ii.split(":") 56 | if len(step_str) > 1: 57 | step = int(step_str[1]) 58 | else: 59 | step = 1 60 | range_str = step_str[0].split("-") 61 | if len(range_str) == 2: 62 | ret += range(int(range_str[0]), int(range_str[1]), step) 63 | elif len(range_str) == 1: 64 | ret += [int(range_str[0])] 65 | else: 66 | raise RuntimeError("not expected range string", step_str[0]) 67 | ret = sorted(list(set(ret))) 68 | return ret 69 | -------------------------------------------------------------------------------- /dpgen2/entrypoint/download.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import ( 3 | Dict, 4 | List, 5 | Optional, 6 | Union, 7 | ) 8 | 9 | from dflow import ( 10 | Workflow, 11 | ) 12 | 13 | from dpgen2.entrypoint.args import normalize as normalize_args 14 | from dpgen2.entrypoint.common import ( 15 | global_config_workflow, 16 | ) 17 | from dpgen2.utils.dflow_query import ( 18 | matched_step_key, 19 | ) 20 | from dpgen2.utils.download_dpgen2_artifacts import ( 21 | download_dpgen2_artifacts, 22 | download_dpgen2_artifacts_by_def, 23 | ) 24 | 25 | 26 | def download_by_def( 27 | workflow_id, 28 | wf_config: Dict = {}, 29 | iterations: Optional[List[int]] = None, 30 | step_defs: Optional[List[str]] = None, 31 | prefix: Optional[str] = None, 32 | chk_pnt: bool = False, 33 | ): 34 | wf_config = normalize_args(wf_config) 35 | 36 | global_config_workflow(wf_config) 37 | 38 | wf = Workflow(id=workflow_id) 39 | 40 | download_dpgen2_artifacts_by_def(wf, iterations, step_defs, prefix, chk_pnt) 41 | 42 | 43 | def download( 44 | workflow_id, 45 | wf_config: Optional[Dict] = {}, 46 | wf_keys: Optional[List] = None, 47 | prefix: Optional[str] = None, 48 | chk_pnt: bool = False, 49 | ): 50 | wf_config = normalize_args(wf_config) 51 | 52 | global_config_workflow(wf_config) 53 | 54 | wf = Workflow(id=workflow_id) 55 | 56 | if wf_keys is None: 57 | wf_keys = wf.query_keys_of_steps() 58 | 59 | assert wf_keys is not None 60 | for kk in wf_keys: 61 | download_dpgen2_artifacts(wf, kk, prefix=prefix, chk_pnt=chk_pnt) 62 | logging.info(f"step {kk} downloaded") 63 | -------------------------------------------------------------------------------- /dpgen2/entrypoint/gui.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: LGPL-3.0-or-later 2 | """DP-GUI entrypoint.""" 3 | 4 | 5 | def start_dpgui(*, port: int, bind_all: bool, **kwargs): 6 | """Host DP-GUI server. 7 | 8 | Parameters 9 | ---------- 10 | port : int 11 | The port to serve DP-GUI on. 12 | bind_all : bool 13 | Serve on all public interfaces. This will expose your DP-GUI instance 14 | to the network on both IPv4 and IPv6 (where available). 15 | **kwargs 16 | additional arguments 17 | 18 | Raises 19 | ------ 20 | ModuleNotFoundError 21 | The dpgui package is not installed 22 | """ 23 | try: 24 | from dpgui import ( 25 | start_dpgui, 26 | ) 27 | except ModuleNotFoundError as e: 28 | raise ModuleNotFoundError( 29 | "To use DP-GUI, please install the dpgui package:\npip install dpgui" 30 | ) from e 31 | start_dpgui(port=port, bind_all=bind_all) 32 | -------------------------------------------------------------------------------- /dpgen2/entrypoint/showkey.py: -------------------------------------------------------------------------------- 1 | import glob 2 | import os 3 | import pickle 4 | from pathlib import ( 5 | Path, 6 | ) 7 | 8 | import dpdata 9 | from dflow import ( 10 | Workflow, 11 | ) 12 | 13 | from dpgen2.entrypoint.args import normalize as normalize_args 14 | from dpgen2.entrypoint.common import ( 15 | global_config_workflow, 16 | ) 17 | from dpgen2.entrypoint.submit import ( 18 | get_resubmit_keys, 19 | ) 20 | from dpgen2.utils import ( 21 | print_keys_in_nice_format, 22 | ) 23 | 24 | 25 | def showkey( 26 | wf_id, 27 | wf_config, 28 | ): 29 | wf_config = normalize_args(wf_config) 30 | 31 | global_config_workflow(wf_config) 32 | 33 | wf = Workflow(id=wf_id) 34 | folded_keys = get_resubmit_keys(wf) 35 | all_step_keys = sum(folded_keys.values(), []) 36 | prt_str = print_keys_in_nice_format( 37 | all_step_keys, 38 | ["run-train", "run-lmp", "run-fp", "diffcsp-gen", "run-relax"], 39 | ) 40 | print(prt_str) 41 | -------------------------------------------------------------------------------- /dpgen2/entrypoint/status.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import ( 3 | Dict, 4 | List, 5 | Optional, 6 | Union, 7 | ) 8 | 9 | from dflow import ( 10 | Workflow, 11 | ) 12 | 13 | from dpgen2.entrypoint.args import normalize as normalize_args 14 | from dpgen2.entrypoint.common import ( 15 | global_config_workflow, 16 | ) 17 | from dpgen2.utils.dflow_query import ( 18 | get_all_schedulers, 19 | get_last_scheduler, 20 | ) 21 | 22 | 23 | def status( 24 | workflow_id, 25 | wf_config: Optional[Dict] = {}, 26 | ): 27 | wf_config = normalize_args(wf_config) 28 | 29 | global_config_workflow(wf_config) 30 | 31 | wf = Workflow(id=workflow_id) 32 | 33 | wf_keys = wf.query_keys_of_steps() 34 | 35 | scheduler = get_last_scheduler(wf, wf_keys) 36 | 37 | if scheduler is not None: 38 | ptr_str = scheduler.print_convergence() 39 | print(ptr_str) 40 | else: 41 | logging.warn("no scheduler is finished") 42 | -------------------------------------------------------------------------------- /dpgen2/entrypoint/watch.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import time 3 | from typing import ( 4 | Dict, 5 | List, 6 | Optional, 7 | Union, 8 | ) 9 | 10 | from dflow import ( 11 | Workflow, 12 | ) 13 | 14 | from dpgen2.entrypoint.args import normalize as normalize_args 15 | from dpgen2.entrypoint.common import ( 16 | global_config_workflow, 17 | ) 18 | from dpgen2.utils.dflow_query import ( 19 | matched_step_key, 20 | ) 21 | from dpgen2.utils.download_dpgen2_artifacts import ( 22 | download_dpgen2_artifacts, 23 | ) 24 | 25 | default_watching_keys = [ 26 | "prep-run-train", 27 | "prep-run-explore", 28 | "prep-run-fp", 29 | "collect-data", 30 | ] 31 | 32 | 33 | def update_finished_steps( 34 | wf, 35 | finished_keys: Optional[List[str]] = None, 36 | download: Optional[bool] = False, 37 | watching_keys: Optional[List[str]] = None, 38 | prefix: Optional[str] = None, 39 | chk_pnt: bool = False, 40 | ): 41 | wf_keys = wf.query_keys_of_steps() 42 | wf_keys = matched_step_key(wf_keys, watching_keys) 43 | if finished_keys is not None: 44 | diff_keys = [] 45 | for kk in wf_keys: 46 | if not (kk in finished_keys): 47 | diff_keys.append(kk) 48 | else: 49 | diff_keys = wf_keys 50 | for kk in diff_keys: 51 | logging.info(f'steps {kk.ljust(50,"-")} finished') 52 | if download: 53 | download_dpgen2_artifacts(wf, kk, prefix=prefix, chk_pnt=chk_pnt) 54 | logging.info(f'steps {kk.ljust(50,"-")} downloaded') 55 | finished_keys = wf_keys 56 | return finished_keys 57 | 58 | 59 | def watch( 60 | workflow_id, 61 | wf_config: Optional[Dict] = {}, 62 | watching_keys: Optional[List] = default_watching_keys, 63 | frequency: float = 600.0, 64 | download: bool = False, 65 | prefix: Optional[str] = None, 66 | chk_pnt: bool = False, 67 | ): 68 | wf_config = normalize_args(wf_config) 69 | 70 | global_config_workflow(wf_config) 71 | 72 | wf = Workflow(id=workflow_id) 73 | 74 | finished_keys = None 75 | 76 | while wf.query_status() in ["Pending", "Running", "Failed", "Error"]: 77 | finished_keys = update_finished_steps( 78 | wf, 79 | finished_keys, 80 | download=download, 81 | watching_keys=watching_keys, 82 | prefix=prefix, 83 | chk_pnt=chk_pnt, 84 | ) 85 | if wf.query_status() in ["Failed", "Error"]: 86 | break 87 | time.sleep(frequency) 88 | 89 | status = wf.query_status() 90 | if status == "Succeeded": 91 | finished_keys = update_finished_steps( 92 | wf, 93 | finished_keys, 94 | download=download, 95 | watching_keys=watching_keys, 96 | prefix=prefix, 97 | chk_pnt=chk_pnt, 98 | ) 99 | logging.info("well done") 100 | elif status in ["Failed", "Error"]: 101 | logging.error("failed or error workflow") 102 | -------------------------------------------------------------------------------- /dpgen2/entrypoint/workflow.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import json 3 | import logging 4 | import os 5 | from typing import ( 6 | Optional, 7 | ) 8 | 9 | from dflow import ( 10 | Workflow, 11 | ) 12 | 13 | from dpgen2.entrypoint.args import normalize as normalize_args 14 | from dpgen2.entrypoint.common import ( 15 | global_config_workflow, 16 | ) 17 | 18 | workflow_subcommands = [ 19 | "terminate", 20 | "stop", 21 | "suspend", 22 | "delete", 23 | "retry", 24 | "resume", 25 | ] 26 | 27 | 28 | def add_subparser_workflow_subcommand(subparsers, command: str): 29 | parser_cmd = subparsers.add_parser( 30 | command, 31 | help=f"{command.capitalize()} a DPGEN2 workflow.", 32 | formatter_class=argparse.ArgumentDefaultsHelpFormatter, 33 | ) 34 | parser_cmd.add_argument("CONFIG", help="the config file in json format.") 35 | parser_cmd.add_argument("ID", help="the ID of the workflow.") 36 | 37 | 38 | def execute_workflow_subcommand( 39 | command: str, 40 | wfid: str, 41 | wf_config: Optional[dict] = {}, 42 | ): 43 | wf_config = normalize_args(wf_config) 44 | global_config_workflow(wf_config) 45 | wf = Workflow(id=wfid) 46 | getattr(wf, command)() 47 | -------------------------------------------------------------------------------- /dpgen2/exploration/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deepmodeling/dpgen2/0a89b7274d9ab85e81a92daad3ed7eb9c1d45046/dpgen2/exploration/__init__.py -------------------------------------------------------------------------------- /dpgen2/exploration/deviation/__init__.py: -------------------------------------------------------------------------------- 1 | from .deviation_manager import ( 2 | DeviManager, 3 | ) 4 | from .deviation_std import ( 5 | DeviManagerStd, 6 | ) 7 | -------------------------------------------------------------------------------- /dpgen2/exploration/deviation/deviation_manager.py: -------------------------------------------------------------------------------- 1 | from abc import ( 2 | ABC, 3 | abstractmethod, 4 | ) 5 | from typing import ( 6 | List, 7 | Optional, 8 | ) 9 | 10 | import numpy as np 11 | 12 | 13 | class DeviManager(ABC): 14 | r"""A class for model deviation management.""" 15 | 16 | MAX_DEVI_V = "max_devi_v" 17 | MIN_DEVI_V = "min_devi_v" 18 | AVG_DEVI_V = "avg_devi_v" 19 | MAX_DEVI_F = "max_devi_f" 20 | MIN_DEVI_F = "min_devi_f" 21 | AVG_DEVI_F = "avg_devi_f" 22 | 23 | def __init__(self) -> None: 24 | super().__init__() 25 | self.ntraj = 0 26 | 27 | def _check_name(self, name: str): 28 | assert name in ( 29 | DeviManager.MAX_DEVI_V, 30 | DeviManager.MIN_DEVI_V, 31 | DeviManager.AVG_DEVI_V, 32 | DeviManager.MAX_DEVI_F, 33 | DeviManager.MIN_DEVI_F, 34 | DeviManager.AVG_DEVI_F, 35 | ), f"Error: unknown deviation name {name}" 36 | 37 | def add(self, name: str, deviation: np.ndarray) -> None: 38 | r"""Add a model deviation into this manager. 39 | 40 | Parameters 41 | ---------- 42 | name : str 43 | The name of the deviation. The name is restricted to 44 | (DeviManager.MAX_DEVI_V, DeviManager.MIN_DEVI_V, 45 | DeviManager.AVG_DEVI_V, DeviManager.MAX_DEVI_F, 46 | DeviManager.MIN_DEVI_F, DeviManager.AVG_DEVI_F) 47 | deviation : np.ndarray 48 | The model deviation is a one-dimensional array extracted 49 | from a trajectory file. 50 | """ 51 | self._check_name(name) 52 | return self._add(name, deviation) 53 | 54 | @abstractmethod 55 | def _add(self, name: str, deviation: np.ndarray) -> None: 56 | pass 57 | 58 | def get(self, name: str) -> List[Optional[np.ndarray]]: 59 | r"""Gat a model deviation from this manager. 60 | 61 | Parameters 62 | ---------- 63 | name : str 64 | The name of the deviation. The name is restricted to 65 | (DeviManager.MAX_DEVI_V, DeviManager.MIN_DEVI_V, 66 | DeviManager.AVG_DEVI_V, DeviManager.MAX_DEVI_F, 67 | DeviManager.MIN_DEVI_F, DeviManager.AVG_DEVI_F) 68 | """ 69 | self._check_name(name) 70 | self._check_data() 71 | return self._get(name) 72 | 73 | @abstractmethod 74 | def _get(self, name: str) -> List[Optional[np.ndarray]]: 75 | pass 76 | 77 | @abstractmethod 78 | def clear(self) -> None: 79 | r"""Clear all data in this manager.""" 80 | pass 81 | 82 | @abstractmethod 83 | def _check_data(self) -> None: 84 | r"""Check if data is valid""" 85 | pass 86 | -------------------------------------------------------------------------------- /dpgen2/exploration/deviation/deviation_std.py: -------------------------------------------------------------------------------- 1 | from collections import ( 2 | defaultdict, 3 | ) 4 | from typing import ( 5 | Dict, 6 | List, 7 | Optional, 8 | ) 9 | 10 | import numpy as np 11 | 12 | from .deviation_manager import ( 13 | DeviManager, 14 | ) 15 | 16 | 17 | class DeviManagerStd(DeviManager): 18 | r"""The class which is responsible for model deviation management. 19 | 20 | This is the standard implementation of DeviManager. Each deviation 21 | (e.g. max_devi_f, max_devi_v in file `model_devi.out`) is stored 22 | as a List[Optional[np.ndarray]], where np.array is a one-dimensional 23 | array. 24 | A List[np.ndarray][ii][jj] is the force model deviation of the jj-th 25 | frame of the ii-th trajectory. 26 | The model deviation can be List[None], where len(List[None]) is 27 | the number of trajectory files. 28 | 29 | """ 30 | 31 | def __init__(self): 32 | super().__init__() 33 | self._data = defaultdict(list) 34 | 35 | def _add(self, name: str, deviation: np.ndarray) -> None: 36 | assert isinstance( 37 | deviation, np.ndarray 38 | ), f"Error: deviation(type: {type(deviation)}) is not a np.ndarray" 39 | assert len(deviation.shape) == 1, ( 40 | f"Error: deviation(shape: {deviation.shape}) is not a " 41 | + f"one-dimensional array" 42 | ) 43 | 44 | self._data[name].append(deviation) 45 | self.ntraj = max(self.ntraj, len(self._data[name])) 46 | 47 | def _get(self, name: str) -> List[Optional[np.ndarray]]: 48 | if self.ntraj == 0: 49 | return [] 50 | elif len(self._data[name]) == 0: 51 | return [None for _ in range(self.ntraj)] 52 | else: 53 | return self._data[name] 54 | 55 | def clear(self) -> None: 56 | self.__init__() 57 | return None 58 | 59 | def _check_data(self) -> None: 60 | r"""Check if data is valid""" 61 | model_devi_names = ( 62 | DeviManager.MAX_DEVI_V, 63 | DeviManager.MIN_DEVI_V, 64 | DeviManager.AVG_DEVI_V, 65 | DeviManager.MAX_DEVI_F, 66 | DeviManager.MIN_DEVI_F, 67 | DeviManager.AVG_DEVI_F, 68 | ) 69 | # check the length of model deviations 70 | frames = {} 71 | for name in model_devi_names: 72 | if len(self._data[name]) > 0: 73 | assert len(self._data[name]) == self.ntraj, ( 74 | f"Error: the number of model deviation {name} " 75 | + f"({len(self._data[name])}) and trajectory files ({self.ntraj}) " 76 | + f"are not equal." 77 | ) 78 | for idx, ndarray in enumerate(self._data[name]): 79 | assert isinstance(ndarray, np.ndarray), ( 80 | f"Error: model deviation in {name} is not ndarray, " 81 | + f"index: {idx}, type: {type(ndarray)}" 82 | ) 83 | 84 | frames[name] = [arr.shape[0] for arr in self._data[name]] 85 | if len(frames[name]) == 0: 86 | frames.pop(name) 87 | 88 | # check if "max_devi_f" exists 89 | assert ( 90 | len(self._data[DeviManager.MAX_DEVI_F]) == self.ntraj 91 | ), f"Error: cannot find model deviation {DeviManager.MAX_DEVI_F}" 92 | 93 | # check if the length of the arrays corresponding to the same 94 | # trajectory has the same number of frames 95 | non_empty_deviations = list(frames.keys()) 96 | for name in non_empty_deviations[1:]: 97 | assert frames[name] == frames[non_empty_deviations[0]], ( 98 | f"Error: the number of frames in {name} is different " 99 | + f"with that in {non_empty_deviations[0]}.\n" 100 | + f"{name}: {frames[name]}\n" 101 | + f"{non_empty_deviations[0]}: {frames[non_empty_deviations[0]]}\n" 102 | ) 103 | -------------------------------------------------------------------------------- /dpgen2/exploration/render/__init__.py: -------------------------------------------------------------------------------- 1 | from .traj_render import ( 2 | TrajRender, 3 | ) 4 | from .traj_render_lammps import ( 5 | TrajRenderLammps, 6 | ) 7 | -------------------------------------------------------------------------------- /dpgen2/exploration/render/traj_render.py: -------------------------------------------------------------------------------- 1 | from abc import ( 2 | ABC, 3 | abstractmethod, 4 | ) 5 | from pathlib import ( 6 | Path, 7 | ) 8 | from typing import ( 9 | TYPE_CHECKING, 10 | List, 11 | Optional, 12 | Tuple, 13 | Union, 14 | ) 15 | 16 | import dpdata 17 | import numpy as np 18 | from dflow.python.opio import ( 19 | HDF5Dataset, 20 | ) 21 | 22 | from ..deviation import ( 23 | DeviManager, 24 | ) 25 | 26 | if TYPE_CHECKING: 27 | from dpgen2.exploration.selector import ( 28 | ConfFilters, 29 | ) 30 | 31 | 32 | class TrajRender(ABC): 33 | @abstractmethod 34 | def get_model_devi( 35 | self, 36 | files: Union[List[Path], List[HDF5Dataset]], 37 | ) -> DeviManager: 38 | r"""Get model deviations from recording files. 39 | 40 | Parameters 41 | ---------- 42 | files : List[Path] 43 | The paths to the model deviation recording files 44 | 45 | Returns 46 | ------- 47 | DeviManager: The class which is responsible for model deviation management. 48 | """ 49 | pass 50 | 51 | @abstractmethod 52 | def get_confs( 53 | self, 54 | traj: Union[List[Path], List[HDF5Dataset]], 55 | id_selected: List[List[int]], 56 | type_map: Optional[List[str]] = None, 57 | conf_filters: Optional["ConfFilters"] = None, 58 | optional_outputs: Optional[List[Path]] = None, 59 | ) -> dpdata.MultiSystems: 60 | r"""Get configurations from trajectory by selection. 61 | 62 | Parameters 63 | ---------- 64 | traj : List[Path] 65 | Trajectory files 66 | id_selected : List[List[int]] 67 | The selected frames. id_selected[ii][jj] is the jj-th selected frame 68 | from the ii-th trajectory. id_selected[ii] may be an empty list. 69 | type_map : List[str] 70 | The type map. 71 | conf_filters : ConfFilters 72 | Configuration filters 73 | optional_outputs : List[Path] 74 | Optional outputs of the exploration 75 | 76 | Returns 77 | ------- 78 | ms: dpdata.MultiSystems 79 | The configurations in dpdata.MultiSystems format 80 | """ 81 | pass 82 | -------------------------------------------------------------------------------- /dpgen2/exploration/report/__init__.py: -------------------------------------------------------------------------------- 1 | from .report import ( 2 | ExplorationReport, 3 | ) 4 | from .report_adaptive_lower import ( 5 | ExplorationReportAdaptiveLower, 6 | ) 7 | from .report_trust_levels_max import ( 8 | ExplorationReportTrustLevelsMax, 9 | ) 10 | from .report_trust_levels_random import ( 11 | ExplorationReportTrustLevelsRandom, 12 | ) 13 | 14 | conv_styles = { 15 | "fixed-levels": ExplorationReportTrustLevelsRandom, 16 | "fixed-levels-max-select": ExplorationReportTrustLevelsMax, 17 | "adaptive-lower": ExplorationReportAdaptiveLower, 18 | } 19 | -------------------------------------------------------------------------------- /dpgen2/exploration/report/report.py: -------------------------------------------------------------------------------- 1 | from abc import ( 2 | ABC, 3 | abstractmethod, 4 | ) 5 | from typing import ( 6 | List, 7 | Optional, 8 | Tuple, 9 | ) 10 | 11 | import numpy as np 12 | 13 | from ..deviation import ( 14 | DeviManager, 15 | ) 16 | 17 | 18 | class ExplorationReport(ABC): 19 | @abstractmethod 20 | def clear(self): 21 | r"""Clear the report""" 22 | pass 23 | 24 | @abstractmethod 25 | def record( 26 | self, 27 | model_devi: DeviManager, 28 | ): 29 | r"""Record the model deviations of the trajectories 30 | 31 | Parameters 32 | ---------- 33 | model_devi : DeviManager 34 | The class which is responsible for model deviation management. 35 | Model deviations is stored as a List[Optional[np.ndarray]], 36 | where np.array is a one-dimensional array. 37 | List[np.ndarray][ii][jj] is the force model deviation of 38 | the jj-th frame of the ii-th trajectory. 39 | Model deviations can be List[None], where len(List[None]) is 40 | the number of trajectory files. 41 | """ 42 | pass 43 | 44 | @abstractmethod 45 | def converged( 46 | self, 47 | reports, 48 | ) -> bool: 49 | r"""Check if the exploration is converged. 50 | 51 | Parameters 52 | ---------- 53 | reports 54 | Historical reports 55 | 56 | Returns 57 | ------- 58 | converged bool 59 | If the exploration is converged. 60 | """ 61 | pass 62 | 63 | @abstractmethod 64 | def no_candidate(self) -> bool: 65 | r"""If no candidate configuration is found""" 66 | pass 67 | 68 | @abstractmethod 69 | def get_candidate_ids( 70 | self, 71 | max_nframes: Optional[int] = None, 72 | ) -> List[List[int]]: 73 | r"""Get indexes of candidate configurations 74 | 75 | Parameters 76 | ---------- 77 | max_nframes 78 | The maximal number of frames of candidates. 79 | 80 | Returns 81 | ------- 82 | idx: List[List[int]] 83 | The frame indices of candidate configurations. 84 | idx[ii][jj] is the frame index of the jj-th candidate of the 85 | ii-th trajectory. 86 | """ 87 | pass 88 | 89 | @abstractmethod 90 | def print_header(self) -> str: 91 | r"""Print the header of report""" 92 | pass 93 | 94 | @abstractmethod 95 | def print( 96 | self, 97 | stage_idx: int, 98 | idx_in_stage: int, 99 | iter_idx: int, 100 | ) -> str: 101 | r"""Print the report""" 102 | pass 103 | -------------------------------------------------------------------------------- /dpgen2/exploration/report/report_trust_levels_random.py: -------------------------------------------------------------------------------- 1 | import random 2 | from typing import ( 3 | List, 4 | Optional, 5 | Tuple, 6 | ) 7 | 8 | import numpy as np 9 | from dargs import ( 10 | Argument, 11 | ) 12 | from dflow.python import ( 13 | FatalError, 14 | ) 15 | 16 | from ..deviation import ( 17 | DeviManager, 18 | ) 19 | from . import ( 20 | ExplorationReport, 21 | ) 22 | from .report_trust_levels_base import ( 23 | ExplorationReportTrustLevels, 24 | ) 25 | 26 | 27 | class ExplorationReportTrustLevelsRandom(ExplorationReportTrustLevels): 28 | def converged( 29 | self, 30 | reports: Optional[List[ExplorationReport]] = None, 31 | ) -> bool: 32 | r"""Check if the exploration is converged. 33 | 34 | Parameters 35 | ---------- 36 | reports 37 | Historical reports 38 | 39 | Returns 40 | ------- 41 | converged bool 42 | If the exploration is converged. 43 | """ 44 | accurate_ratio = self.accurate_ratio() 45 | assert isinstance(accurate_ratio, float) 46 | return accurate_ratio >= self.conv_accuracy 47 | 48 | def get_candidate_ids( 49 | self, 50 | max_nframes: Optional[int] = None, 51 | clear: bool = True, 52 | ) -> List[List[int]]: 53 | ntraj = len(self.traj_nframes) 54 | id_cand = self._get_candidates(max_nframes) 55 | id_cand_list = [[] for ii in range(ntraj)] 56 | for ii in id_cand: 57 | id_cand_list[ii[0]].append(ii[1]) 58 | # free the memory, this method should only be called once 59 | if clear: 60 | self.clear() 61 | return id_cand_list 62 | 63 | def _get_candidates( 64 | self, 65 | max_nframes: Optional[int] = None, 66 | ) -> List[Tuple[int, int]]: 67 | """ 68 | Get candidates. If number of candidates is larger than `max_nframes`, 69 | then randomly pick `max_nframes` frames from the candidates. 70 | 71 | Parameters 72 | ---------- 73 | max_nframes 74 | The maximal number of frames of candidates. 75 | 76 | Returns 77 | ------- 78 | cand_frames List[Tuple[int,int]] 79 | Candidate frames. A list of tuples: [(traj_idx, frame_idx), ...] 80 | """ 81 | self.traj_cand_picked = [] 82 | for tidx, tt in enumerate(self.traj_cand): 83 | for ff in tt: 84 | self.traj_cand_picked.append((tidx, ff)) 85 | if max_nframes is not None and max_nframes < len(self.traj_cand_picked): 86 | # random selection 87 | random.shuffle(self.traj_cand_picked) 88 | ret = sorted(self.traj_cand_picked[:max_nframes]) 89 | else: 90 | ret = self.traj_cand_picked 91 | return ret 92 | 93 | @staticmethod 94 | def doc() -> str: 95 | def make_class_doc_link(key): 96 | from dpgen2.entrypoint.args import ( 97 | make_link, 98 | ) 99 | 100 | return make_link(key, f"explore[lmp]/convergence[fixed-levels]/{key}") 101 | 102 | level_f_hi_link = make_class_doc_link("level_f_hi") 103 | level_v_hi_link = make_class_doc_link("level_v_hi") 104 | level_f_lo_link = make_class_doc_link("level_f_lo") 105 | level_v_lo_link = make_class_doc_link("level_v_lo") 106 | conv_accuracy_link = make_class_doc_link("conv_accuracy") 107 | return f"The configurations with force model deviation between {level_f_lo_link}, {level_f_hi_link} or virial model deviation between {level_v_lo_link} and {level_v_hi_link} are treated as candidates (The virial model deviation check is optional). The configurations will be randomly sampled from candidates for FP calculations. If the ratio of accurate (below {level_f_lo_link} and {level_v_lo_link}) is higher then {conv_accuracy_link}, the stage is treated as converged." 108 | -------------------------------------------------------------------------------- /dpgen2/exploration/scheduler/__init__.py: -------------------------------------------------------------------------------- 1 | from .convergence_check_stage_scheduler import ( 2 | ConvergenceCheckStageScheduler, 3 | ) 4 | from .scheduler import ( 5 | ExplorationScheduler, 6 | ) 7 | from .stage_scheduler import ( 8 | StageScheduler, 9 | ) 10 | -------------------------------------------------------------------------------- /dpgen2/exploration/scheduler/convergence_check_stage_scheduler.py: -------------------------------------------------------------------------------- 1 | from pathlib import ( 2 | Path, 3 | ) 4 | from typing import ( 5 | List, 6 | Optional, 7 | Tuple, 8 | Union, 9 | ) 10 | 11 | from dflow.python import ( 12 | FatalError, 13 | ) 14 | from dflow.python.opio import ( 15 | HDF5Dataset, 16 | ) 17 | 18 | from dpgen2.exploration.report import ( 19 | ExplorationReport, 20 | ) 21 | from dpgen2.exploration.selector import ( 22 | ConfSelector, 23 | ) 24 | from dpgen2.exploration.task import ( 25 | BaseExplorationTaskGroup, 26 | ExplorationStage, 27 | ExplorationTaskGroup, 28 | ) 29 | 30 | from .stage_scheduler import ( 31 | StageScheduler, 32 | ) 33 | 34 | 35 | class ConvergenceCheckStageScheduler(StageScheduler): 36 | def __init__( 37 | self, 38 | stage: ExplorationStage, 39 | selector: ConfSelector, 40 | max_numb_iter: Optional[int] = None, 41 | fatal_at_max: bool = True, 42 | ): 43 | self.stage = stage 44 | self.selector = selector 45 | self.max_numb_iter = max_numb_iter 46 | self.fatal_at_max = fatal_at_max 47 | self.nxt_iter = 0 48 | self.conv = False 49 | self.reached_max_iter = False 50 | self.complete_ = False 51 | self.reports = [] 52 | 53 | def get_reports(self): 54 | return self.reports 55 | 56 | def complete(self): 57 | return self.complete_ 58 | 59 | def force_complete(self): 60 | self.complete_ = True 61 | 62 | def next_iteration(self): 63 | return self.nxt_iter 64 | 65 | def converged(self): 66 | return self.conv 67 | 68 | def reached_max_iteration(self): 69 | return self.reached_max_iter 70 | 71 | def plan_next_iteration( 72 | self, 73 | report: Optional[ExplorationReport] = None, 74 | trajs: Optional[Union[List[Path], List[HDF5Dataset]]] = None, 75 | ) -> Tuple[bool, Optional[BaseExplorationTaskGroup], Optional[ConfSelector]]: 76 | if self.complete(): 77 | raise FatalError("Cannot plan because the stage has completed.") 78 | if report is None: 79 | stg_complete = False 80 | self.conv = stg_complete 81 | lmp_task_grp = self.stage.make_task() 82 | ret_selector = self.selector 83 | else: 84 | stg_complete = report.converged(self.reports) 85 | self.conv = stg_complete 86 | if not stg_complete: 87 | # check if we have any candidate to improve the quality of the model 88 | if report.no_candidate(): 89 | raise FatalError( 90 | "The iteration is not converted, but we find that " 91 | "it does not selected any candidate configuration. " 92 | "This means the quality of the model would not be " 93 | "improved and the iteraction would not end. " 94 | "Please try to increase the higher trust levels. " 95 | ) 96 | # if not stg_complete, check max iter 97 | if ( 98 | self.max_numb_iter is not None 99 | and self.nxt_iter == self.max_numb_iter 100 | ): 101 | self.reached_max_iter = True 102 | if self.fatal_at_max: 103 | raise FatalError("reached maximal number of iterations") 104 | else: 105 | stg_complete = True 106 | # make lmp tasks 107 | if stg_complete: 108 | # if stg_complete, no more lmp task 109 | lmp_task_grp = None 110 | ret_selector = None 111 | else: 112 | lmp_task_grp = self.stage.make_task() 113 | ret_selector = self.selector 114 | self.reports.append(report) 115 | self.nxt_iter += 1 116 | self.complete_ = stg_complete 117 | return stg_complete, lmp_task_grp, ret_selector 118 | -------------------------------------------------------------------------------- /dpgen2/exploration/scheduler/stage_scheduler.py: -------------------------------------------------------------------------------- 1 | from abc import ( 2 | ABC, 3 | abstractmethod, 4 | ) 5 | from pathlib import ( 6 | Path, 7 | ) 8 | from typing import ( 9 | List, 10 | Tuple, 11 | Union, 12 | ) 13 | 14 | from dflow.python.opio import ( 15 | HDF5Dataset, 16 | ) 17 | 18 | from dpgen2.exploration.report import ( 19 | ExplorationReport, 20 | ) 21 | from dpgen2.exploration.selector import ( 22 | ConfSelector, 23 | ) 24 | from dpgen2.exploration.task import ( 25 | ExplorationStage, 26 | ExplorationTaskGroup, 27 | ) 28 | 29 | 30 | class StageScheduler(ABC): 31 | """ 32 | The scheduler for an exploration stage. 33 | """ 34 | 35 | @abstractmethod 36 | def converged(self) -> bool: 37 | """ 38 | Tell if the stage is converged 39 | 40 | Returns 41 | ------- 42 | converged bool 43 | the convergence 44 | """ 45 | pass 46 | 47 | @abstractmethod 48 | def complete(self) -> bool: 49 | """ 50 | Tell if the stage is complete 51 | 52 | Returns 53 | ------- 54 | converged bool 55 | if the stage is complete 56 | """ 57 | pass 58 | 59 | @abstractmethod 60 | def force_complete(self): 61 | """ 62 | For complete the stage 63 | 64 | """ 65 | pass 66 | 67 | @abstractmethod 68 | def next_iteration(self) -> int: 69 | """ 70 | Return the index of the next iteration 71 | 72 | Returns 73 | ------- 74 | index int 75 | the index of the next iteration 76 | """ 77 | pass 78 | 79 | @abstractmethod 80 | def get_reports(self) -> List[ExplorationReport]: 81 | """ 82 | Return all exploration reports 83 | 84 | Returns 85 | ------- 86 | reports List[ExplorationReport] 87 | the reports 88 | """ 89 | pass 90 | 91 | @abstractmethod 92 | def plan_next_iteration( 93 | self, 94 | report: ExplorationReport, 95 | trajs: Union[List[Path], List[HDF5Dataset]], 96 | ) -> Tuple[bool, ExplorationTaskGroup, ConfSelector]: 97 | """ 98 | Make the plan for the next iteration of the stage. 99 | 100 | It checks the report of the current and all historical iterations of the stage, and tells if the iterations are converged. If not converged, it will plan the next ieration for the stage. 101 | 102 | Parameters 103 | ---------- 104 | report : ExplorationReport 105 | The exploration report of this iteration. 106 | trajs : Union[List[Path], List[HDF5Dataset]] 107 | A list of configurations generated during the exploration. May be used to generate new configurations for the next iteration. 108 | 109 | Returns 110 | ------- 111 | stg_complete: bool 112 | If the stage completed. Two cases may happen: 113 | 1. converged. 114 | 2. when not fatal_at_max, not converged but reached max number of iterations. 115 | task: ExplorationTaskGroup 116 | A `ExplorationTaskGroup` defining the exploration of the next iteration. Should be `None` if the stage is converged. 117 | conf_selector: ConfSelector 118 | The configuration selector for the next iteration. Should be `None` if the stage is converged. 119 | 120 | """ 121 | pass 122 | -------------------------------------------------------------------------------- /dpgen2/exploration/selector/__init__.py: -------------------------------------------------------------------------------- 1 | from .conf_filter import ( 2 | ConfFilter, 3 | ConfFilters, 4 | ) 5 | from .conf_selector import ( 6 | ConfSelector, 7 | ) 8 | from .conf_selector_frame import ( 9 | ConfSelectorFrames, 10 | ) 11 | from .distance_conf_filter import ( 12 | BoxLengthFilter, 13 | BoxSkewnessConfFilter, 14 | DistanceConfFilter, 15 | ) 16 | 17 | conf_filter_styles = { 18 | "distance": DistanceConfFilter, 19 | "box_skewness": BoxSkewnessConfFilter, 20 | "box_length": BoxLengthFilter, 21 | } 22 | -------------------------------------------------------------------------------- /dpgen2/exploration/selector/conf_filter.py: -------------------------------------------------------------------------------- 1 | from __future__ import ( 2 | annotations, 3 | ) 4 | 5 | from abc import ( 6 | ABC, 7 | abstractmethod, 8 | ) 9 | from typing import ( 10 | List, 11 | ) 12 | 13 | import dpdata 14 | import numpy as np 15 | 16 | 17 | class ConfFilter(ABC): 18 | @abstractmethod 19 | def check( 20 | self, 21 | frame: dpdata.System, 22 | ) -> bool: 23 | """Check if the configuration is valid. 24 | 25 | Parameters 26 | ---------- 27 | frame : dpdata.System 28 | A dpdata.System containing a single frame 29 | 30 | Returns 31 | ------- 32 | valid : bool 33 | `True` if the configuration is a valid configuration, else `False`. 34 | 35 | """ 36 | pass 37 | 38 | def batched_check( 39 | self, 40 | frames: List[dpdata.System], 41 | ) -> List[bool]: 42 | """Check if a list of configurations are valid. 43 | 44 | Parameters 45 | ---------- 46 | frames : List[dpdata.System] 47 | A list of dpdata.System each containing a single frame 48 | 49 | Returns 50 | ------- 51 | valid : List[bool] 52 | `True` if the configuration is a valid configuration, else `False`. 53 | 54 | """ 55 | return list(map(self.check, frames)) 56 | 57 | 58 | class ConfFilters: 59 | def __init__( 60 | self, 61 | ): 62 | self._filters = [] 63 | 64 | def add( 65 | self, 66 | conf_filter: ConfFilter, 67 | ) -> ConfFilters: 68 | self._filters.append(conf_filter) 69 | return self 70 | 71 | def check( 72 | self, 73 | ms: dpdata.MultiSystems, 74 | ) -> dpdata.MultiSystems: 75 | selected_idx = [] 76 | for i in range(len(ms)): 77 | for j in range(ms[i].get_nframes()): 78 | selected_idx.append((i, j)) 79 | for ff in self._filters: 80 | res = ff.batched_check([ms[i][j] for i, j in selected_idx]) 81 | selected_idx = [idx for i, idx in enumerate(selected_idx) if res[i]] 82 | selected_idx_list = [[] for _ in range(len(ms))] 83 | for i, j in selected_idx: 84 | selected_idx_list[i].append(j) 85 | ms2 = dpdata.MultiSystems(type_map=ms.atom_names) 86 | for i in range(len(ms)): 87 | if len(selected_idx_list[i]) > 0: 88 | ms2.append(ms[i].sub_system(selected_idx_list[i])) 89 | return ms2 90 | -------------------------------------------------------------------------------- /dpgen2/exploration/selector/conf_selector.py: -------------------------------------------------------------------------------- 1 | from abc import ( 2 | ABC, 3 | abstractmethod, 4 | ) 5 | from pathlib import ( 6 | Path, 7 | ) 8 | from typing import ( 9 | List, 10 | Optional, 11 | Set, 12 | Tuple, 13 | Union, 14 | ) 15 | 16 | import dpdata 17 | from dflow.python.opio import ( 18 | HDF5Dataset, 19 | ) 20 | 21 | from dpgen2.exploration.report import ( 22 | ExplorationReport, 23 | ) 24 | 25 | from . import ( 26 | ConfFilters, 27 | ) 28 | 29 | 30 | class ConfSelector(ABC): 31 | """Select configurations from trajectory and model deviation files.""" 32 | 33 | @abstractmethod 34 | def select( 35 | self, 36 | trajs: Union[List[Path], List[HDF5Dataset]], 37 | model_devis: Union[List[Path], List[HDF5Dataset]], 38 | type_map: Optional[List[str]] = None, 39 | optional_outputs: Optional[List[Path]] = None, 40 | ) -> Tuple[List[Path], ExplorationReport]: 41 | pass 42 | -------------------------------------------------------------------------------- /dpgen2/exploration/selector/conf_selector_frame.py: -------------------------------------------------------------------------------- 1 | import copy 2 | from collections import ( 3 | Counter, 4 | ) 5 | from pathlib import ( 6 | Path, 7 | ) 8 | from typing import ( 9 | List, 10 | Optional, 11 | Tuple, 12 | Union, 13 | ) 14 | 15 | import dpdata 16 | import numpy as np 17 | from dflow.python.opio import ( 18 | HDF5Dataset, 19 | ) 20 | 21 | from dpgen2.exploration.render import ( 22 | TrajRender, 23 | ) 24 | from dpgen2.exploration.report import ( 25 | ExplorationReport, 26 | ) 27 | 28 | from . import ( 29 | ConfFilters, 30 | ConfSelector, 31 | ) 32 | 33 | 34 | class ConfSelectorFrames(ConfSelector): 35 | """Select frames from trajectories as confs. 36 | 37 | Parameters: 38 | trust_level: TrustLevel 39 | The trust level 40 | conf_filter: ConfFilters 41 | The configuration filter 42 | 43 | """ 44 | 45 | def __init__( 46 | self, 47 | traj_render: TrajRender, 48 | report: ExplorationReport, 49 | max_numb_sel: Optional[int] = None, 50 | conf_filters: Optional[ConfFilters] = None, 51 | ): 52 | self.max_numb_sel = max_numb_sel 53 | self.conf_filters = conf_filters 54 | self.traj_render = traj_render 55 | self.report = report 56 | 57 | def select( 58 | self, 59 | trajs: Union[List[Path], List[HDF5Dataset]], 60 | model_devis: Union[List[Path], List[HDF5Dataset]], 61 | type_map: Optional[List[str]] = None, 62 | optional_outputs: Optional[List[Path]] = None, 63 | ) -> Tuple[List[Path], ExplorationReport]: 64 | """Select configurations 65 | 66 | Parameters 67 | ---------- 68 | trajs : List[Path] 69 | A `list` of `Path` to trajectory files generated by LAMMPS 70 | model_devis : List[Path] 71 | A `list` of `Path` to model deviation files generated by LAMMPS. 72 | Format: each line has 7 numbers they are used as 73 | # frame_id md_v_max md_v_min md_v_mean md_f_max md_f_min md_f_mean 74 | where `md` stands for model deviation, v for virial and f for force 75 | type_map : List[str] 76 | The `type_map` of the systems 77 | optional_outputs : List[Path] 78 | Optional outputs of the exploration 79 | 80 | Returns 81 | ------- 82 | confs : List[Path] 83 | The selected confgurations, stored in a folder in deepmd/npy format, can be parsed as dpdata.MultiSystems. The `list` only has one item. 84 | report : ExplorationReport 85 | The exploration report recoding the status of the exploration. 86 | 87 | """ 88 | ntraj = len(trajs) 89 | assert ntraj == len(model_devis) 90 | 91 | md_model_devi = self.traj_render.get_model_devi(model_devis) 92 | 93 | self.report.clear() 94 | self.report.record(md_model_devi) 95 | id_cand_list = self.report.get_candidate_ids(self.max_numb_sel) 96 | 97 | ms = self.traj_render.get_confs( 98 | trajs, 99 | id_cand_list, 100 | type_map, 101 | self.conf_filters, 102 | optional_outputs, 103 | ) 104 | 105 | out_path = Path("confs") 106 | out_path.mkdir(exist_ok=True) 107 | ms.to_deepmd_npy(out_path) # type: ignore 108 | 109 | return [out_path], copy.deepcopy(self.report) 110 | -------------------------------------------------------------------------------- /dpgen2/exploration/task/__init__.py: -------------------------------------------------------------------------------- 1 | from .caly_task_group import ( 2 | CalyTaskGroup, 3 | ) 4 | from .conf_sampling_task_group import ( 5 | ConfSamplingTaskGroup, 6 | ) 7 | from .customized_lmp_template_task_group import ( 8 | CustomizedLmpTemplateTaskGroup, 9 | ) 10 | from .diffcsp_task_group import ( 11 | DiffCSPTaskGroup, 12 | ) 13 | from .lmp_template_task_group import ( 14 | LmpTemplateTaskGroup, 15 | ) 16 | from .make_task_group_from_config import ( 17 | caly_normalize, 18 | caly_task_group_args, 19 | diffcsp_normalize, 20 | ) 21 | from .make_task_group_from_config import ( 22 | lmp_normalize as normalize_lmp_task_group_config, 23 | ) 24 | from .make_task_group_from_config import ( 25 | lmp_task_group_args, 26 | make_calypso_task_group_from_config, 27 | make_diffcsp_task_group_from_config, 28 | make_lmp_task_group_from_config, 29 | variant_task_group, 30 | ) 31 | from .npt_task_group import ( 32 | NPTTaskGroup, 33 | ) 34 | from .stage import ( 35 | ExplorationStage, 36 | ) 37 | from .task import ( 38 | ExplorationTask, 39 | ) 40 | from .task_group import ( 41 | BaseExplorationTaskGroup, 42 | ExplorationTaskGroup, 43 | ) 44 | -------------------------------------------------------------------------------- /dpgen2/exploration/task/calypso/__init__.py: -------------------------------------------------------------------------------- 1 | from .caly_input import ( 2 | calypso_check_opt_str, 3 | calypso_run_opt_str, 4 | calypso_run_opt_str_end, 5 | make_calypso_input, 6 | ) 7 | -------------------------------------------------------------------------------- /dpgen2/exploration/task/conf_sampling_task_group.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | import random 3 | from typing import ( 4 | List, 5 | Optional, 6 | ) 7 | 8 | from dpgen2.constants import ( 9 | lmp_conf_name, 10 | lmp_input_name, 11 | model_name_pattern, 12 | ) 13 | 14 | from .task import ( 15 | ExplorationTask, 16 | ) 17 | from .task_group import ( 18 | ExplorationTaskGroup, 19 | ) 20 | 21 | 22 | class ConfSamplingTaskGroup(ExplorationTaskGroup): 23 | def __init__( 24 | self, 25 | ): 26 | super().__init__() 27 | self.conf_set = False 28 | 29 | def set_conf( 30 | self, 31 | conf_list: List[str], 32 | n_sample: Optional[int] = None, 33 | random_sample: bool = False, 34 | ): 35 | """ 36 | Set the configurations of exploration 37 | 38 | Parameters 39 | ---------- 40 | conf_list str 41 | A list of file contents 42 | n_sample int 43 | Number of samples drawn from the conf list each time 44 | `make_task` is called. If set to `None`, 45 | `n_sample` is set to length of the conf_list. 46 | random_sample bool 47 | If true the confs are randomly sampled, otherwise are 48 | consecutively sampled from the conf_list 49 | """ 50 | self.conf_list = conf_list 51 | if n_sample is None: 52 | self.n_sample = len(self.conf_list) 53 | else: 54 | self.n_sample = n_sample 55 | self.random_sample = random_sample 56 | self.conf_queue = [] 57 | self.conf_set = True 58 | 59 | def _sample_confs( 60 | self, 61 | ) -> list: 62 | confs = [] 63 | for ii in range(self.n_sample): 64 | if len(self.conf_queue) == 0: 65 | add_list = self.conf_list.copy() 66 | if self.random_sample: 67 | random.shuffle(add_list) 68 | self.conf_queue += add_list 69 | confs.append(self.conf_queue.pop(0)) 70 | return confs 71 | -------------------------------------------------------------------------------- /dpgen2/exploration/task/diffcsp_task_group.py: -------------------------------------------------------------------------------- 1 | from typing import ( 2 | Optional, 3 | ) 4 | 5 | from .task import ( 6 | ExplorationTask, 7 | ) 8 | from .task_group import ( 9 | ExplorationTaskGroup, 10 | ) 11 | 12 | 13 | class DiffCSPTaskGroup(ExplorationTaskGroup): 14 | def __init__( 15 | self, 16 | trj_freq: int = 10, 17 | fmax: float = 1e-4, 18 | steps: int = 200, 19 | timeout: Optional[int] = None, 20 | ): 21 | super().__init__() 22 | self.trj_freq = trj_freq 23 | self.fmax = fmax 24 | self.steps = steps 25 | self.timeout = timeout 26 | 27 | def make_task(self) -> "DiffCSPTaskGroup": 28 | """ 29 | Make the DiffCSP task group. 30 | 31 | Returns 32 | ------- 33 | task_grp: DiffCSPTaskGroup 34 | Return one DiffCSP task group. 35 | """ 36 | # clear all existing tasks 37 | self.clear() 38 | self.add_task(self._make_diffcsp_task()) 39 | return self 40 | 41 | def _make_diffcsp_task(self) -> ExplorationTask: 42 | task = ExplorationTask() 43 | task.trj_freq = self.trj_freq # type: ignore 44 | task.fmax = self.fmax # type: ignore 45 | task.steps = self.steps # type: ignore 46 | task.timeout = self.timeout # type: ignore 47 | return task 48 | -------------------------------------------------------------------------------- /dpgen2/exploration/task/lmp/__init__.py: -------------------------------------------------------------------------------- 1 | from .lmp_input import ( 2 | make_lmp_input, 3 | ) 4 | -------------------------------------------------------------------------------- /dpgen2/exploration/task/stage.py: -------------------------------------------------------------------------------- 1 | from abc import ( 2 | ABC, 3 | abstractmethod, 4 | ) 5 | from typing import ( 6 | List, 7 | ) 8 | 9 | from dpgen2.constants import ( 10 | lmp_conf_name, 11 | lmp_input_name, 12 | model_name_pattern, 13 | ) 14 | 15 | from .task import ( 16 | ExplorationTask, 17 | ) 18 | from .task_group import ( 19 | BaseExplorationTaskGroup, 20 | ExplorationTaskGroup, 21 | ) 22 | 23 | 24 | class ExplorationStage: 25 | """ 26 | The exploration stage. 27 | 28 | """ 29 | 30 | def __init__(self): 31 | self.clear() 32 | 33 | def clear(self): 34 | """ 35 | Clear all exploration group. 36 | 37 | """ 38 | self.explor_groups = [] 39 | 40 | def add_task_group( 41 | self, 42 | grp: ExplorationTaskGroup, 43 | ): 44 | """ 45 | Add an exploration group 46 | 47 | Parameters 48 | ---------- 49 | grp : ExplorationTaskGroup 50 | The added exploration task group 51 | 52 | """ 53 | self.explor_groups.append(grp) 54 | return self 55 | 56 | def make_task( 57 | self, 58 | ) -> BaseExplorationTaskGroup: 59 | """ 60 | Make the LAMMPS task group. 61 | 62 | Returns 63 | ------- 64 | task_grp: BaseExplorationTaskGroup 65 | The returned lammps task group. The number of tasks is equal to 66 | the summation of task groups defined by all the exploration groups 67 | added to the stage. 68 | 69 | """ 70 | 71 | lmp_task_grp = BaseExplorationTaskGroup() 72 | for ii in self.explor_groups: 73 | # lmp_task_grp.add_group(ii.make_task()) 74 | lmp_task_grp += ii.make_task() 75 | return lmp_task_grp 76 | -------------------------------------------------------------------------------- /dpgen2/exploration/task/task.py: -------------------------------------------------------------------------------- 1 | import os 2 | from collections.abc import ( 3 | Sequence, 4 | ) 5 | from typing import ( 6 | Dict, 7 | List, 8 | Tuple, 9 | ) 10 | 11 | 12 | class ExplorationTask: 13 | """Define the files needed by an exploration task. 14 | 15 | Examples 16 | -------- 17 | >>> # this example dumps all files needed by the task. 18 | >>> files = exploration_task.files() 19 | ... for file_name, file_content in files.items(): 20 | ... with open(file_name, 'w') as fp: 21 | ... fp.write(file_content) 22 | 23 | """ 24 | 25 | def __init__( 26 | self, 27 | ): 28 | self._files = {} 29 | 30 | def add_file( 31 | self, 32 | fname: str, 33 | fcont: str, 34 | ): 35 | """Add file to the task 36 | 37 | Parameters 38 | ---------- 39 | fname : str 40 | The name of the file 41 | fcont : str 42 | The content of the file. 43 | 44 | """ 45 | self._files[fname] = fcont 46 | return self 47 | 48 | def files(self) -> Dict: 49 | """Get all files for the task. 50 | 51 | Returns 52 | ------- 53 | files : dict 54 | The dict storing all files for the task. The file name is a key of the dict, and the file content is the corresponding value. 55 | """ 56 | return self._files 57 | -------------------------------------------------------------------------------- /dpgen2/exploration/task/task_group.py: -------------------------------------------------------------------------------- 1 | from abc import ( 2 | ABC, 3 | abstractmethod, 4 | ) 5 | from collections.abc import ( 6 | Sequence, 7 | ) 8 | from typing import ( 9 | Dict, 10 | List, 11 | Tuple, 12 | ) 13 | 14 | from .task import ( 15 | ExplorationTask, 16 | ) 17 | 18 | 19 | class BaseExplorationTaskGroup(Sequence): 20 | """A group of exploration tasks. Implemented as a `list` of `ExplorationTask`.""" 21 | 22 | def __init__(self): 23 | super().__init__() 24 | self.clear() 25 | 26 | def __getitem__(self, ii: int) -> ExplorationTask: 27 | """Get the `ii`th task""" 28 | return self.task_list[ii] 29 | 30 | def __len__(self) -> int: 31 | """Get the number of tasks in the group""" 32 | return len(self.task_list) 33 | 34 | def clear(self) -> None: 35 | self._task_list = [] 36 | 37 | @property 38 | def task_list(self) -> List[ExplorationTask]: 39 | """Get the `list` of `ExplorationTask`""" 40 | return self._task_list 41 | 42 | def add_task(self, task: ExplorationTask): 43 | """Add one task to the group.""" 44 | self.task_list.append(task) 45 | return self 46 | 47 | def add_group( 48 | self, 49 | group: "ExplorationTaskGroup", 50 | ): 51 | """Add another group to the group.""" 52 | # see https://www.python.org/dev/peps/pep-0484/#forward-references for forward references 53 | self._task_list = self._task_list + group._task_list 54 | return self 55 | 56 | def __add__( 57 | self, 58 | group: "ExplorationTaskGroup", 59 | ): 60 | """Add another group to the group.""" 61 | return self.add_group(group) 62 | 63 | 64 | class ExplorationTaskGroup(ABC, BaseExplorationTaskGroup): 65 | def __init__(self): 66 | super().__init__() 67 | 68 | @abstractmethod 69 | def make_task(self) -> "ExplorationTaskGroup": 70 | """Make the task group.""" 71 | pass 72 | 73 | 74 | class FooTask(ExplorationTask): 75 | def __init__( 76 | self, 77 | conf_name="conf.lmp", 78 | conf_cont="", 79 | inpu_name="in.lammps", 80 | inpu_cont="", 81 | ): 82 | super().__init__() 83 | self._files = { 84 | conf_name: conf_cont, 85 | inpu_name: inpu_cont, 86 | } 87 | 88 | 89 | class FooTaskGroup(BaseExplorationTaskGroup): 90 | def __init__(self, numb_task): 91 | super().__init__() 92 | # TODO: confirm the following is correct 93 | self.tlist = BaseExplorationTaskGroup() 94 | for ii in range(numb_task): 95 | self.tlist.add_task( 96 | FooTask( 97 | f"conf.{ii}", 98 | f"this is conf.{ii}", 99 | f"input.{ii}", 100 | f"this is input.{ii}", 101 | ) 102 | ) 103 | 104 | @property 105 | def task_list(self): 106 | return self.tlist 107 | 108 | 109 | if __name__ == "__main__": 110 | grp = FooTaskGroup(3) 111 | for ii in grp: 112 | fcs = ii.files() 113 | print(fcs) 114 | -------------------------------------------------------------------------------- /dpgen2/flow/__init__.py: -------------------------------------------------------------------------------- 1 | from .dpgen_loop import ( 2 | ConcurrentLearning, 3 | ) 4 | -------------------------------------------------------------------------------- /dpgen2/fp/__init__.py: -------------------------------------------------------------------------------- 1 | from .abacus import ( 2 | FpOpAbacusInputs, 3 | PrepFpOpAbacus, 4 | RunFpOpAbacus, 5 | ) 6 | from .cp2k import ( 7 | FpOpCp2kInputs, 8 | PrepFpOpCp2k, 9 | RunFpOpCp2k, 10 | ) 11 | from .deepmd import ( 12 | DeepmdInputs, 13 | PrepDeepmd, 14 | RunDeepmd, 15 | ) 16 | from .gaussian import ( 17 | GaussianInputs, 18 | PrepGaussian, 19 | RunGaussian, 20 | ) 21 | from .vasp import ( 22 | PrepVasp, 23 | RunVasp, 24 | VaspInputs, 25 | ) 26 | 27 | fp_styles = { 28 | "vasp": { 29 | "inputs": VaspInputs, 30 | "prep": PrepVasp, 31 | "run": RunVasp, 32 | }, 33 | "gaussian": { 34 | "inputs": GaussianInputs, 35 | "prep": PrepGaussian, 36 | "run": RunGaussian, 37 | }, 38 | "deepmd": { 39 | "inputs": DeepmdInputs, 40 | "prep": PrepDeepmd, 41 | "run": RunDeepmd, 42 | }, 43 | "fpop_abacus": { 44 | "inputs": FpOpAbacusInputs, 45 | "prep": PrepFpOpAbacus, 46 | "run": RunFpOpAbacus, 47 | }, 48 | "fpop_cp2k": { 49 | "inputs": FpOpCp2kInputs, 50 | "prep": PrepFpOpCp2k, 51 | "run": RunFpOpCp2k, 52 | }, 53 | } 54 | -------------------------------------------------------------------------------- /dpgen2/op/__init__.py: -------------------------------------------------------------------------------- 1 | from .collect_data import ( 2 | CollectData, 3 | ) 4 | from .collect_run_caly import ( 5 | CollRunCaly, 6 | ) 7 | from .diffcsp_gen import ( 8 | DiffCSPGen, 9 | ) 10 | from .prep_caly_dp_optim import ( 11 | PrepCalyDPOptim, 12 | ) 13 | from .prep_caly_input import ( 14 | PrepCalyInput, 15 | ) 16 | from .prep_caly_model_devi import ( 17 | PrepCalyModelDevi, 18 | ) 19 | from .prep_dp_train import ( 20 | PrepDPTrain, 21 | ) 22 | from .prep_lmp import ( 23 | PrepLmp, 24 | ) 25 | from .prep_relax import ( 26 | PrepRelax, 27 | ) 28 | from .run_caly_dp_optim import ( 29 | RunCalyDPOptim, 30 | ) 31 | from .run_caly_model_devi import ( 32 | RunCalyModelDevi, 33 | ) 34 | from .run_dp_train import ( 35 | RunDPTrain, 36 | ) 37 | from .run_lmp import ( 38 | RunLmp, 39 | RunLmpHDF5, 40 | ) 41 | from .run_relax import ( 42 | RunRelax, 43 | RunRelaxHDF5, 44 | ) 45 | from .select_confs import ( 46 | SelectConfs, 47 | ) 48 | -------------------------------------------------------------------------------- /dpgen2/op/caly_evo_step_merge.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import pickle 4 | import shutil 5 | from pathlib import ( 6 | Path, 7 | ) 8 | from typing import ( 9 | List, 10 | Tuple, 11 | ) 12 | 13 | from dflow import ( 14 | Step, 15 | Workflow, 16 | download_artifact, 17 | upload_artifact, 18 | ) 19 | from dflow.python import ( 20 | OP, 21 | OPIO, 22 | Artifact, 23 | BigParameter, 24 | OPIOSign, 25 | Parameter, 26 | Slices, 27 | TransientError, 28 | ) 29 | from dflow.utils import ( 30 | flatten, 31 | ) 32 | 33 | from dpgen2.constants import ( 34 | calypso_check_opt_file, 35 | calypso_run_opt_file, 36 | ) 37 | from dpgen2.exploration.task import ( 38 | ExplorationTaskGroup, 39 | ) 40 | from dpgen2.superop.caly_evo_step import ( 41 | CalyEvoStep, 42 | ) 43 | from dpgen2.utils import ( 44 | BinaryFileInput, 45 | set_directory, 46 | ) 47 | from dpgen2.utils.run_command import ( 48 | run_command, 49 | ) 50 | 51 | 52 | class CalyEvoStepMerge(OP): 53 | def __init__(self, mode="debug", *args, **kwargs): 54 | self.mode = mode 55 | self.args = args 56 | self.kwargs = kwargs 57 | 58 | @classmethod 59 | def get_input_sign(cls): 60 | return OPIOSign( 61 | { 62 | "iter_num": int, 63 | "cnt_num": Parameter(int, default=0), 64 | "block_id": Parameter(str, default=""), 65 | "task_name": BigParameter(str), 66 | "expl_config": BigParameter(dict), 67 | "models": Artifact(Path), 68 | "input_file": Artifact(Path), 69 | "caly_run_opt_file": Artifact(Path), 70 | "caly_check_opt_file": Artifact(Path), 71 | "results": Artifact(Path, optional=True), 72 | "step": Artifact(Path, optional=True), 73 | "opt_results_dir": Artifact(List[Path], optional=True), 74 | "qhull_input": Artifact(Path, optional=True), 75 | } 76 | ) 77 | 78 | @classmethod 79 | def get_output_sign(cls): 80 | return OPIOSign( 81 | { 82 | "traj_results": Artifact(List[Path]), 83 | } 84 | ) 85 | 86 | @OP.exec_sign_check 87 | def execute( 88 | self, 89 | ip: OPIO, 90 | ) -> OPIO: 91 | from dflow import ( 92 | config, 93 | ) 94 | 95 | config["mode"] = self.mode 96 | wf = Workflow("caly-evo-workflow") 97 | steps = CalyEvoStep(*self.args, **self.kwargs) 98 | step = Step( 99 | "caly-evo-step", 100 | template=steps, 101 | slices=Slices(output_artifact=["traj_results"]), 102 | parameters={k: ip[k] for k in steps.inputs.parameters}, 103 | artifacts={ 104 | k: upload_artifact(ip[k]) if ip[k] is not None else None 105 | for k in steps.inputs.artifacts 106 | }, 107 | with_param=[0], 108 | ) 109 | wf.add(step) 110 | wf.submit() 111 | wf.wait() 112 | assert wf.query_status() == "Succeeded" 113 | out = OPIO() 114 | step = wf.query_step("caly-evo-step")[0] 115 | for k in step.outputs.parameters: 116 | out[k] = step.outputs.parameters[k].value 117 | output_sign = self.get_output_sign() 118 | for k in step.outputs.artifacts: 119 | path_list = download_artifact(step.outputs.artifacts[k]) 120 | if output_sign[k].type == List[Path]: 121 | if not isinstance(path_list, list) or any( 122 | [p is not None and not isinstance(p, str) for p in path_list] 123 | ): 124 | path_list = list(flatten(path_list).values()) 125 | out[k] = [Path(p) for p in path_list] 126 | elif output_sign[k].type == Path: 127 | assert len(path_list) == 1 128 | out[k] = Path(path_list[0]) 129 | return out 130 | -------------------------------------------------------------------------------- /dpgen2/op/collect_data.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | from pathlib import ( 4 | Path, 5 | ) 6 | from typing import ( 7 | List, 8 | Set, 9 | Tuple, 10 | ) 11 | 12 | import dpdata 13 | from dflow.python import ( 14 | OP, 15 | OPIO, 16 | Artifact, 17 | BigParameter, 18 | OPIOSign, 19 | Parameter, 20 | ) 21 | 22 | from dpgen2.utils import ( 23 | setup_ele_temp, 24 | ) 25 | 26 | 27 | class CollectData(OP): 28 | """Collect labeled data and add to the iteration dataset. 29 | 30 | After running FP tasks, the labeled data are scattered in task 31 | directories. This OP collect the labeled data in one data 32 | directory and add it to the iteration data. The data generated by 33 | this iteration will be place in `ip["name"]` subdirectory of the 34 | iteration data directory. 35 | 36 | """ 37 | 38 | default_optional_parameter = { 39 | "mixed_type": False, 40 | } 41 | 42 | @classmethod 43 | def get_input_sign(cls): 44 | return OPIOSign( 45 | { 46 | "name": str, 47 | "type_map": List[str], 48 | "optional_parameter": Parameter( 49 | dict, 50 | default=CollectData.default_optional_parameter, 51 | ), 52 | "labeled_data": Artifact(List[Path]), 53 | "iter_data": Artifact(List[Path]), 54 | } 55 | ) 56 | 57 | @classmethod 58 | def get_output_sign(cls): 59 | return OPIOSign( 60 | { 61 | "iter_data": Artifact(List[Path]), 62 | } 63 | ) 64 | 65 | @OP.exec_sign_check 66 | def execute( 67 | self, 68 | ip: OPIO, 69 | ) -> OPIO: 70 | r"""Execute the OP. This OP collect data scattered in directories given by `ip['labeled_data']` 71 | in to one `dpdata.Multisystems` and store it in a directory named `name`. This directory is appended 72 | to the list `iter_data`. 73 | 74 | Parameters 75 | ---------- 76 | ip : dict 77 | Input dict with components: 78 | 79 | - `name`: (`str`) The name of this iteration. The data generated by this iteration will be place in a sub-directory of `name`. 80 | - `labeled_data`: (`Artifact(List[Path])`) The paths of labeled data generated by FP tasks of the current iteration. 81 | - `iter_data`: (`Artifact(List[Path])`) The data paths previous iterations. 82 | 83 | Returns 84 | ------- 85 | Any 86 | Output dict with components: 87 | - `iter_data`: (`Artifact(List[Path])`) The data paths of previous and the current iteration data. 88 | 89 | """ 90 | name = ip["name"] 91 | type_map = ip["type_map"] 92 | mixed_type = ip["optional_parameter"]["mixed_type"] 93 | labeled_data = ip["labeled_data"] 94 | iter_data = ip["iter_data"] 95 | 96 | ms = dpdata.MultiSystems(type_map=type_map) 97 | for ii in labeled_data: 98 | if ii and len(list(ii.rglob("fparam.npy"))) > 0: 99 | setup_ele_temp(False) 100 | if ii and len(list(ii.rglob("aparam.npy"))) > 0: 101 | setup_ele_temp(True) 102 | ss = dpdata.LabeledSystem(ii, fmt="deepmd/npy") 103 | ms.append(ss) 104 | 105 | # NOTICE: 106 | # if ms.get_nframes() == 0, ms.to_deepmd_npy would not make the dir Path(name) 107 | Path(name).mkdir() 108 | if mixed_type: 109 | ms.to_deepmd_npy_mixed(name) # type: ignore 110 | else: 111 | ms.to_deepmd_npy(name) # type: ignore 112 | iter_data.append(Path(name)) 113 | 114 | return OPIO( 115 | { 116 | "iter_data": iter_data, 117 | } 118 | ) 119 | -------------------------------------------------------------------------------- /dpgen2/op/diffcsp_gen.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | from pathlib import ( 4 | Path, 5 | ) 6 | from typing import ( 7 | List, 8 | ) 9 | 10 | from dflow.python import ( 11 | OP, 12 | OPIO, 13 | Artifact, 14 | OPIOSign, 15 | ) 16 | 17 | 18 | def convert_pt_to_cif(input_file, output_dir): 19 | import numpy as np 20 | import torch # type: ignore 21 | from pymatgen.core.lattice import ( # type: ignore 22 | Lattice, 23 | ) 24 | from pymatgen.core.structure import ( # type: ignore 25 | Structure, 26 | ) 27 | 28 | data = torch.load(input_file, map_location=torch.device("cpu")) 29 | if not os.path.exists(output_dir): 30 | os.makedirs(output_dir) 31 | lengths = data["lengths"] 32 | angles = data["angles"] 33 | num_atoms = data["num_atoms"] 34 | frac_coors = data["frac_coords"] 35 | atom_types = data["atom_types"] 36 | 37 | lengths_list = lengths.numpy() 38 | angles_list = angles.numpy() 39 | num_atoms_list = num_atoms 40 | frac_coors_list = frac_coors.numpy() 41 | atom_types_list = atom_types 42 | if len(atom_types_list.shape) > 1: 43 | atom_types_list = np.argmax(atom_types_list, axis=-1) + 1 44 | 45 | now_atom = 0 46 | for i in range(len(num_atoms_list)): 47 | length = lengths_list[i] 48 | angle = angles_list[i] 49 | atom_num = num_atoms_list[i] 50 | 51 | atom_type = atom_types_list[now_atom : now_atom + atom_num] 52 | frac_coord = frac_coors_list[now_atom : now_atom + atom_num] 53 | lattice = Lattice.from_parameters(*(length.tolist() + angle.tolist())) 54 | structure = Structure( 55 | lattice, atom_type, frac_coord, coords_are_cartesian=False 56 | ) 57 | 58 | filename = "%s.cif" % i 59 | file_path = os.path.join(output_dir, filename) 60 | structure.to(filename=file_path) 61 | now_atom += atom_num 62 | 63 | 64 | class DiffCSPGen(OP): 65 | @classmethod 66 | def get_input_sign(cls): 67 | return OPIOSign( 68 | { 69 | "config": dict, 70 | "task_id": str, 71 | } 72 | ) 73 | 74 | @classmethod 75 | def get_output_sign(cls): 76 | return OPIOSign( 77 | { 78 | "cifs": Artifact(List[Path]), 79 | } 80 | ) 81 | 82 | @OP.exec_sign_check 83 | def execute( 84 | self, 85 | ip: OPIO, 86 | ) -> OPIO: 87 | cmd = ip["config"]["gen_command"] 88 | args = cmd.split() 89 | try: 90 | i = args.index("--model_path") 91 | except ValueError: 92 | raise RuntimeError("Path of DiffCSP model not provided.") 93 | model_path = args[i + 1] 94 | subprocess.run(cmd, shell=True, check=True) 95 | result_file = os.path.join(model_path, "eval_gen.pt") 96 | task_dir = "diffcsp.%s" % ip["task_id"] 97 | convert_pt_to_cif(result_file, task_dir) 98 | return OPIO( 99 | { 100 | "cifs": list(Path(task_dir).glob("*.cif")), 101 | } 102 | ) 103 | -------------------------------------------------------------------------------- /dpgen2/op/md_settings.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import ( 3 | List, 4 | Optional, 5 | ) 6 | 7 | 8 | class MDSettings: 9 | def __init__( 10 | self, 11 | ens: str, 12 | dt: float, 13 | nsteps: int, 14 | trj_freq: int, 15 | temps: Optional[List[float]] = None, 16 | press: Optional[List[float]] = None, 17 | tau_t: float = 0.1, 18 | tau_p: float = 0.5, 19 | pka_e: Optional[float] = None, 20 | neidelay: Optional[int] = None, 21 | no_pbc: bool = False, 22 | use_clusters: bool = False, 23 | relative_epsilon: Optional[float] = None, 24 | relative_v_epsilon: Optional[float] = None, 25 | ele_temp_f: Optional[float] = None, 26 | ele_temp_a: Optional[float] = None, 27 | ) -> None: 28 | self.ens = ens 29 | self.temps = temps 30 | self.press = press 31 | self.dt = dt 32 | self.nsteps = nsteps 33 | self.trj_freq = (trj_freq,) 34 | self.pka_e = pka_e 35 | self.neidelay = neidelay 36 | self.no_pbc = no_pbc 37 | self.tau_t = tau_t 38 | self.tau_p = tau_p 39 | self.use_clusters = use_clusters 40 | self.relative_epsilon = relative_epsilon 41 | self.relative_v_epsilon = relative_v_epsilon 42 | self.ele_temp_f = ele_temp_f 43 | self.ele_temp_a = ele_temp_a 44 | 45 | def to_str( 46 | self, 47 | ) -> str: 48 | return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4) 49 | -------------------------------------------------------------------------------- /dpgen2/op/prep_caly_model_devi.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import pickle 4 | import shutil 5 | from pathlib import ( 6 | Path, 7 | ) 8 | from typing import ( 9 | List, 10 | Tuple, 11 | ) 12 | 13 | from dflow.python import ( 14 | OP, 15 | OPIO, 16 | Artifact, 17 | BigParameter, 18 | OPIOSign, 19 | Parameter, 20 | TransientError, 21 | ) 22 | 23 | from dpgen2.constants import ( 24 | calypso_check_opt_file, 25 | calypso_opt_dir_name, 26 | calypso_run_opt_file, 27 | model_name_pattern, 28 | ) 29 | from dpgen2.exploration.task import ( 30 | ExplorationTaskGroup, 31 | ) 32 | from dpgen2.utils import ( 33 | BinaryFileInput, 34 | set_directory, 35 | ) 36 | from dpgen2.utils.run_command import ( 37 | run_command, 38 | ) 39 | 40 | 41 | class PrepCalyModelDevi(OP): 42 | """Prepare the working directories and input file according to slices information 43 | for making model deviation. 44 | """ 45 | 46 | @classmethod 47 | def get_input_sign(cls): 48 | return OPIOSign( 49 | { 50 | "task_name": Parameter(str), 51 | "config": BigParameter(dict), 52 | "traj_results": Artifact(List[Path]), 53 | } 54 | ) 55 | 56 | @classmethod 57 | def get_output_sign(cls): 58 | return OPIOSign( 59 | { 60 | "task_name_list": Parameter(List[str]), 61 | "grouped_traj_list": Artifact(List[Path]), 62 | } 63 | ) 64 | 65 | @OP.exec_sign_check 66 | def execute( 67 | self, 68 | ip: OPIO, 69 | ) -> OPIO: 70 | """Execute the OP. 71 | 72 | Parameters 73 | ---------- 74 | ip : dict 75 | Input dict with components: 76 | - `task_name` : (`str`) 77 | - `config` : (`BigParameter(dict)`) 78 | - `traj_results` : (`Path`) 79 | 80 | Returns 81 | ------- 82 | op : dict 83 | Output dict with components: 84 | 85 | - `task_name_list`: (`List[str]`) 86 | - `grouped_traj_list`: (`Artifact(List[Path])`) 87 | 88 | """ 89 | work_dir = Path(ip["task_name"]) 90 | traj_results_dir = [ 91 | Path(dir_name).resolve() 92 | for dir_name in ip["traj_results"] 93 | if dir_name is not None 94 | ] 95 | trajs = [ 96 | traj.resolve() 97 | for traj_dir in traj_results_dir 98 | for traj in Path(traj_dir).rglob("*.traj") 99 | ] 100 | expl_config = ip["config"] 101 | group_size = expl_config.get("model_devi_group_size", len(trajs)) 102 | 103 | with set_directory(work_dir): 104 | grouped_trajs_list = [ 105 | trajs[i : i + group_size] for i in range(0, len(trajs), group_size) 106 | ] 107 | 108 | traj_cnt = 0 109 | task_dirs = [] 110 | for idx, grouped_trajs in enumerate(grouped_trajs_list): 111 | trajs_path = Path(f"trajs_part_{idx}") 112 | task_dirs.append(work_dir / trajs_path) 113 | with set_directory(trajs_path): 114 | for traj in grouped_trajs: 115 | Path(f"{traj_cnt}.{traj.name}").symlink_to(traj) 116 | traj_cnt += 1 117 | 118 | task_names = [str(task_dir) for task_dir in task_dirs] 119 | 120 | return OPIO( 121 | { 122 | "task_name_list": task_names, 123 | "grouped_traj_list": task_dirs, 124 | } 125 | ) 126 | -------------------------------------------------------------------------------- /dpgen2/op/prep_lmp.py: -------------------------------------------------------------------------------- 1 | import json 2 | import pickle 3 | from pathlib import ( 4 | Path, 5 | ) 6 | from typing import ( 7 | List, 8 | Tuple, 9 | ) 10 | 11 | from dflow.python import ( 12 | OP, 13 | OPIO, 14 | Artifact, 15 | BigParameter, 16 | OPIOSign, 17 | ) 18 | 19 | from dpgen2.constants import ( 20 | lmp_task_pattern, 21 | ) 22 | from dpgen2.exploration.task import ( 23 | BaseExplorationTaskGroup, 24 | ExplorationTaskGroup, 25 | ) 26 | 27 | 28 | class PrepLmp(OP): 29 | r"""Prepare the working directories for LAMMPS tasks. 30 | 31 | A list of working directories (defined by `ip["task"]`) 32 | containing all files needed to start LAMMPS tasks will be 33 | created. The paths of the directories will be returned as 34 | `op["task_paths"]`. The identities of the tasks are returned as 35 | `op["task_names"]`. 36 | 37 | """ 38 | 39 | @classmethod 40 | def get_input_sign(cls): 41 | return OPIOSign( 42 | { 43 | "lmp_task_grp": BigParameter(BaseExplorationTaskGroup), 44 | } 45 | ) 46 | 47 | @classmethod 48 | def get_output_sign(cls): 49 | return OPIOSign( 50 | { 51 | "task_names": BigParameter(List[str]), 52 | "task_paths": Artifact(List[Path]), 53 | } 54 | ) 55 | 56 | @OP.exec_sign_check 57 | def execute( 58 | self, 59 | ip: OPIO, 60 | ) -> OPIO: 61 | r"""Execute the OP. 62 | 63 | Parameters 64 | ---------- 65 | ip : dict 66 | Input dict with components: 67 | - `lmp_task_grp` : (`BigParameter(Path)`) Can be pickle loaded as a ExplorationTaskGroup. Definitions for LAMMPS tasks 68 | 69 | Returns 70 | ------- 71 | op : dict 72 | Output dict with components: 73 | 74 | - `task_names`: (`List[str]`) The name of tasks. Will be used as the identities of the tasks. The names of different tasks are different. 75 | - `task_paths`: (`Artifact(List[Path])`) The parepared working paths of the tasks. Contains all input files needed to start the LAMMPS simulation. The order fo the Paths should be consistent with `op["task_names"]` 76 | """ 77 | 78 | lmp_task_grp = ip["lmp_task_grp"] 79 | cc = 0 80 | task_paths = [] 81 | for tt in lmp_task_grp: 82 | ff = tt.files() 83 | tname = _mk_task_from_files(cc, ff) 84 | task_paths.append(tname) 85 | cc += 1 86 | task_names = [str(ii) for ii in task_paths] 87 | return OPIO( 88 | { 89 | "task_names": task_names, 90 | "task_paths": task_paths, 91 | } 92 | ) 93 | 94 | 95 | PrepExplorationTaskGroup = PrepLmp 96 | 97 | 98 | def _mk_task_from_files(cc, ff): 99 | tname = Path(lmp_task_pattern % cc) 100 | tname.mkdir(exist_ok=True, parents=True) 101 | for nn in ff.keys(): 102 | (tname / nn).write_text(ff[nn]) 103 | return tname 104 | -------------------------------------------------------------------------------- /dpgen2/op/prep_relax.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import ( 3 | Path, 4 | ) 5 | from typing import ( 6 | List, 7 | ) 8 | 9 | from dflow.python import ( 10 | OP, 11 | OPIO, 12 | Artifact, 13 | OPIOSign, 14 | ) 15 | 16 | 17 | class PrepRelax(OP): 18 | @classmethod 19 | def get_input_sign(cls): 20 | return OPIOSign( 21 | { 22 | "expl_config": dict, 23 | "cifs": Artifact(List[Path]), 24 | } 25 | ) 26 | 27 | @classmethod 28 | def get_output_sign(cls): 29 | return OPIOSign( 30 | { 31 | "ntasks": int, 32 | "task_paths": Artifact(List[Path]), 33 | } 34 | ) 35 | 36 | @OP.exec_sign_check 37 | def execute( 38 | self, 39 | ip: OPIO, 40 | ) -> OPIO: 41 | ncifs = len(ip["cifs"]) 42 | config = ip["expl_config"] 43 | group_size = config["relax_group_size"] 44 | ntasks = int(ncifs / group_size) 45 | task_paths = [] 46 | for i in range(ntasks): 47 | task_dir = Path("task.%06d" % i) 48 | task_dir.mkdir(exist_ok=True) 49 | for j in range(group_size * i, min(group_size * (i + 1), ncifs)): 50 | os.symlink(ip["cifs"][j], task_dir / ("%s.cif" % j)) 51 | task_paths.append(task_dir) 52 | return OPIO( 53 | { 54 | "ntasks": ntasks, 55 | "task_paths": task_paths, 56 | } 57 | ) 58 | -------------------------------------------------------------------------------- /dpgen2/superop/__init__.py: -------------------------------------------------------------------------------- 1 | from .block import ( 2 | ConcurrentLearningBlock, 3 | ) 4 | from .prep_run_calypso import ( 5 | PrepRunCaly, 6 | ) 7 | from .prep_run_diffcsp import ( 8 | PrepRunDiffCSP, 9 | ) 10 | from .prep_run_dp_train import ( 11 | PrepRunDPTrain, 12 | ) 13 | from .prep_run_fp import ( 14 | PrepRunFp, 15 | ) 16 | from .prep_run_lmp import ( 17 | PrepRunLmp, 18 | ) 19 | -------------------------------------------------------------------------------- /dpgen2/utils/__init__.py: -------------------------------------------------------------------------------- 1 | from .artifact_uri import ( 2 | get_artifact_from_uri, 3 | upload_artifact_and_print_uri, 4 | ) 5 | from .binary_file_input import ( 6 | BinaryFileInput, 7 | ) 8 | from .bohrium_config import ( 9 | bohrium_config_from_dict, 10 | ) 11 | from .chdir import ( 12 | chdir, 13 | set_directory, 14 | ) 15 | from .dflow_config import ( 16 | dflow_config, 17 | dflow_s3_config, 18 | workflow_config_from_dict, 19 | ) 20 | from .dflow_query import ( 21 | find_slice_ranges, 22 | get_iteration, 23 | get_last_iteration, 24 | get_last_scheduler, 25 | get_subkey, 26 | matched_step_key, 27 | print_keys_in_nice_format, 28 | sort_slice_ops, 29 | ) 30 | from .obj_artifact import ( 31 | dump_object_to_file, 32 | load_object_from_file, 33 | ) 34 | from .run_command import ( 35 | run_command, 36 | ) 37 | from .setup_ele_temp import ( 38 | setup_ele_temp, 39 | ) 40 | from .step_config import gen_doc as gen_doc_step_dict 41 | from .step_config import ( 42 | init_executor, 43 | ) 44 | from .step_config import normalize as normalize_step_dict 45 | from .step_config import ( 46 | step_conf_args, 47 | ) 48 | -------------------------------------------------------------------------------- /dpgen2/utils/artifact_uri.py: -------------------------------------------------------------------------------- 1 | from dflow import ( 2 | S3Artifact, 3 | s3_config, 4 | upload_artifact, 5 | ) 6 | 7 | 8 | def get_artifact_from_uri(uri): 9 | if uri.startswith("s3://"): 10 | return S3Artifact(uri[5:]) 11 | elif uri.startswith("oss://"): 12 | return S3Artifact(uri[6:]) 13 | else: 14 | raise ValueError("Unrecognized scheme of URI: %s" % uri) 15 | 16 | 17 | def upload_artifact_and_print_uri(files, name): 18 | art = upload_artifact(files) 19 | if s3_config["repo_type"] == "s3" and hasattr(art, "key"): 20 | print("%s has been uploaded to s3://%s" % (name, art.key)) 21 | elif s3_config["repo_type"] == "oss" and hasattr(art, "key"): 22 | print("%s has been uploaded to oss://%s" % (name, art.key)) 23 | return art 24 | -------------------------------------------------------------------------------- /dpgen2/utils/binary_file_input.py: -------------------------------------------------------------------------------- 1 | """Binary file inputs""" 2 | import os 3 | import warnings 4 | from pathlib import ( 5 | Path, 6 | ) 7 | from typing import ( 8 | Any, 9 | List, 10 | Optional, 11 | Tuple, 12 | Union, 13 | ) 14 | 15 | from dargs import ( 16 | Argument, 17 | dargs, 18 | ) 19 | from dflow.python import ( 20 | TransientError, 21 | ) 22 | 23 | 24 | class BinaryFileInput: 25 | def __init__(self, path: Union[str, Path], ext: Optional[str] = None) -> None: 26 | path = str(path) 27 | assert os.path.exists(path), f"No such file: {str(path)}" 28 | if ext and not ext.startswith("."): 29 | ext = "." + ext 30 | self.ext = ext 31 | 32 | if self.ext: 33 | assert ( 34 | os.path.splitext(path)[-1] == self.ext 35 | ), f'File extension mismatch, require "{ext}", current "{os.path.splitext(path)[-1]}", file path: {str(path)}' 36 | 37 | self.file_name = os.path.basename(path) 38 | with open(path, "rb") as f: 39 | self._data = f.read() 40 | 41 | def save_as_file(self, path: Union[str, Path]) -> None: 42 | if self.ext and os.path.splitext(path)[-1] != self.ext: 43 | warnings.warn( 44 | f'warning: file extension mismatch! Extension of input file is "{self.ext}",' 45 | + f"current extension is \"{str(path).split('.')[-1]}\"" 46 | ) 47 | 48 | with open(path, "wb") as file: 49 | file.write(self._data) 50 | -------------------------------------------------------------------------------- /dpgen2/utils/bohrium_config.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | import os 3 | 4 | import dflow 5 | from dflow.config import ( 6 | config, 7 | s3_config, 8 | ) 9 | from dflow.plugins import ( 10 | bohrium, 11 | ) 12 | 13 | 14 | def bohrium_config_from_dict( 15 | bohrium_config, 16 | ): 17 | config["host"] = bohrium_config["host"] 18 | config["k8s_api_server"] = bohrium_config["k8s_api_server"] 19 | bohrium.config["username"] = bohrium_config["username"] 20 | bohrium.config["password"] = bohrium_config["password"] 21 | bohrium.config["project_id"] = str(bohrium_config["project_id"]) 22 | s3_config["repo_key"] = bohrium_config["repo_key"] 23 | module, cls = bohrium_config["storage_client"].rsplit(".", maxsplit=1) 24 | module = importlib.import_module(module) 25 | client = getattr(module, cls) 26 | s3_config["storage_client"] = client() 27 | -------------------------------------------------------------------------------- /dpgen2/utils/chdir.py: -------------------------------------------------------------------------------- 1 | import os 2 | from contextlib import ( 3 | contextmanager, 4 | ) 5 | from functools import ( 6 | wraps, 7 | ) 8 | from pathlib import ( 9 | Path, 10 | ) 11 | from typing import ( 12 | Callable, 13 | ) 14 | 15 | from dflow.python import ( 16 | OPIO, 17 | ) 18 | 19 | 20 | @contextmanager 21 | def set_directory(path: Path): 22 | """Sets the current working path within the context. 23 | 24 | Parameters 25 | ---------- 26 | path : Path 27 | The path to the cwd 28 | 29 | Yields 30 | ------ 31 | None 32 | 33 | Examples 34 | -------- 35 | >>> with set_directory("some_path"): 36 | ... do_something() 37 | """ 38 | cwd = Path().absolute() 39 | path.mkdir(exist_ok=True, parents=True) 40 | try: 41 | os.chdir(path) 42 | yield 43 | finally: 44 | os.chdir(cwd) 45 | 46 | 47 | def chdir(path_key: str): 48 | """Returns a decorator that can change the current working path. 49 | 50 | Parameters 51 | ---------- 52 | path_key : str 53 | key to OPIO 54 | 55 | Examples 56 | -------- 57 | >>> class SomeOP(OP): 58 | ... @chdir("path") 59 | ... def execute(self, ip: OPIO): 60 | ... do_something() 61 | """ 62 | 63 | def decorator(func: Callable): 64 | """Change the current working path.""" 65 | 66 | @wraps(func) 67 | def wrapper(self, ip: OPIO): 68 | with set_directory(Path(ip[path_key])): 69 | return func(self, ip) 70 | 71 | return wrapper 72 | 73 | return decorator 74 | -------------------------------------------------------------------------------- /dpgen2/utils/dflow_config.py: -------------------------------------------------------------------------------- 1 | import copy 2 | 3 | from dflow.config import ( 4 | config, 5 | s3_config, 6 | ) 7 | 8 | 9 | def workflow_config_from_dict( 10 | wf_config, 11 | ): 12 | dflow_config_data = wf_config.get("dflow_config", None) 13 | dflow_config(dflow_config_data) 14 | dflow_s3_config_data = wf_config.get("dflow_s3_config", None) 15 | dflow_s3_config(dflow_s3_config_data) 16 | 17 | 18 | def dflow_config_lower( 19 | dflow_config, 20 | ): 21 | dflow_s3_config = {} 22 | keys = [kk for kk in dflow_config.keys()] 23 | for kk in keys: 24 | if kk[:3] == "s3_": 25 | dflow_s3_config[kk[3:]] = dflow_config.pop(kk) 26 | for kk in dflow_config.keys(): 27 | config[kk] = dflow_config[kk] 28 | for kk in dflow_s3_config.keys(): 29 | s3_config[kk] = dflow_s3_config[kk] 30 | 31 | 32 | def dflow_s3_config_lower( 33 | dflow_s3_config_data, 34 | ): 35 | for kk in dflow_s3_config_data.keys(): 36 | s3_config[kk] = dflow_s3_config_data[kk] 37 | 38 | 39 | def dflow_config( 40 | config_data, 41 | ): 42 | """ 43 | set the dflow config by `config_data` 44 | 45 | the keys starting with "s3_" will be treated as s3_config keys, 46 | other keys are treated as config keys. 47 | 48 | """ 49 | if config_data is not None: 50 | dflow_config_lower(config_data) 51 | 52 | 53 | def dflow_s3_config( 54 | config_data, 55 | ): 56 | """ 57 | set the s3 config by `config_data` 58 | 59 | """ 60 | if config_data is not None: 61 | dflow_s3_config_lower(config_data) 62 | -------------------------------------------------------------------------------- /dpgen2/utils/obj_artifact.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | from pathlib import ( 3 | Path, 4 | ) 5 | 6 | 7 | def dump_object_to_file( 8 | obj, 9 | fname, 10 | ): 11 | """ 12 | pickle dump object to a file 13 | 14 | """ 15 | with open(fname, "wb") as fp: 16 | pickle.dump(obj, fp) 17 | return Path(fname) 18 | 19 | 20 | def load_object_from_file( 21 | fname, 22 | ): 23 | """ 24 | pickle load object from a file 25 | 26 | """ 27 | with open(fname, "rb") as fp: 28 | obj = pickle.load(fp) 29 | return obj 30 | -------------------------------------------------------------------------------- /dpgen2/utils/run_command.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import ( 3 | List, 4 | Tuple, 5 | Union, 6 | ) 7 | 8 | from dflow.config import ( 9 | config, 10 | ) 11 | from dflow.utils import run_command as dflow_run_command 12 | 13 | 14 | def run_command( 15 | cmd: Union[str, List[str]], 16 | shell: bool = False, 17 | ) -> Tuple[int, str, str]: 18 | interactive = False if config["mode"] == "debug" else True 19 | return dflow_run_command( 20 | cmd, raise_error=False, try_bash=shell, interactive=interactive 21 | ) 22 | -------------------------------------------------------------------------------- /dpgen2/utils/setup_ele_temp.py: -------------------------------------------------------------------------------- 1 | import dpdata 2 | import numpy as np 3 | from dpdata.data_type import ( 4 | Axis, 5 | DataType, 6 | ) 7 | 8 | 9 | def setup_ele_temp(atomic: bool): 10 | """Set electronic temperature as required input data. 11 | 12 | Parameters 13 | ---------- 14 | atomic : bool 15 | Whether to use atomic temperature or frame temperature 16 | """ 17 | if atomic: 18 | ele_temp_data_type = DataType( 19 | "aparam", 20 | np.ndarray, 21 | shape=(Axis.NFRAMES, Axis.NATOMS, 1), 22 | required=False, 23 | ) 24 | else: 25 | ele_temp_data_type = DataType( 26 | "fparam", 27 | np.ndarray, 28 | shape=(Axis.NFRAMES, 1), 29 | required=False, 30 | ) 31 | 32 | dpdata.System.register_data_type(ele_temp_data_type) 33 | dpdata.LabeledSystem.register_data_type(ele_temp_data_type) 34 | -------------------------------------------------------------------------------- /examples/almg/dp_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "model" : { 3 | "type_map": ["Al", "Mg"], 4 | "descriptor": { 5 | "type": "se_a", 6 | "sel": [90, 90], 7 | "rcut_smth": 1.80, 8 | "rcut": 6.00, 9 | "neuron": [25, 50, 100], 10 | "resnet_dt": false, 11 | "axis_neuron": 4, 12 | "seed": 1 13 | }, 14 | "fitting_net" : { 15 | "neuron": [128, 128, 128], 16 | "resnet_dt": true, 17 | "seed": 1 18 | } 19 | }, 20 | "loss" : { 21 | "start_pref_e": 0.02, 22 | "limit_pref_e": 1, 23 | "start_pref_f": 1000, 24 | "limit_pref_f": 1, 25 | "start_pref_v": 0, 26 | "limit_pref_v": 0 27 | }, 28 | "learning_rate" : { 29 | "start_lr": 0.001, 30 | "stop_lr": 1e-8, 31 | "decay_steps": 100 32 | }, 33 | "training" : { 34 | "training_data": { 35 | "systems": [], 36 | "batch_size":"auto" 37 | }, 38 | "numb_steps":1000, 39 | "seed":10, 40 | "disp_file":"lcurve.out", 41 | "disp_freq":100, 42 | "save_freq":1000 43 | }, 44 | "_comment" : "all" 45 | } 46 | -------------------------------------------------------------------------------- /examples/calypso/dp_dpa1_train.json: -------------------------------------------------------------------------------- 1 | { 2 | "model": { 3 | "type_map": ["Mg", "Al"] , 4 | "descriptor": { 5 | "type": "se_atten", 6 | "sel": 20, 7 | "rcut_smth": 0.5, 8 | "rcut": 6.0, 9 | "neuron": [ 10 | 40, 11 | 80, 12 | 160 13 | ], 14 | "resnet_dt": false, 15 | "axis_neuron": 16, 16 | "attn": 128, 17 | "attn_layer": 3, 18 | "attn_dotr": true, 19 | "attn_mask": false, 20 | "seed": 30289, 21 | "_activation_function": "tanh" 22 | }, 23 | "fitting_net": { 24 | "neuron": [ 25 | 240, 26 | 240, 27 | 240 28 | ], 29 | "resnet_dt": true, 30 | "_coord_norm": true, 31 | "_type_fitting_net": false, 32 | "seed": 11273, 33 | "_activation_function": "tanh" 34 | } 35 | }, 36 | "learning_rate": { 37 | "type": "exp", 38 | "start_lr": 0.001, 39 | "decay_steps": 5, 40 | "stop_lr": 5e-08, 41 | "_decay_rate": 0.95 42 | }, 43 | "loss": { 44 | "start_pref_e": 0.02, 45 | "limit_pref_e": 2, 46 | "start_pref_f": 1000, 47 | "limit_pref_f": 1, 48 | "start_pref_v": 0, 49 | "limit_pref_v": 0 50 | }, 51 | "training": { 52 | "training_data": { 53 | "systems": [ 54 | "/personal/workplace/DP/dpgen2/train/deepmd" 55 | ], 56 | "batch_size": 16, 57 | "_comment": "that's all" 58 | }, 59 | "numb_steps": 1000, 60 | "seed": 922, 61 | "disp_file": "lcurve.out", 62 | "disp_freq": 1000, 63 | "numb_test": 5, 64 | "save_freq": 10000, 65 | "save_ckpt": "model.ckpt", 66 | "disp_training": true, 67 | "time_training": true, 68 | "profiling": false, 69 | "profiling_file": "timeline.json" 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /examples/calypso/dpa2_train.json: -------------------------------------------------------------------------------- 1 | { 2 | "_comment": "that's all", 3 | "model": { 4 | "type_map": [ 5 | "Mg", 6 | "Al" 7 | ], 8 | "descriptor": { 9 | "type": "dpa2", 10 | "tebd_dim": 8, 11 | "repinit_rcut": 9.0, 12 | "repinit_rcut_smth": 8.0, 13 | "repinit_nsel": 120, 14 | "repformer_rcut": 4.0, 15 | "repformer_rcut_smth": 3.5, 16 | "repformer_nsel": 40, 17 | "repinit_neuron": [ 18 | 25, 19 | 50, 20 | 100 21 | ], 22 | "repinit_axis_neuron": 12, 23 | "repinit_activation": "tanh", 24 | "repformer_nlayers": 12, 25 | "repformer_g1_dim": 128, 26 | "repformer_g2_dim": 32, 27 | "repformer_attn2_hidden": 32, 28 | "repformer_attn2_nhead": 4, 29 | "repformer_attn1_hidden": 128, 30 | "repformer_attn1_nhead": 4, 31 | "repformer_axis_dim": 4, 32 | "repformer_update_h2": false, 33 | "repformer_update_g1_has_conv": true, 34 | "repformer_update_g1_has_grrg": true, 35 | "repformer_update_g1_has_drrd": true, 36 | "repformer_update_g1_has_attn": true, 37 | "repformer_update_g2_has_g1g1": true, 38 | "repformer_update_g2_has_attn": true, 39 | "repformer_attn2_has_gate": true, 40 | "repformer_add_type_ebd_to_seq": false 41 | }, 42 | "fitting_net": { 43 | "neuron": [ 44 | 240, 45 | 240, 46 | 240 47 | ], 48 | "resnet_dt": true, 49 | "seed": 1, 50 | "_comment": " that's all" 51 | }, 52 | "_comment": " that's all" 53 | }, 54 | "learning_rate": { 55 | "type": "exp", 56 | "decay_steps": 5000, 57 | "start_lr": 0.0002, 58 | "stop_lr": 3.51e-08, 59 | "_comment": "that's all" 60 | }, 61 | "loss": { 62 | "type": "ener", 63 | "start_pref_e": 0.02, 64 | "limit_pref_e": 1, 65 | "start_pref_f": 1000, 66 | "limit_pref_f": 1, 67 | "start_pref_v": 0, 68 | "limit_pref_v": 0, 69 | "_comment": " that's all" 70 | }, 71 | "training": { 72 | "stat_file": "./dpa2", 73 | "training_data": { 74 | "systems": [ 75 | "/personal/workplace/DP/dpgen2/Mg12Al8/deepmd" 76 | ], 77 | "batch_size": 1, 78 | "_comment": "that's all" 79 | }, 80 | "_validation_data": { 81 | "systems": [ 82 | "/personal/workplace/DP/dpgen2/Mg12Al8/deepmd" 83 | ], 84 | "batch_size": 1, 85 | "_comment": "that's all" 86 | }, 87 | "numb_steps": 20, 88 | "warmup_steps": 0, 89 | "gradient_max_norm": 5.0, 90 | "seed": 10, 91 | "disp_file": "lcurve.out", 92 | "disp_freq": 100, 93 | "save_freq": 200, 94 | "_comment": "that's all" 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /examples/chno/dpa_manyi.json: -------------------------------------------------------------------------------- 1 | { 2 | "model": { 3 | "type_map": ["H", "C", "N", "O"] , 4 | "descriptor": { 5 | "type": "se_atten", 6 | "sel": 20, 7 | "rcut_smth": 0.5, 8 | "rcut": 6.0, 9 | "neuron": [ 10 | 40, 11 | 80, 12 | 160 13 | ], 14 | "resnet_dt": false, 15 | "axis_neuron": 16, 16 | "attn": 128, 17 | "attn_layer": 3, 18 | "attn_dotr": true, 19 | "attn_mask": false, 20 | "seed": 30289, 21 | "_activation_function": "tanh" 22 | }, 23 | "fitting_net": { 24 | "neuron": [ 25 | 240, 26 | 240, 27 | 240 28 | ], 29 | "resnet_dt": true, 30 | "_coord_norm": true, 31 | "_type_fitting_net": false, 32 | "seed": 11273, 33 | "_activation_function": "tanh" 34 | } 35 | }, 36 | "learning_rate": { 37 | "type": "exp", 38 | "start_lr": 0.001, 39 | "decay_steps": 5000, 40 | "stop_lr": 5e-08, 41 | "_decay_rate": 0.95 42 | }, 43 | "loss": { 44 | "start_pref_e": 0.02, 45 | "limit_pref_e": 2, 46 | "start_pref_f": 1000, 47 | "limit_pref_f": 1, 48 | "start_pref_v": 0, 49 | "limit_pref_v": 0 50 | }, 51 | "training": { 52 | "training_data": { 53 | "systems": [ 54 | ], 55 | "batch_size": 16, 56 | "_comment": "that's all" 57 | }, 58 | "numb_steps": 10000000, 59 | "seed": 922, 60 | "disp_file": "lcurve.out", 61 | "disp_freq": 2000, 62 | "numb_test": 5, 63 | "save_freq": 10000, 64 | "save_ckpt": "model.ckpt", 65 | "disp_training": true, 66 | "time_training": true, 67 | "profiling": false, 68 | "profiling_file": "timeline.json" 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /examples/chno/template.lammps: -------------------------------------------------------------------------------- 1 | variable NSTEPS equal V_NSTEPS 2 | variable TEMP equal V_TEMP 3 | variable PRES equal 0 4 | variable THERMO_FREQ equal V_DUMPFREQ 5 | variable DUMP equal 10 6 | variable TAU_T equal 0.100000 7 | variable TAU_P equal 0.500000 8 | 9 | 10 | #Initialization 11 | units metal 12 | dimension 3 13 | atom_style atomic 14 | boundary f f f 15 | 16 | read_data conf.lmp 17 | mass 1 4.0 18 | mass 2 12.0 19 | mass 3 14.0 20 | mass 4 16.0 21 | 22 | #Interatomic potentials - DeepMD 23 | pair_style deepmd 24 | pair_coeff * * 25 | 26 | 27 | #MD parameters 28 | timestep 0.001 #ps 29 | velocity all create ${TEMP} 1815191 mom yes rot yes dist gaussian 30 | 31 | #Run MD - equil at 300K 32 | run_style verlet #Velocity verlet 33 | fix 1 all nve 34 | fix 2 all temp/csvr ${TEMP} ${TEMP} ${TAU_T} 1305191 35 | #fix 1 all nvt temp ${TEMP} ${TEMP} 0.1 #NH thermostat - 300K with 100 fs frequency 36 | fix 3 all momentum 1 linear 0 0 0 #Remove total linear momentum of the system at each step 37 | fix 4 all recenter INIT INIT INIT 38 | thermo_style custom step temp pe etotal press #Setting printing 39 | thermo ${THERMO_FREQ} #Ouputing thermodynamic properties 40 | dump dpgen_dump 41 | #dump 2 all custom 100 vel.xyz id type vx vy vz 42 | run ${NSTEPS} #25 ps 43 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=61", "setuptools_scm[toml]>=7"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "dpgen2" 7 | dynamic = ["version"] 8 | description = "DPGEN2: concurrent learning workflow generating the machine learning potential energy models." 9 | authors = [ 10 | {name = "DeepModeling"}, 11 | ] 12 | license = {file = "LICENSE"} 13 | classifiers = [ 14 | "Programming Language :: Python :: 3.7", 15 | "License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)", 16 | ] 17 | dependencies = [ 18 | 'numpy', 19 | 'dpdata>=0.2.20', 20 | 'pydflow>=1.8.97', 21 | 'dargs>=0.3.1', 22 | 'scipy', 23 | 'lbg', 24 | 'packaging', 25 | 'fpop', 26 | 'dpgui', 27 | 'cp2kdata', 28 | ] 29 | requires-python = ">=3.7" 30 | readme = "README.md" 31 | keywords = ["deep potential", "concurrent learning", "work flow"] 32 | 33 | [project.urls] 34 | Homepage = "https://github.com/deepmodeling/dpgen2" 35 | documentation = "https://docs.deepmodeling.com/projects/dpgen2" 36 | repository = "https://github.com/deepmodeling/dpgen2" 37 | 38 | [project.scripts] 39 | dpgen2 = "dpgen2.entrypoint.main:main" 40 | 41 | [project.entry-points."dpgui"] 42 | "DP-GEN2 Submit" = "dpgen2.entrypoint.args:submit_args" 43 | 44 | [project.optional-dependencies] 45 | docs = [ 46 | 'sphinx', 47 | 'recommonmark', 48 | 'sphinx-book-theme', 49 | 'numpydoc', 50 | 'myst_parser', 51 | 'deepmodeling-sphinx>=0.3.0', 52 | 'sphinx-argparse<0.5.0', 53 | "dargs>=0.4.1", 54 | ] 55 | test = [ 56 | 'fakegaussian>=0.0.3', 57 | 'dpgui', 58 | ] 59 | gui = [ 60 | 'dpgui', 61 | ] 62 | 63 | [tool.setuptools.packages.find] 64 | include = ["dpgen2*"] 65 | 66 | [tool.setuptools.package-data] 67 | dpgen2 = ['*.json'] 68 | 69 | [tool.setuptools_scm] 70 | write_to = "dpgen2/_version.py" 71 | 72 | [tool.pyright] 73 | include = [ 74 | "dpgen2", 75 | ] 76 | exclude = [ 77 | "dpgen2/_version.py", 78 | ] 79 | 80 | [tool.isort] 81 | profile = "black" 82 | force_grid_wrap = 1 83 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deepmodeling/dpgen2/0a89b7274d9ab85e81a92daad3ed7eb9c1d45046/tests/__init__.py -------------------------------------------------------------------------------- /tests/conf/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deepmodeling/dpgen2/0a89b7274d9ab85e81a92daad3ed7eb9c1d45046/tests/conf/__init__.py -------------------------------------------------------------------------------- /tests/conf/context.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))) 5 | import dpgen2 6 | -------------------------------------------------------------------------------- /tests/conf/test_unit_cell.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import shutil 4 | import unittest 5 | from pathlib import ( 6 | Path, 7 | ) 8 | 9 | import numpy as np 10 | 11 | # isort: off 12 | from .context import ( 13 | dpgen2, 14 | ) 15 | from dpgen2.conf.unit_cells import ( 16 | generate_unit_cell, 17 | ) 18 | 19 | # isort: on 20 | 21 | 22 | class TestGenerateUnitCell(unittest.TestCase): 23 | def test_bcc(self): 24 | sys = generate_unit_cell("bcc", 2.0) 25 | self.assertAlmostEqual(sys["cells"][0][0][0], 2.0) 26 | self.assertEqual(sys["atom_numbs"], [2]) 27 | np.testing.assert_array_almost_equal( 28 | sys["atom_types"], [0] * sum(sys["atom_numbs"]) 29 | ) 30 | 31 | def test_fcc(self): 32 | sys = generate_unit_cell("fcc", 2.0) 33 | self.assertAlmostEqual(sys["cells"][0][0][0], 2.0) 34 | self.assertEqual(sys["atom_numbs"], [4]) 35 | np.testing.assert_array_almost_equal( 36 | sys["atom_types"], [0] * sum(sys["atom_numbs"]) 37 | ) 38 | 39 | def test_hcp(self): 40 | sys = generate_unit_cell("hcp", 2.0) 41 | self.assertAlmostEqual(sys["cells"][0][0][0], 2.0) 42 | self.assertEqual(sys["atom_numbs"], [2]) 43 | np.testing.assert_array_almost_equal( 44 | sys["atom_types"], [0] * sum(sys["atom_numbs"]) 45 | ) 46 | 47 | def test_sc(self): 48 | sys = generate_unit_cell("sc", 2.0) 49 | self.assertAlmostEqual(sys["cells"][0][0][0], 2.0) 50 | self.assertEqual(sys["atom_numbs"], [1]) 51 | np.testing.assert_array_almost_equal( 52 | sys["atom_types"], [0] * sum(sys["atom_numbs"]) 53 | ) 54 | 55 | def test_diamond(self): 56 | sys = generate_unit_cell("diamond", 2.0) 57 | self.assertAlmostEqual(sys["cells"][0][0][0], 2.0 * np.sqrt(2.0)) 58 | self.assertEqual(sys["atom_numbs"], [2]) 59 | np.testing.assert_array_almost_equal( 60 | sys["atom_types"], [0] * sum(sys["atom_numbs"]) 61 | ) 62 | -------------------------------------------------------------------------------- /tests/context.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | dpgen_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) 5 | sys.path.insert(0, dpgen_path) 6 | import dpgen2 7 | from dpgen2.utils import ( 8 | dflow_config, 9 | ) 10 | 11 | if os.getenv("SKIP_UT_WITH_DFLOW"): 12 | skip_ut_with_dflow = int(os.getenv("SKIP_UT_WITH_DFLOW")) != 0 13 | skip_ut_with_dflow_reason = ( 14 | "skip because environment variable SKIP_UT_WITH_DFLOW is set to non-zero" 15 | ) 16 | else: 17 | skip_ut_with_dflow = False 18 | skip_ut_with_dflow_reason = "" 19 | upload_python_packages = [os.path.join(dpgen_path, "dpgen2")] 20 | # one needs to set proper values for the following variable. 21 | default_image = "dptechnology/dpgen2:latest" 22 | default_host = None 23 | dflow_config({}) 24 | if os.getenv("DFLOW_DEBUG"): 25 | from dflow.config import ( 26 | config, 27 | ) 28 | 29 | config["mode"] = "debug" 30 | -------------------------------------------------------------------------------- /tests/entrypoint/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deepmodeling/dpgen2/0a89b7274d9ab85e81a92daad3ed7eb9c1d45046/tests/entrypoint/__init__.py -------------------------------------------------------------------------------- /tests/entrypoint/context.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) 5 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))) 6 | import dpgen2 7 | -------------------------------------------------------------------------------- /tests/entrypoint/test_argparse.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import shutil 4 | import unittest 5 | 6 | # isort: off 7 | from .context import ( 8 | dpgen2, 9 | ) 10 | from dpgen2.entrypoint.main import ( 11 | main_parser, 12 | parse_args, 13 | workflow_subcommands, 14 | ) 15 | 16 | # isort: off 17 | 18 | 19 | class ParserTest(unittest.TestCase): 20 | def setUp(self): 21 | self.parser = main_parser() 22 | 23 | def test_commands(self): 24 | tested_commands = ["resubmit", "status", "download", "watch"] 25 | tested_commands += workflow_subcommands 26 | 27 | for cmd in tested_commands: 28 | parsed = self.parser.parse_args([cmd, "foo", "bar"]) 29 | self.assertEqual(parsed.command, cmd) 30 | self.assertEqual(parsed.CONFIG, "foo") 31 | self.assertEqual(parsed.ID, "bar") 32 | 33 | tested_commands = ["submit"] 34 | for cmd in tested_commands: 35 | parsed = self.parser.parse_args([cmd, "foo"]) 36 | self.assertEqual(parsed.command, cmd) 37 | self.assertEqual(parsed.CONFIG, "foo") 38 | 39 | def test_watch(self): 40 | parsed = self.parser.parse_args( 41 | [ 42 | "watch", 43 | "foo", 44 | "bar", 45 | "-k", 46 | "foo", 47 | "bar", 48 | "tar", 49 | "-f", 50 | "10", 51 | "-d", 52 | "-p", 53 | "myprefix", 54 | ] 55 | ) 56 | self.assertEqual(parsed.keys, ["foo", "bar", "tar"]) 57 | self.assertEqual(parsed.download, True) 58 | self.assertEqual(parsed.frequency, 10) 59 | self.assertEqual(parsed.prefix, "myprefix") 60 | 61 | def test_dld(self): 62 | parsed = self.parser.parse_args( 63 | [ 64 | "download", 65 | "foo", 66 | "bar", 67 | "-k", 68 | "foo", 69 | "bar", 70 | "tar", 71 | "-p", 72 | "myprefix", 73 | ] 74 | ) 75 | self.assertEqual(parsed.keys, ["foo", "bar", "tar"]) 76 | self.assertEqual(parsed.prefix, "myprefix") 77 | 78 | def test_resubmit(self): 79 | parsed = self.parser.parse_args( 80 | [ 81 | "resubmit", 82 | "foo", 83 | "bar", 84 | "-l", 85 | "--reuse", 86 | "0", 87 | "10-20", 88 | ] 89 | ) 90 | self.assertEqual(parsed.list, True) 91 | self.assertEqual(parsed.reuse, ["0", "10-20"]) 92 | -------------------------------------------------------------------------------- /tests/entrypoint/test_workflow.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import shutil 4 | import textwrap 5 | import unittest 6 | 7 | import dflow 8 | import mock 9 | from dflow import ( 10 | Workflow, 11 | ) 12 | 13 | # isort: off 14 | from .context import ( 15 | dpgen2, 16 | ) 17 | from dpgen2.entrypoint.workflow import ( 18 | execute_workflow_subcommand, 19 | ) 20 | 21 | 22 | class ParserTest(unittest.TestCase): 23 | @mock.patch("dflow.Workflow.terminate") 24 | def test_terminate(self, mocked_f): 25 | config = json.loads(foo_str) 26 | execute_workflow_subcommand("terminate", "foo", config) 27 | mocked_f.assert_called_with() 28 | 29 | @mock.patch("dflow.Workflow.stop") 30 | def test_stop(self, mocked_f): 31 | config = json.loads(foo_str) 32 | execute_workflow_subcommand("stop", "foo", config) 33 | mocked_f.assert_called_with() 34 | 35 | @mock.patch("dflow.Workflow.suspend") 36 | def test_suspend(self, mocked_f): 37 | config = json.loads(foo_str) 38 | execute_workflow_subcommand("suspend", "foo", config) 39 | mocked_f.assert_called_with() 40 | 41 | @mock.patch("dflow.Workflow.delete") 42 | def test_delete(self, mocked_f): 43 | config = json.loads(foo_str) 44 | execute_workflow_subcommand("delete", "foo", config) 45 | mocked_f.assert_called_with() 46 | 47 | @mock.patch("dflow.Workflow.retry") 48 | def test_retry(self, mocked_f): 49 | config = json.loads(foo_str) 50 | execute_workflow_subcommand("retry", "foo", config) 51 | mocked_f.assert_called_with() 52 | 53 | @mock.patch("dflow.Workflow.resume") 54 | def test_resume(self, mocked_f): 55 | config = json.loads(foo_str) 56 | execute_workflow_subcommand("resume", "foo", config) 57 | mocked_f.assert_called_with() 58 | 59 | 60 | foo_str = textwrap.dedent( 61 | """ 62 | { 63 | "default_step_config" : { 64 | "template_config" : { 65 | "image" : "dflow:1.1.4", 66 | "_comment" : "all" 67 | }, 68 | "_comment" : "all" 69 | }, 70 | 71 | "step_configs":{ 72 | "_comment" : "all" 73 | }, 74 | 75 | "upload_python_packages" : "/path/to/dpgen2", 76 | 77 | "inputs": { 78 | "type_map": ["Al", "Mg"], 79 | "mass_map": [27, 24], 80 | "init_data_prefix": "", 81 | "init_data_sys": [ 82 | "init/al.fcc.01x01x01/02.md/sys-0004/deepmd", 83 | "init/mg.fcc.01x01x01/02.md/sys-0004/deepmd" 84 | ], 85 | "_comment" : "all" 86 | }, 87 | "train":{ 88 | "type" : "dp", 89 | "numb_models" : 4, 90 | "config" : {}, 91 | "template_script" : "dp_input_template", 92 | "_comment" : "all" 93 | }, 94 | 95 | "explore" : { 96 | "type" : "lmp", 97 | "config" : { 98 | "command": "lmp -var restart 0" 99 | }, 100 | "max_numb_iter" : 5, 101 | "fatal_at_max" : false, 102 | "convergence":{ 103 | "type": "fixed-levels", 104 | "level_f_lo": 0.05, 105 | "level_f_hi": 0.50, 106 | "conv_accuracy" : 0.9 107 | }, 108 | "configuration_prefix": null, 109 | "configuration": [ 110 | ], 111 | "stages": [ 112 | ], 113 | "_comment" : "all" 114 | }, 115 | "fp" : { 116 | "type" : "vasp", 117 | "run_config" : { 118 | "command": "source /opt/intel/oneapi/setvars.sh && mpirun -n 16 vasp_std" 119 | }, 120 | "task_max": 2, 121 | "inputs_config" : { 122 | "pp_files": {"Al" : "vasp/POTCAR.Al", "Mg" : "vasp/POTCAR.Mg"}, 123 | "incar": "vasp/INCAR", 124 | "kspacing": 0.32, 125 | "kgamma": true 126 | }, 127 | "_comment" : "all" 128 | } 129 | } 130 | """ 131 | ) 132 | -------------------------------------------------------------------------------- /tests/exploration/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deepmodeling/dpgen2/0a89b7274d9ab85e81a92daad3ed7eb9c1d45046/tests/exploration/__init__.py -------------------------------------------------------------------------------- /tests/exploration/context.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) 5 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))) 6 | import dpgen2 7 | -------------------------------------------------------------------------------- /tests/exploration/test_conf_filter.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | 4 | import dpdata 5 | import numpy as np 6 | from fake_data_set import ( 7 | fake_system, 8 | ) 9 | from mock import ( 10 | patch, 11 | ) 12 | 13 | # isort: off 14 | from .context import ( 15 | dpgen2, 16 | ) 17 | from dpgen2.exploration.selector import ( 18 | ConfFilter, 19 | ConfFilters, 20 | ) 21 | 22 | # isort: on 23 | 24 | 25 | class FooFilter(ConfFilter): 26 | def check( 27 | self, 28 | frame: dpdata.System, 29 | ) -> bool: 30 | return frame["coords"][0][0][0] > 0.0 31 | 32 | 33 | class BarFilter(ConfFilter): 34 | def check( 35 | self, 36 | frame: dpdata.System, 37 | ) -> bool: 38 | return frame["coords"][0][0][1] > 0.0 39 | 40 | 41 | class BazFilter(ConfFilter): 42 | def check( 43 | self, 44 | frame: dpdata.System, 45 | ) -> bool: 46 | return frame["coords"][0][0][2] > 0.0 47 | 48 | 49 | class TestConfFilter(unittest.TestCase): 50 | def test_filter_0(self): 51 | faked_sys = fake_system(4, 3) 52 | # expected only frame 1 is preseved. 53 | faked_sys["coords"][1][0] = 1.0 54 | faked_sys["coords"][0][0][0] = 2.0 55 | faked_sys["coords"][2][0][1] = 3.0 56 | faked_sys["coords"][3][0][2] = 4.0 57 | filters = ConfFilters() 58 | filters.add(FooFilter()).add(BarFilter()).add(BazFilter()) 59 | ms = dpdata.MultiSystems() 60 | ms.append(faked_sys) 61 | sel_sys = filters.check(ms)[0] 62 | self.assertEqual(sel_sys.get_nframes(), 1) 63 | self.assertAlmostEqual(sel_sys["coords"][0][0][0], 1) 64 | 65 | def test_filter_1(self): 66 | faked_sys = fake_system(4, 3) 67 | # expected frame 1 and 3 are preseved. 68 | faked_sys["coords"][1][0] = 1.0 69 | faked_sys["coords"][3][0] = 3.0 70 | filters = ConfFilters() 71 | filters.add(FooFilter()).add(BarFilter()).add(BazFilter()) 72 | ms = dpdata.MultiSystems() 73 | ms.append(faked_sys) 74 | sel_sys = filters.check(ms)[0] 75 | self.assertEqual(sel_sys.get_nframes(), 2) 76 | self.assertAlmostEqual(sel_sys["coords"][0][0][0], 1) 77 | self.assertAlmostEqual(sel_sys["coords"][1][0][0], 3) 78 | 79 | def test_filter_all(self): 80 | faked_sys = fake_system(4, 3) 81 | # expected all frames are preseved. 82 | faked_sys["coords"][0][0] = 0.5 83 | faked_sys["coords"][1][0] = 1.0 84 | faked_sys["coords"][2][0] = 2.0 85 | faked_sys["coords"][3][0] = 3.0 86 | filters = ConfFilters() 87 | filters.add(FooFilter()).add(BarFilter()).add(BazFilter()) 88 | ms = dpdata.MultiSystems() 89 | ms.append(faked_sys) 90 | sel_sys = filters.check(ms)[0] 91 | self.assertEqual(sel_sys.get_nframes(), 4) 92 | self.assertAlmostEqual(sel_sys["coords"][0][0][0], 0.5) 93 | self.assertAlmostEqual(sel_sys["coords"][1][0][0], 1) 94 | self.assertAlmostEqual(sel_sys["coords"][2][0][0], 2) 95 | self.assertAlmostEqual(sel_sys["coords"][3][0][0], 3) 96 | 97 | def test_filter_none(self): 98 | faked_sys = fake_system(4, 3) 99 | filters = ConfFilters() 100 | filters.add(FooFilter()).add(BarFilter()).add(BazFilter()) 101 | ms = dpdata.MultiSystems() 102 | ms.append(faked_sys) 103 | sel_ms = filters.check(ms) 104 | self.assertEqual(sel_ms.get_nframes(), 0) 105 | -------------------------------------------------------------------------------- /tests/exploration/test_devi_manager.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | from pathlib import ( 4 | Path, 5 | ) 6 | 7 | import numpy as np 8 | 9 | # isort: off 10 | from .context import ( 11 | dpgen2, 12 | ) 13 | from dpgen2.exploration.deviation import ( 14 | DeviManager, 15 | DeviManagerStd, 16 | ) 17 | 18 | # isort: on 19 | 20 | 21 | class TestDeviManagerStd(unittest.TestCase): 22 | def test_success(self): 23 | model_devi = DeviManagerStd() 24 | model_devi.add(DeviManager.MAX_DEVI_F, np.array([1, 2, 3])) 25 | model_devi.add(DeviManager.MAX_DEVI_F, np.array([4, 5, 6])) 26 | 27 | self.assertEqual(model_devi.ntraj, 2) 28 | self.assertTrue( 29 | np.allclose( 30 | model_devi.get(DeviManager.MAX_DEVI_F), np.array([[1, 2, 3], [4, 5, 6]]) 31 | ) 32 | ) 33 | self.assertEqual(model_devi.get(DeviManager.MAX_DEVI_V), [None, None]) 34 | 35 | model_devi.clear() 36 | self.assertEqual(model_devi.ntraj, 0) 37 | self.assertEqual(model_devi.get(DeviManager.MAX_DEVI_F), []) 38 | self.assertEqual(model_devi.get(DeviManager.MAX_DEVI_V), []) 39 | 40 | def test_add_invalid_name(self): 41 | model_devi = DeviManagerStd() 42 | 43 | self.assertRaisesRegex( 44 | AssertionError, 45 | "Error: unknown deviation name foo", 46 | model_devi.add, 47 | "foo", 48 | np.array([1, 2, 3]), 49 | ) 50 | 51 | def test_add_invalid_deviation(self): 52 | model_devi = DeviManagerStd() 53 | 54 | self.assertRaisesRegex( 55 | AssertionError, 56 | "Error: deviation\(shape: ", 57 | model_devi.add, 58 | DeviManager.MAX_DEVI_F, 59 | np.array([[1], [2], [3]]), 60 | ) 61 | 62 | self.assertRaisesRegex( 63 | AssertionError, 64 | "Error: deviation\(type: ", 65 | model_devi.add, 66 | DeviManager.MAX_DEVI_F, 67 | "foo", 68 | ) 69 | 70 | def test_devi_manager_std_check_data(self): 71 | model_devi = DeviManagerStd() 72 | model_devi.add(DeviManager.MAX_DEVI_F, np.array([1, 2, 3])) 73 | model_devi.add(DeviManager.MAX_DEVI_F, np.array([4, 5, 6])) 74 | model_devi.add(DeviManager.MAX_DEVI_V, np.array([4, 5, 6])) 75 | 76 | self.assertEqual(model_devi.ntraj, 2) 77 | 78 | self.assertRaisesRegex( 79 | AssertionError, 80 | "Error: the number of model deviation", 81 | model_devi.get, 82 | DeviManager.MAX_DEVI_V, 83 | ) 84 | 85 | model_devi = DeviManagerStd() 86 | model_devi.add(DeviManager.MAX_DEVI_V, np.array([1, 2, 3])) 87 | 88 | self.assertRaisesRegex( 89 | AssertionError, 90 | f"Error: cannot find model deviation {DeviManager.MAX_DEVI_F}", 91 | model_devi.get, 92 | DeviManager.MAX_DEVI_V, 93 | ) 94 | 95 | model_devi = DeviManagerStd() 96 | model_devi.add(DeviManager.MAX_DEVI_F, np.array([1, 2, 3])) 97 | model_devi.add(DeviManager.MAX_DEVI_F, np.array([4, 5, 6])) 98 | model_devi.add(DeviManager.MAX_DEVI_V, np.array([1, 2, 3])) 99 | model_devi.add(DeviManager.MAX_DEVI_V, np.array([4, 5])) 100 | self.assertRaisesRegex( 101 | AssertionError, 102 | f"Error: the number of frames in", 103 | model_devi.get, 104 | DeviManager.MAX_DEVI_F, 105 | ) 106 | self.assertRaisesRegex( 107 | AssertionError, 108 | f"Error: the number of frames in", 109 | model_devi.get, 110 | DeviManager.MAX_DEVI_V, 111 | ) 112 | -------------------------------------------------------------------------------- /tests/exploration/test_make_task_group_from_config.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | import os 3 | import textwrap 4 | import unittest 5 | from pathlib import ( 6 | Path, 7 | ) 8 | from typing import ( 9 | List, 10 | Set, 11 | ) 12 | 13 | import numpy as np 14 | 15 | try: 16 | from exploration.context import ( 17 | dpgen2, 18 | ) 19 | except ModuleNotFoundError: 20 | # case of upload everything to argo, no context needed 21 | pass 22 | from dpgen2.exploration.task import ( 23 | CalyTaskGroup, 24 | LmpTemplateTaskGroup, 25 | NPTTaskGroup, 26 | make_calypso_task_group_from_config, 27 | make_lmp_task_group_from_config, 28 | ) 29 | from dpgen2.exploration.task.calypso import ( 30 | make_calypso_input, 31 | ) 32 | 33 | 34 | class TestMakeLmpTaskGroupFromConfig(unittest.TestCase): 35 | def setUp(self): 36 | self.config_npt = { 37 | "type": "lmp-md", 38 | "Ts": [100], 39 | } 40 | self.config_template = { 41 | "type": "lmp-template", 42 | "lmp_template_fname": "foo", 43 | } 44 | from .test_lmp_templ_task_group import ( 45 | in_lmp_template, 46 | ) 47 | 48 | Path(self.config_template["lmp_template_fname"]).write_text(in_lmp_template) 49 | self.mass_map = [1.0, 2.0] 50 | self.numb_models = 4 51 | 52 | def tearDown(self): 53 | os.remove(self.config_template["lmp_template_fname"]) 54 | 55 | def test_npt(self): 56 | tgroup = make_lmp_task_group_from_config( 57 | self.numb_models, self.mass_map, self.config_npt 58 | ) 59 | self.assertTrue(isinstance(tgroup, NPTTaskGroup)) 60 | 61 | def test_template(self): 62 | tgroup = make_lmp_task_group_from_config( 63 | self.numb_models, self.mass_map, self.config_template 64 | ) 65 | self.assertTrue(isinstance(tgroup, LmpTemplateTaskGroup)) 66 | 67 | 68 | class TestMakeCalyTaskGroupFromConfig(unittest.TestCase): 69 | def setUp(self): 70 | self.config = { 71 | "name_of_atoms": ["Li", "La"], 72 | "numb_of_atoms": [10, 10], 73 | "numb_of_species": 2, 74 | "atomic_number": [3, 4], 75 | "distance_of_ions": [[1.0, 1.0], [1.0, 1.0]], 76 | } 77 | self.config_err = { 78 | "name_of_atoms": ["Li", "La"], 79 | "numb_of_atoms": [10, 10], 80 | "numb_of_species": 4, 81 | "atomic_number": [3, 4], 82 | "distance_of_ions": [[1.0, 1.0], [1.0, 1.0]], 83 | } 84 | self.ref_input = """NumberOfSpecies = 2 85 | NameOfAtoms = Li La 86 | AtomicNumber = 3 4 87 | NumberOfAtoms = 10 10 88 | PopSize = 30 89 | MaxStep = 5 90 | SystemName = CALYPSO 91 | NumberOfFormula = 1 1 92 | Volume = 0 93 | Ialgo = 2 94 | PsoRatio = 0.6 95 | ICode = 15 96 | NumberOfLbest = 4 97 | NumberOfLocalOptim = 4 98 | Command = sh submit.sh 99 | MaxTime = 9000 100 | GenType = 1 101 | PickUp = False 102 | PickStep = 1 103 | Parallel = F 104 | Split = T 105 | SpeSpaceGroup = 2 230 106 | VSC = F 107 | MaxNumAtom = 100 108 | @DistanceOfIon 109 | 1.0 1.0 110 | 1.0 1.0 111 | @End 112 | @CtrlRange 113 | 1 10 114 | @End 115 | """ 116 | 117 | def tearDown(self): 118 | # os.remove(self.config_template["lmp_template_fname"]) 119 | pass 120 | 121 | def test_make_caly_input(self): 122 | input_file_str, run_opt_str, check_opt_str = make_calypso_input(**self.config) 123 | self.assertEqual(input_file_str, self.ref_input) 124 | self.assertRaises(AssertionError, make_calypso_input, **self.config_err) 125 | 126 | def test_caly_task_group(self): 127 | tgroup = make_calypso_task_group_from_config(self.config) 128 | self.assertTrue(isinstance(tgroup, CalyTaskGroup)) 129 | -------------------------------------------------------------------------------- /tests/exploration/test_traj_render_lammps.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import unittest 4 | 5 | import dpdata 6 | import numpy as np 7 | 8 | # isort: off 9 | from .context import ( 10 | dpgen2, 11 | ) 12 | from dpgen2.exploration.render import TrajRenderLammps 13 | 14 | # isort: on 15 | 16 | 17 | class TestTrajRenderLammps(unittest.TestCase): 18 | def test_use_ele_temp_1(self): 19 | with open("job.json", "w") as f: 20 | json.dump({"ele_temp": 6.6}, f) 21 | traj_render = TrajRenderLammps(use_ele_temp=1) 22 | ele_temp = traj_render.get_ele_temp(["job.json"]) 23 | self.assertEqual(ele_temp, [6.6]) 24 | 25 | system = dpdata.System( 26 | data={ 27 | "atom_names": ["H"], 28 | "atom_numbs": [1], 29 | "atom_types": np.zeros(1, dtype=int), 30 | "cells": np.eye(3).reshape(1, 3, 3), 31 | "coords": np.zeros((1, 1, 3)), 32 | "orig": np.zeros(3), 33 | "nopbc": True, 34 | } 35 | ) 36 | traj_render.set_ele_temp(system, ele_temp[0]) 37 | np.testing.assert_array_almost_equal(system.data["fparam"], np.array([[6.6]])) 38 | 39 | def test_use_ele_temp_2(self): 40 | with open("job.json", "w") as f: 41 | json.dump({"ele_temp": 6.6}, f) 42 | traj_render = TrajRenderLammps(use_ele_temp=2) 43 | ele_temp = traj_render.get_ele_temp(["job.json"]) 44 | self.assertEqual(ele_temp, [6.6]) 45 | 46 | system = dpdata.System( 47 | data={ 48 | "atom_names": ["H"], 49 | "atom_numbs": [1], 50 | "atom_types": np.zeros(1, dtype=int), 51 | "cells": np.eye(3).reshape(1, 3, 3), 52 | "coords": np.zeros((1, 1, 3)), 53 | "orig": np.zeros(3), 54 | "nopbc": True, 55 | } 56 | ) 57 | traj_render.set_ele_temp(system, ele_temp[0]) 58 | np.testing.assert_array_almost_equal(system.data["aparam"], np.array([[[6.6]]])) 59 | 60 | def tearDown(self): 61 | if os.path.exists("job.json"): 62 | os.remove("job.json") 63 | -------------------------------------------------------------------------------- /tests/fake_data_set.py: -------------------------------------------------------------------------------- 1 | import dpdata 2 | import numpy as np 3 | 4 | 5 | def fake_system( 6 | nframes, 7 | natoms, 8 | atom_name="foo", 9 | ): 10 | ss = dpdata.LabeledSystem() 11 | ss.data["atom_names"] = [atom_name] 12 | ss.data["atom_numbs"] = [natoms] 13 | ss.data["atom_types"] = np.array([0 for ii in range(natoms)]).astype(int) 14 | # ss.data['cells'] = np.zeros([nframes, 3, 3]) 15 | ss.data["cells"] = np.tile(np.eye(3), [nframes, 1, 1]) 16 | ss.data["coords"] = np.zeros([nframes, natoms, 3]) 17 | ss.data["forces"] = np.zeros([nframes, natoms, 3]) 18 | ss.data["energies"] = np.zeros([nframes]) 19 | return ss 20 | 21 | 22 | def fake_multi_sys( 23 | nframs, 24 | natoms, 25 | natom_name="foo", 26 | ): 27 | nsys = len(nframs) 28 | ms = dpdata.MultiSystems() 29 | for ii in range(nsys): 30 | ss = fake_system(nframs[ii], natoms[ii], natom_name) 31 | ms.append(ss) 32 | return ms 33 | -------------------------------------------------------------------------------- /tests/fp/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deepmodeling/dpgen2/0a89b7274d9ab85e81a92daad3ed7eb9c1d45046/tests/fp/__init__.py -------------------------------------------------------------------------------- /tests/fp/context.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))) 5 | import dpgen2 6 | -------------------------------------------------------------------------------- /tests/fp/data.abacus/INPUT: -------------------------------------------------------------------------------- 1 | INPUT_PARAMETERS 2 | calculation scf 3 | suffix ABACUS 4 | ntype 1 5 | symmetry 0 6 | ecutwfc 50.000000 7 | scf_thr 1.000000e-08 8 | scf_nmax 100 9 | cal_force 1 10 | cal_stress 1 11 | basis_type pw 12 | smearing_method mp 13 | smearing_sigma 0.014600 14 | mixing_type pulay 15 | mixing_beta 0.700000 16 | ks_solver dav 17 | kspacing 0.20000 18 | -------------------------------------------------------------------------------- /tests/fp/data.abacus/OUT.ABACUS/STRU_READIN_ADJUST.cif: -------------------------------------------------------------------------------- 1 | data_none 2 | 3 | _audit_creation_method generated by ABACUS 4 | 5 | _cell_length_a 2.72368 6 | _cell_length_b 2.72368 7 | _cell_length_c 2.72368 8 | _cell_angle_alpha 90 9 | _cell_angle_beta 90 10 | _cell_angle_gamma 90 11 | 12 | loop_ 13 | _atom_site_label 14 | _atom_site_fract_x 15 | _atom_site_fract_y 16 | _atom_site_fract_z 17 | Na 0 0 0 18 | Na 0.5 0.5 0.5 19 | -------------------------------------------------------------------------------- /tests/fp/data.abacus/OUT.ABACUS/warning.log: -------------------------------------------------------------------------------- 1 | In SCAN_BEGIN, can't find: LATTICE_PARAMETERS block. 2 | Generate k-points file according to KSPACING: KPT 3 | AUTO_SET NBANDS to 19 4 | startmag_type = 2 5 | charge from rho_at = 8.86329 6 | charge should be = 9 7 | 8 | SETUP ATOMIC RHO FOR SPIN 1 9 | Electron number from rho = 18 10 | total electron number from rho = 18 11 | should be = 18 12 | charge before normalized = 18 13 | charge after normalized = 18 14 | scf warning : Threshold on eigenvalues was too large. 15 | -------------------------------------------------------------------------------- /tests/fp/data.abacus/sys-2/set.000/box.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deepmodeling/dpgen2/0a89b7274d9ab85e81a92daad3ed7eb9c1d45046/tests/fp/data.abacus/sys-2/set.000/box.npy -------------------------------------------------------------------------------- /tests/fp/data.abacus/sys-2/set.000/coord.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deepmodeling/dpgen2/0a89b7274d9ab85e81a92daad3ed7eb9c1d45046/tests/fp/data.abacus/sys-2/set.000/coord.npy -------------------------------------------------------------------------------- /tests/fp/data.abacus/sys-2/type.raw: -------------------------------------------------------------------------------- 1 | 0 2 | 0 3 | -------------------------------------------------------------------------------- /tests/fp/data.abacus/sys-2/type_map.raw: -------------------------------------------------------------------------------- 1 | Na 2 | -------------------------------------------------------------------------------- /tests/fp/data.cp2k/CELL_PARAMETER: -------------------------------------------------------------------------------- 1 | A 2.7523088455 0.0000000000 0.0000000000 2 | B 0.0000000000 2.7523088455 0.0000000000 3 | C 0.0000000000 0.0000000000 2.7523088455 -------------------------------------------------------------------------------- /tests/fp/data.cp2k/coord.xyz: -------------------------------------------------------------------------------- 1 | Na 0.000000000 0.000000000 0.000000000 2 | Na 1.376154423 1.376154423 1.376154423 -------------------------------------------------------------------------------- /tests/fp/data.cp2k/input.inp: -------------------------------------------------------------------------------- 1 | #Generated by Multiwfn 2 | &GLOBAL 3 | PROJECT POSCAR-1 4 | PRINT_LEVEL MEDIUM 5 | RUN_TYPE ENERGY_FORCE 6 | &END GLOBAL 7 | 8 | &FORCE_EVAL 9 | METHOD Quickstep 10 | &SUBSYS 11 | &CELL 12 | @INCLUDE CELL_PARAMETER 13 | PERIODIC XYZ #Direction(s) of applied PBC (geometry aspect) 14 | &END CELL 15 | &COORD 16 | @INCLUDE coord.xyz 17 | &END COORD 18 | &KIND Na 19 | ELEMENT Na 20 | BASIS_SET DZVP-MOLOPT-SR-GTH-q9 21 | POTENTIAL GTH-PBE 22 | &END KIND 23 | &END SUBSYS 24 | 25 | &DFT 26 | BASIS_SET_FILE_NAME BASIS_MOLOPT 27 | POTENTIAL_FILE_NAME POTENTIAL 28 | # WFN_RESTART_FILE_NAME POSCAR-1-RESTART.wfn 29 | CHARGE 0 #Net charge 30 | MULTIPLICITY 1 #Spin multiplicity 31 | &QS 32 | EPS_DEFAULT 1.0E-12 #Set all EPS_xxx to values such that the energy will be correct up to this value 33 | &END QS 34 | &POISSON 35 | PERIODIC XYZ #Direction(s) of PBC for calculating electrostatics 36 | PSOLVER PERIODIC #The way to solve Poisson equation 37 | &END POISSON 38 | &XC 39 | &XC_FUNCTIONAL PBE 40 | &END XC_FUNCTIONAL 41 | &END XC 42 | &MGRID 43 | CUTOFF 400 44 | REL_CUTOFF 55 45 | &END MGRID 46 | &SCF 47 | MAX_SCF 25 #Maximum number of steps of inner SCF 48 | EPS_SCF 1.0E-06 #Convergence threshold of density matrix of inner SCF 49 | # SCF_GUESS RESTART #Use wavefunction from WFN_RESTART_FILE_NAME file as initial guess 50 | &OT 51 | PRECONDITIONER FULL_ALL #Usually best but expensive for large system. Cheaper: FULL_SINGLE_INVERSE and FULL_KINETIC 52 | MINIMIZER DIIS #CG is worth to consider in difficult cases 53 | LINESEARCH 2PNT #1D line search algorithm for CG. 2PNT is default, 3PNT is better but more costly. GOLD is best but very expensive 54 | ALGORITHM STRICT #Algorithm of OT. Can be STRICT (default) or IRAC 55 | &END OT 56 | &OUTER_SCF 57 | MAX_SCF 20 #Maximum number of steps of outer SCF 58 | EPS_SCF 1.0E-06 #Convergence threshold of outer SCF 59 | &END OUTER_SCF 60 | &PRINT 61 | &RESTART #Note: Use "&RESTART OFF" can prevent generating .wfn file 62 | BACKUP_COPIES 0 #Maximum number of backup copies of wfn file. 0 means never 63 | &END RESTART 64 | &END PRINT 65 | &END SCF 66 | &END DFT 67 | &PRINT 68 | &FORCES ON #Print atomic forces 69 | &END FORCES 70 | &END PRINT 71 | &END FORCE_EVAL 72 | -------------------------------------------------------------------------------- /tests/fp/data.cp2k/sys-3/set.000/box.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deepmodeling/dpgen2/0a89b7274d9ab85e81a92daad3ed7eb9c1d45046/tests/fp/data.cp2k/sys-3/set.000/box.npy -------------------------------------------------------------------------------- /tests/fp/data.cp2k/sys-3/set.000/coord.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deepmodeling/dpgen2/0a89b7274d9ab85e81a92daad3ed7eb9c1d45046/tests/fp/data.cp2k/sys-3/set.000/coord.npy -------------------------------------------------------------------------------- /tests/fp/data.cp2k/sys-3/type.raw: -------------------------------------------------------------------------------- 1 | 0 2 | 0 3 | -------------------------------------------------------------------------------- /tests/fp/data.cp2k/sys-3/type_map.raw: -------------------------------------------------------------------------------- 1 | Na 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/INCAR: -------------------------------------------------------------------------------- 1 | PREC=A 2 | ENCUT=600 3 | ISYM=0 4 | ALGO=fast 5 | EDIFF=1.000000e-06 6 | LREAL=A 7 | NPAR=1 8 | KPAR=1 9 | 10 | NELMIN=4 11 | ISIF=2 12 | ISMEAR=1 13 | SIGMA=1.000000 14 | IBRION=-1 15 | 16 | NSW=0 17 | 18 | LWAVE=F 19 | LCHARG=F 20 | PSTRESS=0 21 | 22 | KSPACING=0.160000 23 | KGAMMA=.FALSE. 24 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/POSCAR: -------------------------------------------------------------------------------- 1 | POSCAR file written by OVITO 2 | 1 3 | 6.6326952 0.0 0.0 4 | 0.1301009 6.5259342 0.0 5 | 0.0170968 -0.0156295 6.4869027 6 | Al 7 | 1 8 | Cartesian 9 | 0 0 0 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/make_kp_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | 5 | import ase 6 | import dpdata 7 | import numpy as np 8 | 9 | 10 | def make_one(out_dir): 11 | # [0.5, 1) 12 | [aa, bb, cc] = np.random.random(3) * 0.5 + 0.5 13 | # [1, 179) 14 | [alpha, beta, gamma] = np.random.random(3) * (178 / 180) + 1 15 | # make cell 16 | cell = ase.geometry.cellpar_to_cell([aa, bb, cc, alpha, beta, gamma]) 17 | sys = dpdata.System("POSCAR") 18 | sys["cells"][0] = cell 19 | os.makedirs(out_dir, exist_ok=True) 20 | sys.to_vasp_poscar(os.path.join(out_dir, "POSCAR")) 21 | 22 | 23 | ntest = 30 24 | for ii in range(ntest): 25 | out_dir = "test.%03d" % ii 26 | make_one(out_dir) 27 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.000/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 5.4127259713610210e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 7.8618816929019986e-01 2.4081028936890111e-02 0.0000000000000000e+00 5 | 6.7715811612933341e-01 8.3424435233163518e-03 1.5808741859753134e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.000/kp.ref: -------------------------------------------------------------------------------- 1 | 3012 1844 2485 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.001/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 5.6880700105188797e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 9.3755473569041059e-01 2.7862428092373298e-02 0.0000000000000000e+00 5 | 7.5094919110404246e-01 1.3658846496970233e-02 1.3537583670211236e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.001/kp.ref: -------------------------------------------------------------------------------- 1 | 2759 2003 2901 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.002/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 7.1318152793059453e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 9.0090250362408364e-01 2.2057413311066729e-02 0.0000000000000000e+00 5 | 5.9568260849887056e-01 1.8603698586820437e-04 1.4346759150138591e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.002/kp.ref: -------------------------------------------------------------------------------- 1 | 3187 1781 2738 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.003/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 9.9290805389638193e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 8.7190986412990223e-01 1.6171729506936849e-02 0.0000000000000000e+00 5 | 9.3585048431850237e-01 7.6384960535879976e-03 2.3018089381209059e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.003/kp.ref: -------------------------------------------------------------------------------- 1 | 2316 2559 1707 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.004/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 6.2510904478129614e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 8.7599597707006160e-01 2.2319600269443796e-02 0.0000000000000000e+00 5 | 8.2278296275134177e-01 3.8799483550304068e-03 1.8371270941025968e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.004/kp.ref: -------------------------------------------------------------------------------- 1 | 3368 1799 2138 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.005/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 9.9053877966779680e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 5.3940028709160581e-01 1.8670361241875811e-02 0.0000000000000000e+00 5 | 7.7381050613360713e-01 1.2800166920594798e-02 1.9826939886769979e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.005/kp.ref: -------------------------------------------------------------------------------- 1 | 1403 2504 1981 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.006/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 5.1962337600558017e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 7.7015283686821623e-01 2.3582102183916275e-02 0.0000000000000000e+00 5 | 8.8645171326257188e-01 1.3693290237640118e-02 9.0982527300307661e-03 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.006/kp.ref: -------------------------------------------------------------------------------- 1 | 4406 3010 4317 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.007/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 9.0778928724851427e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 5.0174477170218057e-01 8.7638477359290260e-03 0.0000000000000000e+00 5 | 5.7691634780896750e-01 6.9486384276072335e-03 1.1319556047435765e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.007/kp.ref: -------------------------------------------------------------------------------- 1 | 2570 5258 3470 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.008/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 7.9430397140351772e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 6.7703587724520653e-01 2.0197502612753639e-02 0.0000000000000000e+00 5 | 8.9944450316391611e-01 1.2190061829841534e-02 1.8455625620951665e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.008/kp.ref: -------------------------------------------------------------------------------- 1 | 2117 2331 2128 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.009/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 6.4107824073770014e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 5.9514813102041653e-01 1.4122096000048717e-02 0.0000000000000000e+00 5 | 7.9753239756842043e-01 -3.1662388773523020e-03 1.4910307577850239e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.009/kp.ref: -------------------------------------------------------------------------------- 1 | 4615 2843 2634 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.010/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 5.5550486411663158e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 6.8070960410020875e-01 1.4350480968449014e-02 0.0000000000000000e+00 5 | 9.6801337243813057e-01 1.7342606777491703e-02 2.0266814112522992e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.010/kp.ref: -------------------------------------------------------------------------------- 1 | 3393 3602 1938 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.011/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 8.5870696695197124e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 5.1893704956323827e-01 1.1776894361646813e-02 0.0000000000000000e+00 5 | 6.3519056323670364e-01 5.0285215989483009e-03 1.5158501840668978e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.011/kp.ref: -------------------------------------------------------------------------------- 1 | 2371 3514 2591 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.012/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 9.4351735339550946e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 6.0999656860152052e-01 1.7148923876854596e-02 0.0000000000000000e+00 5 | 7.1637100648807928e-01 1.1870854711635171e-02 1.1964593611508943e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.012/kp.ref: -------------------------------------------------------------------------------- 1 | 1801 3226 3283 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.013/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 5.9990177759298668e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 9.2886495622654708e-01 2.5044515831327989e-02 0.0000000000000000e+00 5 | 6.1638694777449798e-01 3.5946492305453936e-03 1.3705944562333917e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.013/kp.ref: -------------------------------------------------------------------------------- 1 | 3350 1622 2866 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.014/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 8.4054798625146632e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 6.6292027037099721e-01 2.0213000030988901e-02 0.0000000000000000e+00 5 | 6.5074072860649845e-01 7.0429905975569664e-03 1.0835310572355412e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.014/kp.ref: -------------------------------------------------------------------------------- 1 | 2372 2318 3625 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.015/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 9.0592670804143993e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 7.0164106324818487e-01 2.1732588605303776e-02 0.0000000000000000e+00 5 | 8.8666934784642748e-01 1.8228661223474250e-02 2.0519776281709610e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.015/kp.ref: -------------------------------------------------------------------------------- 1 | 1536 2417 1914 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.016/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 6.2808818210241246e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 6.2949227090630400e-01 1.5699128478506564e-02 0.0000000000000000e+00 5 | 5.9031653449750210e-01 9.0790581254778426e-03 1.8332430081517040e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.016/kp.ref: -------------------------------------------------------------------------------- 1 | 2624 2792 2143 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.017/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 6.7183926821441775e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 6.4140141985920385e-01 1.4352733214998581e-02 0.0000000000000000e+00 5 | 9.8021519702240911e-01 3.8684916997537011e-03 1.7031593077790490e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.017/kp.ref: -------------------------------------------------------------------------------- 1 | 3809 2806 2306 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.018/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 9.0904675499270871e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 7.1357506069253507e-01 2.0653299473695933e-02 0.0000000000000000e+00 5 | 5.6191429011053162e-01 6.8530669078996310e-03 8.2949112079975656e-03 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.018/kp.ref: -------------------------------------------------------------------------------- 1 | 2258 2467 4735 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.019/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 6.4026766094815746e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 8.9305845794356431e-01 2.4670136019966725e-02 0.0000000000000000e+00 5 | 8.1428436058792186e-01 6.0057917170911659e-03 1.4588637440111405e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.019/kp.ref: -------------------------------------------------------------------------------- 1 | 3352 1722 2692 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.020/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 6.8594934473558355e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 9.6687449166074457e-01 2.3715030862177477e-02 0.0000000000000000e+00 5 | 6.8762327046143379e-01 1.0364949378730682e-02 1.5799497242638412e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.020/kp.ref: -------------------------------------------------------------------------------- 1 | 2525 1981 2486 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.021/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 7.3554024077214453e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 8.1638656898051709e-01 2.5391675987756768e-02 0.0000000000000000e+00 5 | 6.2524529611437596e-01 9.3128812085326602e-03 1.3441851906291361e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.021/kp.ref: -------------------------------------------------------------------------------- 1 | 2151 1882 2922 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.022/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 8.8048895544046824e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 9.6154868911418356e-01 3.2033436833257448e-02 0.0000000000000000e+00 5 | 9.7271788298409301e-01 8.3132749231043680e-03 1.6867644942364032e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.022/kp.ref: -------------------------------------------------------------------------------- 1 | 2335 1367 2329 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.023/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 9.9242002811213836e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 7.3678428169130761e-01 1.3088540016532589e-02 0.0000000000000000e+00 5 | 6.7762054769321078e-01 1.4681922343327957e-03 1.2965836382839194e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.023/kp.ref: -------------------------------------------------------------------------------- 1 | 2875 3020 3029 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.024/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 7.2216474674443032e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 6.6935422004335077e-01 1.6694693700306504e-02 0.0000000000000000e+00 5 | 8.9534541552420710e-01 1.7436949895310000e-02 2.3785962099237896e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.024/kp.ref: -------------------------------------------------------------------------------- 1 | 2227 2917 1651 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.025/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 8.9117435342322038e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 6.3409939955121208e-01 1.9968611983052557e-02 0.0000000000000000e+00 5 | 5.0895483976207867e-01 6.3684100723567965e-03 8.3841132142836797e-03 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.025/kp.ref: -------------------------------------------------------------------------------- 1 | 2136 2470 4684 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.026/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 6.6146943697405836e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 7.5311939800441674e-01 1.4380427233774489e-02 0.0000000000000000e+00 5 | 6.3888769875715334e-01 -1.6310546934199758e-03 1.1825304039599110e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.026/kp.ref: -------------------------------------------------------------------------------- 1 | 4785 2757 3321 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.027/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 6.8488504358517499e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 6.4733602786911981e-01 1.8986412365836220e-02 0.0000000000000000e+00 5 | 5.4847407769833134e-01 6.7603520231260773e-03 1.2632337835217923e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.027/kp.ref: -------------------------------------------------------------------------------- 1 | 2431 2346 3109 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.028/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 7.7714368902548037e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 5.1744302821750987e-01 9.2339899572028723e-03 0.0000000000000000e+00 5 | 8.0249076087440185e-01 1.8781333828875989e-02 1.9301537101012855e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.028/kp.ref: -------------------------------------------------------------------------------- 1 | 2907 5934 2035 2 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.029/POSCAR: -------------------------------------------------------------------------------- 1 | Al1 2 | 1.0 3 | 8.1827757864115269e-01 0.0000000000000000e+00 0.0000000000000000e+00 4 | 7.0976104277800611e-01 1.2594842675049416e-02 0.0000000000000000e+00 5 | 6.0244177763787687e-01 1.3692427728208972e-02 1.1282872913318998e-02 6 | Al 7 | 1 8 | cartesian 9 | 0.0000000000 0.0000000000 0.0000000000 10 | -------------------------------------------------------------------------------- /tests/fp/data.vasp.kp.gf/test.029/kp.ref: -------------------------------------------------------------------------------- 1 | 2799 4903 3481 2 | -------------------------------------------------------------------------------- /tests/fp/test_abacus.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import unittest 4 | from pathlib import ( 5 | Path, 6 | ) 7 | 8 | import dflow 9 | import dpdata 10 | from dflow import ( 11 | Step, 12 | Workflow, 13 | download_artifact, 14 | upload_artifact, 15 | ) 16 | 17 | from dpgen2.fp import ( 18 | FpOpAbacusInputs, 19 | PrepFpOpAbacus, 20 | RunFpOpAbacus, 21 | ) 22 | from dpgen2.superop import ( 23 | PrepRunFp, 24 | ) 25 | 26 | 27 | class TestFpOpAbacus(unittest.TestCase): 28 | def tearDown(self): 29 | if Path("upload").is_dir(): 30 | shutil.rmtree("upload") 31 | if Path("output").is_dir(): 32 | shutil.rmtree("output") 33 | for p in Path(".").glob("abacus-dpgen-*"): 34 | shutil.rmtree(p) 35 | 36 | def test_abacus(self): 37 | data_path = Path(__file__).parent / "data.abacus" 38 | fp_config = { 39 | "inputs": FpOpAbacusInputs( 40 | data_path / "INPUT", {"Na": data_path / "Na_ONCV_PBE-1.0.upf"} 41 | ), 42 | "run": { 43 | "command": "cp -r %s OUT.ABACUS && cat %s" 44 | % (data_path / "OUT.ABACUS", data_path / "log"), 45 | }, 46 | "extra_output_files": [], 47 | } 48 | confs = [data_path / "sys-2"] 49 | type_map = ["Na"] 50 | 51 | dflow.config["mode"] = "debug" 52 | prep_run_fp_op = PrepRunFp( 53 | "prep-run-fp", 54 | PrepFpOpAbacus, 55 | RunFpOpAbacus, 56 | ) 57 | prep_run_fp = Step( 58 | name="prep-run-fp", 59 | template=prep_run_fp_op, 60 | parameters={ 61 | "block_id": "iter-000000", 62 | "fp_config": fp_config, 63 | "type_map": type_map, 64 | }, 65 | artifacts={ 66 | "confs": upload_artifact(confs), 67 | }, 68 | key="iter-000000--prep-run-fp", 69 | ) 70 | wf = Workflow(name="abacus-dpgen") 71 | wf.add(prep_run_fp) 72 | wf.submit() 73 | self.assertEqual(wf.query_status(), "Succeeded") 74 | 75 | step = wf.query_step(key="iter-000000--run-fp-000000")[0] 76 | log = download_artifact(step.outputs.artifacts["log"], path="output")[0] 77 | self.assertTrue(os.path.isfile(log)) 78 | labeled_data = download_artifact( 79 | step.outputs.artifacts["labeled_data"], path="output" 80 | )[0] 81 | s = dpdata.LabeledSystem(labeled_data, fmt="deepmd/npy") 82 | self.assertEqual(len(s), 1) 83 | -------------------------------------------------------------------------------- /tests/fp/test_cp2k.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import sys 4 | import unittest 5 | from pathlib import ( 6 | Path, 7 | ) 8 | 9 | import dflow 10 | import dpdata 11 | from dflow import ( 12 | Step, 13 | Workflow, 14 | download_artifact, 15 | upload_artifact, 16 | ) 17 | 18 | from dpgen2.fp import ( 19 | FpOpCp2kInputs, 20 | PrepFpOpCp2k, 21 | RunFpOpCp2k, 22 | ) 23 | from dpgen2.superop import ( 24 | PrepRunFp, 25 | ) 26 | 27 | 28 | class TestFpOpCp2k(unittest.TestCase): 29 | def setUp(self): 30 | self.python_version = sys.version_info 31 | 32 | def tearDown(self): 33 | if Path("upload").is_dir(): 34 | shutil.rmtree("upload") 35 | if Path("output").is_dir(): 36 | shutil.rmtree("output") 37 | for p in Path(".").glob("cp2k-dpgen-*"): 38 | shutil.rmtree(p) 39 | 40 | def test_cp2k(self): 41 | # skip Python 3.7 version, which is unsuitable for cp2kdata 42 | if self.python_version < (3, 8): 43 | self.skipTest("Python version is below 3.8, skipping test.") 44 | data_path = Path(__file__).parent / "data.cp2k" 45 | fp_config = { 46 | "inputs": FpOpCp2kInputs(data_path / "input.inp"), 47 | "run": { 48 | "command": "cp -r %s output.log && cat %s" 49 | % (data_path / "output.log", data_path / "output.log"), 50 | }, 51 | "extra_output_files": [], 52 | } 53 | confs = [data_path / "sys-3"] 54 | type_map = ["Na"] 55 | 56 | dflow.config["mode"] = "debug" 57 | prep_run_fp_op = PrepRunFp( 58 | "prep-run-fp", 59 | PrepFpOpCp2k, 60 | RunFpOpCp2k, 61 | ) 62 | prep_run_fp = Step( 63 | name="prep-run-fp", 64 | template=prep_run_fp_op, 65 | parameters={ 66 | "block_id": "iter-000000", 67 | "fp_config": fp_config, 68 | "type_map": type_map, 69 | }, 70 | artifacts={ 71 | "confs": upload_artifact(confs), 72 | }, 73 | key="iter-000000--prep-run-fp", 74 | ) 75 | wf = Workflow(name="cp2k-dpgen") 76 | wf.add(prep_run_fp) 77 | wf.submit() 78 | self.assertEqual(wf.query_status(), "Succeeded") 79 | 80 | step = wf.query_step(key="iter-000000--run-fp-000000")[0] 81 | log = download_artifact(step.outputs.artifacts["log"], path="output")[0] 82 | self.assertTrue(os.path.isfile(log)) 83 | labeled_data = download_artifact( 84 | step.outputs.artifacts["labeled_data"], path="output" 85 | )[0] 86 | s = dpdata.LabeledSystem(labeled_data, fmt="deepmd/npy") 87 | self.assertEqual(len(s), 1) 88 | -------------------------------------------------------------------------------- /tests/op/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deepmodeling/dpgen2/0a89b7274d9ab85e81a92daad3ed7eb9c1d45046/tests/op/__init__.py -------------------------------------------------------------------------------- /tests/op/context.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))) 5 | import dpgen2 6 | -------------------------------------------------------------------------------- /tests/op/test_prep_caly_input.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import unittest 4 | from pathlib import ( 5 | Path, 6 | ) 7 | 8 | import numpy as np 9 | from dflow.python import ( 10 | OP, 11 | OPIO, 12 | Artifact, 13 | OPIOSign, 14 | TransientError, 15 | ) 16 | from mock import ( 17 | call, 18 | mock, 19 | patch, 20 | ) 21 | 22 | # isort: off 23 | from dpgen2.constants import ( 24 | calypso_task_pattern, 25 | calypso_input_file, 26 | calypso_run_opt_file, 27 | calypso_check_opt_file, 28 | ) 29 | from dpgen2.op.prep_caly_input import PrepCalyInput 30 | from dpgen2.utils import ( 31 | BinaryFileInput, 32 | ) 33 | 34 | from dpgen2.exploration.task import ( 35 | BaseExplorationTaskGroup, 36 | ExplorationTask, 37 | ) 38 | from dpgen2.exploration.task.calypso import ( 39 | calypso_run_opt_str, 40 | calypso_run_opt_str_end, 41 | calypso_check_opt_str, 42 | ) 43 | 44 | # isort: on 45 | 46 | 47 | def make_task_group_list(njobs): 48 | tgrp = BaseExplorationTaskGroup() 49 | for ii in range(njobs): 50 | tt = ExplorationTask() 51 | tt.add_file(calypso_input_file, f"input.dat_{ii}") 52 | tt.add_file( 53 | calypso_run_opt_file, 54 | calypso_run_opt_str + calypso_run_opt_str_end % (0.01, 0.01, 100), 55 | ) 56 | tt.add_file(calypso_check_opt_file, calypso_check_opt_str) 57 | tgrp.add_task(tt) 58 | return tgrp 59 | 60 | 61 | class TestPrepCalyInput(unittest.TestCase): 62 | def setUp(self): 63 | njobs = 2 64 | self.caly_task_grp = make_task_group_list(njobs) 65 | self.task_name_path = [calypso_task_pattern % i for i in range(2)] 66 | self.input_dat_list = [ 67 | Path(i) / calypso_input_file for i in self.task_name_path 68 | ] 69 | self.caly_run_opt_list = [ 70 | Path(i) / calypso_run_opt_file for i in self.task_name_path 71 | ] 72 | self.caly_check_opt_list = [ 73 | Path(i) / calypso_check_opt_file for i in self.task_name_path 74 | ] 75 | 76 | def tearDown(self): 77 | for work_dir in self.task_name_path: 78 | shutil.rmtree(work_dir) 79 | 80 | def test_success(self): 81 | op = PrepCalyInput() 82 | out = op.execute( 83 | OPIO( 84 | { 85 | "caly_task_grp": self.caly_task_grp, 86 | } 87 | ) 88 | ) 89 | # check output 90 | self.assertEqual(out["task_names"], self.task_name_path) 91 | self.assertEqual(out["input_dat_files"], self.input_dat_list) 92 | self.assertEqual(out["caly_run_opt_files"], self.caly_run_opt_list) 93 | self.assertEqual(out["caly_check_opt_files"], self.caly_check_opt_list) 94 | self.assertEqual(out["ntasks"], 2) 95 | # check files details 96 | self.assertEqual(self.input_dat_list[0].read_text().strip("\n"), "input.dat_0") 97 | # self.assertEqual(self.caly_run_opt_list[1].read_text().strip("\n"), "run_1") 98 | -------------------------------------------------------------------------------- /tests/op/test_prep_caly_model_devi.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import unittest 4 | from pathlib import ( 5 | Path, 6 | ) 7 | 8 | import numpy as np 9 | from dflow.python import ( 10 | OP, 11 | OPIO, 12 | Artifact, 13 | OPIOSign, 14 | TransientError, 15 | ) 16 | from mock import ( 17 | call, 18 | mock, 19 | patch, 20 | ) 21 | 22 | # isort: off 23 | from .context import ( 24 | dpgen2, 25 | ) 26 | from dpgen2.constants import ( 27 | calypso_task_pattern, 28 | model_name_pattern, 29 | calypso_run_opt_file, 30 | calypso_check_opt_file, 31 | ) 32 | from dpgen2.op import PrepCalyModelDevi 33 | from dpgen2.utils import ( 34 | BinaryFileInput, 35 | ) 36 | 37 | # isort: on 38 | 39 | 40 | class TestPrepCalyModelDevi(unittest.TestCase): 41 | def setUp(self): 42 | self.run_dir_name = "run_dir" 43 | 44 | self.ref_dir = Path("ref_dir") 45 | self.ref_dir.mkdir(parents=True, exist_ok=True) 46 | 47 | self.ref_traj_results = [] 48 | 49 | ntrajs_dir = 5 50 | ntrajs_per_dir = 2 51 | for dir_index in range(ntrajs_dir): 52 | dir_name = self.ref_dir.joinpath(f"traj_dir_{dir_index}") 53 | dir_name.mkdir(parents=True, exist_ok=True) 54 | self.ref_traj_results.append(dir_name) 55 | for traj_index in range(ntrajs_per_dir): 56 | dir_name.joinpath(f"{dir_index}.{traj_index}.traj").write_text( 57 | f"trajectory.{dir_index}.{traj_index}" 58 | ) 59 | 60 | self.group_size = 5 61 | self.ngroup = ntrajs_dir * ntrajs_per_dir / self.group_size 62 | self.model_devi_group_size_2 = 0 63 | 64 | def tearDown(self): 65 | shutil.rmtree(self.ref_dir) 66 | shutil.rmtree(self.run_dir_name) 67 | 68 | def test_00_success(self): 69 | explore_config = {"model_devi_group_size": self.group_size} 70 | op = PrepCalyModelDevi() 71 | out = op.execute( 72 | OPIO( 73 | { 74 | "task_name": self.run_dir_name, 75 | "config": explore_config, 76 | "traj_results": self.ref_traj_results, 77 | } 78 | ) 79 | ) 80 | # check output length 81 | self.assertEqual(len(out["task_name_list"]), self.ngroup) 82 | self.assertEqual(len(out["grouped_traj_list"]), self.ngroup) 83 | # check filename 84 | self.assertEqual(out["task_name_list"][0], "run_dir/trajs_part_0") 85 | self.assertEqual(out["task_name_list"][1], "run_dir/trajs_part_1") 86 | # check file exists 87 | self.assertTrue(Path(out["grouped_traj_list"][0]).exists()) 88 | self.assertTrue(Path(out["grouped_traj_list"][1]).exists()) 89 | 90 | traj_list = list(Path(out["grouped_traj_list"][0]).rglob("*traj")) 91 | # check traj number 92 | self.assertEqual(len(traj_list), 5) 93 | # check traj file name 94 | # self.assertTrue(Path("run_dir/trajs_part_0/0.0.0.traj") in traj_list) 95 | 96 | def test_01_success(self): 97 | explore_config = {} 98 | op = PrepCalyModelDevi() 99 | out = op.execute( 100 | OPIO( 101 | { 102 | "task_name": self.run_dir_name, 103 | "config": explore_config, 104 | "traj_results": self.ref_traj_results, 105 | } 106 | ) 107 | ) 108 | # check output length 109 | self.assertEqual(len(out["task_name_list"]), 1) 110 | self.assertEqual(len(out["grouped_traj_list"]), 1) 111 | # check filename 112 | self.assertEqual(out["task_name_list"][0], "run_dir/trajs_part_0") 113 | # check file exists 114 | self.assertTrue(Path(out["grouped_traj_list"][0]).exists()) 115 | 116 | traj_list = list(Path(out["grouped_traj_list"][0]).rglob("*traj")) 117 | # check traj number 118 | self.assertEqual(len(traj_list), 10) 119 | # check traj file name 120 | # self.assertTrue(Path("run_dir/trajs_part_0/0.0.0.traj") in traj_list) 121 | -------------------------------------------------------------------------------- /tests/op/test_prep_relax.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import unittest 4 | from pathlib import ( 5 | Path, 6 | ) 7 | 8 | from dflow.python import ( 9 | OPIO, 10 | ) 11 | 12 | from dpgen2.op import ( 13 | PrepRelax, 14 | ) 15 | 16 | 17 | class TestPrepRelax(unittest.TestCase): 18 | def testPrepRelax(self): 19 | cifs = [] 20 | for i in range(4): 21 | p = Path("%i.cif" % i) 22 | p.write_text("Mocked cif.") 23 | cifs.append(p) 24 | op_in = OPIO( 25 | { 26 | "expl_config": { 27 | "relax_group_size": 2, 28 | }, 29 | "cifs": cifs, 30 | } 31 | ) 32 | op = PrepRelax() 33 | op_out = op.execute(op_in) 34 | self.assertEqual(op_out["ntasks"], 2) 35 | self.assertEqual(len(op_out["task_paths"]), 2) 36 | for i, task_path in enumerate(op_out["task_paths"]): 37 | self.assertEqual(str(task_path), "task.%06d" % i) 38 | self.assertEqual(len(list(task_path.iterdir())), 2) 39 | 40 | def tearDown(self): 41 | for i in range(2): 42 | if os.path.isdir("task.%06d" % i): 43 | shutil.rmtree("task.%06d" % i) 44 | for i in range(4): 45 | if os.path.isfile("%s.cif" % i): 46 | os.remove("%s.cif" % i) 47 | -------------------------------------------------------------------------------- /tests/test_check_examples.py: -------------------------------------------------------------------------------- 1 | import json 2 | import unittest 3 | from pathlib import ( 4 | Path, 5 | ) 6 | 7 | from dpgen2.entrypoint.args import ( 8 | normalize, 9 | ) 10 | 11 | p_examples = Path(__file__).parent.parent / "examples" 12 | 13 | input_files = ( 14 | p_examples / "almg" / "input.json", 15 | # p_examples / "almg" / "input-v005.json", 16 | # p_examples / "almg" / "dp_template.json", 17 | p_examples / "calypso" / "input.test.json", 18 | p_examples / "water" / "input_distill.json", 19 | p_examples / "water" / "input_dpgen.json", 20 | p_examples / "water" / "input_multitask.json", 21 | p_examples / "ch4" / "input_dist.json", 22 | # p_examples / "chno" / "dpa_manyi.json", 23 | p_examples / "chno" / "input.json", 24 | p_examples / "water" / "input_dpgen_abacus.json", 25 | p_examples / "water" / "input_dpgen_cp2k.json", 26 | p_examples / "diffcsp" / "dpgen.json", 27 | ) 28 | 29 | 30 | class TestExamples(unittest.TestCase): 31 | def test_arguments(self): 32 | for fn in input_files: 33 | with self.subTest(fn=fn): 34 | with open(fn) as f: 35 | jdata = json.load(f) 36 | normalize(jdata) 37 | -------------------------------------------------------------------------------- /tests/test_gui.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: LGPL-3.0-or-later 2 | import unittest 3 | 4 | from dpgui import ( 5 | generate_dpgui_templates, 6 | ) 7 | 8 | 9 | class TestDPGUI(unittest.TestCase): 10 | def test_dpgui_entrypoints(self): 11 | self.assertTrue(len(generate_dpgui_templates()) > 0) 12 | -------------------------------------------------------------------------------- /tests/test_prep_run_diffcsp.py: -------------------------------------------------------------------------------- 1 | import glob 2 | import os 3 | import shutil 4 | import unittest 5 | from pathlib import ( 6 | Path, 7 | ) 8 | 9 | from dflow import ( 10 | Step, 11 | Workflow, 12 | download_artifact, 13 | upload_artifact, 14 | ) 15 | from dflow.python import ( 16 | OP, 17 | OPIO, 18 | ) 19 | 20 | from dpgen2.exploration.task import ( 21 | DiffCSPTaskGroup, 22 | ) 23 | from dpgen2.op import ( 24 | DiffCSPGen, 25 | PrepRelax, 26 | RunRelax, 27 | ) 28 | from dpgen2.superop import ( 29 | PrepRunDiffCSP, 30 | ) 31 | 32 | 33 | class MockedDiffCSPGen(DiffCSPGen): 34 | @OP.exec_sign_check 35 | def execute( 36 | self, 37 | ip: OPIO, 38 | ) -> OPIO: 39 | task_dir = Path("diffcsp.%s" % ip["task_id"]) 40 | task_dir.mkdir(exist_ok=True) 41 | for i in range(2): 42 | fpath = task_dir / ("%s.cif" % i) 43 | fpath.write_text("Mocked cif.") 44 | return OPIO( 45 | { 46 | "cifs": list(Path(task_dir).glob("*.cif")), 47 | } 48 | ) 49 | 50 | 51 | class MockedRunRelax(RunRelax): 52 | @OP.exec_sign_check 53 | def execute( 54 | self, 55 | ip: OPIO, 56 | ) -> OPIO: 57 | cifs = os.listdir(ip["task_path"]) 58 | assert len(cifs) == 2 59 | trajs = [] 60 | model_devis = [] 61 | for cif in cifs: 62 | name = cif[:-4] 63 | traj = ip["task_path"] / ("traj.%s.dump" % name) 64 | traj.write_text("Mocked traj.") 65 | trajs.append(traj) 66 | model_devi = ip["task_path"] / ("model_devi.%s.out" % name) 67 | model_devi.write_text("Mocked model_devi.") 68 | model_devis.append(model_devi) 69 | return OPIO( 70 | { 71 | "trajs": trajs, 72 | "model_devis": model_devis, 73 | } 74 | ) 75 | 76 | 77 | class TestPrepRunDiffCSP(unittest.TestCase): 78 | def testPrepRunDiffCSP(self): 79 | task_group = DiffCSPTaskGroup() 80 | task_group.make_task() 81 | 82 | wf = Workflow("test-prep-run-diffcsp") 83 | upload_packages = [] 84 | if "__file__" in globals(): 85 | upload_packages.append(__file__) 86 | upload_packages.append(os.path.dirname(__file__)) 87 | steps = PrepRunDiffCSP( 88 | "prep-run-diffcsp", 89 | MockedDiffCSPGen, 90 | PrepRelax, 91 | MockedRunRelax, 92 | upload_python_packages=upload_packages, 93 | ) 94 | step = Step( 95 | "main", 96 | template=steps, 97 | parameters={ 98 | "block_id": "iter-000000", 99 | "expl_task_grp": task_group, 100 | "explore_config": { 101 | "gen_tasks": 2, 102 | "gen_command": "echo 'mocked generation' --model_path .", 103 | "relax_group_size": 2, 104 | }, 105 | "type_map": [], 106 | }, 107 | artifacts={ 108 | "models": upload_artifact([]), 109 | }, 110 | ) 111 | wf.add(step) 112 | wf.submit() 113 | wf.wait() 114 | self.assertEqual(wf.query_status(), "Succeeded") 115 | step = wf.query_step("main")[0] 116 | trajs = download_artifact(step.outputs.artifacts["trajs"]) 117 | self.assertEqual(len(trajs), 4) 118 | model_devis = download_artifact(step.outputs.artifacts["model_devis"]) 119 | self.assertEqual(len(model_devis), 4) 120 | 121 | def tearDown(self): 122 | for d in glob.glob("test-prep-run-diffcsp-*") + ["task.000000", "task.000001"]: 123 | if os.path.isdir(d): 124 | shutil.rmtree(d) 125 | -------------------------------------------------------------------------------- /tests/test_prep_run_gaussian.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import unittest 4 | from pathlib import ( 5 | Path, 6 | ) 7 | 8 | import numpy as np 9 | from dargs import ( 10 | Argument, 11 | ) 12 | 13 | from dpgen2.fp.gaussian import ( 14 | GaussianInputs, 15 | PrepGaussian, 16 | RunGaussian, 17 | dpdata, 18 | gaussian_input_name, 19 | gaussian_output_name, 20 | ) 21 | 22 | 23 | class TestPrepGaussian(unittest.TestCase): 24 | def test_prep_gaussian(self): 25 | inputs = GaussianInputs( 26 | keywords="force b3lyp/6-31g*", 27 | multiplicity=1, 28 | ) 29 | ta = GaussianInputs.args() 30 | base = Argument("base", dict, ta) 31 | data = base.normalize_value(inputs.data, trim_pattern="_*") 32 | base.check_value(data, strict=True) 33 | system = dpdata.LabeledSystem( 34 | data={ 35 | "atom_names": ["H"], 36 | "atom_numbs": [1], 37 | "atom_types": np.zeros(1, dtype=int), 38 | "cells": np.eye(3).reshape(1, 3, 3), 39 | "coords": np.zeros((1, 1, 3)), 40 | "energies": np.zeros(1), 41 | "forces": np.zeros((1, 1, 3)), 42 | "orig": np.zeros(3), 43 | "nopbc": True, 44 | } 45 | ) 46 | prep_gaussian = PrepGaussian() 47 | prep_gaussian.prep_task( 48 | conf_frame=system, 49 | inputs=inputs, 50 | ) 51 | assert Path(gaussian_input_name).exists() 52 | for ii in ["task.log", "task.gjf"]: 53 | if Path(ii).exists(): 54 | os.remove(ii) 55 | 56 | 57 | class TestRunGaussian(unittest.TestCase): 58 | def test_run_gaussian(self): 59 | dpdata.LabeledSystem( 60 | data={ 61 | "atom_names": ["H"], 62 | "atom_numbs": [1], 63 | "atom_types": np.zeros(1, dtype=int), 64 | "cells": np.eye(3).reshape(1, 3, 3), 65 | "coords": np.zeros((1, 1, 3)), 66 | "energies": np.zeros(1), 67 | "forces": np.zeros((1, 1, 3)), 68 | "orig": np.zeros(3), 69 | "nopbc": True, 70 | } 71 | ).to_gaussian_gjf( 72 | gaussian_input_name, keywords="force b3lyp/6-31g*", multiplicity=1 73 | ) 74 | run_gaussian = RunGaussian() 75 | output = "mock_output" 76 | out_name, log_name = run_gaussian.run_task( 77 | "g16", 78 | output, 79 | ) 80 | assert out_name == output 81 | assert log_name == gaussian_output_name 82 | for ii in [output]: 83 | if Path(ii).exists(): 84 | shutil.rmtree(ii) 85 | for ii in ["task.log", "task.gjf"]: 86 | if Path(ii).exists(): 87 | os.remove(ii) 88 | -------------------------------------------------------------------------------- /tests/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deepmodeling/dpgen2/0a89b7274d9ab85e81a92daad3ed7eb9c1d45046/tests/utils/__init__.py -------------------------------------------------------------------------------- /tests/utils/context.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))) 5 | import dpgen2 6 | -------------------------------------------------------------------------------- /tests/utils/test_bohrium_config.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import random 4 | import shutil 5 | import tempfile 6 | import unittest 7 | from pathlib import ( 8 | Path, 9 | ) 10 | 11 | import dflow 12 | import dpdata 13 | import numpy as np 14 | import pytest 15 | from dflow.config import ( 16 | config, 17 | s3_config, 18 | ) 19 | from dflow.plugins import ( 20 | bohrium, 21 | ) 22 | 23 | # isort: off 24 | from .context import ( 25 | dpgen2, 26 | ) 27 | from dpgen2.utils import ( 28 | bohrium_config_from_dict, 29 | ) 30 | 31 | # isort: on 32 | 33 | 34 | @pytest.mark.server( 35 | url="/account/login", response={"code": 0, "data": {"token": "abc"}}, method="POST" 36 | ) 37 | @pytest.mark.server( 38 | url="/brm/v1/storage/token", 39 | response={ 40 | "code": 0, 41 | "data": {"token": "abc", "path": "/", "sharePath": "/", "userSharePath": "/"}, 42 | }, 43 | method="GET", 44 | ) 45 | def test_handler_responses(): 46 | bohrium_config = { 47 | "host": "666", 48 | "k8s_api_server": "777", 49 | "username": "foo", 50 | "password": "bar", 51 | "project_id": 10086, 52 | "repo_key": "tar", 53 | "storage_client": "dflow.plugins.bohrium.TiefblueClient", 54 | } 55 | bohrium.config["bohrium_url"] = "http://localhost:5000" 56 | bohrium_config_from_dict(bohrium_config) 57 | assert config["host"] == "666" 58 | assert config["k8s_api_server"] == "777" 59 | assert bohrium.config["username"] == "foo" 60 | assert bohrium.config["password"] == "bar" 61 | assert bohrium.config["project_id"] == "10086" 62 | assert s3_config["repo_key"] == "tar" 63 | assert isinstance(s3_config["storage_client"], dflow.plugins.bohrium.TiefblueClient) 64 | 65 | 66 | # @unittest.skipIf(True, "dflow requires a real bohrium account to instantiate a storage client") 67 | # class TestBohriumConfig(unittest.TestCase): 68 | # def test_config(self): 69 | # bohrium_config = { 70 | # "host" : "666", 71 | # "k8s_api_server" : "777", 72 | # "username": "foo", 73 | # "password": "bar", 74 | # "project_id": 10086, 75 | # "repo_key": "tar", 76 | # "storage_client" : "dflow.plugins.bohrium.TiefblueClient" 77 | # } 78 | # bohrium_config_from_dict(bohrium_config) 79 | # self.assertEqual(config["host"], "666") 80 | # self.assertEqual(config["k8s_api_server"], "777") 81 | # self.assertEqual(bohrium.config["username"], "foo") 82 | # self.assertEqual(bohrium.config["password"], "bar") 83 | # self.assertEqual(bohrium.config["project_id"], "10086") 84 | # self.assertEqual(s3_config["repo_key"], "tar") 85 | # self.assertTrue(isinstance(s3_config["storage_client"], 86 | # dflow.plugins.bohrium.TiefblueClient)) 87 | -------------------------------------------------------------------------------- /tests/utils/test_dflow_config.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import random 4 | import shutil 5 | import tempfile 6 | import unittest 7 | from pathlib import ( 8 | Path, 9 | ) 10 | 11 | import dpdata 12 | import numpy as np 13 | from dflow.config import ( 14 | config, 15 | s3_config, 16 | ) 17 | 18 | # isort: off 19 | from .context import ( 20 | dpgen2, 21 | ) 22 | from dpgen2.utils import ( 23 | dflow_config, 24 | dflow_s3_config, 25 | ) 26 | 27 | # isort: on 28 | 29 | 30 | class TestDflowConfig(unittest.TestCase): 31 | def test_config(self): 32 | config_data = { 33 | "host": "foo", 34 | "s3_endpoint": "bar", 35 | "k8s_api_server": "tar", 36 | "token": "bula", 37 | } 38 | dflow_config(config_data) 39 | self.assertEqual(config["host"], "foo") 40 | self.assertEqual(s3_config["endpoint"], "bar") 41 | self.assertEqual(config["k8s_api_server"], "tar") 42 | self.assertEqual(config["token"], "bula") 43 | 44 | def test_none(self): 45 | config_data = { 46 | "host": "foo", 47 | "s3_endpoint": None, 48 | "k8s_api_server": None, 49 | "token": "bula", 50 | } 51 | dflow_config(config_data) 52 | self.assertEqual(config["host"], "foo") 53 | self.assertEqual(s3_config["endpoint"], None) 54 | self.assertEqual(config["k8s_api_server"], None) 55 | self.assertEqual(config["token"], "bula") 56 | 57 | def test_empty(self): 58 | config_data = { 59 | "host": None, 60 | "s3_endpoint": None, 61 | "k8s_api_server": None, 62 | "token": None, 63 | } 64 | dflow_config(config_data) 65 | self.assertEqual(config["host"], None) 66 | self.assertEqual(s3_config["endpoint"], None) 67 | self.assertEqual(config["k8s_api_server"], None) 68 | self.assertEqual(config["token"], None) 69 | 70 | def test_s3_config(self): 71 | config_data = { 72 | "endpoint": "bar", 73 | } 74 | dflow_s3_config(config_data) 75 | self.assertEqual(s3_config["endpoint"], "bar") 76 | 77 | def test_none(self): 78 | config_data = { 79 | "endpoint": None, 80 | } 81 | dflow_s3_config(config_data) 82 | self.assertEqual(s3_config["endpoint"], None) 83 | -------------------------------------------------------------------------------- /tests/utils/test_ele_temp.py: -------------------------------------------------------------------------------- 1 | import glob 2 | import os 3 | import shutil 4 | import unittest 5 | 6 | import dpdata 7 | import numpy as np 8 | 9 | # isort: off 10 | from .context import ( 11 | dpgen2, 12 | ) 13 | from dpgen2.utils import ( 14 | setup_ele_temp, 15 | ) 16 | 17 | # isort: on 18 | 19 | 20 | class TestSetupEleTemp(unittest.TestCase): 21 | def test_setup_ele_temp_unlabeled(self): 22 | system = dpdata.System( 23 | data={ 24 | "atom_names": ["H"], 25 | "atom_numbs": [1], 26 | "atom_types": np.zeros(1, dtype=int), 27 | "cells": np.eye(3).reshape(1, 3, 3), 28 | "coords": np.zeros((1, 1, 3)), 29 | "orig": np.zeros(3), 30 | "nopbc": True, 31 | } 32 | ) 33 | setup_ele_temp(False) 34 | system.data["fparam"] = np.array([[1.0]]) 35 | system.to_deepmd_npy("ele_temp_data") 36 | self.assertEqual(len(glob.glob("ele_temp_data/*/fparam.npy")), 1) 37 | new_system = dpdata.System("ele_temp_data", fmt="deepmd/npy") 38 | self.assertTrue("fparam" in new_system.data) 39 | 40 | def test_setup_ele_temp_mixed(self): 41 | system = dpdata.System( 42 | data={ 43 | "atom_names": ["H"], 44 | "atom_numbs": [1], 45 | "atom_types": np.zeros(1, dtype=int), 46 | "cells": np.eye(3).reshape(1, 3, 3), 47 | "coords": np.zeros((1, 1, 3)), 48 | "orig": np.zeros(3), 49 | "nopbc": True, 50 | } 51 | ) 52 | setup_ele_temp(True) 53 | system.data["aparam"] = np.array([[[1.0]]]) 54 | system.to_deepmd_npy_mixed("ele_temp_mixed_data") 55 | self.assertEqual(len(glob.glob("ele_temp_mixed_data/*/aparam.npy")), 1) 56 | ms = dpdata.MultiSystems() 57 | ms.load_systems_from_file( 58 | "ele_temp_mixed_data", fmt="deepmd/npy/mixed", labeled=False 59 | ) 60 | self.assertTrue("aparam" in ms[0].data) 61 | 62 | def tearDown(self): 63 | if os.path.exists("ele_temp_data"): 64 | shutil.rmtree("ele_temp_data") 65 | if os.path.exists("ele_temp_mixed_data"): 66 | shutil.rmtree("ele_temp_mixed_data") 67 | -------------------------------------------------------------------------------- /tests/utils/test_run_command.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import shutil 4 | import unittest 5 | from pathlib import ( 6 | Path, 7 | ) 8 | 9 | import numpy as np 10 | 11 | # isort: off 12 | from .context import ( 13 | dpgen2, 14 | ) 15 | from dpgen2.utils.run_command import ( 16 | run_command, 17 | ) 18 | 19 | # isort: on 20 | 21 | 22 | class TestRunCommand(unittest.TestCase): 23 | def setUp(self): 24 | self.work_path = Path("work_path") 25 | self.work_path.mkdir(exist_ok=True) 26 | (self.work_path / "foo").write_text("foo") 27 | (self.work_path / "bar").write_text("foo") 28 | 29 | def tearDown(self): 30 | if self.work_path.is_dir(): 31 | shutil.rmtree(self.work_path) 32 | 33 | def test_success_shell(self): 34 | os.chdir(self.work_path) 35 | ret, out, err = run_command(["ls | sort"], shell=True) 36 | self.assertEqual(ret, 0) 37 | self.assertEqual(out, "bar\nfoo\n") 38 | # ignore the warnings 39 | # self.assertEqual(err, '') 40 | os.chdir("..") 41 | 42 | def test_success(self): 43 | os.chdir(self.work_path) 44 | ret, out, err = run_command(["ls"]) 45 | self.assertEqual(ret, 0) 46 | self.assertEqual(out, "bar\nfoo\n") 47 | self.assertEqual(err, "") 48 | os.chdir("..") 49 | 50 | def test_success_foo(self): 51 | os.chdir(self.work_path) 52 | ret, out, err = run_command(["ls", "foo"]) 53 | self.assertEqual(ret, 0) 54 | self.assertEqual(out, "foo\n") 55 | self.assertEqual(err, "") 56 | os.chdir("..") 57 | 58 | def test_failed(self): 59 | os.chdir(self.work_path) 60 | ret, out, err = run_command(["ls", "tar"]) 61 | self.assertNotEqual(ret, 0) 62 | self.assertEqual(out, "") 63 | # self.assertEqual(err, "ls: cannot access 'tar': No such file or directory\n") 64 | self.assertNotEqual(err, "") 65 | os.chdir("..") 66 | --------------------------------------------------------------------------------