├── .gitignore ├── .readthedocs.yaml ├── LICENSE ├── README.md ├── assets ├── logo.png └── workflow.png ├── bin └── gdp ├── conda.recipe └── meta.yaml ├── docs ├── Makefile ├── images │ ├── CutAndSplice.png │ ├── dscribe.png │ ├── expedition.svg │ ├── ga.png │ ├── ga.svg │ ├── gdpflow.png │ ├── graph-exp.png │ ├── region-cube.png │ ├── region-cylinder.png │ ├── region-lattice.png │ └── region-sphere.png ├── make.bat ├── requirements.txt └── source │ ├── about.rst │ ├── applications │ └── index.rst │ ├── builders │ ├── dimer.rst │ ├── graph.rst │ ├── index.rst │ ├── random.rst │ └── region.rst │ ├── computations │ └── index.rst │ ├── conf.py │ ├── expeditions │ ├── ga.rst │ ├── index.rst │ └── mc.rst │ ├── extensions │ └── index.rst │ ├── index.rst │ ├── installation.rst │ ├── potentials │ └── index.rst │ ├── references.rst │ ├── selections │ ├── descriptor.rst │ └── index.rst │ ├── sessions │ ├── index.rst │ └── operations.rst │ ├── start.rst │ ├── trainers │ ├── deepmd.rst │ ├── index.rst │ └── mace.rst │ ├── tutorials │ ├── copper.rst │ ├── index.rst │ └── water.rst │ └── workflows │ ├── compute_select.rst │ ├── correct.rst │ ├── explore_GA.rst │ ├── index.rst │ ├── react.rst │ ├── train.rst │ └── validate.rst ├── pyproject.toml ├── pytest.ini ├── scripts ├── estimate_chemical_potential.py └── plot_mctraj.py ├── src └── gdpx │ ├── bias │ ├── __init__.py │ ├── afir │ │ ├── __init__.py │ │ └── afir.py │ ├── bias.py │ ├── bondboost.py │ ├── gaussian │ │ ├── __init__.py │ │ ├── bond.py │ │ ├── com.py │ │ ├── distance.py │ │ ├── gaussian.py │ │ └── rmsd.py │ ├── harmonic │ │ ├── __init__.py │ │ ├── distance.py │ │ ├── harmonic.py │ │ └── plane.py │ ├── nuclei.py │ ├── timeio.py │ └── utils │ │ ├── __init__.py │ │ └── bondpair.py │ ├── builder │ ├── __init__.py │ ├── builder.py │ ├── cleave_group.py │ ├── cleave_surface.py │ ├── constraints.py │ ├── crossover.py │ ├── dimer.py │ ├── direct.py │ ├── graph │ │ ├── __init__.py │ │ ├── exchange.py │ │ ├── insert.py │ │ ├── modifier.py │ │ └── remove.py │ ├── group.py │ ├── insert.py │ ├── interface.py │ ├── mutation │ │ └── __init__.py │ ├── packer.py │ ├── perturbator.py │ ├── randomBuilder.py │ ├── region.py │ ├── repeat.py │ ├── scan │ │ ├── angle.py │ │ ├── hypercube.py │ │ └── intercoord.py │ ├── species.py │ ├── utils.py │ ├── wulff.py │ └── zoom.py │ ├── cli │ ├── build.py │ ├── compute.py │ ├── explore.py │ └── select.py │ ├── colvar │ ├── __init__.py │ ├── coordination.py │ ├── distance.py │ ├── fingerprint.py │ ├── position.py │ └── rmsd.py │ ├── comparator │ ├── __init__.py │ ├── cartesian.py │ ├── comparator.py │ ├── coordination.py │ ├── graph.py │ ├── interface.py │ ├── reaction.py │ └── singlepoint.py │ ├── computation │ ├── __init__.py │ ├── abacus.py │ ├── asedriver.py │ ├── cp2k.py │ ├── driver.py │ ├── espresso.py │ ├── interface.py │ ├── lammps.py │ ├── lasp.py │ ├── mc │ │ └── tfmc.py │ ├── md │ │ ├── md_utils.py │ │ └── nosehoover.py │ ├── observer.py │ ├── utils.py │ └── vasp.py │ ├── config.py │ ├── core │ ├── __init__.py │ ├── node.py │ ├── operation.py │ ├── placeholder.py │ ├── register.py │ ├── session │ │ ├── __init__.py │ │ ├── active.py │ │ ├── basic.py │ │ ├── interface.py │ │ ├── session.py │ │ └── utils.py │ └── variable.py │ ├── data │ ├── ClusterAndCUR.py │ ├── __init__.py │ ├── analyser.py │ ├── array.py │ ├── cleave_deviation.py │ ├── convert.py │ ├── correction.py │ ├── database.py │ ├── dataset.py │ ├── extatoms.py │ ├── extract_evolution.py │ ├── interface.py │ ├── operators.py │ ├── system.py │ └── utils.py │ ├── describer │ ├── __init__.py │ ├── describer.py │ ├── interface.py │ ├── soap.py │ └── spc.py │ ├── expedition │ ├── __init__.py │ ├── accelerated_dynamics │ │ └── prev_example.py │ ├── artificial_force │ │ └── afir.py │ ├── expedition.py │ ├── ga │ │ ├── engine.py │ │ └── population.py │ ├── interface.py │ ├── monte_carlo │ │ ├── __init__.py │ │ ├── basin_hopping.py │ │ ├── hybrid_monte_carlo.py │ │ ├── monte_carlo.py │ │ └── operators │ │ │ ├── __init__.py │ │ │ ├── exchange.py │ │ │ ├── move.py │ │ │ ├── operator.py │ │ │ ├── react.py │ │ │ └── swap.py │ └── simulated_annealing │ │ └── simulated_annealing.py │ ├── graph │ ├── __init__.py │ ├── comparison.py │ ├── creator.py │ ├── graph_main.py │ ├── molecule.py │ ├── sites.py │ ├── surface.py │ └── utils.py │ ├── main.py │ ├── potential │ ├── __init__.py │ ├── calculators │ │ ├── dummy.py │ │ └── mixer.py │ ├── interface.py │ ├── manager.py │ ├── managers │ │ ├── __init__.py │ │ ├── abacus.py │ │ ├── asepot.py │ │ ├── bias.py │ │ ├── cp2k.py │ │ ├── deepmd │ │ │ ├── __init__.py │ │ │ ├── calculator.py │ │ │ ├── convert.py │ │ │ └── deepmd.py │ │ ├── dftd3.py │ │ ├── eam.py │ │ ├── emt.py │ │ ├── espresso.py │ │ ├── gp │ │ │ ├── __init__.py │ │ │ ├── bench.py │ │ │ ├── fgp.py │ │ │ ├── gptools.py │ │ │ ├── representation.py │ │ │ └── sgp.py │ │ ├── grid.py │ │ ├── lasp.py │ │ ├── mace.py │ │ ├── mixer.py │ │ ├── nequip.py │ │ ├── plumed │ │ │ ├── calculators │ │ │ │ ├── plumed.py │ │ │ │ └── plumed2.py │ │ │ └── plumed.py │ │ ├── reann │ │ │ ├── beann.py │ │ │ ├── calculators │ │ │ │ └── reann.py │ │ │ └── reann.py │ │ ├── reax.py │ │ ├── schnet.py │ │ ├── vasp.py │ │ └── xtb.py │ ├── trainer.py │ └── utils.py │ ├── reactor │ ├── __init__.py │ ├── future │ │ ├── AccCons.py │ │ ├── AccNEB.py │ │ ├── cmp_mep.py │ │ ├── constrain.py │ │ ├── crs.py │ │ ├── diffusion3.py │ │ ├── find_adsorption.py │ │ ├── find_inter.py │ │ ├── muller-brown.py │ │ └── test_mh.py │ ├── interface.py │ ├── reactor.py │ ├── string │ │ ├── __init__.py │ │ ├── cp2k.py │ │ ├── grid.py │ │ ├── pathway.py │ │ ├── string.py │ │ └── vasp.py │ └── utils.py │ ├── scheduler │ ├── __init__.py │ ├── interface.py │ ├── local.py │ ├── lsf.py │ ├── pbs.py │ ├── remote.py │ ├── scheduler.py │ └── slurm.py │ ├── selector │ ├── __init__.py │ ├── basin.py │ ├── compare.py │ ├── composition.py │ ├── cur.py │ ├── descriptor.py │ ├── graph.py │ ├── interface.py │ ├── interval.py │ ├── invariant.py │ ├── locate.py │ ├── property.py │ ├── random.py │ ├── scf.py │ └── selector.py │ ├── trainer │ ├── __init__.py │ └── interface.py │ ├── utils │ ├── __init__.py │ ├── atomUtils.py │ ├── cleave_cluster.py │ ├── cmdrun.py │ ├── cmp_refdat.py │ ├── command.py │ ├── comparision.py │ ├── dputils │ │ ├── DeepPot.py │ │ └── acquire_dmat.py │ ├── geometry.py │ ├── plot_dimer.py │ ├── reduce_dataset.py │ ├── second_reduce.py │ ├── split-dataset.py │ ├── strconv.py │ └── strucopy.py │ ├── validator │ ├── __init__.py │ ├── diffusion_coefficient.py │ ├── dimer.py │ ├── eos.py │ ├── interface.py │ ├── mdf.py │ ├── melting_point.py │ ├── minima.py │ ├── rank.py │ ├── rdf.py │ ├── rxn.py │ ├── spc.py │ ├── surface_energy.py │ ├── trimer.py │ ├── utils.py │ └── validator.py │ └── worker │ ├── __init__.py │ ├── drive.py │ ├── explore.py │ ├── grid.py │ ├── interface.py │ ├── react.py │ ├── single.py │ ├── train.py │ ├── utils.py │ └── worker.py └── tests ├── assets ├── Cu-fcc-s111p22.xyz ├── ZnO.xyz ├── dpmd-AlCuO-m0.pb └── dpmd-AlCuO-m1.pb ├── bias ├── afir │ ├── test_afir.py │ └── test_afir_jax.py ├── bondboost │ ├── test_bondboost.py │ └── test_bondboost_jax.py ├── gaussian │ ├── test_distance_gaussian.py │ ├── test_fcom_gaussian.py │ └── test_fcom_gaussian_jax.py └── harmonic │ └── test_distance_harmonic.py ├── builder ├── assets │ └── Pd38.xyz ├── constraints │ └── test_constraints.py ├── region │ └── test_region.py ├── test_dimer.py ├── test_graph.py ├── test_hypercube.py ├── test_interface.py ├── test_molecule.py ├── test_packer.py ├── test_perturbator.py └── test_random.py ├── computation ├── asedriver │ ├── assets │ │ ├── Pd38_oct.xyz │ │ ├── emtmin.yaml │ │ └── emtnvt.yaml │ └── test_asedriver.py ├── assets │ ├── broken_ase_spc │ │ ├── dyn.log │ │ └── dyn.traj │ └── finished_ase_spc │ │ ├── dyn.log │ │ └── dyn.traj ├── bias │ └── test_harmonic.py ├── cp2k │ └── test_cp2k.py ├── espresso │ └── test_espresso.py ├── lammps │ ├── assets │ │ ├── ffield.PdO │ │ ├── reaxmin.yaml │ │ ├── reaxnvt.yaml │ │ └── reaxspc.yaml │ └── test_lammps.py └── vasp │ ├── assets │ ├── H2.xyz │ ├── INCAR │ ├── potpaw_PBE │ │ ├── Cu │ │ │ ├── POTCAR │ │ │ └── PSCTR │ │ ├── H │ │ │ ├── POTCAR │ │ │ └── PSCTR │ │ └── O │ │ │ ├── POTCAR │ │ │ └── PSCTR │ ├── vaspmd.yaml │ └── vaspspc.yaml │ └── test_vasp.py ├── conftest.py ├── data ├── bands.xyz └── test_atomsarray.py ├── potential ├── deepmd │ └── test_deepmd.py ├── lasp │ └── test_lasp.py ├── nequip │ └── test_nequip.py ├── plumed │ └── test_plumed.py └── reax │ ├── assets │ ├── ffield.PdO │ ├── reaxmd.yaml │ ├── reaxmin.yaml │ └── reaxspc.yaml │ └── test_reax.py ├── reactor ├── asedriver │ ├── assets │ │ └── aseneb.yaml │ └── test_ase_neb.py └── vasp │ ├── assets │ ├── CO+O_mep.xyz │ ├── INCAR │ ├── potpaw_PBE │ │ ├── C │ │ │ ├── POTCAR │ │ │ └── PSCTR │ │ ├── Cu │ │ │ ├── POTCAR │ │ │ └── PSCTR │ │ ├── H │ │ │ ├── POTCAR │ │ │ └── PSCTR │ │ ├── O │ │ │ ├── POTCAR │ │ │ └── PSCTR │ │ └── Pt │ │ │ ├── POTCAR │ │ │ └── PSCTR │ └── vaspneb.yaml │ └── test_vaspneb.py ├── selector ├── r2.xyz ├── test_cache.py ├── test_desc.py ├── test_interval.py └── test_property.py └── session ├── test_omegaconf.py └── test_session.py /.gitignore: -------------------------------------------------------------------------------- 1 | # byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | .vscode 7 | .DS_store 8 | 9 | # built files 10 | dist/ 11 | *egg-info/ 12 | 13 | build 14 | _build 15 | 16 | # simulation results 17 | cand* 18 | 19 | # output files in examples 20 | *.xyz 21 | *.pbs 22 | *.slurm 23 | slurm.* 24 | *.data 25 | *.lammps 26 | *.png 27 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file for Sphinx projects 2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 3 | 4 | # Required 5 | version: 2 6 | 7 | # Set the OS, Python version and other tools you might need 8 | build: 9 | os: ubuntu-22.04 10 | tools: 11 | python: "3.10" 12 | # You can also specify other tool versions: 13 | # nodejs: "20" 14 | # rust: "1.70" 15 | # golang: "1.20" 16 | 17 | # Build documentation in the "docs/" directory with Sphinx 18 | sphinx: 19 | configuration: docs/source/conf.py 20 | # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs 21 | # builder: "dirhtml" 22 | # Fail on all warnings to avoid broken references 23 | # fail_on_warning: true 24 | 25 | # Optionally build your docs in additional formats such as PDF and ePub 26 | formats: 27 | - pdf 28 | 29 | # Optional but recommended, declare the Python requirements required 30 | # to build your documentation 31 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 32 | python: 33 | install: 34 | - requirements: docs/requirements.txt 35 | -------------------------------------------------------------------------------- /assets/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hsulab/GDPy/e6d56cf70bdeef1bbe973cad32bf87b66263a0bd/assets/logo.png -------------------------------------------------------------------------------- /assets/workflow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hsulab/GDPy/e6d56cf70bdeef1bbe973cad32bf87b66263a0bd/assets/workflow.png -------------------------------------------------------------------------------- /bin/gdp: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | '''exec' python3 "$0" "$@" 3 | ' ''' 4 | # -*- coding: utf-8 -*- 5 | import re 6 | import sys 7 | from gdpx.main import main 8 | if __name__ == '__main__': 9 | sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) 10 | sys.exit(main()) 11 | -------------------------------------------------------------------------------- /conda.recipe/meta.yaml: -------------------------------------------------------------------------------- 1 | {% set name = "gdpx" %} 2 | {% set version = "0.0.10" %} 3 | 4 | package: 5 | name: {{ name|lower }} 6 | version: {{ version }} 7 | 8 | source: 9 | url: https://pypi.python.org/packages/source/g/gdpx/gdpx-0.0.10.tar.gz 10 | sha256: ffd485f15bd65553f42ae365c5e70b428d38ed75dad633a7429d270a26de3051 11 | 12 | build: 13 | noarch: python 14 | number: 0 15 | script: {{ PYTHON }} -m pip install . -vv 16 | entry_points: 17 | - gdp = gdpx.main:main 18 | 19 | requirements: 20 | host: 21 | - python >=3.9 22 | - pip 23 | run: 24 | - python >=3.9 25 | - ase >=3.23 26 | - h5py >=3.7.0 27 | - joblib >=1.1.0 28 | - networkx >=2.6.3 29 | - omegaconf >=2.3.0 30 | - pyyaml >=6.0 31 | - tinydb >=4.7.0 32 | 33 | test: 34 | imports: 35 | - gdpx 36 | requires: 37 | - pip 38 | commands: 39 | - pip check 40 | - gdp -h # [not win] 41 | 42 | about: 43 | home: https://github.com/hsulab/GDPy 44 | summary: "Automate computational chemistry/materials sciance and machine learning interatomic potential training workflow." 45 | license: GPL-3.0-only 46 | license_family: GPL3 47 | license_file: LICENSE 48 | dev_url: https://github.com/hsulab/GDPy 49 | 50 | extra: 51 | recipe-maintainers: 52 | - hsulab 53 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/images/CutAndSplice.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hsulab/GDPy/e6d56cf70bdeef1bbe973cad32bf87b66263a0bd/docs/images/CutAndSplice.png -------------------------------------------------------------------------------- /docs/images/dscribe.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hsulab/GDPy/e6d56cf70bdeef1bbe973cad32bf87b66263a0bd/docs/images/dscribe.png -------------------------------------------------------------------------------- /docs/images/ga.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hsulab/GDPy/e6d56cf70bdeef1bbe973cad32bf87b66263a0bd/docs/images/ga.png -------------------------------------------------------------------------------- /docs/images/gdpflow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hsulab/GDPy/e6d56cf70bdeef1bbe973cad32bf87b66263a0bd/docs/images/gdpflow.png -------------------------------------------------------------------------------- /docs/images/graph-exp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hsulab/GDPy/e6d56cf70bdeef1bbe973cad32bf87b66263a0bd/docs/images/graph-exp.png -------------------------------------------------------------------------------- /docs/images/region-cube.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hsulab/GDPy/e6d56cf70bdeef1bbe973cad32bf87b66263a0bd/docs/images/region-cube.png -------------------------------------------------------------------------------- /docs/images/region-cylinder.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hsulab/GDPy/e6d56cf70bdeef1bbe973cad32bf87b66263a0bd/docs/images/region-cylinder.png -------------------------------------------------------------------------------- /docs/images/region-lattice.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hsulab/GDPy/e6d56cf70bdeef1bbe973cad32bf87b66263a0bd/docs/images/region-lattice.png -------------------------------------------------------------------------------- /docs/images/region-sphere.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hsulab/GDPy/e6d56cf70bdeef1bbe973cad32bf87b66263a0bd/docs/images/region-sphere.png -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | alabaster==0.7.12 2 | Babel==2.11.0 3 | Brotli==1.0.9 4 | certifi==2023.11.17 5 | cffi==1.15.1 6 | charset-normalizer==2.0.4 7 | colorama==0.4.6 8 | cryptography==41.0.3 9 | docutils==0.17.1 10 | idna==3.4 11 | imagesize==1.4.1 12 | Jinja2==3.1.2 13 | MarkupSafe==2.1.1 14 | packaging==23.1 15 | pip==23.3.1 16 | pycparser==2.21 17 | Pygments==2.15.1 18 | pyOpenSSL==23.2.0 19 | PySocks==1.7.1 20 | pytz==2023.3.post1 21 | requests==2.31.0 22 | setuptools==68.0.0 23 | snowballstemmer==2.2.0 24 | Sphinx==5.0.2 25 | sphinx-rtd-theme==1.1.1 26 | sphinxcontrib-applehelp==1.0.2 27 | sphinxcontrib-devhelp==1.0.2 28 | sphinxcontrib-htmlhelp==2.0.0 29 | sphinxcontrib-jsmath==1.0.1 30 | sphinxcontrib-qthelp==1.0.3 31 | sphinxcontrib-serializinghtml==1.1.5 32 | tqdm==4.64.0 33 | urllib3==1.26.18 34 | wheel==0.41.2 35 | -------------------------------------------------------------------------------- /docs/source/about.rst: -------------------------------------------------------------------------------- 1 | About 2 | ===== 3 | 4 | GDPy stands for **Generating Deep Potential with Python**, including 5 | a set of tools and Python modules to automate the structure exploration 6 | and the model training for **machine learning interatomic potentials** (MLIPs). 7 | It is developed and maintained by `Jiayan Xu`_ under supervision of Prof. `P. Hu`_ 8 | at Queen's University Belfast. 9 | 10 | .. _Jiayan Xu: https://scholar.google.com/citations?user=ue5SBQMAAAAJ&hl=en 11 | .. _P. Hu: https://scholar.google.com/citations?user=GNuXfeQAAAAJ&hl=en 12 | 13 | Features 14 | -------- 15 | 16 | - A unified interface to various MLIPs. 17 | - Versatile exploration algorithms to construct a general dataset. 18 | - Automation workflows for dataset construction and MLIP training. 19 | 20 | Overview 21 | -------- 22 | 23 | .. |workflow| image:: ../../assets/workflow.png 24 | :width: 800 25 | 26 | The modules are: 27 | 28 | |workflow| 29 | -------------------------------------------------------------------------------- /docs/source/applications/index.rst: -------------------------------------------------------------------------------- 1 | Applications 2 | ============ 3 | 4 | .. include:: ../references.rst 5 | 6 | #. |JPCC2022Xu| 7 | 8 | #. |ACSCatal2022Xu| 9 | 10 | #. |ACSCatal2023Han| -------------------------------------------------------------------------------- /docs/source/builders/dimer.rst: -------------------------------------------------------------------------------- 1 | DimerBuilder 2 | ============ 3 | 4 | -------------------------------------------------------------------------------- /docs/source/builders/graph.rst: -------------------------------------------------------------------------------- 1 | .. _graph builders: 2 | 3 | Graph 4 | ===== 5 | 6 | 7 | insert 8 | ------ 9 | 10 | .. code-block:: yaml 11 | 12 | # config.yaml 13 | method: graph_insert 14 | species: CO 15 | spectators: [C, O] 16 | sites: 17 | - cn: 1 18 | group: 19 | - "symbol Cu" 20 | - "region cube 0. 0. 0. -100. -100. 6. 100. 100. 8." 21 | radius: 3 22 | ads: 23 | mode: "atop" 24 | distance: 2.0 25 | - cn: 2 26 | group: 27 | - "symbol Cu" 28 | - "region cube 0. 0. 0. -100. -100. 6. 100. 100. 8." 29 | radius: 3 30 | ads: 31 | mode: "atop" 32 | distance: 2.0 33 | - cn: 3 34 | group: 35 | - "symbol Cu" 36 | - "region cube 0. 0. 0. -100. -100. 6. 100. 100. 8." 37 | radius: 3 38 | ads: 39 | mode: "atop" 40 | distance: 2.0 41 | graph: 42 | pbc_grid: [2, 2, 0] 43 | graph_radius: 2 44 | neigh_params: 45 | covalent_ratio: 1.1 46 | skin: 0.25 47 | 48 | remove 49 | ------ 50 | 51 | .. code-block:: yaml 52 | 53 | # config.yaml 54 | method: graph_remove 55 | species: O 56 | graph: 57 | pbc_grid: [2, 2, 0] 58 | graph_radius: 2 59 | neigh_params: 60 | covalent_ratio: 1.1 61 | skin: 0.25 62 | spectators: [O] 63 | target_group: 64 | - "symbol O" 65 | - "region surface_lattice 0.0 0.0 8.0 9.8431 0.0 0.0 0.0 10.5534 0.0 0.0 0.0 8.0" 66 | 67 | exchange 68 | -------- 69 | 70 | .. code-block:: yaml 71 | 72 | # config.yaml 73 | method: graph_exchange 74 | species: Zn 75 | target: Cr 76 | graph: 77 | pbc_grid: [2, 2, 0] 78 | graph_radius: 2 79 | neigh_params: 80 | # AssertionError: Single atoms group into one adsorbate. 81 | # Try reducing the covalent radii. if it sets 1.1. 82 | covalent_ratio: 1.0 83 | skin: 0.25 84 | spectators: [Zn, Cr] 85 | target_group: 86 | - "symbol Zn Cr" 87 | - "region surface_lattice 0.0 0.0 8.0 9.8431 0.0 0.0 0.0 10.5534 0.0 0.0 0.0 8.0" 88 | -------------------------------------------------------------------------------- /docs/source/builders/index.rst: -------------------------------------------------------------------------------- 1 | .. _Builders: 2 | 3 | Builders 4 | ======== 5 | 6 | Builders are several classes that generate structures. They can be defined in two 7 | categories as Builder and Modifier. 8 | 9 | Related Commands 10 | ---------------- 11 | 12 | .. code-block:: shell 13 | 14 | # - build structures based on `config.yaml` 15 | # results would be written to the `results` directory 16 | $ gdp -d ./results build ./config.yaml 17 | 18 | # - build structures based on `config.yaml` 19 | # some builders (modifiers) require substrates as input 20 | # it can be set in `config.yaml` directly or as a command argument 21 | $ gdp -d ./results build ./config.yaml --substrates ./sub.xyz 22 | 23 | # - build 10 structures based on `config.yaml` 24 | # `number` can be used for some random-based builders (modifiers) 25 | # otherwise, only **1** structure is randomly built. 26 | $ gdp -d ./results build ./config.yaml --substrates ./sub.xyz --number 10 27 | 28 | .. FixedNumberBuilders that returns a fixed number of structures based on input parameters. 29 | .. 30 | .. direct, molecule, graph 31 | .. 32 | .. UserDefinedNumberBuilders 33 | .. 34 | .. random, perturbator 35 | .. 36 | .. Modifiers that must have substrates as input 37 | .. 38 | .. repeat, cleave, perturb 39 | 40 | List of Builders 41 | ---------------- 42 | 43 | .. toctree:: 44 | :maxdepth: 2 45 | 46 | dimer.rst 47 | random.rst 48 | graph.rst 49 | 50 | Related Components 51 | ------------------ 52 | 53 | .. toctree:: 54 | :maxdepth: 2 55 | 56 | region.rst 57 | 58 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | import os 14 | import sys 15 | sys.path.insert(0, os.path.abspath("../../src/gdpx")) 16 | 17 | 18 | # -- Project information ----------------------------------------------------- 19 | 20 | project = "GDPy (gdpx)" 21 | copyright = '2020-2023, Jiayan Xu' 22 | author = 'Jiayan Xu' 23 | 24 | # The full version, including alpha/beta/rc tags 25 | release = '0.0.2' 26 | 27 | 28 | # -- General configuration --------------------------------------------------- 29 | 30 | # Add any Sphinx extension module names here, as strings. They can be 31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 32 | # ones. 33 | extensions = [ 34 | "sphinx.ext.autodoc", 35 | "sphinx.ext.napoleon", 36 | "sphinx.ext.doctest", 37 | "sphinx.ext.intersphinx", 38 | "sphinx.ext.todo", 39 | "sphinx.ext.coverage", 40 | "sphinx.ext.mathjax", 41 | ] 42 | 43 | # Add any paths that contain templates here, relative to this directory. 44 | templates_path = ['_templates'] 45 | 46 | # List of patterns, relative to source directory, that match files and 47 | # directories to ignore when looking for source files. 48 | # This pattern also affects html_static_path and html_extra_path. 49 | exclude_patterns = [] 50 | 51 | 52 | # -- Options for HTML output ------------------------------------------------- 53 | 54 | # The theme to use for HTML and HTML Help pages. See the documentation for 55 | # a list of builtin themes. 56 | # 57 | html_theme = "sphinx_rtd_theme" 58 | 59 | # Add any paths that contain custom static files (such as style sheets) here, 60 | # relative to this directory. They are copied after the builtin static files, 61 | # so a file named "default.css" will overwrite the builtin "default.css". 62 | # html_static_path = ['_static'] 63 | -------------------------------------------------------------------------------- /docs/source/extensions/index.rst: -------------------------------------------------------------------------------- 1 | Extensions 2 | ========== 3 | 4 | This section is about how to extend GDPy with custom python files. 5 | 6 | Custom Potential 7 | ---------------- 8 | 9 | First we define a class named ``EmtManager`` that is a subclass of ``AbstractPotentialManager`` 10 | in ``emt.py``. We need to implement two attributes (``implemented_backends`` and ``valid_combinations``) 11 | and one method (``register_calculator``). Here, we only implement one backend that uses built-in EMT calculator 12 | in **ase**. 13 | 14 | .. code-block:: python3 15 | 16 | #!/usr/bin/env python3 17 | # -*- coding: utf-8 -* 18 | 19 | from ase.calculators.emt import EMT 20 | 21 | from GDPy.potential.manager import AbstractPotentialManager 22 | 23 | class EmtManager(AbstractPotentialManager): 24 | 25 | name = "emt" 26 | implemented_backends = ["ase"] 27 | 28 | valid_combinations = [ 29 | ["ase", "ase"] 30 | ] 31 | 32 | 33 | def register_calculator(self, calc_params, *args, **kwargs): 34 | super().register_calculator(calc_params) 35 | 36 | if self.calc_backend == "ase": 37 | calc = EMT() 38 | 39 | self.calc = calc 40 | 41 | return 42 | 43 | if __name__ == "__main__": 44 | pass 45 | 46 | Then we can use EMT through ``pot.yaml``. 47 | 48 | .. code-block:: yaml 49 | 50 | potential: 51 | name: ./emt.py # lowercase 52 | params: 53 | backend: ase 54 | driver: 55 | backend: external 56 | task: min 57 | run: 58 | fmax: 0.05 59 | steps: 10 60 | 61 | At last, we optimise a **H2O** molecule with **EMT**. The results are stored in the directory **cand0**. 62 | 63 | .. code-block:: shell 64 | 65 | $ gdp driver ./pot.yaml -s H2O 66 | nframes: 1 67 | potter: emt 68 | *** run-driver time: 0.1517 *** 69 | [1.8792752663147125] 70 | 71 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. GDPy documentation master file, created by 2 | sphinx-quickstart on Mon Aug 22 14:06:51 2022. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | GDPy Documentation 7 | ============================================ 8 | 9 | GDPy stands for **Generating Deep Potential with Python**, including 10 | a set of tools and Python modules to automate the structure exploration 11 | and the model training for **machine learning interatomic potentials** (MLIPs). 12 | It is developed and maintained by `Jiayan Xu`_ under supervision of Prof. `P. Hu`_ 13 | at Queen's University Belfast. 14 | 15 | .. _Jiayan Xu: https://scholar.google.com/citations?user=ue5SBQMAAAAJ&hl=en 16 | .. _P. Hu: https://scholar.google.com/citations?user=GNuXfeQAAAAJ&hl=en 17 | 18 | .. figure:: ../../assets/logo.png 19 | :alt: GPDy LOGO 20 | :width: 400 21 | :align: center 22 | 23 | 24 | Supported **Potentials** 25 | ------------------------ 26 | 27 | ``eann``, ``deepmd``, ``lasp``, ``nequip`` / ``allegro`` 28 | 29 | Supported **Expeditions** 30 | ------------------------- 31 | 32 | ``molecular dynamics``, ``genetic algorithm``, ``grand canonical monte carlo``, 33 | ``graph-theory adsorbate configuration``, ``artificial force induced reaction`` 34 | 35 | .. toctree:: 36 | :maxdepth: 2 37 | :caption: Introduction: 38 | 39 | about.rst 40 | installation.rst 41 | 42 | .. toctree:: 43 | :maxdepth: 2 44 | :caption: Basic Guides: 45 | 46 | start 47 | potentials/index 48 | trainers/index 49 | computations/index 50 | builders/index 51 | selections/index 52 | routines/index 53 | expeditions/index 54 | tutorials/index 55 | 56 | .. toctree:: 57 | :maxdepth: 2 58 | :caption: Advanced Guides: 59 | 60 | sessions/index 61 | workflows/index 62 | 63 | .. toctree:: 64 | :maxdepth: 2 65 | :caption: Developer Guides: 66 | 67 | extensions/index 68 | .. modules/modules 69 | 70 | .. toctree:: 71 | :maxdepth: 2 72 | :caption: Gallery: 73 | 74 | applications/index 75 | 76 | 77 | .. Indices and tables 78 | .. ================== 79 | .. 80 | .. * :ref:`genindex` 81 | .. * :ref:`modindex` 82 | .. * :ref:`search` 83 | -------------------------------------------------------------------------------- /docs/source/installation.rst: -------------------------------------------------------------------------------- 1 | Installation 2 | ============ 3 | 4 | Requirements 5 | ------------ 6 | 7 | Must: 8 | 9 | - Python 3.9 10 | - matplotlib 3.5.0 11 | - numpy 1.21.2 12 | - scipy 1.7.3 13 | - scikit-learn 1.0.1 14 | - ase_ 3.22.1 15 | - dscribe 1.2.1 16 | - joblib 1.1.0 17 | - tinydb_ 4.7.0 18 | - pyyaml 6.0 19 | - networkx 2.6.3 20 | - omegaconf_ 2.3.0 21 | - h5py 3.7.0 22 | 23 | .. - e3nn 0.5.0 24 | 25 | .. _ase: https://wiki.fysik.dtu.dk/ase 26 | .. _tinydb: https://tinydb.readthedocs.io 27 | .. _omegaconf: https://omegaconf.readthedocs.io 28 | 29 | Optional: 30 | 31 | - jax 0.2.27 32 | - pytorch 1.10.1 33 | - sella 2.0.2 34 | - plumed 2.7.3 35 | 36 | From Source, Conda or Pip 37 | ------------------------- 38 | 39 | .. code-block:: shell 40 | 41 | # Create a python environment 42 | 43 | # Install the latest RELEASED version from anaconda 44 | $ conda install gdpx -c conda-forge 45 | 46 | # or from pypi 47 | $ pip install gdpx 48 | 49 | # Install the latest development version 50 | # 1. download the MAIN branch 51 | $ git clone https://github.com/hsulab/GDPy.git 52 | # or the DEV branch 53 | $ git clone -b dev https://github.com/hsulab/GDPy.git 54 | 55 | # 2. Use pip to install the an editable version to 56 | # the current environment 57 | $ cd GDPy 58 | $ pip install -e ./ 59 | 60 | # 3. Update the source code 61 | $ cd GDPy 62 | $ git fetch 63 | $ git pull 64 | -------------------------------------------------------------------------------- /docs/source/references.rst: -------------------------------------------------------------------------------- 1 | 2 | .. |JPCC2022Xu| replace:: Lee, M.-H.; **Xu, J.**; Xie, W. Exploring the Stability of Single-Atom Catalysts Using the Density Functional Theory-Based Global Optimization Method: H2 Formation on VOx/γ-Al2O3(100). **J. Phys. Chem. C** 2022, 126, 6973-6981. 3 | 4 | .. |ACSCatal2022Xu| replace:: **Xu, J.**; Xie, W.; Han, Y.; Hu, P. Atomistic Insights into the Oxidation of Flat and Stepped Platinum Surfaces Using Large-Scale Machine Learning Potential-Based Grand-Canonical Monte Carlo. **ACS Catal.** 2022, 12, 14812-14824. 5 | 6 | .. |ACSCatal2023Han| replace:: Han, Y.; **Xu, J.**; Xie, W.; Wang, Z.; Hu, P. Comprehensive Study of Oxygen Vacancies on the Catalytic Performance of Zno for CO/H(2) Activation Using Machine Learning-Accelerated First-Principles Simulations. **ACS Catal.** 2023, 13, 5104-5113. -------------------------------------------------------------------------------- /docs/source/selections/descriptor.rst: -------------------------------------------------------------------------------- 1 | Descriptor 2 | ========== 3 | 4 | `Select structures based on descriptors.` 5 | 6 | Two sparsification methods are supported. 7 | 8 | - cur: 9 | 10 | Run CUR decomposition to select the most representative structures. This method 11 | computes a CUR score for every structure and `strategy` defines the selection 12 | either performs a deterministic selection (`descent`), structures with the `number` largest scores, 13 | or a random one (`stochastic`), structures with higher scores that have higher probability. 14 | If `zeta` is larger than 0., the input descripters will be transformed as 15 | `MATMUL(descriptors.T, descriptors)^zeta`. 16 | 17 | - fps: 18 | 19 | The farthest point sampling strategy. `min_distance` can be set to adjust the 20 | sparsity of selected structures in the feature (descriptor) space. 21 | 22 | .. code-block:: yaml 23 | 24 | selection: 25 | - method: descriptor 26 | descriptor: 27 | name: soap 28 | species: ["H", "O", "Pt"] 29 | rcut : 6.0 30 | nmax : 12 31 | lmax : 8 32 | sigma : 0.3 33 | average : inner 34 | periodic : true 35 | sparsify: 36 | method: cur # fps 37 | zeta: -1 38 | strategy: descent 39 | number: [16, 1.0] 40 | 41 | .. |dscribe| image:: ../../images/dscribe.png 42 | :width: 400 43 | 44 | This selection will produce a picture to visualise the distribution of structures. 45 | 46 | |dscribe| 47 | 48 | .. note:: 49 | 50 | This requires the python package `dscribe` to be installed. Use `pip install` or 51 | `conda install dscribe -c conda-forge`. 52 | -------------------------------------------------------------------------------- /docs/source/sessions/operations.rst: -------------------------------------------------------------------------------- 1 | .. _operations: 2 | 3 | Operations 4 | ========== 5 | 6 | 7 | extract_cache 8 | ------------- 9 | 10 | .. code-block:: yaml 11 | 12 | extract_cahe: 13 | type: extract_cache 14 | compute: ${vx:computer} 15 | cache_wdirs: 16 | - ./cand0 -------------------------------------------------------------------------------- /docs/source/trainers/deepmd.rst: -------------------------------------------------------------------------------- 1 | deepmd 2 | ====== 3 | 4 | .. warning:: 5 | 6 | This trainer requires an extra package `dpdata`. Use `conda install dpdata -c deepmodeling` to 7 | install it. 8 | 9 | **gdp** converts structures into the deepmd format stored in two folders `train` 10 | and `valid` based on `dataset` and writes a training configuration `deepmd.json`. 11 | The training will be performed by `dp train deepmd.json`. 12 | 13 | Some parameters in the `deepmd.json` will be filled automatically by **gdp**. 14 | training.training_data and training.validation_data will be the folder paths generated 15 | by **gdp**. Moreover, deepmd uses numb_steps instead of epochs. **gdp** will compute 16 | the number of batches based on the input dataset and multiply it with `train_epochs` 17 | to give the value of `numb_steps`. 18 | 19 | See DEEPMD_ doc for more info about configuration parameters. Example Configuration: 20 | 21 | .. _DEEPMD: https://docs.deepmodeling.com/projects/deepmd/en/master/index.html 22 | 23 | .. code-block:: yaml 24 | 25 | dataset: 26 | name: xyz 27 | dataset_path: ./dataset 28 | train_ratio: 0.9 29 | batchsize: 16 30 | random_seed: 1112 31 | trainer: 32 | name: deepmd 33 | config: ./dpconfig.json 34 | type_list: ["H", "O"] 35 | train_epochs: 10 36 | random_seed: 1112 37 | init_model: ../model.ckpt 38 | 39 | .. note:: 40 | 41 | Deepmd Trainer in **gdp** supports a `init_model` keyword that allows one to 42 | initialise model parameters from a previous checkpoint. This is useful when 43 | training models iteratively in an active learning loop. 44 | -------------------------------------------------------------------------------- /docs/source/trainers/mace.rst: -------------------------------------------------------------------------------- 1 | mace 2 | ==== 3 | 4 | **gdp** writes `./_train.xyz` and `./_test.xyz` into the training directory based on 5 | `dataset` and generates a command line based on `trainer`. 6 | 7 | Notice some parameters are override by **gdp** based on the `dataset` and the `trainer` 8 | parameters. The `trainer.config` section will be converted to a command line as 9 | `python ./run_train.py --name='MACE_model' ...`, which is the current training command 10 | supported by MACE. 11 | 12 | - seed: Override by `trainer.seed` 13 | - max_num_epochs: Override by `trainer.train_epochs`. 14 | - batch_size: Override by `dataset`. 15 | - train_file: Override as `./_train.xyz` 16 | - valid_file: Override as `./_test.xyz` 17 | - valid_fraction: Always 0. 18 | - device: Automatically detected (either cpu or cuda). No Apple Silicon! 19 | - config_type_weights: Must be a string instead of a dictionary. 20 | 21 | .. note:: 22 | 23 | Train set are data used to optimise model parameters. Validation set are data 24 | that helps us monitor the training progress and decide to save the model at which 25 | epoch. Test set are data that neither are trained nor affect our decision on the 26 | model. Some training simplifies these complex concepts and just use one `test` set 27 | for both the validation and the test purposes. 28 | 29 | See MACE_ doc for more info about configuration parameters. Example Configuration: 30 | 31 | .. _MACE: https://github.com/ACEsuit/mace 32 | 33 | .. code-block:: yaml 34 | 35 | dataset: 36 | name: xyz 37 | dataset_path: ./dataset 38 | train_ratio: 0.9 39 | batchsize: 16 40 | random_seed: 1112 41 | trainer: 42 | name: mace 43 | command: python ./run_train.py 44 | config: # This section can be put into a separate file e.g. `./config.yaml` 45 | name: MACE_model 46 | valid_fraction: 0.05 47 | config_type_weights: '{"Default": 1.0}' 48 | E0s: {1: -12.6261, 8: -428.5812} 49 | model: MACE 50 | default_dtype: float32 51 | hidden_irreps: "128x0e + 128x1o" 52 | r_max: 4.0 53 | swa: true 54 | start_swa: 10 55 | ema: true 56 | ema_decay: 0.99 57 | amsgrad: true 58 | restart_latest: true 59 | type_list: ["H", "O"] 60 | train_epochs: 10 61 | random_seed: 1112 62 | 63 | .. warning:: 64 | 65 | If one uses `swa`, **gdp** will not check if `start_swa` is smaller than 66 | `max_num_epochs`. If `start_swa` is larger than `max_num_epochs`, there will 67 | be an error when saving the model. -------------------------------------------------------------------------------- /docs/source/tutorials/copper.rst: -------------------------------------------------------------------------------- 1 | Build a Potential for Cu Bulk with Global Search 2 | ------------------------------------------------ 3 | 4 | Here, we use EMT-GA to explore structures of Cu bulks. -------------------------------------------------------------------------------- /docs/source/tutorials/index.rst: -------------------------------------------------------------------------------- 1 | Tutorials 2 | ========= 3 | 4 | We have listed several tutorials to demonstrate how to build a potential for a 5 | very specific chemical system. 6 | 7 | 8 | List of Tutorials 9 | ----------------- 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | copper.rst 15 | water.rst 16 | -------------------------------------------------------------------------------- /docs/source/tutorials/water.rst: -------------------------------------------------------------------------------- 1 | Build a Potential for Pt/H2O with On-the-Fly Molecular Dynamics 2 | --------------------------------------------------------------- 3 | 4 | Here, we use deepmd to explore structures of platinum-water interface. -------------------------------------------------------------------------------- /docs/source/workflows/correct.rst: -------------------------------------------------------------------------------- 1 | Add Correction to Computed Structures 2 | ===================================== 3 | 4 | Use `gdp session` to run a worflow add energy/forces correction to computed 5 | structures. The `correct` operation needs two input variables and forwards a 6 | `Tempdata` variable (See Dataset section for more details). 7 | 8 | For the input variables, 9 | 10 | - structures: A `Tempdata` variable. 11 | 12 | - computer: A `computer` variable. 13 | 14 | The example below adds `DFT-D3` correction to a dataset of a H2O molecule. The output 15 | cache is saved `./_corr/0002.corr_dftd3/merged.xyz`. The structures have energy/forces 16 | equal `origin+dftd3`. 17 | 18 | Example Configuration 19 | --------------------- 20 | 21 | .. code-block:: yaml 22 | 23 | variables: 24 | dataset: 25 | type: tempdata 26 | system_dirs: 27 | - ./min-H2O-molecule 28 | # --- 29 | spc_dftd3: 30 | type: computer 31 | potter: 32 | name: dftd3 33 | params: 34 | backend: ase 35 | method: PBE # xc 36 | damping: d3bj 37 | operations: 38 | corr_dftd3: 39 | type: correct 40 | structures: ${vx:dataset} 41 | computer: ${vx:spc_dftd3} 42 | sessions: 43 | _corr: corr_dftd3 44 | 45 | .. note:: 46 | 47 | `DFT-D3` computation requires python packge `dftd3-python`. 48 | Use `conda install dftd3-python -c conda-forge` if one does not have it. 49 | -------------------------------------------------------------------------------- /docs/source/workflows/index.rst: -------------------------------------------------------------------------------- 1 | Workflows 2 | ========= 3 | 4 | This section includes several oft-used `sessions` (workflows). 5 | 6 | List of Workflows 7 | ----------------- 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | 12 | compute_select.rst 13 | react.rst 14 | explore_GA.rst 15 | train.rst 16 | validate.rst 17 | correct.rst 18 | -------------------------------------------------------------------------------- /docs/source/workflows/train.rst: -------------------------------------------------------------------------------- 1 | Train 2 | ===== 3 | 4 | We can access the training by a `train` operation. This operation accepst four input 5 | variables and forwards a `potter` (AbstractPotentialManager) object. 6 | 7 | For the input variables, 8 | 9 | - potter: 10 | 11 | The potential manager. See :ref:`Potential Examples` for more details. 12 | 13 | - dataset: 14 | 15 | The dataset. See :ref:`Trainers` for more details. 16 | 17 | - trainer: 18 | 19 | The trainer configuration that defines the commands and the model configuration. 20 | 21 | - scheduler: 22 | 23 | Any scheduler. In general, the training needs a GPU-scheduler. 24 | 25 | .. note:: 26 | 27 | The name in `potter` and `trainer` should be the same. 28 | 29 | Extra parameters, 30 | 31 | - size: 32 | 33 | Number of models trained at the same time. This is useful when a committee needs 34 | later for uncertainty estimation. 35 | 36 | - init_models: 37 | 38 | A List of model checkpoints to initialise model parameters. 39 | The number should be the same as size. 40 | 41 | Session Configuration 42 | --------------------- 43 | 44 | .. code-block:: yaml 45 | 46 | variables: 47 | dataset: 48 | type: dataset 49 | name: xyz 50 | dataset_path: ./dataset 51 | train_ratio: 0.9 52 | batchsize: 16 53 | # random_seed: 1112 # Set this if one wants to reproduce results 54 | potter: 55 | type: potter 56 | name: deepmd 57 | params: 58 | backend: lammps 59 | command: "lmp -in in.lammps 2>&1 > lmp.out" 60 | type_list: ["H", "O"] 61 | trainer: 62 | type: trainer 63 | name: deepmd 64 | command: dp 65 | config: ${json:./config.json} 66 | train_epochs: 500 67 | # random_seed: 1112 # Set this if one wants to reproduce results 68 | scheduler_gpu: 69 | type: scheduler 70 | backend: slurm 71 | partition: k2-gpu 72 | time: "6:00:00" 73 | ntasks: 1 74 | cpus-per-task: 4 75 | mem-per-cpu: 4G 76 | gres: gpu:1 77 | environs: "conda activate deepmd\n" 78 | operations: 79 | train: 80 | type: train 81 | potter: ${vx:potter} 82 | dataset: ${vx:dataset} 83 | trainer: ${vx:trainer} 84 | scheduler: ${vx:scheduler_gpu} 85 | size: 4 86 | init_models: 87 | - ./model.ckpt 88 | sessions: 89 | _train: train 90 | -------------------------------------------------------------------------------- /docs/source/workflows/validate.rst: -------------------------------------------------------------------------------- 1 | Validate 2 | ======== 3 | 4 | **Validate* operations requires `validator`, `structures` (dataset), and `worker` (optional) as 5 | inputs. 6 | 7 | .. code-block:: yaml 8 | 9 | variables: 10 | ... 11 | operations: 12 | ... 13 | sessions: 14 | ... 15 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=61.0"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "gdpx" 7 | version = "0.0.10" 8 | dependencies = [ 9 | "ase>=3.23", 10 | "h5py>=3.7.0", 11 | "joblib>=1.1.0", 12 | "networkx>=2.6.3", 13 | "omegaconf>=2.3.0", 14 | "pyyaml>=6.0", 15 | "tinydb>=4.7.0", 16 | ] 17 | requires-python = ">=3.9" 18 | authors = [ 19 | { name="Jiayan Xu", email="ahcigar@foxmail.com" }, 20 | ] 21 | description = "Automate computational chemistry/materials sciance and machine learning interatomic potential training workflow." 22 | readme = "README.md" 23 | license = {file = "LICENSE"} 24 | classifiers = [ 25 | "Programming Language :: Python :: 3", 26 | "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", 27 | "Operating System :: OS Independent", 28 | ] 29 | 30 | [project.urls] 31 | Homepage = "https://github.com/hsulab/GDPy" 32 | Issues = "https://github.com/hsulab/GDPy/issues" 33 | 34 | [project.scripts] 35 | gdp = "gdpx.main:main" 36 | 37 | [tool.pyright] 38 | include = [ 39 | "src" 40 | ] 41 | exclude = [ 42 | "**/tests", 43 | ] 44 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | markers = 3 | basic: mark tests as basic 4 | vasp: mark tests as vasp (mpirun needed!!) 5 | vasp_rxn: mark tests as vasp (mpirun needed!!) 6 | lammps: mark tests as lammps (lammps needed!!) 7 | -------------------------------------------------------------------------------- /scripts/estimate_chemical_potential.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import numpy as np 6 | 7 | from ase import units 8 | 9 | 10 | def estimate_chemical_potential( 11 | temperature: float, 12 | pressure: float, # pressure, 1 bar 13 | total_energy: float, 14 | zpe: float, 15 | dU: float, 16 | dS: float, # entropy 17 | coef: float = 1.0 18 | ) -> float: 19 | """Estimate Chemical Potential 20 | 21 | Examples: 22 | >>> O2 by ReaxFF 23 | >>> molecular energy -5.588 atomic energy -0.109 24 | >>> O2 by vdW-DF spin-polarised 25 | >>> molecular energy -9.196 atomic energy -1.491 26 | >>> ZPE 0.09714 27 | >>> dU 8.683 kJ/mol (exp) 28 | >>> entropy@298.15K 205.147 J/mol (exp) 29 | >>> For two reservoirs, O and Pt 30 | >>> Pt + O2 -> aPtO2 31 | >>> mu_Pt = E_aPtO2 - G_O2 32 | >>> FreeEnergy = E_DFT + ZPE + U(T) + TS + pV 33 | 34 | References: 35 | Thermodynamic Data https://janaf.nist.gov 36 | """ 37 | kJm2eV = units.kJ / units.mol # from kJ/mol to eV 38 | # 300K, PBE-ZPE, experimental data https://janaf.nist.gov 39 | temp_correction = zpe + (dU*kJm2eV) - temperature*(dS/1000*kJm2eV) 40 | pres_correction = units.kB*temperature*np.log(pressure/1.0) # eV 41 | chemical_potential = coef*( 42 | total_energy + temp_correction + pres_correction 43 | ) 44 | 45 | return chemical_potential 46 | 47 | 48 | if __name__ == "__main__": 49 | ... -------------------------------------------------------------------------------- /scripts/plot_mctraj.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import argparse 6 | 7 | import numpy as np 8 | 9 | import matplotlib 10 | import matplotlib.pyplot as plt 11 | plt.style.use("presentation") 12 | 13 | from ase.io import read, write 14 | 15 | parser = argparse.ArgumentParser() 16 | parser.add_argument("TRAJECTORY") 17 | args = parser.parse_args() 18 | 19 | 20 | frames = read(args.TRAJECTORY, ":") 21 | nsteps = len(frames) 22 | energies = np.array([a.get_potential_energy() for a in frames]) 23 | energies -= energies[0] 24 | steps = range(nsteps) 25 | 26 | fig, ax = plt.subplots(1, 1, figsize=(12, 8)) 27 | ax.set_title("Monte Carlo") 28 | ax.set_ylabel("Potential Energy [eV]") 29 | ax.set_xlabel("MC Step") 30 | 31 | ax.plot(steps, energies, alpha=0.5, marker="o") 32 | 33 | 34 | plt.tight_layout() 35 | plt.savefig("./mctraj.png") 36 | 37 | 38 | if __name__ == "__main__": 39 | ... 40 | -------------------------------------------------------------------------------- /src/gdpx/bias/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from ..core.register import registers 6 | from ..utils.strconv import str2array 7 | 8 | """Add bias on potential energy surface. 9 | 10 | Some of bias forces are based on JAX. In the future, we need replace those oft-used 11 | ones to pure python codes as jax need accelerate them a lot. 12 | 13 | """ 14 | 15 | from .afir import AFIRCalculator 16 | 17 | registers.bias.register("afir")(AFIRCalculator) 18 | 19 | from .bondboost import BondBoostCalculator 20 | 21 | registers.bias.register("bondboost")(BondBoostCalculator) 22 | 23 | from .nuclei import NucleiRepulsionCalculator 24 | 25 | registers.bias.register("nuclei_repulsion")(NucleiRepulsionCalculator) 26 | 27 | from .harmonic import DistanceHarmonicCalculator, PlaneHarmonicCalculator 28 | 29 | registers.bias.register("distance_harmonic")(DistanceHarmonicCalculator) 30 | registers.bias.register("plane_harmonic")(PlaneHarmonicCalculator) 31 | 32 | from .gaussian import (BondGaussianCalculator, CenterOfMassGaussianCalculator, 33 | DistanceGaussianCalculator, RMSDGaussian) 34 | 35 | registers.bias.register("bond_gaussian")(BondGaussianCalculator) 36 | registers.bias.register("center_of_mass_gaussian")(CenterOfMassGaussianCalculator) 37 | registers.bias.register("distance_gaussian")(DistanceGaussianCalculator) 38 | registers.bias.register("rmsd_gaussian")(RMSDGaussian) 39 | 40 | 41 | if __name__ == "__main__": 42 | ... 43 | -------------------------------------------------------------------------------- /src/gdpx/bias/afir/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from .afir import AFIRCalculator 6 | 7 | 8 | if __name__ == "__main__": 9 | ... 10 | -------------------------------------------------------------------------------- /src/gdpx/bias/bias.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | from typing import NoReturn, List 5 | 6 | import numpy as np 7 | import jax.numpy as jnp 8 | 9 | from ase import Atoms 10 | from ase.calculators.calculator import Calculator 11 | 12 | 13 | class AbstractBias(Calculator): 14 | 15 | implemented_properties = ["energy", "free_energy", "forces"] 16 | 17 | default_parameters = dict() 18 | 19 | def __init__(self, colvars: List[dict]=None, restart=None, label=None, atoms=None, directory=".", **kwargs): 20 | """""" 21 | super().__init__(restart=restart, label=label, atoms=atoms, directory=directory, **kwargs) 22 | 23 | # - check colvar 24 | colvars_ = [] 25 | if isinstance(colvars, list): 26 | for colvar in colvars: 27 | colvars_.append(initiate_colvar(colvar)) 28 | elif isinstance(colvars, dict): 29 | colvars_.append(initiate_colvar(colvars)) 30 | else: 31 | ... 32 | self.colvars = colvars_ 33 | 34 | # - NOTE: set bias function and parameters in subclass! 35 | ... 36 | 37 | return 38 | 39 | def calculate(self, atoms=None, properties=["energy"], system_changes=["positions"]): 40 | """""" 41 | super().calculate(atoms, properties, system_changes) 42 | 43 | positions = jnp.array(atoms.get_positions()) 44 | bias_energy, bias_forces = 0., np.zeros(positions.shape) 45 | for colvar, bias_params in zip(self.colvars, self.bias_params): 46 | ret = self._compute_bias( 47 | positions, colvar, **bias_params 48 | ) 49 | bias_energy += np.asarray(ret[0]) 50 | bias_forces += -np.array(ret[1]) 51 | self.results["energy"] = bias_energy 52 | self.results["forces"] = bias_forces 53 | 54 | return 55 | 56 | 57 | if __name__ == "__main__": 58 | ... -------------------------------------------------------------------------------- /src/gdpx/bias/gaussian/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from .bond import BondGaussianCalculator 6 | from .com import CenterOfMassGaussianCalculator 7 | from .distance import DistanceGaussianCalculator 8 | from .rmsd import RMSDGaussian 9 | 10 | __all__ = [ 11 | "BondGaussianCalculator", 12 | "CenterOfMassGaussianCalculator", 13 | "DistanceGaussianCalculator", 14 | "RMSDGaussian", 15 | ] 16 | 17 | 18 | if __name__ == "__main__": 19 | ... 20 | -------------------------------------------------------------------------------- /src/gdpx/bias/harmonic/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from .distance import DistanceHarmonicCalculator 6 | from .plane import PlaneHarmonicCalculator 7 | 8 | 9 | __all__ = ["DistanceHarmonicCalculator", "PlaneHarmonicCalculator"] 10 | 11 | 12 | if __name__ == "__main__": 13 | ... 14 | -------------------------------------------------------------------------------- /src/gdpx/bias/harmonic/plane.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import copy 6 | from typing import Optional, List 7 | 8 | import numpy as np 9 | 10 | from ase import Atoms 11 | from ase.calculators.calculator import Calculator, all_changes 12 | from ase.geometry import find_mic 13 | 14 | 15 | def compute_harmonic_energy_and_forces( 16 | cell, positions, harmonic_position, kspring: float, selected_indices: List[int], 17 | pbc: bool=True 18 | ): 19 | """""" 20 | # - compute forces 21 | distances, vectors = [], [] 22 | for i in selected_indices: 23 | pos_i = copy.deepcopy(positions[i]) 24 | pos_i[2] = 0. 25 | vec, dis = find_mic(harmonic_position - pos_i, cell, pbc=pbc) 26 | vectors.append(vec) 27 | distances.append(dis) 28 | distances = np.array(distances) 29 | 30 | # - compute energy 31 | energy = np.sum(0.5*kspring*distances**2) 32 | 33 | # - compute forces 34 | forces = np.zeros(positions.shape) 35 | for i, vec in zip(selected_indices, vectors): 36 | frc_i = kspring*vec 37 | forces[i] += frc_i 38 | 39 | return energy, forces 40 | 41 | 42 | class PlaneHarmonicCalculator(Calculator): 43 | 44 | implemented_properties = ["energy", "free_energy", "forces"] 45 | 46 | def __init__(self, harmonic_position, kspring: float = 0.1, *args, **kwargs): 47 | """""" 48 | super().__init__(*args, **kwargs) 49 | 50 | #: Spring constant, eV/Ang^2. 51 | self.kspring = kspring 52 | 53 | #: Harmonic position on the plane. 54 | self.harmonic_position = np.array(harmonic_position) 55 | assert self.harmonic_position[2] == 0. 56 | 57 | return 58 | 59 | def calculate( 60 | self, 61 | atoms: Optional[Atoms] = None, 62 | properties=["energy"], 63 | system_changes=["positions"], 64 | ): 65 | """""" 66 | super().calculate(atoms, properties, system_changes) 67 | 68 | target_indices = [45, 46, 47] 69 | 70 | energy, forces = compute_harmonic_energy_and_forces( 71 | atoms.cell, atoms.positions, self.harmonic_position, self.kspring, 72 | selected_indices=target_indices, pbc=True 73 | ) 74 | 75 | print(f"{energy =}") 76 | print(f"{forces[target_indices, :] =}") 77 | 78 | self.results["energy"] = energy 79 | self.results["free_energy"] = energy 80 | self.results["forces"] = forces 81 | 82 | return 83 | 84 | 85 | if __name__ == "__main__": 86 | ... 87 | -------------------------------------------------------------------------------- /src/gdpx/bias/timeio.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import pathlib 6 | 7 | from ase.calculators.calculator import Calculator 8 | 9 | 10 | class TimeIOCalculator(Calculator): 11 | 12 | def __init__(self, pace: int = 1, delay: int = 0, *args, **kwargs): 13 | """""" 14 | super().__init__(*args, **kwargs) 15 | 16 | self.pace = pace 17 | 18 | self.delay = delay 19 | 20 | self._num_steps = 0 21 | 22 | return 23 | 24 | @property 25 | def num_steps(self) -> int: 26 | """Finished steps that match the host driver e.g. MD.""" 27 | 28 | return self._num_steps 29 | 30 | @property 31 | def log_fpath(self): 32 | """""" 33 | 34 | return pathlib.Path(self.directory) / "calc.log" 35 | 36 | def calculate( 37 | self, 38 | atoms=None, 39 | properties=["energy"], 40 | system_changes=["positions", "numbers", "cell"], 41 | ): 42 | super().calculate(atoms, properties, system_changes) 43 | 44 | if self.num_steps == 0: 45 | self._write_first_step() 46 | 47 | self.results, self.step_info = self._icalculate( 48 | atoms, properties, system_changes 49 | ) 50 | 51 | if self.num_steps % self.pace == 0: 52 | self._write_step() 53 | 54 | self._num_steps += 1 55 | 56 | return 57 | 58 | def _icalculate(self, atoms, properties, system_changes): 59 | """""" 60 | 61 | raise NotImplementedError() 62 | 63 | def _write_first_step(self): 64 | """""" 65 | 66 | raise NotImplementedError() 67 | 68 | def _write_step(self): 69 | """""" 70 | 71 | raise NotImplementedError() 72 | 73 | 74 | if __name__ == "__main__": 75 | ... 76 | -------------------------------------------------------------------------------- /src/gdpx/bias/utils/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from .bondpair import compute_distance_and_shift, get_bond_information, get_equidis_dict 6 | 7 | 8 | __all__ = ["compute_distance_and_shift", "get_bond_information", "get_equidis_dict"] 9 | 10 | 11 | if __name__ == "__main__": 12 | ... 13 | -------------------------------------------------------------------------------- /src/gdpx/builder/cleave_group.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from typing import List 6 | 7 | from ase import Atoms 8 | 9 | from .builder import StructureModifier 10 | from .group import create_a_group 11 | 12 | """""" 13 | 14 | 15 | class CleaveGroupModifier(StructureModifier): 16 | 17 | name: str = "cleave_group" 18 | 19 | def __init__(self, group, substrates=None, *args, **kwargs): 20 | """""" 21 | super().__init__(substrates, *args, **kwargs) 22 | 23 | self.group = group 24 | 25 | return 26 | 27 | def run(self, substrates=None, *args, **kwargs) -> List[Atoms]: 28 | """""" 29 | super().run(substrates=substrates, *args, **kwargs) 30 | 31 | frames = [] 32 | for atoms in substrates: 33 | ainds = create_a_group(atoms, self.group) 34 | frames.append(atoms[ainds]) 35 | 36 | return frames 37 | 38 | 39 | if __name__ == "__main__": 40 | ... 41 | -------------------------------------------------------------------------------- /src/gdpx/builder/crossover.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | from ase.ga.particle_crossovers import CutSpliceCrossover 5 | from ase.ga.cutandsplicepairing import CutAndSplicePairing 6 | 7 | from gdpx.core.register import registers 8 | 9 | registers.builder.register("cut_and_splice")(CutAndSplicePairing) 10 | registers.builder.register("cut_and_splice_cluster")(CutSpliceCrossover) 11 | 12 | 13 | 14 | if __name__ == "__main__": 15 | ... -------------------------------------------------------------------------------- /src/gdpx/builder/dimer.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | from typing import List 5 | from pathlib import Path 6 | 7 | import numpy as np 8 | 9 | from ase import Atoms 10 | from ase.constraints import FixAtoms 11 | 12 | from .builder import StructureBuilder 13 | 14 | 15 | class DimerBuilder(StructureBuilder): 16 | 17 | def __init__( 18 | self, 19 | elements: List[str], 20 | distances: List[float] = [0.8, 2.5, 0.05], 21 | directory=Path.cwd(), 22 | *args, 23 | **kwargs, 24 | ): 25 | """""" 26 | super().__init__(directory, *args, **kwargs) 27 | 28 | self.elements = elements 29 | assert ( 30 | len(self.elements) == 2 31 | ), "DimerBuilder needs two chemical symbols as elements." 32 | 33 | self.distances = distances 34 | assert ( 35 | len(self.distances) == 3 36 | ), "DimerBuilder needs min, max and intv for the distance." 37 | 38 | return 39 | 40 | def run(self, *args, **kwargs) -> List[Atoms]: 41 | """""" 42 | super().run(*args, **kwargs) 43 | 44 | dmin, dmax, intv = self.distances 45 | distances = np.arange(dmin, dmax + intv, intv) 46 | self._print(f"{distances}") 47 | 48 | frames = [] 49 | for dis in distances: 50 | atoms = Atoms( 51 | symbols=self.elements, 52 | positions=[[10.0, 10.0, 10.0], [10.0, 10.0, 10.0+dis]], 53 | # cell = 20.*np.eye(3), 54 | cell=[[19.0, 0.0, 0.0], [0.0, 20.0, 0.0], [0.0, 0.0, 21.0]], 55 | pbc=[True, True, True], 56 | ) 57 | atoms.set_constraint(FixAtoms(indices=[0])) 58 | frames.append(atoms) 59 | 60 | return frames 61 | 62 | 63 | if __name__ == "__main__": 64 | ... 65 | -------------------------------------------------------------------------------- /src/gdpx/builder/graph/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from .insert import GraphInsertModifier 6 | from .remove import GraphRemoveModifier 7 | from .exchange import GraphExchangeModifier 8 | 9 | 10 | __all__ = [GraphInsertModifier, GraphRemoveModifier, GraphExchangeModifier] 11 | 12 | 13 | if __name__ == "__main__": 14 | ... -------------------------------------------------------------------------------- /src/gdpx/builder/mutation/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | from ase.ga.standardmutations import RattleMutation, PermutationMutation, MirrorMutation 5 | from ase.ga.particle_mutations import ( 6 | RandomMutation, RandomPermutation, COM2surfPermutation, Poor2richPermutation, 7 | Rich2poorPermutation, SymmetricSubstitute, RandomSubstitute 8 | ) 9 | from ase.ga.standardmutations import StrainMutation 10 | from ase.ga.soft_mutation import SoftMutation, BondElectroNegativityModel 11 | 12 | from gdpx.core.register import registers 13 | 14 | # - standard 15 | registers.builder.register("rattle")(RattleMutation) 16 | registers.builder.register("permutation")(PermutationMutation) 17 | registers.builder.register("mirror")(MirrorMutation) 18 | 19 | # - bulk 20 | registers.builder.register("strain")(StrainMutation) 21 | registers.builder.register("soft")(SoftMutation) 22 | 23 | # - cluster 24 | #registers.builder.register("random")(RandomMutation) 25 | 26 | 27 | if __name__ == "__main__": 28 | ... -------------------------------------------------------------------------------- /src/gdpx/builder/repeat.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | from typing import List, Union 5 | 6 | from ase import Atoms 7 | 8 | from .builder import StructureModifier 9 | 10 | 11 | class RepeatModifier(StructureModifier): 12 | 13 | name = "repeat" 14 | 15 | def __init__(self, repeat: Union[int,List[int]]=1, substrates=None, *args, **kwargs): 16 | """""" 17 | super().__init__(substrates, *args, **kwargs) 18 | 19 | self.repeat = repeat 20 | 21 | return 22 | 23 | def run(self, substrates: List[Atoms], size: int=1, *args, **kwargs): 24 | """""" 25 | super().run(substrates=substrates, *args, **kwargs) 26 | 27 | frames = [] 28 | for substrate in self.substrates: 29 | frames.append(substrate.repeat(self.repeat)) 30 | 31 | return frames 32 | 33 | 34 | if __name__ == "__main__": 35 | ... -------------------------------------------------------------------------------- /src/gdpx/builder/scan/intercoord.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import numpy as np 6 | 7 | import jax 8 | import jax.numpy as jnp 9 | 10 | 11 | @jax.jit 12 | def compute_bond_angles(positions, trimer_indices): 13 | """Compute angles. 14 | 15 | For very small/acute angles, results by arccos are inaccurate. arctan may be 16 | more effective. 17 | 18 | """ 19 | trimer_positions = jnp.take(positions, trimer_indices.T, axis=0) 20 | # TODO: shifts 21 | dvecs1 = trimer_positions[1] - trimer_positions[0] 22 | dnorms1 = jnp.linalg.norm(dvecs1, axis=1) 23 | dvecs2 = trimer_positions[2] - trimer_positions[0] 24 | dnorms2 = jnp.linalg.norm(dvecs2, axis=1) 25 | 26 | angles = jnp.arccos(jnp.sum(dvecs1 * dvecs2, axis=1) / dnorms1 / dnorms2) 27 | 28 | return angles 29 | 30 | 31 | compute_angle_jacobian = jax.jacrev(compute_bond_angles, argnums=0) 32 | 33 | 34 | @jax.jit 35 | def pseudo_inverse_of_jacobian(jac, eps=0.0001): 36 | """""" 37 | dim = jac.shape[0] 38 | jac_inv = jnp.transpose(jac) @ jnp.linalg.inv( 39 | jac @ jnp.transpose(jac) + eps * jnp.eye(dim) 40 | ) 41 | 42 | return jac_inv 43 | 44 | 45 | @jax.jit 46 | def optimisation_step(jac, disp, eps=0.0001): 47 | """""" 48 | jac_inv = pseudo_inverse_of_jacobian(jac, eps) 49 | 50 | return jnp.reshape(jac_inv @ disp, (-1, 3)) 51 | 52 | 53 | if __name__ == "__main__": 54 | ... 55 | -------------------------------------------------------------------------------- /src/gdpx/builder/wulff.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from typing import List 6 | 7 | import numpy as np 8 | 9 | from ase import Atoms 10 | from ase.cluster import wulff_construction 11 | 12 | from .builder import StructureBuilder 13 | 14 | 15 | class WulffConstructionBuilder(StructureBuilder): 16 | 17 | name = "wulff_construction" 18 | 19 | def __init__(self, vacuum_size: float = -1, *args, **kwargs) -> None: 20 | """""" 21 | super().__init__(*args, **kwargs) 22 | 23 | self.vacuum_size = vacuum_size 24 | 25 | self.parameters = dict(rounding="closest", latticeconstant=None) 26 | self.parameters.update(**kwargs) 27 | 28 | return 29 | 30 | def run(self, size: int = 1, *args, **kwargs) -> List[Atoms]: 31 | """""" 32 | super().run(*args, **kwargs) 33 | 34 | frames = self._irun() 35 | 36 | return frames 37 | 38 | def _irun(self) -> List[Atoms]: 39 | """""" 40 | atoms = wulff_construction( 41 | symbol=self.parameters["symbol"], 42 | surfaces=self.parameters["surfaces"], 43 | energies=self.parameters["energies"], 44 | size=self.parameters["num_atoms"], 45 | structure=self.parameters["crystal_structure"], 46 | rounding=self.parameters["rounding"], 47 | latticeconstant=self.parameters["latticeconstant"], 48 | ) 49 | 50 | if self.vacuum_size > 0.0: 51 | lengths = np.max(atoms.positions, axis=0) - np.min(atoms.positions, axis=0) 52 | lengths += self.vacuum_size 53 | cell = np.zeros((3, 3)) 54 | np.fill_diagonal(cell, lengths) 55 | atoms.pbc = True 56 | atoms.cell = cell 57 | atoms.positions += np.sum(cell, axis=0)/2. - atoms.get_center_of_mass() 58 | 59 | return [atoms] 60 | 61 | 62 | if __name__ == "__main__": 63 | ... 64 | -------------------------------------------------------------------------------- /src/gdpx/builder/zoom.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import copy 5 | from typing import List 6 | 7 | import numpy as np 8 | 9 | from ase import Atoms 10 | from ase.io import read, write 11 | 12 | from .builder import StructureModifier 13 | 14 | 15 | class ZoomModifier(StructureModifier): 16 | 17 | """Extend or compress bulk. 18 | """ 19 | 20 | def __init__(self, coefs: List[float]=None, substrates=None, *args, **kwargs): 21 | """""" 22 | super().__init__(substrates=substrates, *args, **kwargs) 23 | if coefs is None: 24 | coefs = np.arange(0.6, 1.8, 0.05) 25 | self.coefs = coefs 26 | 27 | return 28 | 29 | def run(self, substrates=None, size: int=1, *args, **kwargs) -> List[Atoms]: 30 | """""" 31 | super().run(substrates=substrates, *args, **kwargs) 32 | 33 | frames = [] 34 | for substrate in self.substrates: 35 | curr_frames = self._irun(substrate=substrate, size=size, *args, **kwargs) 36 | frames.extend(curr_frames) 37 | 38 | return frames 39 | 40 | def _irun(self, substrate: Atoms, size: int, *args, **kwargs) -> List[Atoms]: 41 | """""" 42 | volume = substrate.get_volume() 43 | cell = copy.deepcopy(substrate.get_cell(complete=True)) 44 | 45 | frames = [] 46 | for i in self.coefs: 47 | atoms = copy.deepcopy(substrate) 48 | atoms.set_cell(cell*(i)**(1/3.), scale_atoms=True) 49 | frames.append(atoms) 50 | 51 | return frames 52 | 53 | 54 | if __name__ == "__main__": 55 | ... -------------------------------------------------------------------------------- /src/gdpx/cli/build.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import pathlib 6 | from typing import Union 7 | 8 | from ase.io import read, write 9 | 10 | from ..builder.builder import StructureBuilder 11 | from ..builder.interface import BuilderVariable 12 | 13 | 14 | def create_builder(config: Union[str, dict]) -> StructureBuilder: 15 | """""" 16 | supported_configtypes = ["json", "yaml"] 17 | if isinstance(config, (str, pathlib.Path)): 18 | params = str(config) 19 | suffix = params[-4:] 20 | if suffix in supported_configtypes: 21 | from gdpx.utils.command import parse_input_file 22 | 23 | params = parse_input_file(config) 24 | else: # assume it is an ASE readable structure file 25 | # FIXME: separate reading structures from a file or a direct python object 26 | params = dict(method="direct", frames=params) 27 | 28 | builder: StructureBuilder = BuilderVariable(**params).value 29 | 30 | return builder 31 | 32 | 33 | def build_structures( 34 | config: dict, substrates=None, size: int = 1, directory: str = "./" 35 | ): 36 | """""" 37 | directory = pathlib.Path(directory) 38 | 39 | builder: StructureBuilder = BuilderVariable(directory=directory, **config).value 40 | builder.directory = directory 41 | 42 | # assume substrates is a file path 43 | if substrates is not None: 44 | substrates = read(substrates, ":") 45 | 46 | frames = builder.run(substrates=substrates, size=size) 47 | 48 | write(directory / "structures.xyz", frames) 49 | 50 | return 51 | 52 | 53 | if __name__ == "__main__": 54 | ... 55 | -------------------------------------------------------------------------------- /src/gdpx/cli/explore.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | 5 | import pathlib 6 | import time 7 | 8 | from typing import Optional 9 | 10 | 11 | from .. import config 12 | from ..scheduler.interface import SchedulerVariable 13 | from ..expedition.interface import ExpeditionVariable 14 | from ..worker.explore import ExpeditionBasedWorker 15 | 16 | 17 | def run_expedition( 18 | exp_params: dict, wait: Optional[float] = None, directory: str = "./", potter=None 19 | ): 20 | """""" 21 | directory = pathlib.Path(directory) 22 | 23 | if potter is not None: 24 | exp_params["worker"] = potter 25 | else: 26 | if "worker" not in exp_params: 27 | raise RuntimeError("Expedition must have a worker.") 28 | 29 | scheduler_params = exp_params.pop("scheduler", {}) 30 | scheduler = SchedulerVariable(**scheduler_params).value 31 | 32 | expedition = ExpeditionVariable(directory=directory, **exp_params).value 33 | expedition.directory = directory 34 | if hasattr(expedition, "register_worker"): 35 | expedition.register_worker(exp_params["worker"]) 36 | 37 | if scheduler.name == "local": 38 | if wait is not None: 39 | for i in range(1000): 40 | expedition.run() 41 | if expedition.read_convergence(): 42 | break 43 | time.sleep(wait) 44 | config._print(f"wait {wait} seconds...") 45 | else: 46 | ... 47 | else: 48 | expedition.run() 49 | else: # submit to queue 50 | worker = ExpeditionBasedWorker( 51 | expedition=expedition, scheduler=scheduler, directory=directory 52 | ) 53 | worker.run() 54 | worker.inspect(resubmit=True) 55 | if worker.get_number_of_running_jobs() == 0: 56 | config._print("Expedition finished...") 57 | else: 58 | ... 59 | 60 | return 61 | 62 | 63 | if __name__ == "__main__": 64 | ... 65 | -------------------------------------------------------------------------------- /src/gdpx/cli/select.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import pathlib 6 | from typing import Union 7 | 8 | from ..data.array import AtomsNDArray 9 | from ..selector.interface import SelectorVariable 10 | from ..selector.selector import AbstractSelector 11 | from .build import create_builder 12 | 13 | 14 | def run_selection( 15 | param_file: Union[str, pathlib.Path], 16 | structure: Union[str, dict], 17 | directory: Union[str, pathlib.Path] = "./", 18 | ) -> None: 19 | """Run selection with input selector and input structures. 20 | 21 | This no more accepts a worker as all data used in the selection should be 22 | computed in advance. 23 | 24 | """ 25 | directory = pathlib.Path(directory) 26 | if not directory.exists(): 27 | directory.mkdir(parents=True, exist_ok=False) 28 | 29 | from gdpx.utils.command import parse_input_file 30 | 31 | params = parse_input_file(param_file) 32 | 33 | selector: AbstractSelector = SelectorVariable(directory=directory, **params).value 34 | selector.directory = directory 35 | 36 | # - read structures 37 | builder = create_builder(structure) 38 | frames = builder.run() # -> List[Atoms] 39 | 40 | # TODO: convert to a bundle of atoms? 41 | data = AtomsNDArray(frames) 42 | 43 | # - 44 | selected_frames = selector.select(data) 45 | 46 | from ase.io import read, write 47 | 48 | write(directory / "selected_frames.xyz", selected_frames) 49 | 50 | return 51 | 52 | 53 | if __name__ == "__main__": 54 | ... 55 | -------------------------------------------------------------------------------- /src/gdpx/colvar/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | try: 6 | import jax 7 | except Exception as e: 8 | ... 9 | 10 | from ..core.register import registers 11 | 12 | from .distance import DistanceColvar 13 | registers.colvar.register("DistanceColvar")(DistanceColvar) 14 | 15 | from .rmsd import RmsdColvar 16 | registers.colvar.register("RmsdColvar")(RmsdColvar) 17 | 18 | from .fingerprint import FingerprintColvar 19 | registers.colvar.register("FingerprintColvar")(FingerprintColvar) 20 | 21 | from .position import position 22 | registers.colvar.register("position")(position) 23 | 24 | 25 | if __name__ == "__main__": 26 | ... -------------------------------------------------------------------------------- /src/gdpx/colvar/coordination.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | 5 | import numpy as np 6 | from scipy.spatial import distance_matrix 7 | 8 | 9 | from ase.io import read, write 10 | 11 | 12 | def switch_function(distances, r_cut, r_shift=0., nn=6, mm: int=None): 13 | """""" 14 | if mm is None: 15 | mm = nn*2 16 | 17 | scaled_distances = (distances - r_shift) / r_cut 18 | 19 | return (1 - scaled_distances**nn) / (1 - scaled_distances**mm) 20 | 21 | 22 | def compute_coordination_number(): 23 | """""" 24 | frames = read( 25 | "/scratch/gpfs/jx1279/copper+alumina/dptrain/r8/_explore/_mrxn/Cu13+s001p32/_sinter/3xCu13/_200ps/cand0_400K_200ps/traj.dump", 26 | ":1" 27 | ) 28 | nframes = len(frames) 29 | print(f"nframes: {nframes}") 30 | 31 | atoms = frames[0] 32 | 33 | positions = atoms.positions[-13:, :] 34 | print(positions) 35 | clusters = [ 36 | atoms.positions[-13:, :], 37 | atoms.positions[-26:-13, :], 38 | atoms.positions[-39:-26, :], 39 | ] 40 | 41 | com_clusters = [np.mean(p, axis=0) for p in clusters] 42 | print(com_clusters) 43 | print(np.linalg.norm(com_clusters[2] - [0., 25.239, 0.] - com_clusters[1])) 44 | print(np.linalg.norm(com_clusters[2] - com_clusters[0])) 45 | 46 | def xxx(positions): 47 | dmat = distance_matrix(positions, positions) 48 | #print(dmat) 49 | 50 | sf = switch_function(dmat, r_cut=3.8, nn=8, mm=14) 51 | np.fill_diagonal(sf, 0.) 52 | coordination = np.sum(sf, axis=1) 53 | print(coordination) 54 | 55 | return coordination 56 | 57 | for positions in clusters: 58 | print(np.sum(xxx(positions))) 59 | 60 | return 61 | 62 | 63 | if __name__ == "__main__": 64 | compute_coordination_number() 65 | ... -------------------------------------------------------------------------------- /src/gdpx/colvar/distance.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import numpy as np 6 | 7 | import jax 8 | import jax.numpy as jnp 9 | 10 | 11 | class DistanceColvar(): 12 | 13 | def __init__(self, pairs, *args, **kwargs) -> None: 14 | """""" 15 | self.params = np.array(pairs).T 16 | 17 | return 18 | 19 | @property 20 | def dim(self): 21 | """""" 22 | 23 | return 1 24 | 25 | @staticmethod 26 | @jax.jit 27 | def cvfunc(positions, params): 28 | """""" 29 | pair_indices = params 30 | 31 | pair_positions = jnp.take(positions, pair_indices, axis=0) 32 | dvecs = pair_positions[0] - pair_positions[1] 33 | distances = jnp.linalg.norm(dvecs, axis=1) 34 | 35 | return distances[jnp.newaxis, :] # (1, num_dim) 36 | 37 | 38 | if __name__ == "__main__": 39 | ... -------------------------------------------------------------------------------- /src/gdpx/colvar/position.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | def position(positions, axis=slice(None, None, 1)): 5 | """""" 6 | 7 | return positions[:, axis] 8 | 9 | 10 | if __name__ == "__main__": 11 | ... -------------------------------------------------------------------------------- /src/gdpx/colvar/rmsd.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import numpy as np 6 | 7 | from ase.io import read, write 8 | 9 | from dscribe.descriptors import ValleOganov 10 | 11 | 12 | class RmsdColvar(): 13 | 14 | def __init__(self, data) -> None: 15 | """""" 16 | 17 | self.params = {} 18 | 19 | # - landmarks 20 | frames_ = read(data, ":") 21 | frames = [a[240:] for a in frames_] 22 | 23 | _, self.features = self.calculate_features(frames) 24 | 25 | return 26 | 27 | @property 28 | def dim(self): 29 | """""" 30 | 31 | return 1 32 | 33 | def calculate_features(self, frames): 34 | """""" 35 | vo2 = ValleOganov( 36 | species = ["Cu"], 37 | function = "distance", 38 | sigma = 10**(-5), 39 | n = 161, 40 | #n = 17, 41 | r_cut = 8., 42 | ) 43 | 44 | vo3 = ValleOganov( 45 | species = ["Cu"], 46 | function = "angle", 47 | sigma = 10**(-5), 48 | n = 181, 49 | r_cut = 4., 50 | ) 51 | 52 | #vo2_features = vo2.create(frames) 53 | vo2_gradients, vo2_features = vo2.derivatives(frames) 54 | #print(vo2_features.shape) 55 | #print(vo2_gradients.shape) 56 | 57 | return vo2_gradients, vo2_features 58 | 59 | def cvfunc(self, atoms, params): 60 | """""" 61 | gradients, features = self.calculate_features([atoms[240:]]) 62 | features = features[np.newaxis, :] # (1, feature_dimension) 63 | gradients = gradients[np.newaxis, :, :, :] 64 | 65 | #cv = 66 | 67 | return 68 | 69 | 70 | 71 | if __name__ == "__main__": 72 | ... -------------------------------------------------------------------------------- /src/gdpx/comparator/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | from ase.ga.ofp_comparator import OFPComparator 5 | from ase.ga.particle_comparator import NNMatComparator 6 | from ase.ga.standard_comparators import InteratomicDistanceComparator 7 | 8 | from ..core.register import registers 9 | 10 | registers.comparator.register(OFPComparator) 11 | registers.comparator.register(NNMatComparator) 12 | registers.comparator.register(InteratomicDistanceComparator) 13 | 14 | from .cartesian import CartesianComparator 15 | registers.comparator.register(CartesianComparator) 16 | 17 | from .coordination import CoordinationComparator 18 | registers.comparator.register(CoordinationComparator) 19 | 20 | from .graph import GraphComparator 21 | registers.comparator.register(GraphComparator) 22 | 23 | from .singlepoint import SinglePointComparator 24 | registers.comparator.register("single_point")(SinglePointComparator) 25 | 26 | from .reaction import ReactionComparator 27 | registers.comparator.register("reaction")(ReactionComparator) 28 | 29 | 30 | 31 | if __name__ == "__main__": 32 | ... -------------------------------------------------------------------------------- /src/gdpx/comparator/comparator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import abc 6 | 7 | import numpy as np 8 | 9 | from ..core.node import AbstractNode 10 | 11 | 12 | class AbstractComparator(AbstractNode): 13 | 14 | ... 15 | 16 | def compare_composition(self, a1, a2): 17 | """""" 18 | # TODO: compare PBC? 19 | is_similar = False 20 | na1, na2 = len(a1), len(a2) 21 | if na1 == na2: 22 | c1, c2 = a1.get_cell(complete=True), a2.get_cell(complete=True) 23 | if np.allclose(c1, c2): 24 | s1, s2 = a1.get_chemical_formula(), a2.get_chemical_formula() 25 | if s1 == s2: 26 | is_similar = True 27 | else: 28 | ... 29 | else: 30 | ... 31 | 32 | return is_similar 33 | 34 | def __call__(self, a1, a2) -> bool: 35 | """""" 36 | 37 | return self.looks_like(a1, a2) 38 | 39 | 40 | if __name__ == "__main__": 41 | ... -------------------------------------------------------------------------------- /src/gdpx/comparator/interface.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | from ..core.register import registers 5 | from ..core.variable import Variable 6 | from ..core.operation import Operation 7 | from ..core.variable import DummyVariable 8 | 9 | 10 | @registers.variable.register 11 | class ComparatorVariable(Variable): 12 | 13 | def __init__(self, directory="./", *args, **kwargs): 14 | """""" 15 | method = kwargs.pop("method", None) 16 | comparator = registers.create("comparator", method, convert_name=False, **kwargs) 17 | super().__init__(initial_value=comparator, directory=directory) 18 | 19 | return 20 | 21 | 22 | @registers.operation.register 23 | class compare(Operation): 24 | 25 | status = "finished" # Always finished since it is not time-consuming 26 | 27 | def __init__(self, reference, prediction = DummyVariable(), comparator = DummyVariable(), directory="./") -> None: 28 | """""" 29 | super().__init__(input_nodes=[reference, prediction, comparator], directory=directory) 30 | 31 | return 32 | 33 | def forward(self, reference, prediction, comparator): 34 | """""" 35 | super().forward() 36 | 37 | comparator.directory = self.directory 38 | comparator.run(prediction, reference) 39 | 40 | return 41 | 42 | 43 | if __name__ == "__main__": 44 | ... -------------------------------------------------------------------------------- /src/gdpx/computation/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | from .. import config 5 | from ..core.register import Register 6 | 7 | # - driver (dynamics) backends... 8 | register_drivers = Register("driver") 9 | 10 | from gdpx.computation.asedriver import AseDriver 11 | register_drivers.register("ase")(AseDriver) 12 | 13 | try: 14 | from .jarex import JarexDriver 15 | register_drivers.register("jax")(JarexDriver) 16 | except ImportError: 17 | config._print(f"Driver Backend `jax` is not imported.") 18 | 19 | from gdpx.computation.lammps import LmpDriver 20 | register_drivers.register("lammps")(LmpDriver) 21 | 22 | from gdpx.computation.lasp import LaspDriver 23 | register_drivers.register("lasp")(LaspDriver) 24 | 25 | from .abacus import AbacusDriver 26 | register_drivers.register("abacus")(AbacusDriver) 27 | 28 | from gdpx.computation.vasp import VaspDriver 29 | register_drivers.register("vasp")(VaspDriver) 30 | 31 | from gdpx.computation.cp2k import Cp2kDriver 32 | register_drivers.register("cp2k")(Cp2kDriver) 33 | 34 | 35 | if __name__ == "__main__": 36 | ... 37 | -------------------------------------------------------------------------------- /src/gdpx/computation/md/md_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | """ 4 | 5 | import numpy as np 6 | 7 | from ase.md.md import MolecularDynamics 8 | from ase.md.velocitydistribution import Stationary 9 | from ase import units 10 | 11 | def force_temperature(atoms, temperature, unit="K"): 12 | """ force (nucl.) temperature to have a precise value 13 | 14 | Parameters: 15 | atoms: ase.Atoms 16 | the structure 17 | temperature: float 18 | nuclear temperature to set 19 | unit: str 20 | 'K' or 'eV' as unit for the temperature 21 | """ 22 | 23 | eps_temp = 1e-12 24 | 25 | if unit == "K": 26 | E_temp = temperature * units.kB 27 | elif unit == "eV": 28 | E_temp = temperature 29 | else: 30 | raise UnitError("'{}' is not supported, use 'K' or 'eV'.".format(unit)) 31 | 32 | # check DOF 33 | ndof = 3*len(atoms) 34 | for constraint in atoms._constraints: 35 | ndof -= constraint.get_removed_dof(atoms) 36 | 37 | # calculate kinetic energy and get the scale 38 | if temperature > eps_temp: 39 | E_kin0 = atoms.get_kinetic_energy() / (0.5 * ndof) 40 | gamma = E_temp / E_kin0 41 | else: 42 | gamma = 0.0 43 | 44 | atoms.set_momenta(atoms.get_momenta() * np.sqrt(gamma)) 45 | 46 | return 47 | 48 | -------------------------------------------------------------------------------- /src/gdpx/computation/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import copy 5 | from typing import List, Callable 6 | 7 | from ase import Atoms 8 | from ase.calculators.singlepoint import SinglePointCalculator 9 | 10 | 11 | def copy_minimal_frames(prev_frames: List[Atoms]): 12 | """Copy atoms without extra information. 13 | 14 | Do not copy atoms.info since it is a dict and does not maitain order. 15 | 16 | """ 17 | curr_frames, curr_info = [], [] 18 | for prev_atoms in prev_frames: 19 | # - copy geometry 20 | curr_atoms = Atoms( 21 | symbols=copy.deepcopy(prev_atoms.get_chemical_symbols()), 22 | positions=copy.deepcopy(prev_atoms.get_positions()), 23 | cell=copy.deepcopy(prev_atoms.get_cell(complete=True)), 24 | pbc=copy.deepcopy(prev_atoms.get_pbc()), 25 | ) 26 | curr_frames.append(curr_atoms) 27 | # - save info 28 | confid = prev_atoms.info.get("confid", -1) 29 | dynstep = prev_atoms.info.get("step", -1) 30 | prev_wdir = prev_atoms.info.get("wdir", "null") 31 | curr_info.append((confid, dynstep, prev_wdir)) 32 | 33 | return curr_frames, curr_info 34 | 35 | 36 | def make_clean_atoms(atoms_: Atoms, results: dict = None): 37 | """Create a clean atoms from the input.""" 38 | atoms = Atoms( 39 | symbols=atoms_.get_chemical_symbols(), 40 | positions=atoms_.get_positions().copy(), 41 | cell=atoms_.get_cell().copy(), 42 | pbc=copy.deepcopy(atoms_.get_pbc()), 43 | ) 44 | if results is not None: 45 | spc = SinglePointCalculator(atoms, **results) 46 | atoms.calc = spc 47 | 48 | return atoms 49 | 50 | 51 | def parse_type_list(atoms): 52 | """parse type list for read and write structure of lammps""" 53 | # elements 54 | type_list = list(set(atoms.get_chemical_symbols())) 55 | type_list.sort() # by alphabet 56 | 57 | return type_list 58 | 59 | 60 | def get_composition_from_atoms(atoms): 61 | """""" 62 | from collections import Counter 63 | 64 | chemical_symbols = atoms.get_chemical_symbols() 65 | composition = Counter(chemical_symbols) 66 | sorted_composition = sorted(composition.items(), key=lambda x: x[0]) 67 | 68 | return sorted_composition 69 | 70 | 71 | def get_formula_from_atoms(atoms): 72 | """""" 73 | from collections import Counter 74 | 75 | chemical_symbols = atoms.get_chemical_symbols() 76 | composition = Counter(chemical_symbols) 77 | sorted_composition = sorted(composition.items(), key=lambda x: x[0]) 78 | 79 | return "".join([str(k) + str(v) for k, v in sorted_composition]) 80 | 81 | 82 | if __name__ == "__main__": 83 | ... 84 | -------------------------------------------------------------------------------- /src/gdpx/config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | """Some shared configuration parameters. 5 | """ 6 | 7 | import logging 8 | from typing import Union, List, Callable 9 | 10 | #: 11 | logger = logging.getLogger("GDP") 12 | logger.setLevel(logging.INFO) 13 | 14 | formatter = logging.Formatter( 15 | "%(asctime)s - %(levelname)s: %(message)s", 16 | datefmt="%Y%b%d-%H:%M:%S" 17 | #"%(levelname)s: %(module)s - %(message)s" 18 | ) 19 | ch = logging.StreamHandler() 20 | ch.setFormatter(formatter) 21 | logger.addHandler(ch) 22 | 23 | _print: Callable = logger.info 24 | _debug: Callable = logger.debug 25 | 26 | LOGO_LINES = [ 27 | " ____ ____ ______ __ ", 28 | " / ___| _ \| _ \ \/ / ", 29 | "| | _| | | | |_) \ / ", 30 | "| |_| | |_| | __// \ ", 31 | " \____|____/|_| /_/\_\ ", 32 | " ", 33 | ] 34 | 35 | #: Number of parallel jobs for joblib. 36 | NJOBS: int = 1 37 | 38 | #: Global random number generator 39 | GRNG = None 40 | 41 | # - find default vasp settings 42 | #gdpconfig = Path.home() / ".gdp" 43 | #if gdpconfig.exists() and gdpconfig.is_dir(): 44 | # # find vasp config 45 | # vasprc = gdpconfig / "vasprc.json" 46 | # with open(vasprc, "r") as fopen: 47 | # input_dict = json.load(fopen) 48 | #else: 49 | # input_dict = {} 50 | 51 | #: Model deviations by the committee model. 52 | VALID_DEVI_FRAME_KEYS: List[str] = [ 53 | "devi_te", 54 | "max_devi_v", "min_devi_v", "avg_devi_v", 55 | "max_devi_f", "min_devi_f", "avg_devi_f", 56 | "max_devi_ae", "min_devi_ae", "avg_devi_ae", 57 | ] 58 | 59 | #: Model deviations by the committee model. 60 | VALID_DEVI_ATOMIC_KEYS: List[str] = [ 61 | "devi_f", 62 | ] 63 | 64 | if __name__ == "__main__": 65 | ... -------------------------------------------------------------------------------- /src/gdpx/core/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from .. import config 6 | 7 | 8 | if __name__ == "__main__": 9 | ... 10 | -------------------------------------------------------------------------------- /src/gdpx/core/placeholder.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import pathlib 5 | from typing import NoReturn, Union 6 | 7 | class Placeholder: # Placeholder 8 | 9 | """Placeholder for input structures that may be from external files. 10 | """ 11 | 12 | #: Working directory for the operation. 13 | _directory: Union[str,pathlib.Path] = pathlib.Path.cwd() 14 | 15 | #: Working status that should be always finished. 16 | status = "finished" 17 | 18 | def __init__(self): 19 | """""" 20 | self.consumers = [] 21 | 22 | return 23 | 24 | @property 25 | def directory(self): 26 | """""" 27 | 28 | return self._directory 29 | 30 | @directory.setter 31 | def directory(self, directory_) -> NoReturn: 32 | """""" 33 | self._directory = pathlib.Path(directory_) 34 | 35 | return 36 | 37 | def reset(self): 38 | """Reset node's output and status.""" 39 | ... 40 | 41 | return 42 | 43 | 44 | if __name__ == "__main__": 45 | ... 46 | -------------------------------------------------------------------------------- /src/gdpx/core/session/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from .interface import run_session 6 | 7 | __all__ = ["run_session"] 8 | 9 | 10 | if __name__ == "__main__": 11 | ... 12 | -------------------------------------------------------------------------------- /src/gdpx/core/session/basic.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import pathlib 6 | 7 | from ..placeholder import Placeholder 8 | from ..variable import Variable 9 | from .session import AbstractSession 10 | from .utils import traverse_postorder 11 | 12 | 13 | class Session(AbstractSession): 14 | 15 | def __init__(self, directory="./") -> None: 16 | """""" 17 | self.directory = pathlib.Path(directory) 18 | 19 | return 20 | 21 | def run(self, operation, feed_dict: dict = {}) -> None: 22 | """""" 23 | # - find forward order 24 | nodes_postorder = traverse_postorder(operation) 25 | for node in nodes_postorder: 26 | if hasattr(node, "_active") and node._active: 27 | node._active = False 28 | self._print( 29 | f"Set {node} active to false as it is not supported in a basic session" 30 | ) 31 | 32 | self._print( 33 | "\x1b[1;34;40m" 34 | + "[{:^24s}] NUM_NODES: {} AT MAIN: ".format( 35 | "START", len(nodes_postorder) 36 | ) 37 | + "\x1b[0m" 38 | ) 39 | self._print("\x1b[1;34;40m" + " {}".format(str(self.directory)) + "\x1b[0m") 40 | 41 | # - run nodes 42 | for i, node in enumerate(nodes_postorder): 43 | # NOTE: reset directory since it maybe changed 44 | prev_name = node.directory.name 45 | if not prev_name: 46 | prev_name = node.__class__.__name__ 47 | node.directory = self.directory / f"{str(i).zfill(4)}.{prev_name}" 48 | if node.__class__.__name__.endswith("Variable"): 49 | node_type = "VX" 50 | else: 51 | node_type = "OP" 52 | self._print( 53 | "[{:^24s}] NAME: {} AT {}".format( 54 | node_type, node.__class__.__name__.upper(), node.directory.name 55 | ) 56 | ) 57 | 58 | if isinstance(node, Placeholder): 59 | node.output = feed_dict[node] 60 | elif isinstance(node, Variable): 61 | node.output = node.value 62 | else: # Operation 63 | self._debug(f"node: {node}") 64 | self._process_operation(node) 65 | 66 | return 67 | 68 | 69 | if __name__ == "__main__": 70 | ... 71 | -------------------------------------------------------------------------------- /src/gdpx/core/session/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import copy 5 | 6 | from .. import config 7 | from ..register import registers 8 | from ..operation import Operation 9 | 10 | def traverse_postorder(operation): 11 | 12 | nodes_postorder = [] 13 | identifiers = [] 14 | 15 | def recurse(node): 16 | if isinstance(node, Operation): 17 | for input_node in node.input_nodes: 18 | recurse(input_node) 19 | if id(node) not in identifiers: 20 | nodes_postorder.append(node) 21 | identifiers.append(id(node)) 22 | 23 | recurse(operation) 24 | 25 | return nodes_postorder 26 | 27 | def create_variable(node_name, node_params_: dict): 28 | """""" 29 | node_params = copy.deepcopy(node_params_) 30 | node_type = node_params.pop("type", None) 31 | assert node_type is not None, f"{node_name} has no type." 32 | node_template = node_params.pop("template", None) 33 | # -- special keywords 34 | #random_seed = node_params.pop("random_seed", None) 35 | #if random_seed is not None: 36 | # rng = np.random.default_rng(random_seed) 37 | # node_params.update(rng=rng) 38 | config._debug(node_name) 39 | config._debug(node_params) 40 | 41 | # -- 42 | node = None 43 | if node_template is not None: 44 | node_params.update(**node_template) 45 | node_cls = registers.get("variable", node_type, convert_name=True) 46 | node = node_cls(**node_params) 47 | 48 | return node 49 | 50 | def create_operation(op_name, op_params_: dict): 51 | """""" 52 | op_params = copy.deepcopy(op_params_) 53 | op_type = op_params.pop("type", None) 54 | assert op_type is not None, f"{op_name} has no type." 55 | op_template = op_params.pop("template", None) 56 | # -- special keywords 57 | #random_seed = op_params.pop("random_seed", None) 58 | #if random_seed is not None: 59 | # rng = np.random.default_rng(random_seed) 60 | # op_params.update(rng=rng) 61 | config._debug(op_name) 62 | config._debug(op_params) 63 | 64 | # -- 65 | op_cls = registers.get("operation", op_type, convert_name=False) 66 | operation = op_cls(**op_params) 67 | 68 | return operation 69 | 70 | 71 | if __name__ == "__main__": 72 | ... 73 | -------------------------------------------------------------------------------- /src/gdpx/core/variable.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import pathlib 5 | from typing import Callable, NoReturn, Union 6 | 7 | from .. import config 8 | 9 | 10 | class Variable: 11 | """Intrinsic, changeable parameter of a graph.""" 12 | 13 | #: Node ID. 14 | identifier: str = "vx" 15 | 16 | #: Working directory for the operation. 17 | _directory: Union[str, pathlib.Path] = pathlib.Path.cwd() 18 | 19 | #: Working status that should be always finished. 20 | status = "finished" 21 | 22 | #: Standard print function. 23 | _print: Callable = config._print 24 | 25 | #: Standard debug function. 26 | _debug: Callable = config._debug 27 | 28 | def __init__(self, initial_value=None, directory: Union[str, pathlib.Path] = "./"): 29 | """""" 30 | self.value = initial_value 31 | self.consumers = [] 32 | 33 | self.directory = directory 34 | 35 | return 36 | 37 | @property 38 | def directory(self): 39 | """""" 40 | 41 | return self._directory 42 | 43 | @directory.setter 44 | def directory(self, directory_) -> None: 45 | """""" 46 | self._directory = pathlib.Path(directory_) 47 | 48 | return 49 | 50 | def reset(self): 51 | """Reset node's output and status.""" 52 | if hasattr(self, "output"): 53 | delattr(self, "output") 54 | 55 | return 56 | 57 | def reset_random_seed(self, mode="init"): 58 | """""" 59 | if hasattr(self, "_reset_random_seed"): 60 | self._reset_random_seed(mode=mode) 61 | 62 | return 63 | 64 | 65 | class DummyVariable(Variable): ... 66 | 67 | 68 | if __name__ == "__main__": 69 | ... 70 | -------------------------------------------------------------------------------- /src/gdpx/data/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import pathlib 6 | 7 | from ..core.register import registers 8 | 9 | from .correction import correct 10 | registers.operation.register(correct) 11 | 12 | from .convert import convert_dataset 13 | 14 | 15 | 16 | if __name__ == "__main__": 17 | ... -------------------------------------------------------------------------------- /src/gdpx/data/extatoms.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import copy 6 | 7 | from ase import Atoms 8 | 9 | 10 | class ScfErrAtoms(Atoms): 11 | 12 | def __init__( 13 | self, 14 | symbols=None, 15 | positions=None, 16 | numbers=None, 17 | tags=None, 18 | momenta=None, 19 | masses=None, 20 | magmoms=None, 21 | charges=None, 22 | scaled_positions=None, 23 | cell=None, 24 | pbc=None, 25 | celldisp=None, 26 | constraint=None, 27 | calculator=None, 28 | info=None, 29 | velocities=None, 30 | ): 31 | super().__init__( 32 | symbols, 33 | positions, 34 | numbers, 35 | tags, 36 | momenta, 37 | masses, 38 | magmoms, 39 | charges, 40 | scaled_positions, 41 | cell, 42 | pbc, 43 | celldisp, 44 | constraint, 45 | calculator, 46 | info, 47 | velocities, 48 | ) 49 | 50 | self.info["scf_error"] = True 51 | 52 | return 53 | 54 | @staticmethod 55 | def from_atoms(atoms: Atoms) -> "ScfErrAtoms": 56 | """""" 57 | scferr_atoms = ScfErrAtoms( 58 | symbols=atoms.get_chemical_symbols(), 59 | positions=atoms.get_positions(), 60 | # tags=atoms.get_tags(), 61 | # magmoms 62 | # charges 63 | momenta=atoms.get_momenta(), 64 | cell=atoms.get_cell(), 65 | pbc=atoms.get_pbc(), 66 | constraint=atoms.constraints, 67 | calculator=atoms.calc, 68 | info=copy.deepcopy(atoms.info), 69 | ) 70 | 71 | return scferr_atoms 72 | 73 | 74 | if __name__ == "__main__": 75 | ... 76 | -------------------------------------------------------------------------------- /src/gdpx/data/system.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import re 5 | from typing import Any, Optional, List 6 | 7 | from ase import Atoms 8 | from ase.io import read, write 9 | 10 | from ..core.node import AbstractNode 11 | 12 | 13 | class DataSystem(AbstractNode): 14 | 15 | """This contains a fixed-composition system. 16 | """ 17 | 18 | prefix: str = None 19 | 20 | _images: Optional[List[Atoms]] = None 21 | 22 | _tags: Optional[List[str]] = None 23 | 24 | def __init__(self, directory="./", pattern: str="*.xyz", *args, **kwargs) -> None: 25 | """""" 26 | super().__init__(directory=directory, *args, **kwargs) 27 | 28 | self.pattern = pattern 29 | self._process_dataset() 30 | 31 | return 32 | 33 | def _process_dataset(self): 34 | """""" 35 | wdir = self.directory 36 | self.prefix = wdir.name 37 | 38 | images = [] 39 | xyzpaths = sorted(list(wdir.glob(self.pattern))) 40 | 41 | if self._images is None: 42 | self._images = [] 43 | 44 | if self._tags is None: 45 | self._tags = [] 46 | 47 | for p in xyzpaths: 48 | # -- read structures 49 | curr_frames = read(p, ":") 50 | n_curr_frames = len(curr_frames) 51 | self._debug(f"{p.name} nframes: {n_curr_frames}") 52 | self._images.extend(curr_frames) 53 | # -- add file prefix 54 | curr_tag = p.name.split(".")[0] 55 | self._tags.extend([curr_tag for _ in range(n_curr_frames)]) 56 | 57 | return 58 | 59 | def get_matched_indices(self, pattern=r".*") -> List[Atoms]: 60 | """Get structures with given criteria. 61 | 62 | Args: 63 | origin: How these structures are created. 64 | generation: The model generatoin. 65 | 66 | """ 67 | matched_indices = [] 68 | for i, tag in enumerate(self._tags): 69 | if re.match(fr"{pattern}", tag) is not None: 70 | matched_indices.append(i) 71 | 72 | return matched_indices 73 | 74 | def __repr__(self) -> str: 75 | return f"DataSystem(nimages={len(self._images)})" 76 | 77 | 78 | if __name__ == "__main__": 79 | ... -------------------------------------------------------------------------------- /src/gdpx/data/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from ase.formula import Formula 6 | 7 | 8 | def get_atomic_number_list(system_name, type_list): 9 | """""" 10 | name_parts = system_name.split("-") 11 | if len(name_parts) == 1: 12 | composition = name_parts[0] 13 | elif len(name_parts) == 2: 14 | desc, composition = name_parts 15 | elif len(name_parts) == 3: 16 | desc, composition, substrate = name_parts 17 | else: 18 | raise ValueError( 19 | f"System name must be as xxx, xxx-xxx, or xxx-xxx-xxx instead of `{system_name}`." 20 | ) 21 | 22 | # print(composition) 23 | formula = Formula(composition) 24 | count = formula.count() 25 | 26 | return [count.get(s, 0) for s in type_list][::-1] 27 | 28 | 29 | def is_a_valid_system_name(system_name: str) -> bool: 30 | """""" 31 | is_valid = True 32 | 33 | name_parts = system_name.split("-") 34 | if len(name_parts) == 1: 35 | composition = name_parts[0] 36 | elif len(name_parts) == 2: 37 | desc, composition = name_parts 38 | elif len(name_parts) == 3: 39 | desc, composition, substrate = name_parts 40 | else: 41 | composition = "" 42 | 43 | try: 44 | formula = Formula(composition) 45 | except: 46 | is_valid = False 47 | 48 | return is_valid 49 | 50 | 51 | if __name__ == "__main__": 52 | ... 53 | -------------------------------------------------------------------------------- /src/gdpx/describer/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | """This module aims to offer a unified interface to the computation of descriptors.""" 5 | 6 | from ..core.register import registers 7 | 8 | # --- 9 | from .interface import DescriberVariable, describe 10 | registers.variable.register(DescriberVariable) 11 | registers.operation.register(describe) 12 | 13 | # --- 14 | from .spc import SpcDescriber 15 | registers.describer.register("spc")(SpcDescriber) 16 | 17 | 18 | if __name__ == "__main__": 19 | ... -------------------------------------------------------------------------------- /src/gdpx/describer/interface.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import pathlib 6 | from typing import Union 7 | 8 | import omegaconf 9 | 10 | from . import registers 11 | from ..core.variable import Variable, DummyVariable 12 | from ..core.operation import Operation 13 | 14 | 15 | class DescriberVariable(Variable): 16 | 17 | def __init__(self, directory="./", *args, **kwargs): 18 | """""" 19 | name = kwargs.pop("name", "soap") 20 | describer = registers.create("describer", name, convert_name=False, **kwargs) 21 | 22 | super().__init__(initial_value=describer, directory=directory) 23 | 24 | return 25 | 26 | 27 | class describe(Operation): 28 | 29 | def __init__( 30 | self, 31 | structures, 32 | describer, 33 | worker=DummyVariable(), 34 | directory: Union[str, pathlib.Path] = "./", 35 | ) -> None: 36 | """""" 37 | super().__init__( 38 | input_nodes=[structures, describer, worker], directory=directory 39 | ) 40 | 41 | return 42 | 43 | def _preprocess_input_nodes(self, input_nodes): 44 | """""" 45 | structures, describer, worker = input_nodes 46 | 47 | if isinstance(describer, dict) or isinstance( 48 | describer, omegaconf.dictconfig.DictConfig 49 | ): 50 | describer = DescriberVariable( 51 | directory=self.directory / "describer", **describer 52 | ) 53 | 54 | return structures, describer, worker 55 | 56 | def forward(self, structures, describer, workers): 57 | """""" 58 | super().forward() 59 | 60 | # - verify the worker 61 | if workers is not None: 62 | nworkers = len(workers) 63 | assert ( 64 | nworkers == 1 65 | ), f"{self.__class__.__name__} only accepts one worker but {nworkers} were given." 66 | worker = workers[0] 67 | worker.directory = self.directory/"worker" 68 | else: 69 | worker = None 70 | 71 | # - dataset? 72 | 73 | # - compute descriptors... 74 | describer.directory = self.directory 75 | status = describer.run(structures, worker, ) 76 | 77 | #self.status = "finished" 78 | self.status = status 79 | 80 | return structures 81 | 82 | 83 | if __name__ == "__main__": 84 | ... 85 | -------------------------------------------------------------------------------- /src/gdpx/describer/soap.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import copy 5 | import pathlib 6 | from typing import Optional, List, Mapping 7 | 8 | import numpy as np 9 | from sklearn.decomposition import PCA 10 | 11 | from ase import Atoms 12 | from ase.io import read, write 13 | 14 | from ..core.register import registers 15 | from .describer import AbstractDescriber 16 | 17 | #try: 18 | # from dscribe.descriptors import SOAP 19 | #except Exception as e: 20 | # print(e) 21 | 22 | # NOTE: If there is no dscribe, this class will not be registered. 23 | from dscribe.descriptors import SOAP 24 | 25 | 26 | @registers.describer.register("soap") 27 | class SoapDescriber(AbstractDescriber): 28 | 29 | cache_features = "features.npy" 30 | 31 | def __init__(self, params, *args, **kwargs) -> None: 32 | """""" 33 | super().__init__(*args, **kwargs) 34 | 35 | self.descriptor = copy.deepcopy(params) 36 | 37 | return 38 | 39 | def run(self, dataset, *args, **kwargs): 40 | """""" 41 | ... 42 | self._debug(f"n_jobs: {self.njobs}") 43 | 44 | # - for single system 45 | features = [] 46 | for system in dataset: 47 | curr_frames = system._images 48 | if not (self.directory/system.prefix).exists(): 49 | (self.directory/system.prefix).mkdir(parents=True) 50 | cache_features = self.directory/system.prefix/self.cache_features 51 | if not cache_features.exists(): 52 | curr_features = self._compute_descripter(frames=curr_frames) 53 | np.save(cache_features, curr_features) 54 | else: 55 | curr_features = np.load(cache_features) 56 | features.extend(curr_features.tolist()) 57 | features = np.array(features) 58 | self._debug(f"shape of features: {features.shape}") 59 | 60 | return features 61 | 62 | def _compute_descripter(self, frames: List[Atoms]) -> np.array: 63 | """Calculate vector-based descriptors. 64 | 65 | Each structure is represented by a vector. 66 | 67 | """ 68 | self._print("start calculating features...") 69 | desc_params = copy.deepcopy(self.descriptor) 70 | 71 | soap = SOAP(**desc_params) 72 | ndim = soap.get_number_of_features() 73 | self._print(f"soap descriptor dimension: {ndim}") 74 | features = soap.create(frames, n_jobs=self.njobs) 75 | self._print("finished calculating features...") 76 | 77 | # - save calculated features 78 | features = features.reshape(-1,ndim) 79 | 80 | return features 81 | 82 | 83 | if __name__ == "__main__": 84 | ... -------------------------------------------------------------------------------- /src/gdpx/expedition/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from .. import config 6 | from ..builder.builder import StructureBuilder 7 | from ..builder.group import create_a_group, create_a_molecule_group 8 | from ..builder.utils import convert_string_to_atoms 9 | from ..core.register import registers 10 | from ..data.array import AtomsNDArray 11 | from ..graph.molecule import MolecularAdsorbate, find_molecules 12 | from ..potential.interface import create_mixer 13 | from ..utils.command import convert_indices, dict2str 14 | from ..utils.strconv import str2array 15 | from ..worker.drive import DriverBasedWorker 16 | from ..worker.grid import GridDriverBasedWorker 17 | from ..worker.interface import ComputerVariable 18 | from ..worker.single import SingleWorker 19 | 20 | if __name__ == "__main__": 21 | ... 22 | -------------------------------------------------------------------------------- /src/gdpx/expedition/expedition.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import abc 6 | import copy 7 | import logging 8 | import pathlib 9 | 10 | from . import config 11 | from . import registers 12 | from . import ComputerVariable, DriverBasedWorker, SingleWorker 13 | 14 | from ..core.node import AbstractNode 15 | 16 | 17 | class AbstractExpedition(AbstractNode): 18 | 19 | #: Name of the expedition. 20 | name: str = "expedition" 21 | 22 | @abc.abstractmethod 23 | def read_convergence(self): 24 | 25 | return 26 | 27 | @abc.abstractmethod 28 | def get_workers(self): 29 | 30 | return 31 | 32 | def run(self, *args, **kwargs) -> None: 33 | """""" 34 | # - some imported packages change `logging.basicConfig` 35 | # and accidently add a StreamHandler to logging.root 36 | # so remove it... 37 | for h in logging.root.handlers: 38 | if isinstance(h, logging.StreamHandler) and not isinstance( 39 | h, logging.FileHandler 40 | ): 41 | logging.root.removeHandler(h) 42 | 43 | assert self.worker is not None, f"{self.name} has not set its worker properly." 44 | 45 | return 46 | 47 | def register_builder(self, builder: dict) -> None: 48 | """Register StructureBuilder for this expedition.""" 49 | if isinstance(builder, dict): 50 | builder_params = copy.deepcopy(builder) 51 | builder_method = builder_params.pop("method") 52 | builder = registers.create( 53 | "builder", builder_method, convert_name=False, **builder_params 54 | ) 55 | else: 56 | builder = builder 57 | 58 | self.builder = builder 59 | 60 | return 61 | 62 | def register_worker(self, worker: dict, *args, **kwargs) -> None: 63 | """Register DriverBasedWorker for this expedition.""" 64 | if isinstance(worker, dict): 65 | worker_params = copy.deepcopy(worker) 66 | worker = registers.create( 67 | "variable", "computer", convert_name=True, **worker_params 68 | ).value[0] 69 | elif isinstance(worker, list): # assume it is from a computervariable 70 | worker = worker[0] 71 | elif isinstance(worker, ComputerVariable): 72 | worker = worker.value[0] 73 | elif isinstance(worker, DriverBasedWorker) or isinstance(worker, SingleWorker): 74 | worker = worker 75 | else: 76 | raise RuntimeError(f"Unknown worker type {worker}") 77 | 78 | self.worker = worker 79 | 80 | return 81 | 82 | 83 | if __name__ == "__main__": 84 | ... 85 | -------------------------------------------------------------------------------- /src/gdpx/expedition/monte_carlo/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from .. import registers 6 | from .. import convert_string_to_atoms 7 | 8 | 9 | if __name__ == "__main__": 10 | ... -------------------------------------------------------------------------------- /src/gdpx/expedition/monte_carlo/basin_hopping.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import copy 6 | from typing import List 7 | 8 | from .monte_carlo import MonteCarlo 9 | 10 | 11 | class BasinHopping(MonteCarlo): 12 | 13 | def __init__( 14 | self, builder: dict, operators: List[dict], 15 | convergence: dict, random_seed=None, dump_period: int=1, ckpt_period: int=100, 16 | restart: bool = False, directory="./", *args, **kwargs 17 | ) -> None: 18 | """""" 19 | super().__init__( 20 | builder, operators, convergence, random_seed, dump_period, ckpt_period, 21 | restart, directory, *args, **kwargs 22 | ) 23 | 24 | return 25 | 26 | def as_dict(self) -> dict: 27 | """""" 28 | engine_params = super().as_dict() 29 | engine_params["method"] = "basin_hopping" 30 | 31 | return engine_params 32 | 33 | 34 | if __name__ == "__main__": 35 | ... -------------------------------------------------------------------------------- /src/gdpx/graph/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from .. import config 6 | from ..utils.command import CustomTimer 7 | 8 | 9 | if __name__ == "__main__": 10 | ... -------------------------------------------------------------------------------- /src/gdpx/graph/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import time 5 | import networkx as nx 6 | 7 | import matplotlib as mpl 8 | mpl.use("Agg") #silent mode 9 | from matplotlib import pyplot as plt 10 | #plt.style.use("presentation") 11 | 12 | def node_symbol(symbol, idx, offset): 13 | """""" 14 | return "{}:{}:[{},{},{}]".format(symbol, idx, offset[0], offset[1], offset[2]) 15 | 16 | def unpack_node_name(node_symbol): 17 | #print("xxx: ", node_symbol) 18 | chem_sym, idx, offset = node_symbol.split(":") 19 | idx = int(idx) 20 | offset = eval(offset) 21 | 22 | return chem_sym, idx, offset 23 | 24 | def bond_symbol(sym1, sym2, a1, a2): 25 | return "{}{}".format(*sorted((sym1, sym2))) 26 | 27 | def grid_iterator(grid): 28 | """Yield all of the coordinates in a 3D grid as tuples 29 | 30 | Args: 31 | grid (tuple[int] or int): The grid dimension(s) to 32 | iterate over (x or (x, y, z)) 33 | 34 | Yields: 35 | tuple: (x, y, z) coordinates 36 | """ 37 | if isinstance(grid, int): # Expand to 3D grid 38 | grid = (grid, grid, grid) 39 | 40 | for x in range(-grid[0], grid[0]+1): 41 | for y in range(-grid[1], grid[1]+1): 42 | for z in range(-grid[2], grid[2]+1): 43 | yield (x, y, z) 44 | 45 | def show_edges(graph): 46 | print("----- See Edges -----") 47 | for (u, v, d) in graph.edges.data(): 48 | print(u, v, d) 49 | 50 | return 51 | 52 | def show_nodes(graph): 53 | print("----- See Nodes -----") 54 | for (u, d) in graph.nodes.data(): 55 | print(u, d) 56 | 57 | return 58 | 59 | def show_components(): 60 | #print("----- connected components -----") 61 | #for c in nx.connected_components(graph): 62 | # print(c) 63 | pass 64 | 65 | return 66 | 67 | def plot_graph(graph, fig_name="graph.png"): 68 | # plot graph 69 | fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(12,8)) 70 | ax.set_title("Graph") 71 | 72 | nx.draw(graph, with_labels=True) 73 | 74 | plt.savefig(fig_name) 75 | 76 | return 77 | 78 | if __name__ == "__main__": 79 | ... -------------------------------------------------------------------------------- /src/gdpx/potential/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | """ potential wrappers 5 | general potential format: deepmd, eann, lasp, vasp 6 | dynamics backend: ase, lammps, lasp, vasp 7 | """ 8 | 9 | 10 | from .. import config 11 | from ..core.register import registers 12 | 13 | 14 | if __name__ == "__main__": 15 | ... 16 | -------------------------------------------------------------------------------- /src/gdpx/potential/calculators/dummy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | 5 | from ase.calculators.calculator import Calculator, all_properties, all_changes 6 | 7 | 8 | class DummyCalculator(Calculator): 9 | 10 | name = "dummy" 11 | 12 | def __init__( 13 | self, restart=None, label="dummy", atoms=None, directory=".", **kwargs 14 | ): 15 | super().__init__( 16 | restart, label=label, atoms=atoms, directory=directory, **kwargs 17 | ) 18 | 19 | return 20 | 21 | def calculate( 22 | self, atoms=None, properties=all_properties, system_changes=all_changes 23 | ): 24 | """""" 25 | raise NotImplementedError("DummyCalculator is unable to calculate.") 26 | 27 | 28 | if __name__ == "__main__": 29 | ... 30 | -------------------------------------------------------------------------------- /src/gdpx/potential/interface.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from ..core.variable import Variable, DummyVariable 6 | from ..core.operation import Operation 7 | from ..core.register import registers 8 | 9 | from ..potential.managers.mixer import MixerManager 10 | 11 | 12 | @registers.variable.register 13 | class PotterVariable(Variable): 14 | 15 | def __init__(self, directory="./", **kwargs): 16 | """""" 17 | # manager = PotentialRegister() 18 | name = kwargs.get("name", None) 19 | # potter = manager.create_potential(pot_name=name) 20 | # potter.register_calculator(kwargs.get("params", {})) 21 | # potter.version = kwargs.get("version", "unknown") 22 | 23 | potter = registers.create( 24 | "manager", 25 | name, 26 | convert_name=True, 27 | # **kwargs.get("params", {}) 28 | ) 29 | potter.register_calculator(kwargs.get("params", {})) 30 | 31 | super().__init__(initial_value=potter, directory=directory) 32 | 33 | return 34 | 35 | 36 | def create_mixer(basic_params, *args, **kwargs): 37 | """""" 38 | potters = [basic_params] 39 | for x in args: 40 | potters.append(x) 41 | calc_params = dict(backend="ase", potters=potters) 42 | 43 | mixer = MixerManager() 44 | mixer.register_calculator(calc_params=calc_params) 45 | 46 | return mixer 47 | 48 | 49 | if __name__ == "__main__": 50 | ... 51 | -------------------------------------------------------------------------------- /src/gdpx/potential/managers/abacus.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import pathlib 5 | 6 | from ase.calculators.abacus import Abacus, AbacusProfile 7 | from ase.io.abacus import read_input 8 | 9 | from . import AbstractPotentialManager 10 | 11 | 12 | class AbacusWrapper(Abacus): 13 | 14 | def reset(self): 15 | """Clear all information from old calculation.""" 16 | 17 | self.atoms = None 18 | self.results = {} 19 | 20 | return 21 | 22 | 23 | class AbacusManager(AbstractPotentialManager): 24 | 25 | name = "abacus" 26 | 27 | implemented_backends = ["abacus"] 28 | valid_combinations = ( 29 | ("abacus", "abacus"), 30 | ("abacus", "ase"), 31 | ) 32 | 33 | def register_calculator(self, calc_params: dict): 34 | """Register the calculator. 35 | 36 | The input parameters may contain values as: 37 | 38 | command: 'mpirun -n 2 abacus' 39 | template: INPUT_ABACUS 40 | pseudo_dir: ... 41 | basis_dir: ... 42 | 43 | """ 44 | super().register_calculator(calc_params) 45 | 46 | command = calc_params.pop("command", None) 47 | 48 | pseudo_dir = str(pathlib.Path(calc_params.pop("pseudo_dir", None)).resolve()) 49 | self.calc_params.update(pseudo_dir=pseudo_dir) 50 | basis_dir = str(pathlib.Path(calc_params.pop("basis_dir", None)).resolve()) 51 | self.calc_params.update(basis_dir=basis_dir) 52 | 53 | template_fpath = str(pathlib.Path(calc_params.pop("template")).resolve()) 54 | self.calc_params.update(basis_dir=basis_dir) 55 | kpts = calc_params.pop("kpts", (1, 1, 1)) 56 | 57 | pp, basis = {}, {} 58 | for s, data in calc_params.get("type_info", {}).items(): 59 | pp[s] = data["pseudo"] 60 | basis[s] = data["basis"] 61 | 62 | if self.calc_backend == "abacus": 63 | profile = AbacusProfile( 64 | command=command, pseudo_dir=pseudo_dir, basis_dir=basis_dir 65 | ) 66 | calc = AbacusWrapper(profile, pp=pp, basis=basis, kpts=kpts) 67 | inp_params = read_input(template_fpath) 68 | calc.parameters.update(**inp_params) 69 | calc_type = calc.parameters.get("calculation", "scf") 70 | if calc_type != "scf": 71 | raise RuntimeError("ABACUS only supports `scf` for now.") 72 | 73 | self.calc = calc 74 | else: 75 | raise NotImplementedError( 76 | f"Unimplemented backend {self.calc_backend} for abacus." 77 | ) 78 | 79 | return 80 | 81 | 82 | if __name__ == "__main__": 83 | ... 84 | -------------------------------------------------------------------------------- /src/gdpx/potential/managers/asepot.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | 5 | import copy 6 | 7 | from . import AbstractPotentialManager 8 | 9 | 10 | class AsePotManager(AbstractPotentialManager): 11 | 12 | name = "ase" 13 | 14 | implemented_backends = ["ase"] 15 | valid_combinations = ( 16 | ("ase", "ase"), 17 | ) 18 | 19 | """Here is an interface to ase built-in calculators.""" 20 | 21 | def __init__(self, *args, **kwargs): 22 | """""" 23 | super().__init__() 24 | 25 | return 26 | 27 | def register_calculator(self, calc_params: dict, *args, **kwargs): 28 | """""" 29 | super().register_calculator(calc_params, *args, **kwargs) 30 | 31 | calc_params = copy.deepcopy(calc_params) 32 | method = calc_params.pop("method", "") 33 | 34 | if self.calc_backend == "ase": 35 | if method == "lj": 36 | from ase.calculators.lj import LennardJones 37 | calc_cls = LennardJones 38 | elif method == "morse": 39 | from ase.calculators.morse import MorsePotential 40 | calc_cls = MorsePotential 41 | elif method == "tip3p": 42 | from ase.calculators.tip3p import TIP3P 43 | calc_cls = TIP3P 44 | else: 45 | raise NotImplementedError(f"Unsupported potential {method}.") 46 | else: 47 | raise NotImplementedError(f"Unsupported backend {self.calc_backend}.") 48 | 49 | calc = calc_cls(**calc_params) 50 | 51 | self.calc = calc 52 | 53 | return 54 | 55 | 56 | if __name__ == "__main__": 57 | ... 58 | -------------------------------------------------------------------------------- /src/gdpx/potential/managers/deepmd/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from .deepmd import DeepmdManager, DeepmdDataloader, DeepmdTrainer 6 | 7 | __all__ = [ 8 | "DeepmdManager", "DeepmdDataloader", "DeepmdTrainer" 9 | ] 10 | 11 | 12 | if __name__ == "__main__": 13 | ... 14 | -------------------------------------------------------------------------------- /src/gdpx/potential/managers/dftd3.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | 5 | from . import AbstractPotentialManager, DummyCalculator 6 | 7 | 8 | """Check https://dftd3.readthedocs.io/en/latest/api/ase.html 9 | 10 | To install, use conda install dftd3-python -c conda-forge. 11 | 12 | Calculator parameters should have `method` (xc e.g. PBE) and `damping` (e.g. d3bj). 13 | 14 | """ 15 | 16 | 17 | class Dftd3Manager(AbstractPotentialManager): 18 | 19 | name = "dftd3" 20 | 21 | implemented_backends = ["ase"] 22 | valid_combinations = ( 23 | ("ase", "ase") 24 | ) 25 | 26 | """See ASE documentation for calculator parameters. 27 | """ 28 | 29 | def __init__(self, *args, **kwargs): 30 | """""" 31 | super().__init__() 32 | 33 | return 34 | 35 | def register_calculator(self, calc_params, *agrs, **kwargs): 36 | """""" 37 | super().register_calculator(calc_params, *agrs, **kwargs) 38 | 39 | calc = DummyCalculator() 40 | if self.calc_backend == "ase": 41 | from dftd3.ase import DFTD3 as calc_cls 42 | else: 43 | raise NotImplementedError(f"Unsupported backend {self.calc_backend}.") 44 | 45 | calc = calc_cls(**calc_params) 46 | 47 | self.calc = calc 48 | 49 | return 50 | 51 | 52 | if __name__ == "__main__": 53 | ... -------------------------------------------------------------------------------- /src/gdpx/potential/managers/eam.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | import os 5 | import pathlib 6 | from typing import NoReturn 7 | 8 | from . import AbstractPotentialManager, DummyCalculator 9 | 10 | 11 | class EamManager(AbstractPotentialManager): 12 | 13 | name = "eam" 14 | 15 | implemented_backends = ["lammps"] 16 | valid_combinations = ( 17 | ("lammps", "lammps") 18 | ) 19 | 20 | """See LAMMPS documentation for calculator parameters. 21 | """ 22 | 23 | def __init__(self, *args, **kwargs): 24 | """""" 25 | super().__init__() 26 | 27 | return 28 | 29 | def register_calculator(self, calc_params, *agrs, **kwargs): 30 | """""" 31 | super().register_calculator(calc_params, *agrs, **kwargs) 32 | 33 | calc = DummyCalculator() 34 | 35 | # - some shared params 36 | command = calc_params.pop("command", None) 37 | directory = calc_params.pop("directory", pathlib.Path.cwd()) 38 | 39 | type_list = calc_params.pop("type_list", []) 40 | type_map = {} 41 | for i, a in enumerate(type_list): 42 | type_map[a] = i 43 | 44 | # --- model files 45 | model_ = calc_params.get("model", []) 46 | if not isinstance(model_, list): 47 | model_ = [model_] 48 | 49 | models = [] 50 | for m in model_: 51 | m = pathlib.Path(m).resolve() 52 | if not m.exists(): 53 | raise FileNotFoundError(f"Cant find model file {str(m)}") 54 | models.append(str(m)) 55 | 56 | if self.calc_backend == "lammps": 57 | from gdpx.computation.lammps import Lammps 58 | if models: 59 | pair_style = "eam" 60 | pair_coeff = calc_params.pop("pair_coeff", "* *") 61 | pair_coeff += " {} ".format(models[0]) 62 | 63 | pair_style_name = pair_style.split()[0] 64 | assert pair_style_name == "eam", "Incorrect pair_style for lammps eam..." 65 | 66 | calc = Lammps( 67 | command=command, directory=directory, 68 | pair_style=pair_style, pair_coeff=pair_coeff, 69 | **calc_params 70 | ) 71 | # - update several params 72 | calc.units = "metal" 73 | calc.atom_style = "atomic" 74 | else: 75 | ... 76 | 77 | self.calc = calc 78 | 79 | return 80 | 81 | 82 | if __name__ == "__main__": 83 | ... -------------------------------------------------------------------------------- /src/gdpx/potential/managers/emt.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | import os 5 | import pathlib 6 | from typing import NoReturn 7 | 8 | from . import AbstractPotentialManager 9 | 10 | 11 | class EmtManager(AbstractPotentialManager): 12 | 13 | name = "emt" 14 | 15 | implemented_backends = ["emt", "ase"] 16 | valid_combinations = ( 17 | ("ase", "ase"), 18 | ) 19 | 20 | """See ASE documentation for calculator parameters. 21 | """ 22 | 23 | def __init__(self, *args, **kwargs): 24 | """""" 25 | super().__init__() 26 | 27 | return 28 | 29 | def register_calculator(self, calc_params, *agrs, **kwargs): 30 | """""" 31 | super().register_calculator(calc_params, *agrs, **kwargs) 32 | if self.calc_backend == "emt": 33 | self.calc_backend = "ase" 34 | 35 | # NOTE: emt backend is just an alias of ase backend, they are the same. 36 | if self.calc_backend == "ase": 37 | from ase.calculators.emt import EMT 38 | calc_cls = EMT 39 | else: 40 | raise NotImplementedError(f"Unsupported backend {self.calc_backend}.") 41 | 42 | calc = calc_cls(**calc_params) 43 | 44 | self.calc = calc 45 | 46 | return 47 | 48 | 49 | if __name__ == "__main__": 50 | ... 51 | -------------------------------------------------------------------------------- /src/gdpx/potential/managers/espresso.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | 5 | import pathlib 6 | 7 | 8 | from . import AbstractPotentialManager, DummyCalculator 9 | 10 | 11 | class EspressoManager(AbstractPotentialManager): 12 | 13 | name = "espresso" 14 | 15 | implemented_backends = ["espresso"] 16 | valid_combinations = ( 17 | ("espresso", "ase") 18 | ) 19 | 20 | def __init__(self, *args, **kwargs): 21 | """""" 22 | super().__init__() 23 | 24 | return 25 | 26 | def register_calculator(self, calc_params, *agrs, **kwargs): 27 | """""" 28 | # - check backends 29 | super().register_calculator(calc_params, *agrs, **kwargs) 30 | 31 | # - parse params 32 | command = calc_params.get("command", "pw.x -in PREFIX.pwi > PREFIX.pwo") 33 | self.calc_params.update(command = command) 34 | 35 | pp_path = pathlib.Path(calc_params.pop("pp_path", "./")).resolve() 36 | if not pp_path.exists(): 37 | raise FileNotFoundError("Pseudopotentials for espresso does not exist.") 38 | self.calc_params.update(pp_path=str(pp_path)) 39 | 40 | pp_name = calc_params.get("pp_name", None) 41 | if pp_name is None: 42 | raise RuntimeError("Must set name for pseudopotentials.") 43 | 44 | template = calc_params.pop("template", "./espresso.pwi") 45 | template = pathlib.Path(template) 46 | if not template.exists(): 47 | raise FileNotFoundError("Template espresso input file does not exist.") 48 | self.calc_params.update(template=str(template)) 49 | 50 | from gdpx.computation.espresso import EspressoParser 51 | ep = EspressoParser(template=template) 52 | 53 | # -- kpoints that maybe read from the file 54 | kpts = calc_params.pop("kpts", None) 55 | kspacing = calc_params.pop("kspacing", None) 56 | koffset = calc_params.pop("koffset", 0) 57 | 58 | assert kpts is None or kspacing is None, "Cannot set kpts and kspacing at the same time." 59 | 60 | # - NOTE: check 61 | calc = DummyCalculator() 62 | 63 | if self.calc_backend == "espresso": 64 | from gdpx.computation.espresso import Espresso 65 | calc = Espresso( 66 | command = command, 67 | input_data = ep.parameters, 68 | pseudopotentials = pp_name, 69 | kspacing = kspacing, 70 | kpts = kpts, 71 | koffset = koffset 72 | ) 73 | else: 74 | ... 75 | 76 | self.calc = calc 77 | 78 | return 79 | 80 | 81 | if __name__ == "__main__": 82 | ... -------------------------------------------------------------------------------- /src/gdpx/potential/managers/gp/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | if __name__ == "__main__": 5 | ... 6 | -------------------------------------------------------------------------------- /src/gdpx/potential/managers/gp/fgp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from typing import List 6 | from gdpx.potential.trainer import AbstractTrainer 7 | 8 | 9 | class FGPTrainer(AbstractTrainer): 10 | 11 | name = "fgp" 12 | 13 | def __init__( 14 | self, config: dict, type_list: List[str] = None, train_epochs: int = 200, 15 | directory=".", command="train", freeze_command="freeze", random_seed: int = None, 16 | *args, **kwargs 17 | ) -> None: 18 | """""" 19 | super().__init__( 20 | config, type_list, train_epochs, 21 | directory, command, freeze_command, 22 | random_seed, *args, **kwargs 23 | ) 24 | 25 | return 26 | 27 | @property 28 | def frozen_name(self): 29 | """""" 30 | return f"{self.name}.pb" 31 | 32 | def _resolve_freeze_command(self, *args, **kwargs): 33 | return super()._resolve_freeze_command(*args, **kwargs) 34 | 35 | def _resolve_train_command(self, *args, **kwargs): 36 | """""" 37 | command = self.command 38 | 39 | return command 40 | 41 | def train(self, dataset, init_model=None, *args, **kwargs): 42 | """""" 43 | self._print("miaow") 44 | from .representation import train 45 | train() 46 | 47 | return 48 | 49 | def write_input(self, dataset, *args, **kwargs): 50 | return super().write_input(dataset, *args, **kwargs) 51 | 52 | def read_convergence(self) -> bool: 53 | return super().read_convergence() 54 | 55 | 56 | if __name__ == "__main__": 57 | ... -------------------------------------------------------------------------------- /src/gdpx/potential/managers/lasp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | from pathlib import Path 5 | 6 | from . import AbstractPotentialManager 7 | 8 | 9 | class LaspManager(AbstractPotentialManager): 10 | 11 | name = "lasp" 12 | implemented_backends = ["lasp"] 13 | valid_combinations = ( 14 | ("lasp", "lasp"), # calculator, dynamics 15 | ("lasp", "ase") 16 | ) 17 | 18 | def __init__(self): 19 | 20 | return 21 | 22 | def register_calculator(self, calc_params): 23 | """ params 24 | command 25 | directory 26 | pot 27 | """ 28 | super().register_calculator(calc_params) 29 | 30 | self.calc_params["pot_name"] = self.name 31 | 32 | command = calc_params.pop("command", None) 33 | directory = calc_params.pop("directory", Path.cwd()) 34 | atypes = calc_params.pop("type_list", []) 35 | 36 | # --- model files 37 | model_ = calc_params.get("model", []) 38 | if not isinstance(model_, list): 39 | model_ = [model_] 40 | 41 | models = [] 42 | for m in model_: 43 | m = Path(m).resolve() 44 | if not m.exists(): 45 | raise FileNotFoundError(f"Cant find model file {str(m)}") 46 | models.append(str(m)) 47 | 48 | # update to resolved paths 49 | self.calc_params["model"] = models 50 | 51 | pot = {} 52 | if len(models) == len(atypes): 53 | for t, m in zip(atypes,models): 54 | pot[t] = m 55 | else: 56 | # use first model for all types 57 | for t in atypes: 58 | pot[t] = models[0] 59 | 60 | self.calc = None 61 | if self.calc_backend == "lasp": 62 | from gdpx.computation.lasp import LaspNN 63 | self.calc = LaspNN( 64 | command=command, directory=directory, pot=pot, 65 | **calc_params 66 | ) 67 | elif self.calc_backend == "lammps": 68 | # TODO: add lammps calculator 69 | pass 70 | else: 71 | raise NotImplementedError(f"{self.name} does not have {self.calc_backend}.") 72 | 73 | return 74 | 75 | 76 | if __name__ == "__main__": 77 | pass -------------------------------------------------------------------------------- /src/gdpx/potential/managers/reax.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | from pathlib import Path 5 | 6 | from . import AbstractPotentialManager 7 | 8 | 9 | class ReaxManager(AbstractPotentialManager): 10 | 11 | name = "reax" 12 | implemented_backends = ["lammps"] 13 | 14 | valid_combinations = (("lammps", "ase"), ("lammps", "lammps")) 15 | 16 | def __init__(self, *args, **kwargs): 17 | """""" 18 | 19 | return 20 | 21 | def register_calculator(self, calc_params, *agrs, **kwargs): 22 | """""" 23 | super().register_calculator(calc_params, *agrs, **kwargs) 24 | 25 | command = calc_params.pop("command", None) 26 | directory = calc_params.pop("directory", Path.cwd()) 27 | 28 | model = calc_params.get("model", None) 29 | model = str(Path(model).resolve()) 30 | self.calc_params["model"] = model 31 | 32 | if self.calc_backend == "lammps": 33 | from gdpx.computation.lammps import Lammps 34 | 35 | if model: 36 | pair_style = "reax/c NULL" 37 | pair_coeff = f"* * {model}" 38 | calc = Lammps( 39 | command=command, 40 | directory=directory, 41 | pair_style=pair_style, 42 | pair_coeff=pair_coeff, 43 | **calc_params, 44 | ) 45 | # - update several params 46 | calc.set(units="real") 47 | calc.set(atom_style="charge") 48 | else: 49 | calc = None 50 | self.calc = calc 51 | 52 | return 53 | 54 | 55 | if __name__ == "__main__": 56 | pass 57 | -------------------------------------------------------------------------------- /src/gdpx/potential/managers/xtb.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | import os 5 | import pathlib 6 | from typing import NoReturn 7 | 8 | from . import AbstractPotentialManager 9 | 10 | 11 | class XtbManager(AbstractPotentialManager): 12 | 13 | name = "xtb" 14 | 15 | implemented_backends = ["xtb", "tblite"] 16 | valid_combinations = ( 17 | ("xtb", "ase"), 18 | ("tblite", "ase"), 19 | ) 20 | 21 | """See XTB documentation for calculator parameters. 22 | 23 | method: "GFN2-xTB" 24 | accuracy: 1.0 25 | electronic_temperature: 300.0 26 | max_iterations: 250 27 | solvent: "none" 28 | cache_api: True 29 | 30 | """ 31 | 32 | def __init__(self, *args, **kwargs): 33 | """""" 34 | super().__init__() 35 | 36 | return 37 | 38 | def register_calculator(self, calc_params, *agrs, **kwargs): 39 | """""" 40 | super().register_calculator(calc_params, *agrs, **kwargs) 41 | 42 | if self.calc_backend == "xtb": 43 | try: 44 | from xtb.ase.calculator import XTB 45 | except: 46 | print("Please install xtb python to use this module.") 47 | exit() 48 | calc_cls = XTB 49 | elif self.calc_backend == "tblite": 50 | try: 51 | from tblite.ase import TBLite 52 | except: 53 | print("Please install xtb python to use this module.") 54 | exit() 55 | calc_cls = TBLite 56 | else: 57 | raise NotImplementedError(f"Unsupported backend {self.calc_backend}.") 58 | 59 | calc = calc_cls(**calc_params) 60 | 61 | self.calc = calc 62 | 63 | return 64 | 65 | 66 | if __name__ == "__main__": 67 | ... 68 | -------------------------------------------------------------------------------- /src/gdpx/potential/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import copy 6 | import pathlib 7 | 8 | from typing import Callable, Optional, List 9 | 10 | import omegaconf 11 | 12 | from ..core.variable import Variable 13 | from ..core.register import registers 14 | from ..utils.command import parse_input_file 15 | from .manager import AbstractPotentialManager 16 | 17 | 18 | def potter_from_dict(inp_dict: dict): 19 | """""" 20 | name = inp_dict.get("name", None) 21 | potter = registers.create( 22 | "manager", 23 | name, 24 | convert_name=True, 25 | ) 26 | potter.register_calculator(inp_dict.get("params", {})) 27 | potter.version = inp_dict.get("version", "unknown") 28 | 29 | return potter 30 | 31 | def convert_input_to_potter(inp) -> "AbstractPotentialManager": 32 | """Convert an input to a potter and adjust its behaviour.""" 33 | potter = None 34 | if isinstance(inp, AbstractPotentialManager): 35 | potter = inp 36 | elif isinstance(inp, Variable): 37 | potter = inp.value 38 | elif isinstance(inp, dict) or isinstance(inp, omegaconf.dictconfig.DictConfig): 39 | # DictConfig must be cast to dict as sometimes cannot be overwritten. 40 | if isinstance(inp, omegaconf.dictconfig.DictConfig): 41 | inp = omegaconf.OmegaConf.to_object(inp) 42 | potter_params = copy.deepcopy(inp) 43 | potter = potter_from_dict(potter_params) 44 | elif isinstance(inp, str) or isinstance(inp, pathlib.Path): 45 | if pathlib.Path(inp).exists(): 46 | potter_params = parse_input_file(input_fpath=inp) 47 | potter = potter_from_dict(potter_params) 48 | else: 49 | raise RuntimeError(f"The potter configuration `{inp}` does not exist.") 50 | else: 51 | raise RuntimeError(f"Unknown {inp} of type {type(inp)} for the potter.") 52 | 53 | return potter 54 | 55 | 56 | if __name__ == "__main__": 57 | ... 58 | -------------------------------------------------------------------------------- /src/gdpx/reactor/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import copy 5 | 6 | from .. import config 7 | from ..core.register import registers 8 | 9 | from ..builder.constraints import parse_constraint_info 10 | from ..potential.calculators.mixer import EnhancedCalculator 11 | 12 | from ..utils.strucopy import read_sort, resort_atoms_with_spc 13 | from ..utils.cmdrun import run_ase_calculator 14 | 15 | 16 | """ This submodule is for exploring, sampling, 17 | and performing (chemical) reactions with 18 | various advanced algorithms. 19 | """ 20 | 21 | # - string methods... 22 | from .string import ( 23 | AseStringReactor, Cp2kStringReactor, VaspStringReactor, 24 | ZeroStringReactor 25 | ) 26 | registers.reactor.register("ase")(AseStringReactor) 27 | registers.reactor.register("cp2k")(Cp2kStringReactor) 28 | registers.reactor.register("vasp")(VaspStringReactor) 29 | registers.reactor.register("grid")(ZeroStringReactor) 30 | 31 | 32 | if __name__ == "__main__": 33 | ... 34 | -------------------------------------------------------------------------------- /src/gdpx/reactor/future/diffusion3.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import numpy as np 5 | 6 | from ase.io import read 7 | from ase.constraints import FixAtoms 8 | from ase.calculators.emt import EMT 9 | from ase.neb import NEB 10 | #from ase.neb import SingleCalculatorNEB 11 | from ase.optimize import BFGS 12 | from ase.io import read, write 13 | 14 | from eann.interface.ase.calculator import Eann 15 | 16 | #initial = read('initial.traj') 17 | #final = read('final.traj') 18 | 19 | #prepared_images = read( 20 | # "/users/40247882/scratch2/validations/structures/surfaces-2x2/relaxed/hcp2fcc-brg_opt.xyz", ":" # read neb optimised trajectory 21 | #) 22 | # 23 | #for atoms in prepared_images: 24 | # atoms.wrap() 25 | #write("wrapped_traj.xyz", prepared_images) 26 | #exit() 27 | 28 | #prepared_images = read("./start_images.xyz", ":") 29 | prepared_images = read("./hcp2fcc-top_opt.xyz", ":") 30 | 31 | initial = prepared_images[0].copy() 32 | final = prepared_images[-1].copy() 33 | 34 | constraint = FixAtoms(indices=[1,2,3,4]) # first atom is O 35 | #constraint = FixAtoms(indices=[1,2,3,4,5,6,7,8]) # first atom is O 36 | 37 | nimages = 7 38 | images = [initial] 39 | images += [initial.copy() for i in range(nimages-2)] 40 | images.append(final) 41 | 42 | # set calculator 43 | cur_model = "/users/40247882/scratch2/oxides/eann-main/reduce-13/ensemble/model-0/eann_best_DOUBLE.pt" 44 | calc = Eann( 45 | type_map = {'O': 0, 'Pt': 1}, 46 | model = cur_model 47 | ) 48 | 49 | for atoms in images: 50 | calc = Eann( 51 | type_map = {'O': 0, 'Pt': 1}, 52 | #model = "/users/40247882/projects/oxides/eann-main/it-0012/ensemble/model-3/eann_best-3_DOUBLE.pt" 53 | model = cur_model 54 | ) 55 | atoms.calc = calc 56 | atoms.set_constraint(constraint) 57 | 58 | print(initial.get_potential_energy()) 59 | #print(initial.get_forces()) 60 | print(final.get_potential_energy()) 61 | 62 | neb = NEB( 63 | images, allow_shared_calculator=False, 64 | k=0.1 65 | # dynamic_relaxation = False 66 | ) 67 | #neb = SingleCalculatorNEB(images) 68 | 69 | neb.interpolate() 70 | #print(neb.images) 71 | 72 | qn = BFGS(neb, trajectory="neb.traj") 73 | qn.run(fmax=0.05, steps=50) 74 | 75 | opt_images = read("./neb.traj", "-%s:" %nimages) 76 | for a in opt_images: 77 | calc = Eann( 78 | type_map = {'O': 0, 'Pt': 1}, 79 | model = cur_model 80 | ) 81 | a.calc = calc 82 | 83 | energies = np.array( 84 | [a.get_potential_energy() for a in opt_images] 85 | ) 86 | energies = energies - energies[0] 87 | print(energies) 88 | 89 | if __name__ == '__main__': 90 | pass 91 | -------------------------------------------------------------------------------- /src/gdpx/reactor/future/test_mh.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | from ase import Atoms, Atom 5 | from ase.build import fcc110 6 | from ase.optimize.minimahopping import MinimaHopping 7 | from ase.calculators.emt import EMT 8 | from ase.constraints import FixAtoms, Hookean 9 | 10 | # Make the Pt 110 slab. 11 | atoms = fcc110('Pt', (2, 2, 2), vacuum=7.) 12 | 13 | # Add the Cu2 adsorbate. 14 | adsorbate = Atoms( 15 | [Atom('Cu', atoms[7].position + (0., 0., 2.5)), 16 | Atom('Cu', atoms[7].position + (0., 0., 5.0))] 17 | ) 18 | atoms.extend(adsorbate) 19 | 20 | # Constrain the surface to be fixed and a Hookean constraint between 21 | # the adsorbate atoms. 22 | constraints = [ 23 | FixAtoms(indices=[atom.index for atom in atoms if 24 | atom.symbol == 'Pt']), 25 | Hookean(a1=8, a2=9, rt=2.6, k=15.), 26 | Hookean(a1=8, a2=(0., 0., 1., -15.), k=15.), 27 | ] 28 | atoms.set_constraint(constraints) 29 | 30 | # Set the calculator. 31 | calc = EMT() 32 | atoms.calc = calc 33 | 34 | # Instantiate and run the minima hopping algorithm. 35 | hop = MinimaHopping( 36 | atoms, 37 | Ediff0=2.5, 38 | T0=4000. 39 | ) 40 | hop(totalsteps=10) 41 | 42 | from ase.optimize.minimahopping import MHPlot 43 | 44 | mhplot = MHPlot() 45 | mhplot.save_figure('summary.png') 46 | 47 | if __name__ == '__main__': 48 | pass 49 | -------------------------------------------------------------------------------- /src/gdpx/reactor/reactor.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import abc 6 | import dataclasses 7 | import pathlib 8 | from typing import Union 9 | 10 | import numpy as np 11 | 12 | from ase import Atoms 13 | 14 | from ..core.node import AbstractNode 15 | 16 | 17 | """Find possible reaction pathways in given structures. 18 | """ 19 | 20 | 21 | class AbstractReactor(AbstractNode): 22 | 23 | """Base class of an arbitrary reactor. 24 | 25 | A valid reactor may contain the following components: 26 | - A builder that offers input structures 27 | - A worker that manages basic dynamics task (minimisation and MD) 28 | - driver with two calculator for PES and BIAS 29 | - scheduler 30 | - A miner that finds saddle points and MEPs 31 | and the results would be trajectories and pathways for further analysis. 32 | 33 | """ 34 | 35 | def __init__(self, calc, directory: Union[str, pathlib.Path] = "./", random_seed: int = None, *args, **kwargs): 36 | """""" 37 | super().__init__(directory, random_seed, *args, **kwargs) 38 | self.calc = calc 39 | 40 | return 41 | 42 | def reset(self): 43 | """""" 44 | self.calc.reset() 45 | 46 | return 47 | 48 | @abc.abstractmethod 49 | def run(self, structures, read_cache: bool=True, *args, **kwargs): 50 | """""" 51 | if not self.directory.exists(): 52 | self.directory.mkdir(parents=True) 53 | 54 | return 55 | 56 | @abc.abstractmethod 57 | def read_convergence(self, *args, **kwargs) -> bool: 58 | """""" 59 | converged = False 60 | 61 | return converged 62 | 63 | 64 | if __name__ == "__main__": 65 | ... -------------------------------------------------------------------------------- /src/gdpx/reactor/string/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from .pathway import AseStringReactor 6 | from .cp2k import Cp2kStringReactor 7 | from .vasp import VaspStringReactor 8 | from .grid import ZeroStringReactor 9 | 10 | 11 | if __name__ == "__main__": 12 | ... 13 | -------------------------------------------------------------------------------- /src/gdpx/scheduler/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | 5 | """Create scheduler based on parameters 6 | 7 | This module includes several schedulers. 8 | 9 | Example: 10 | 11 | .. code-block:: python 12 | 13 | >>> from gdpx.scheduler.local import LocalScheduler 14 | >>> params = dict() 15 | >>> scheduler = LocalScheduler(**params) 16 | 17 | """ 18 | 19 | from .. import config 20 | from ..core.register import registers 21 | 22 | from .local import LocalScheduler 23 | 24 | registers.scheduler.register(LocalScheduler) 25 | 26 | from .lsf import LsfScheduler 27 | 28 | registers.scheduler.register(LsfScheduler) 29 | 30 | from .pbs import PbsScheduler 31 | 32 | registers.scheduler.register(PbsScheduler) 33 | 34 | from .slurm import SlurmScheduler 35 | 36 | registers.scheduler.register(SlurmScheduler) 37 | 38 | try: 39 | from .remote import RemoteSlurmScheduler 40 | registers.scheduler.register(RemoteSlurmScheduler) 41 | except Exception as e: 42 | config._print(f"Module {'remote'} import failed: {e}") 43 | 44 | 45 | if __name__ == "__main__": 46 | ... 47 | -------------------------------------------------------------------------------- /src/gdpx/scheduler/interface.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import copy 5 | from gdpx.core.variable import Variable 6 | from gdpx.core.register import registers 7 | 8 | 9 | @registers.variable.register 10 | class SchedulerVariable(Variable): 11 | 12 | def __init__(self, directory="./", **kwargs): 13 | """""" 14 | scheduler_params = copy.deepcopy(kwargs) 15 | backend = scheduler_params.pop("backend", "local") 16 | scheduler = registers.create( 17 | "scheduler", backend, convert_name=True, **scheduler_params 18 | ) 19 | super().__init__(initial_value=scheduler, directory=directory) 20 | 21 | return 22 | 23 | 24 | if __name__ == "__main__": 25 | ... -------------------------------------------------------------------------------- /src/gdpx/scheduler/local.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | from typing import NoReturn 5 | 6 | from .scheduler import AbstractScheduler 7 | 8 | 9 | class LocalScheduler(AbstractScheduler): 10 | """Local scheduler.""" 11 | 12 | name: str = "local" 13 | 14 | @AbstractScheduler.job_name.setter 15 | def job_name(self, job_name_: str): 16 | self._job_name = job_name_ 17 | return 18 | 19 | def submit(self) -> NoReturn: 20 | """No submit is performed.""" 21 | 22 | return 23 | 24 | def is_finished(self) -> bool: 25 | """Check if the job were finished. 26 | 27 | Returns: 28 | Always return true. 29 | 30 | """ 31 | 32 | return True 33 | 34 | 35 | if __name__ == "__main__": 36 | ... 37 | -------------------------------------------------------------------------------- /src/gdpx/scheduler/pbs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | import re 5 | import subprocess 6 | import json 7 | import pathlib 8 | 9 | from .scheduler import AbstractScheduler 10 | 11 | 12 | class PbsScheduler(AbstractScheduler): 13 | 14 | name = "pbs" 15 | 16 | PREFIX = "#" 17 | SUFFIX = ".pbs" 18 | SHELL = "#!/bin/bash -l" 19 | 20 | SUBMIT_COMMAND = "qsub" 21 | ENQUIRE_COMMAND = "qstat" 22 | 23 | default_parameters = {} 24 | 25 | running_status = ["R", "Q", "PD", "CG"] 26 | 27 | def __str__(self): 28 | """Return the content of the job script.""" 29 | # - slurm params 30 | content = self.SHELL + "\n" 31 | for key, value in self.parameters.items(): 32 | if value: 33 | content += "{} --{}={}\n".format(self.PREFIX, key, value) 34 | # else: 35 | # raise ValueError("Keyword *%s* not properly set." %key) 36 | 37 | if self.environs: 38 | content += "\n\n" 39 | content += self.environs 40 | 41 | if self.user_commands: 42 | content += "\n\n" 43 | content += self.user_commands 44 | 45 | return content 46 | 47 | def is_finished(self) -> bool: 48 | """Not implemented yet.""" 49 | 50 | raise NotImplementedError() 51 | 52 | 53 | if __name__ == "__main__": 54 | ... 55 | -------------------------------------------------------------------------------- /src/gdpx/selector/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import warnings 5 | 6 | 7 | from ..core.register import registers 8 | 9 | from .basin import BasinSelector 10 | 11 | registers.selector.register(BasinSelector) 12 | 13 | from .compare import CompareSelector 14 | 15 | registers.selector.register(CompareSelector) 16 | 17 | from .interval import IntervalSelector 18 | 19 | registers.selector.register(IntervalSelector) 20 | 21 | from .invariant import InvariantSelector 22 | 23 | registers.selector.register(InvariantSelector) 24 | 25 | from .locate import LocateSelector 26 | 27 | registers.selector.register(LocateSelector) 28 | 29 | from .property import PropertySelector 30 | 31 | registers.selector.register(PropertySelector) 32 | 33 | from .random import RandomSelector 34 | 35 | registers.selector.register(RandomSelector) 36 | 37 | from .scf import ScfSelector 38 | 39 | registers.selector.register(ScfSelector) 40 | 41 | try: 42 | # TODO: This selector depends on an external package dscribe. 43 | from .descriptor import DescriptorSelector 44 | 45 | registers.selector.register(DescriptorSelector) 46 | except ImportError as e: 47 | warnings.warn(f"Module DescriptorSelector import failed: {e}", UserWarning) 48 | 49 | 50 | if __name__ == "__main__": 51 | ... 52 | -------------------------------------------------------------------------------- /src/gdpx/selector/composition.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import copy 5 | from pathlib import Path 6 | from typing import Union, List, NoReturn 7 | 8 | import numpy as np 9 | 10 | from ase import Atoms 11 | from ase.io import read, write 12 | 13 | from ..data.array import AtomsNDArray 14 | from .selector import AbstractSelector 15 | 16 | 17 | class ComposedSelector(AbstractSelector): 18 | """Perform several selections consecutively.""" 19 | 20 | name = "composed" 21 | 22 | default_parameters = dict(selectors=[]) 23 | 24 | def __init__( 25 | self, selectors: List[AbstractSelector], directory="./", *args, **kwargs 26 | ): 27 | """""" 28 | super().__init__(directory=directory, *args, **kwargs) 29 | 30 | self.selectors = selectors 31 | 32 | return 33 | 34 | def _mark_structures(self, frames: AtomsNDArray, *args, **kwargs) -> None: 35 | """Return selected indices.""" 36 | # - update selectors' directories 37 | for s in self.selectors: 38 | s.directory = self._directory 39 | 40 | # - initial index stuff 41 | curr_frames = frames 42 | 43 | # - run selection 44 | for i, node in enumerate(self.selectors): 45 | # - adjust name 46 | prev_fname = node._fname 47 | node.fname = str(i) + "-" + prev_fname 48 | # - map indices 49 | # TODO: use _select_indices instead? 50 | node.select(curr_frames) 51 | 52 | node.fname = prev_fname 53 | 54 | return 55 | 56 | 57 | if __name__ == "__main__": 58 | ... 59 | -------------------------------------------------------------------------------- /src/gdpx/selector/invariant.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import copy 5 | from typing import List 6 | from pathlib import Path 7 | 8 | import numpy as np 9 | 10 | from ase import Atoms 11 | 12 | from .selector import AbstractSelector 13 | 14 | 15 | class InvariantSelector(AbstractSelector): 16 | """Perform an invariant selection.""" 17 | 18 | name = "invariant" 19 | 20 | default_parameters = dict() 21 | 22 | def __init__(self, *args, **kwargs): 23 | """""" 24 | super().__init__(*args, **kwargs) 25 | 26 | return 27 | 28 | def _mark_structures(self, frames: List[Atoms], *args, **kwargs) -> None: 29 | """Return selected indices.""" 30 | 31 | return 32 | 33 | 34 | if __name__ == "__main__": 35 | ... 36 | -------------------------------------------------------------------------------- /src/gdpx/selector/random.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import itertools 5 | 6 | from typing import Optional 7 | 8 | import numpy as np 9 | 10 | from ..data.array import AtomsNDArray 11 | from .selector import AbstractSelector 12 | 13 | 14 | class RandomSelector(AbstractSelector): 15 | 16 | name = "random" 17 | 18 | default_parameters = dict(number=[4, 0.2]) 19 | 20 | """""" 21 | 22 | def __init__( 23 | self, directory="./", axis: Optional[int] = None, *args, **kwargs 24 | ) -> None: 25 | """""" 26 | super().__init__(directory, axis, *args, **kwargs) 27 | 28 | return 29 | 30 | def _mark_structures(self, data: AtomsNDArray, *args, **kwargs) -> None: 31 | """""" 32 | marker_groups = self.group_structures_by_axis(data, self.axis) 33 | 34 | selected_markers = [] 35 | for grp_name, markers in marker_groups.items(): 36 | num_markers = len(markers) 37 | num_selected = self._parse_selection_number(num_markers) 38 | if num_selected > 0: 39 | curr_selected_markers = self.rng.choice( 40 | markers, size=num_selected, replace=False 41 | ) 42 | selected_markers.extend(curr_selected_markers) 43 | else: 44 | ... 45 | 46 | data.markers = np.array(selected_markers) 47 | 48 | return 49 | 50 | @staticmethod 51 | def group_structures_by_axis(data: AtomsNDArray, axis: Optional[int] = None): 52 | # - group markers 53 | if axis is not None: 54 | ndim = len(data.shape) 55 | if axis < -ndim or axis > ndim: 56 | raise IndexError(f"axis {axis} is out of dimension {ndim}.") 57 | if axis < 0: 58 | axis = ndim + axis 59 | 60 | marker_groups = {} 61 | for k, v in itertools.groupby(data.markers, key=lambda x: x[axis]): 62 | if k in marker_groups: 63 | marker_groups[k].extend(list(v)) 64 | else: 65 | marker_groups[k] = list(v) 66 | else: 67 | marker_groups = dict(all=data.markers) 68 | 69 | return marker_groups 70 | 71 | 72 | if __name__ == "__main__": 73 | ... 74 | -------------------------------------------------------------------------------- /src/gdpx/selector/scf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import numpy as np 6 | 7 | from ..data.array import AtomsNDArray 8 | from ..data.extatoms import ScfErrAtoms 9 | from .selector import AbstractSelector 10 | 11 | 12 | class ScfSelector(AbstractSelector): 13 | 14 | name = "scf" 15 | 16 | default_parameters = dict(scf_converged=True) 17 | 18 | def _mark_structures(self, data: AtomsNDArray, *args, **kwargs) -> None: 19 | """""" 20 | markers, structures = data.markers, data.get_marked_structures() 21 | if self.parameters["scf_converged"]: 22 | selected_indices = [ 23 | i for i, a in enumerate(structures) if not isinstance(a, ScfErrAtoms) 24 | ] 25 | else: 26 | selected_indices = [ 27 | i for i, a in enumerate(structures) if not isinstance(a, ScfErrAtoms) 28 | ] 29 | selected_markers = [markers[i] for i in selected_indices] 30 | 31 | data.markers = np.array(selected_markers) 32 | 33 | return 34 | 35 | 36 | if __name__ == "__main__": 37 | ... 38 | -------------------------------------------------------------------------------- /src/gdpx/trainer/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | import itertools 5 | import pathlib 6 | from typing import NoReturn, List, Union 7 | 8 | import numpy as np 9 | 10 | from ase.io import read, write 11 | 12 | from .. import config 13 | from ..core.register import registers 14 | from ..utils.command import parse_input_file 15 | 16 | 17 | """""" 18 | 19 | 20 | def run_newtrainer(configuration, directory): 21 | """""" 22 | config._print(f"{configuration = }") 23 | params = parse_input_file(configuration) 24 | 25 | # - create trainer 26 | name = params["trainer"].get("name", None) 27 | trainer = registers.create( 28 | "trainer", name, convert_name=True, **params["trainer"] 29 | ) 30 | trainer.directory = directory 31 | 32 | # - create dataset 33 | name = params["dataset"].get("name", None) 34 | dataset = registers.create( 35 | "dataloader", name, convert_name=True, **params["dataset"] 36 | ) 37 | 38 | # - other options 39 | init_model = params.get("init_model", None) 40 | 41 | # TODO: merge below two into one func? 42 | trainer.train(dataset, init_model=init_model) 43 | trainer.freeze() 44 | 45 | return 46 | 47 | 48 | if __name__ == "__main__": 49 | ... 50 | -------------------------------------------------------------------------------- /src/gdpx/utils/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | if __name__ == "__main__": 6 | ... -------------------------------------------------------------------------------- /src/gdpx/utils/cmdrun.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import os 6 | import subprocess 7 | 8 | from ase.calculators.calculator import EnvironmentError, CalculationFailed 9 | 10 | 11 | def run_ase_calculator(name: str, command, directory): 12 | """Run vasp from the command. 13 | 14 | ASE Vasp does not treat restart of a MD simulation well. Therefore, we run 15 | directly from the command if some input files aready exist. 16 | 17 | For example, we use existed INCAR for VASP. 18 | 19 | """ 20 | try: 21 | proc = subprocess.Popen(command, shell=True, cwd=directory) 22 | except OSError as err: 23 | # Actually this may never happen with shell=True, since 24 | # probably the shell launches successfully. But we soon want 25 | # to allow calling the subprocess directly, and then this 26 | # distinction (failed to launch vs failed to run) is useful. 27 | msg = f"Failed to execute `{command}`" 28 | raise EnvironmentError(msg) from err 29 | 30 | errorcode = proc.wait() 31 | 32 | if errorcode: 33 | path = os.path.abspath(directory) 34 | msg = ( 35 | f"Calculator `{name}` failed with command `{command}` " 36 | + f"failed in `{path}` with error code `{errorcode}`" 37 | ) 38 | raise CalculationFailed(msg) 39 | 40 | return 41 | 42 | 43 | if __name__ == "__main__": 44 | ... 45 | -------------------------------------------------------------------------------- /src/gdpx/utils/geometry.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import copy 5 | from typing import List 6 | 7 | from ase import Atoms 8 | from ase.geometry import find_mic 9 | from ase.calculators.singlepoint import SinglePointCalculator 10 | 11 | 12 | def wrap_traj(frames: List[Atoms]): 13 | """Align positions according to the first frame. 14 | 15 | This is necessary for computing physical quantities base on atomic positions 16 | with periodic boundary conditions. 17 | 18 | NOTE: 19 | This only works for fixed cell systems. 20 | 21 | TODO: 22 | Variable cell systems? 23 | 24 | """ 25 | cell = frames[0].get_cell(complete=True) 26 | nframes = len(frames) 27 | for i in range(1,nframes): 28 | results = copy.deepcopy(frames[i].calc.results) 29 | prev_positions = frames[i-1].get_positions() 30 | curr_positions = frames[i].get_positions() 31 | shift = curr_positions - prev_positions 32 | curr_vectors, curr_distances = find_mic(shift, cell, pbc=True) 33 | frames[i].positions = prev_positions + curr_vectors 34 | frames[i].calc = SinglePointCalculator(frames[i], **results) 35 | 36 | return frames 37 | 38 | 39 | if __name__ == "__main__": 40 | ... -------------------------------------------------------------------------------- /src/gdpx/utils/plot_dimer.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | """ 5 | plot dimer curve 6 | """ 7 | 8 | import argparse 9 | from pathlib import Path 10 | 11 | import numpy as np 12 | 13 | import matplotlib 14 | matplotlib.use('Agg') #silent mode 15 | import matplotlib.pyplot as plt 16 | 17 | from ase.io import read, write 18 | 19 | def parse_dimer_frames(xyzfile): 20 | frames = read(xyzfile, ':16') 21 | dimer_symbols = frames[0].get_chemical_symbols() 22 | 23 | data = [] 24 | for atoms in frames: 25 | assert len(atoms) == 2 26 | energy = atoms.get_potential_energy() 27 | dist = np.linalg.norm(atoms[0].position-atoms[1].position) 28 | data.append([dist,energy]) 29 | data = np.array(data) 30 | 31 | return dimer_symbols, data 32 | 33 | def parse_dimer_from_files(prefix): 34 | distances, energies = [], [] 35 | for p in Path.cwd().glob(prefix): 36 | vasprun = p / 'vasprun.xml' 37 | atoms = read(vasprun, format='vasp-xml') 38 | distances.append(np.linalg.norm(atoms[0].position-atoms[1].position)) 39 | energies.append(atoms.get_potential_energy()) 40 | 41 | return distances, energies 42 | 43 | def harvest_dimer_from_files(prefix): 44 | dimer_dirs = [] 45 | for p in Path.cwd().glob(prefix): 46 | dimer_dirs.append(str(p)) 47 | 48 | dimer_dirs.sort() 49 | 50 | dimer_frames = [] 51 | for p in dimer_dirs: 52 | p = Path(p) 53 | vasprun = p / 'vasprun.xml' 54 | atoms = read(vasprun, format='vasp-xml') 55 | dimer_frames.append(atoms) 56 | 57 | write('dimer.xyz', dimer_frames) 58 | 59 | return 60 | 61 | if __name__ == '__main__': 62 | parser = argparse.ArgumentParser() 63 | parser.add_argument( 64 | '-p', '--prefix', 65 | help='time series files' 66 | ) 67 | 68 | args = parser.parse_args() 69 | 70 | harvest_dimer_from_files('Pt2-*') 71 | exit() 72 | 73 | #symbols, data = parse_dimer_frames('./evaluated.xyz') 74 | #distances, energies = parse_dimer_from_files('O2-*') 75 | distances, energies = parse_dimer_from_files(args.prefix) 76 | 77 | fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(12,8)) 78 | ax.set_title( 79 | 'Dimer', 80 | fontsize=20, 81 | fontweight='bold' 82 | ) 83 | 84 | ax.set_xlabel('Distance [Å]', fontsize=16) 85 | ax.set_ylabel('Energyr [eV]', fontsize=16) 86 | 87 | ax.scatter(distances, energies) 88 | 89 | plt.savefig('dimer.png') 90 | -------------------------------------------------------------------------------- /src/gdpx/utils/strconv.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from typing import List 6 | 7 | import numpy as np 8 | 9 | 10 | def str2list_int(inp: str, convention: str = "lmp") -> List[int]: 11 | """Convert a string to a List of int. 12 | 13 | Args: 14 | inp: A string contains numbers and colons. 15 | convention: The input convention either `lmp` or `ase`. 16 | lmp index starts from 1 and includes the last. 17 | 18 | Examples: 19 | >>> str2list_int("1:2 4:6", "lmp") 20 | >>> [0, 1, 3, 4, 5] 21 | >>> str2list_int("1:2 4:6", "ase") 22 | >>> [1, 4, 5] 23 | 24 | Returns: 25 | A List of integers. 26 | 27 | """ 28 | ret = [] 29 | for x in inp.strip().split(): 30 | curr_range = list(map(int, x.split(":"))) 31 | if len(curr_range) == 1: 32 | start, end = curr_range[0], curr_range[0] 33 | else: 34 | start, end = curr_range 35 | if convention == "lmp": 36 | ret.extend([i - 1 for i in list(range(start, end + 1))]) 37 | elif convention == "ase": 38 | ret.extend(list(range(start, end))) 39 | else: 40 | ... 41 | 42 | # remove duplicates 43 | # ret = sorted(list(set(ret))) 44 | ret = list(set(ret)) 45 | 46 | return ret 47 | 48 | 49 | def str2array(inp: str): 50 | """Convert a string to a np.array using np.arange. 51 | 52 | The endpoint is always included. 53 | 54 | """ 55 | ret = [] 56 | for x in inp.strip().split(): 57 | curr_range = list(map(float, x.split(":"))) 58 | if len(curr_range) == 1: 59 | start, end, step = curr_range[0], curr_range[0] + 0.01, 1e8 60 | elif len(curr_range) == 3: 61 | start, end, step = curr_range 62 | end += step * 1e-8 63 | else: 64 | raise RuntimeError(f"Invalid range `{curr_range}`.") 65 | ret.extend(np.arange(start, end, step).tolist()) 66 | 67 | # Donot sort entries and just keep it as what it is 68 | # ret = np.array(sorted(ret)) 69 | ret = np.array(ret) 70 | 71 | return ret 72 | 73 | 74 | if __name__ == "__main__": 75 | ... 76 | -------------------------------------------------------------------------------- /src/gdpx/validator/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import warnings 5 | import pathlib 6 | from pathlib import Path 7 | from typing import NoReturn, List, Union 8 | 9 | from ..core.register import registers 10 | 11 | from .dimer import DimerValidator 12 | registers.validator.register("dimer")(DimerValidator) 13 | 14 | from .trimer import TrimerValidator 15 | registers.validator.register("trimer")(TrimerValidator) 16 | 17 | from .mdf import MassDistributionValidator 18 | registers.validator.register("mass_distribution")(MassDistributionValidator) 19 | 20 | from .rdf import RdfValidator 21 | registers.validator.register("radial_distribution")(RdfValidator) 22 | 23 | from .eos import EquationOfStateValidator 24 | registers.validator.register("equation_of_state")(EquationOfStateValidator) 25 | 26 | from .melting_point import MeltingPointValidator 27 | registers.validator.register("melting_point")(MeltingPointValidator) 28 | 29 | from .minima import MinimaValidator 30 | registers.validator.register("minima")(MinimaValidator) 31 | 32 | from .spc import SinglepointValidator 33 | registers.validator.register("spc")(SinglepointValidator) 34 | 35 | from .surface_energy import SurfaceEnergyValidator 36 | registers.validator.register("surface_energy")(SurfaceEnergyValidator) 37 | 38 | from .diffusion_coefficient import DiffusionCoefficientValidator 39 | registers.validator.register("diffusion_coefficient")(DiffusionCoefficientValidator) 40 | 41 | from .rank import RankValidator 42 | registers.validator.register("rank")(RankValidator) 43 | 44 | from .rxn import PathwayValidator 45 | registers.validator.register("mep")(PathwayValidator) 46 | 47 | 48 | """ 49 | Various properties to be validated 50 | 51 | Atomic Energy and Crystal Lattice constant 52 | 53 | Elastic Constants 54 | 55 | Phonon Calculations 56 | 57 | Point Defects (vacancies, self interstitials, ...) 58 | 59 | Surface energies 60 | 61 | Diffusion Coefficient 62 | 63 | Adsorption, Reaction, ... 64 | """ 65 | 66 | 67 | def run_validation(params: dict, directory: Union[str, pathlib.Path], potter): 68 | """ This is a factory to deal with various validations... 69 | """ 70 | # run over validations 71 | directory = pathlib.Path(directory) 72 | 73 | raise NotImplementedError("Command Line Validation is NOT Suppoted.") 74 | 75 | 76 | if __name__ == "__main__": 77 | ... 78 | -------------------------------------------------------------------------------- /src/gdpx/validator/rank.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | from typing import Mapping 6 | 7 | from ..data.array import AtomsNDArray 8 | from .validator import AbstractValidator 9 | 10 | class RankValidator(AbstractValidator): 11 | 12 | def run(self, dataset: Mapping[str, AtomsNDArray], *args, **kwargs): 13 | """""" 14 | super().run(*args, **kwargs) 15 | 16 | # - 17 | prediction = dataset["prediction"].get_marked_structures() 18 | reference = dataset["reference"].get_marked_structures() 19 | self._print(prediction) 20 | self._print(reference) 21 | 22 | return 23 | 24 | 25 | if __name__ == "__main__": 26 | ... -------------------------------------------------------------------------------- /src/gdpx/validator/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | from typing import List 5 | 6 | import numpy as np 7 | from scipy.interpolate import make_interp_spline, BSpline 8 | 9 | from ase import Atoms 10 | from ase.geometry import find_mic 11 | 12 | def get_properties(frames: List[Atoms], other_props = []): 13 | """Get properties of frames for comparison. 14 | 15 | Currently, only total energy and forces are considered. 16 | 17 | Returns: 18 | tot_symbols: shape (nframes,) 19 | tot_energies: shape (nframes,) 20 | tot_forces: shape (nframes,3) 21 | 22 | """ 23 | tot_symbols, tot_energies, tot_forces = [], [], [] 24 | 25 | for atoms in frames: # free energy per atom 26 | # -- basic info 27 | symbols = atoms.get_chemical_symbols() 28 | tot_symbols.extend(symbols) 29 | 30 | # -- energy 31 | energy = atoms.get_potential_energy() 32 | tot_energies.append(energy) 33 | 34 | # -- force 35 | forces = atoms.get_forces(apply_constraint=False) 36 | tot_forces.extend(forces.tolist()) 37 | 38 | return tot_symbols, tot_energies, tot_forces 39 | 40 | def wrap_traj(frames): 41 | """Align positions according to the first frame. 42 | 43 | This is necessary for computing physical quantities base on atomic positions 44 | with periodic boundary conditions. 45 | 46 | NOTE: 47 | This only works for fixed cell systems. 48 | 49 | TODO: 50 | Variable cell systems? 51 | 52 | """ 53 | cell = frames[0].get_cell(complete=True) 54 | nframes = len(frames) 55 | for i in range(1,nframes): 56 | prev_positions = frames[i-1].get_positions() 57 | curr_positions = frames[i].get_positions() 58 | shift = curr_positions - prev_positions 59 | curr_vectors, curr_distances = find_mic(shift, cell, pbc=True) 60 | frames[i].positions = prev_positions + curr_vectors 61 | 62 | return frames 63 | 64 | def smooth_curve(bins, points): 65 | """""" 66 | spl = make_interp_spline(bins, points, k=3) 67 | bins = np.linspace(bins.min(), bins.max(), 300) 68 | points= spl(bins) 69 | 70 | for i, d in enumerate(points): 71 | if d < 1e-6: 72 | points[i] = 0.0 73 | 74 | return bins, points 75 | 76 | 77 | if __name__ == "__main__": 78 | ... -------------------------------------------------------------------------------- /src/gdpx/validator/validator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import abc 5 | import logging 6 | import pathlib 7 | from typing import NoReturn, Union, Callable 8 | 9 | from gdpx import config 10 | 11 | 12 | class AbstractValidator(abc.ABC): 13 | 14 | _print: Callable = config._print 15 | _debug: Callable = config._debug 16 | 17 | _directory = pathlib.Path.cwd() 18 | 19 | def __init__(self, directory: Union[str,pathlib.Path]="./", *args, **kwargs): 20 | """ 21 | """ 22 | self.directory = directory 23 | 24 | self.njobs = config.NJOBS 25 | 26 | return 27 | 28 | @property 29 | def directory(self): 30 | """""" 31 | 32 | return self._directory 33 | 34 | @directory.setter 35 | def directory(self, directory_): 36 | """""" 37 | directory_ = pathlib.Path(directory_) 38 | self._directory = directory_ 39 | 40 | return 41 | 42 | @abc.abstractmethod 43 | def run(self, *args, **kwargs): 44 | """""" 45 | if not self.directory.exists(): 46 | self.directory.mkdir(parents=True) 47 | 48 | return 49 | 50 | 51 | if __name__ == "__main__": 52 | ... -------------------------------------------------------------------------------- /src/gdpx/worker/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | if __name__ == "__main__": 6 | pass -------------------------------------------------------------------------------- /src/gdpx/worker/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import copy 6 | from typing import List 7 | 8 | from ase import Atoms 9 | 10 | 11 | def get_file_md5(f): 12 | import hashlib 13 | m = hashlib.md5() 14 | while True: 15 | # if not using binary 16 | #data = f.read(1024).encode('utf-8') 17 | data = f.read(1024) # read in block 18 | if not data: 19 | break 20 | m.update(data) 21 | return m.hexdigest() 22 | 23 | 24 | def copy_minimal_frames(prev_frames: List[Atoms]): 25 | """Copy atoms without extra information. 26 | 27 | Do not copy atoms.info since it is a dict and does not maitain order. 28 | 29 | """ 30 | curr_frames, curr_info = [], [] 31 | for prev_atoms in prev_frames: 32 | # - copy geometry 33 | curr_atoms = Atoms( 34 | symbols=copy.deepcopy(prev_atoms.get_chemical_symbols()), 35 | positions=copy.deepcopy(prev_atoms.get_positions()), 36 | cell=copy.deepcopy(prev_atoms.get_cell(complete=True)), 37 | pbc=copy.deepcopy(prev_atoms.get_pbc()), 38 | tags = prev_atoms.get_tags() # retain this for molecules 39 | ) 40 | if prev_atoms.get_kinetic_energy() > 0.: # retain this for MD 41 | curr_atoms.set_momenta(prev_atoms.get_momenta()) 42 | curr_frames.append(curr_atoms) 43 | # - save info 44 | confid = prev_atoms.info.get("confid", -1) 45 | dynstep = prev_atoms.info.get("step", -1) 46 | prev_wdir = prev_atoms.info.get("wdir", "null") 47 | curr_info.append((confid,dynstep,prev_wdir)) 48 | 49 | return curr_frames, curr_info 50 | 51 | 52 | def read_cache_info(wdir, length=36): 53 | # - read extra info data 54 | _info_data = [] 55 | for p in (wdir/"_data").glob("*_info.txt"): 56 | identifier = p.name[:length] # MD5 57 | with open(p, "r") as fopen: 58 | for line in fopen.readlines(): 59 | if not line.startswith("#"): 60 | _info_data.append(line.strip().split()) 61 | _info_data = sorted(_info_data, key=lambda x: int(x[0])) 62 | 63 | return _info_data 64 | 65 | 66 | if __name__ == "__main__": 67 | ... -------------------------------------------------------------------------------- /tests/assets/Cu-fcc-s111p22.xyz: -------------------------------------------------------------------------------- 1 | 16 2 | Lattice="5.08575768899054 0.0 0.0 0.0 4.40439535615784 1.24766366054662e-15 0.0 0.0 30.3812594112192" Properties=species:S:1:pos:R:3 pbc="T T T" 3 | Cu -0.00000000 1.46813179 0.00000000 4 | Cu -0.00000000 2.93626357 2.07625188 5 | Cu 0.00000000 0.00000000 4.15250376 6 | Cu -0.00000000 1.46813179 6.22875565 7 | Cu 1.27143942 3.67032946 0.00000000 8 | Cu 1.27143942 0.73406589 2.07625188 9 | Cu 1.27143942 3.67032946 6.22875565 10 | Cu 1.27143942 2.20219768 4.15250376 11 | Cu 2.54287884 1.46813179 0.00000000 12 | Cu 2.54287884 2.93626357 2.07625188 13 | Cu 2.54287884 0.00000000 4.15250376 14 | Cu 2.54287884 1.46813179 6.22875565 15 | Cu 3.81431827 3.67032946 0.00000000 16 | Cu 3.81431827 0.73406589 2.07625188 17 | Cu 3.81431827 3.67032946 6.22875565 18 | Cu 3.81431827 2.20219768 4.15250376 19 | -------------------------------------------------------------------------------- /tests/assets/dpmd-AlCuO-m0.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hsulab/GDPy/e6d56cf70bdeef1bbe973cad32bf87b66263a0bd/tests/assets/dpmd-AlCuO-m0.pb -------------------------------------------------------------------------------- /tests/assets/dpmd-AlCuO-m1.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hsulab/GDPy/e6d56cf70bdeef1bbe973cad32bf87b66263a0bd/tests/assets/dpmd-AlCuO-m1.pb -------------------------------------------------------------------------------- /tests/bias/gaussian/test_distance_gaussian.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import jax 6 | import jax.numpy as jnp 7 | import numpy as np 8 | 9 | from gdpx.bias.gaussian.distance import (compute_bias_forces, 10 | compute_colvar_and_gradient, 11 | compute_gaussian_and_gradient) 12 | 13 | 14 | def compute_distance_gaussian_jax(positions, a0, a1, sigma, omega, s_t): 15 | """""" 16 | s = jnp.linalg.norm(positions[a0, :] - positions[a1, :]) 17 | 18 | s1 = s - s_t 19 | s2 = s1**2 / 2.0 / sigma**2 20 | v = jnp.sum(omega * jnp.exp(-jnp.sum(s2, axis=1))) 21 | 22 | return v, s 23 | 24 | 25 | if __name__ == "__main__": 26 | from ase.io import read, write 27 | 28 | atoms = read("./packed-init-344.xyz") 29 | 30 | sigma, omega = np.array([0.2]), 0.5 31 | history_records = np.array([[2.4], [3.9]]) 32 | 33 | # compute by numpy 34 | s, dsdx = compute_colvar_and_gradient( 35 | atoms.cell, atoms.positions[[37, 38], :], pbc=True 36 | ) 37 | print(f"{s =}") 38 | print(f"{dsdx =}") 39 | 40 | v, dvds = compute_gaussian_and_gradient(s, history_records, sigma, omega) 41 | print(f"{v =}") 42 | print(f"{dvds =}") 43 | forces = compute_bias_forces(dvds, dsdx) 44 | print(f"{forces =}") 45 | 46 | # compute by jax 47 | (energy, distance), gradients = jax.value_and_grad( 48 | compute_distance_gaussian_jax, argnums=0, has_aux=True 49 | )(atoms.get_positions(), 37, 38, sigma=sigma, omega=omega, s_t=history_records) 50 | print(f"{distance =}") 51 | print(f"{energy =}") 52 | forces = -np.array(gradients)[[37, 38]] 53 | print(f"{forces =}") 54 | -------------------------------------------------------------------------------- /tests/bias/harmonic/test_distance_harmonic.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import jax 6 | import jax.numpy as jnp 7 | import numpy as np 8 | 9 | from gdpx.bias.harmonic.distance import ( 10 | compute_distance, compute_distance_harmonic_energy_and_forces) 11 | 12 | 13 | def compute_distance_harmonic_jax(positions, a0, a1, center, kspring): 14 | """""" 15 | distance = jnp.linalg.norm(positions[a0, :] - positions[a1, :]) 16 | 17 | energy = 0.5 * kspring * (distance - center) ** 2 18 | 19 | return energy, distance 20 | 21 | 22 | if __name__ == "__main__": 23 | from ase.io import read, write 24 | 25 | atoms = read("./packed-init-344.xyz") 26 | 27 | center, kspring = 2.8, 3.6 28 | 29 | # compute by numpy 30 | vec, dis = compute_distance(atoms.cell, atoms.positions[[37, 38], :], pbc=True) 31 | print(f"{dis =}") 32 | 33 | energy, forces = compute_distance_harmonic_energy_and_forces( 34 | vec, dis, center=center, kspring=kspring 35 | ) 36 | print(f"{energy =}") 37 | print(f"{forces =}") 38 | 39 | # compute by jax 40 | (energy, distance), gradients = jax.value_and_grad( 41 | compute_distance_harmonic_jax, argnums=0, has_aux=True 42 | )(atoms.get_positions(), 37, 38, center=center, kspring=kspring) 43 | print(f"{distance =}") 44 | print(f"{energy =}") 45 | forces = -np.array(gradients)[[37, 38]] 46 | print(f"{forces =}") 47 | -------------------------------------------------------------------------------- /tests/builder/assets/Pd38.xyz: -------------------------------------------------------------------------------- 1 | 38 2 | Lattice="23.0 0.0 0.0 0.0 24.0 0.0 0.0 0.0 25.0" Properties=species:S:1:pos:R:3 pbc="T T T" 3 | Pd 12.86841188 13.36841188 12.50000000 4 | Pd 15.51036085 13.33667194 12.50000000 5 | Pd 12.83667194 16.01036085 12.50000000 6 | Pd 10.13158812 13.36841188 12.50000000 7 | Pd 7.48963915 13.33667194 12.50000000 8 | Pd 10.16332806 16.01036085 12.50000000 9 | Pd 10.13158812 10.63158812 12.50000000 10 | Pd 7.48963915 10.66332806 12.50000000 11 | Pd 10.16332806 7.98963915 12.50000000 12 | Pd 12.86841188 10.63158812 12.50000000 13 | Pd 15.51036085 10.66332806 12.50000000 14 | Pd 12.83667194 7.98963915 12.50000000 15 | Pd 11.50000000 12.00000000 14.42230356 16 | Pd 14.30172608 12.00000000 14.47346917 17 | Pd 8.69827392 12.00000000 14.47346917 18 | Pd 11.50000000 14.80172608 14.47346917 19 | Pd 11.50000000 9.19827392 14.47346917 20 | Pd 14.17515143 14.67515143 14.38418179 21 | Pd 8.82484857 14.67515143 14.38418179 22 | Pd 8.82484857 9.32484857 14.38418179 23 | Pd 14.17515143 9.32484857 14.38418179 24 | Pd 11.50000000 12.00000000 10.57769644 25 | Pd 14.30172608 12.00000000 10.52653083 26 | Pd 8.69827392 12.00000000 10.52653083 27 | Pd 11.50000000 14.80172608 10.52653083 28 | Pd 11.50000000 9.19827392 10.52653083 29 | Pd 14.17515143 14.67515143 10.61581821 30 | Pd 8.82484857 14.67515143 10.61581821 31 | Pd 8.82484857 9.32484857 10.61581821 32 | Pd 14.17515143 9.32484857 10.61581821 33 | Pd 12.83894489 13.33894489 16.26074853 34 | Pd 10.16105511 13.33894489 16.26074853 35 | Pd 10.16105511 10.66105511 16.26074853 36 | Pd 12.83894489 10.66105511 16.26074853 37 | Pd 12.83894489 13.33894489 8.73925147 38 | Pd 10.16105511 13.33894489 8.73925147 39 | Pd 10.16105511 10.66105511 8.73925147 40 | Pd 12.83894489 10.66105511 8.73925147 41 | -------------------------------------------------------------------------------- /tests/builder/constraints/test_constraints.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import pytest 5 | 6 | import numpy as np 7 | 8 | from ase import Atoms 9 | 10 | from gdpx.builder.constraints import convert_indices, parse_constraint_info 11 | 12 | @pytest.fixture(scope="function") 13 | def rng(): 14 | """Initialise a random number generator.""" 15 | rng = np.random.default_rng(seed=1112) 16 | 17 | return rng 18 | 19 | @pytest.fixture(autouse=True) 20 | def H2(): 21 | """Create a H2 molecule.""" 22 | atoms = Atoms( 23 | symbols="H2", positions=[[0., 0., 0.], [0., 0., 1.]], 24 | cell=np.eye(3)*10. 25 | ) 26 | 27 | return atoms 28 | 29 | def test_convert_indices(): 30 | """""" 31 | ret = convert_indices([1,2,3,6,7,8], index_convention="lmp") 32 | 33 | assert ret == "1:3 6:8" 34 | 35 | def test_parse_constraint_info(H2): 36 | """""" 37 | mobile_text, frozen_text = parse_constraint_info( 38 | atoms=H2, cons_text="1:2", ignore_ase_constraints=True, ret_text=True 39 | ) 40 | 41 | assert mobile_text == "" 42 | assert frozen_text == "1:2" 43 | 44 | if __name__ == "__main__": 45 | ... 46 | -------------------------------------------------------------------------------- /tests/builder/region/test_region.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import pytest 5 | 6 | import numpy as np 7 | 8 | from ase import Atoms 9 | 10 | @pytest.fixture(scope="function") 11 | def rng(): 12 | """Initialise a random number generator.""" 13 | rng = np.random.default_rng(seed=1112) 14 | 15 | return rng 16 | 17 | @pytest.fixture(autouse=True) 18 | def H2(): 19 | """Create a H2 molecule.""" 20 | atoms = Atoms( 21 | symbols="H2", positions=[[0., 0., 0.], [0., 0., 1.]], 22 | cell=np.eye(3)*10. 23 | ) 24 | 25 | return atoms 26 | 27 | 28 | class TestRegion(): 29 | 30 | def test_sphere(self, rng): 31 | """Test sphere region.""" 32 | print("start one...") 33 | print(rng) 34 | print(rng.random(3)) 35 | x = "this" 36 | assert "h" in x 37 | 38 | def test_lattice(self, rng): 39 | print("start one...") 40 | print(rng) 41 | print(rng.random(3)) 42 | x = "this" 43 | assert "h" in x 44 | 45 | 46 | if __name__ == "__main__": 47 | ... -------------------------------------------------------------------------------- /tests/builder/test_dimer.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import pytest 5 | 6 | from gdpx.builder.dimer import DimerBuilder 7 | 8 | 9 | class TestDimerBuilder: 10 | 11 | def test_dimer(self): 12 | """""" 13 | elements = ["H", "H"] 14 | distances = [0.6, 1.0, 0.05] 15 | 16 | builder = DimerBuilder(elements, distances) 17 | frames = builder.run() 18 | 19 | nframes = len(frames) 20 | 21 | assert nframes == 9 22 | 23 | 24 | if __name__ == "__main__": 25 | ... -------------------------------------------------------------------------------- /tests/builder/test_molecule.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import pytest 5 | 6 | from gdpx.builder.species import MoleculeBuilder 7 | 8 | 9 | def test_molecule(): 10 | """""" 11 | builder = MoleculeBuilder(name="H2O") 12 | structures = builder.run() 13 | 14 | water = structures[0] 15 | 16 | assert len(water) == 3 17 | 18 | 19 | if __name__ == "__main__": 20 | ... -------------------------------------------------------------------------------- /tests/builder/test_packer.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import pytest 5 | 6 | from ase.io import write 7 | 8 | from gdpx.builder.species import MoleculeBuilder 9 | from gdpx.builder.packer import PackerBuilder 10 | 11 | 12 | def test_packer(): 13 | """""" 14 | water = MoleculeBuilder(name="H2O").run()[0] 15 | builder = PackerBuilder( 16 | substrates=[water], numbers=[4], intermoleculer_distance=[1., 8.], 17 | random_seed=1112 18 | ) 19 | structures = builder.run(size=5) 20 | n_structures = len(structures) 21 | 22 | #write("./xxx.xyz", structures) 23 | 24 | assert n_structures == 5 25 | 26 | 27 | def test_packer_mixed(): 28 | """""" 29 | water = MoleculeBuilder(name="H2O").run()[0] 30 | methanol = MoleculeBuilder(name="CH3OH").run()[0] 31 | builder = PackerBuilder( 32 | substrates=[water, methanol], numbers=[4,2], 33 | intermoleculer_distance=[1., 10.], random_seed=1112 34 | ) 35 | structures = builder.run(size=5) 36 | n_structures = len(structures) 37 | 38 | #write("./xxx.xyz", structures) 39 | 40 | assert n_structures == 5 41 | 42 | 43 | if __name__ == "__main__": 44 | ... -------------------------------------------------------------------------------- /tests/builder/test_perturbator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import pytest 5 | 6 | from ase.io import read, write 7 | 8 | from gdpx.builder.species import MoleculeBuilder 9 | from gdpx.builder.perturbator import PerturbatorBuilder 10 | from gdpx.builder.insert import InsertModifier 11 | 12 | 13 | @pytest.mark.basic 14 | def test_molecule(): 15 | """""" 16 | inp = MoleculeBuilder(name="H2O").run() 17 | 18 | builder = PerturbatorBuilder(eps=0.2, ceps=0.02, random_seed=1112) 19 | structures = builder.run(substrates=inp, size=10) 20 | n_structures = len(structures) 21 | 22 | assert n_structures == 10 23 | 24 | 25 | @pytest.mark.basic 26 | def test_cluster(): 27 | """""" 28 | substrates = read("./assets/Pd38.xyz", ":") 29 | 30 | builder = PerturbatorBuilder(eps=0.2, ceps=None, random_seed=1112) 31 | structures = builder.run(substrates=substrates, size=10) 32 | n_structures = len(structures) 33 | 34 | assert n_structures == 10 35 | 36 | 37 | @pytest.mark.basic 38 | def test_insert(): 39 | """""" 40 | substrates = read("./assets/Pd38.xyz", ":") 41 | 42 | builder = InsertModifier( 43 | # region = dict(method="sphere", origin=[12., 12., 12.], radius=10), 44 | region=dict( 45 | method="intersect", 46 | regions=[ 47 | dict(method="sphere", origin=[12.0, 12.0, 12.0], radius=8.0), 48 | dict(method="sphere", origin=[12.0, 12.0, 12.0], radius=6.0), 49 | ], 50 | ), 51 | composition=dict(O=4), 52 | max_times_size=100, 53 | random_seed=1112, 54 | ) 55 | structures = builder.run(substrates=substrates, size=10) 56 | n_structures = len(structures) 57 | 58 | # write("./xxx.xyz", structures) 59 | 60 | assert n_structures == 10 61 | 62 | 63 | if __name__ == "__main__": 64 | ... 65 | -------------------------------------------------------------------------------- /tests/computation/asedriver/assets/Pd38_oct.xyz: -------------------------------------------------------------------------------- 1 | 38 2 | Lattice="23.0 0.0 0.0 0.0 24.0 0.0 0.0 0.0 25.0" Properties=species:S:1:pos:R:3 pbc="T T T" 3 | Pd 12.86841188 13.36841188 12.50000000 4 | Pd 15.51036085 13.33667194 12.50000000 5 | Pd 12.83667194 16.01036085 12.50000000 6 | Pd 10.13158812 13.36841188 12.50000000 7 | Pd 7.48963915 13.33667194 12.50000000 8 | Pd 10.16332806 16.01036085 12.50000000 9 | Pd 10.13158812 10.63158812 12.50000000 10 | Pd 7.48963915 10.66332806 12.50000000 11 | Pd 10.16332806 7.98963915 12.50000000 12 | Pd 12.86841188 10.63158812 12.50000000 13 | Pd 15.51036085 10.66332806 12.50000000 14 | Pd 12.83667194 7.98963915 12.50000000 15 | Pd 11.50000000 12.00000000 14.42230356 16 | Pd 14.30172608 12.00000000 14.47346917 17 | Pd 8.69827392 12.00000000 14.47346917 18 | Pd 11.50000000 14.80172608 14.47346917 19 | Pd 11.50000000 9.19827392 14.47346917 20 | Pd 14.17515143 14.67515143 14.38418179 21 | Pd 8.82484857 14.67515143 14.38418179 22 | Pd 8.82484857 9.32484857 14.38418179 23 | Pd 14.17515143 9.32484857 14.38418179 24 | Pd 11.50000000 12.00000000 10.57769644 25 | Pd 14.30172608 12.00000000 10.52653083 26 | Pd 8.69827392 12.00000000 10.52653083 27 | Pd 11.50000000 14.80172608 10.52653083 28 | Pd 11.50000000 9.19827392 10.52653083 29 | Pd 14.17515143 14.67515143 10.61581821 30 | Pd 8.82484857 14.67515143 10.61581821 31 | Pd 8.82484857 9.32484857 10.61581821 32 | Pd 14.17515143 9.32484857 10.61581821 33 | Pd 12.83894489 13.33894489 16.26074853 34 | Pd 10.16105511 13.33894489 16.26074853 35 | Pd 10.16105511 10.66105511 16.26074853 36 | Pd 12.83894489 10.66105511 16.26074853 37 | Pd 12.83894489 13.33894489 8.73925147 38 | Pd 10.16105511 13.33894489 8.73925147 39 | Pd 10.16105511 10.66105511 8.73925147 40 | Pd 12.83894489 10.66105511 8.73925147 41 | -------------------------------------------------------------------------------- /tests/computation/asedriver/assets/emtmin.yaml: -------------------------------------------------------------------------------- 1 | potter: 2 | name: emt 3 | params: 4 | backend: ase 5 | driver: 6 | task: min 7 | backend: ase 8 | init: 9 | dump_period: 1 10 | run: 11 | fmax: 0.000000000001 12 | steps: 23 13 | random_seed: 1112 14 | -------------------------------------------------------------------------------- /tests/computation/asedriver/assets/emtnvt.yaml: -------------------------------------------------------------------------------- 1 | potter: 2 | name: emt 3 | params: 4 | backend: ase 5 | driver: 6 | task: md 7 | backend: ase 8 | init: 9 | dump_period: 1 10 | run: 11 | steps: 23 12 | random_seed: 1112 13 | -------------------------------------------------------------------------------- /tests/computation/assets/broken_ase_spc/dyn.log: -------------------------------------------------------------------------------- 1 | Step Time Energy fmax 2 | BFGS: 0 02:50:34 2.803056 0.0613 3 | -------------------------------------------------------------------------------- /tests/computation/assets/broken_ase_spc/dyn.traj: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hsulab/GDPy/e6d56cf70bdeef1bbe973cad32bf87b66263a0bd/tests/computation/assets/broken_ase_spc/dyn.traj -------------------------------------------------------------------------------- /tests/computation/assets/finished_ase_spc/dyn.log: -------------------------------------------------------------------------------- 1 | Step Time Energy fmax 2 | BFGS: 0 02:50:34 2.803056 0.0613 3 | -------------------------------------------------------------------------------- /tests/computation/assets/finished_ase_spc/dyn.traj: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hsulab/GDPy/e6d56cf70bdeef1bbe973cad32bf87b66263a0bd/tests/computation/assets/finished_ase_spc/dyn.traj -------------------------------------------------------------------------------- /tests/computation/bias/test_harmonic.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import pathlib 6 | import pytest 7 | 8 | import numpy as np 9 | 10 | from ase.io import read, write 11 | 12 | 13 | @pytest.fixture 14 | def water(): 15 | """""" 16 | xyz_fpath = pathlib.Path(__file__).resolve().parent.parent/"assets"/"H2O.xyz" 17 | atoms = read(xyz_fpath) 18 | 19 | return atoms 20 | 21 | 22 | def test_harmonic(water): 23 | """""" 24 | atoms = water 25 | print(atoms.positions) 26 | 27 | colvar = dict( 28 | name = "position", 29 | axis = 2 30 | ) 31 | 32 | calc = HarmonicBias(colvar=colvar, k=100., s=0.2) 33 | atoms.calc = calc 34 | 35 | forces = atoms.get_forces() 36 | 37 | assert np.allclose( 38 | forces, 39 | [ 40 | [0., 0., -19.6309], 41 | [0., 0., 99.630905], 42 | [0., 0., 99.630905] 43 | ] 44 | ) 45 | 46 | 47 | if __name__ == "__main__": 48 | ... 49 | -------------------------------------------------------------------------------- /tests/computation/cp2k/test_cp2k.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import pytest 6 | 7 | from gdpx.core.register import import_all_modules_for_register 8 | from gdpx.potential.interface import PotterVariable 9 | from gdpx.worker.interface import ComputerVariable 10 | 11 | import_all_modules_for_register() 12 | 13 | @pytest.fixture 14 | def cp2k_config(): 15 | """""" 16 | params = dict( 17 | potential = dict( 18 | name = "cp2k", 19 | params = dict( 20 | backend = "cp2k", 21 | command = "srun /mnt/scratch2/chemistry-apps/dkb01416/cp2k/developed/cp2k-9.1/exe/local/cp2k.psmp", 22 | template = "/mnt/scratch2/users/40247882/porous/inputs/PBE+D3_RKS.inp", 23 | basis_set = "DZVP-MOLOPT-SR-GTH", 24 | basis_set_file = "/mnt/scratch2/chemistry-apps/dkb01416/cp2k/developed/cp2k-9.1/data/BASIS_MOLOPT", 25 | pseudo_potential = "GTH-PBE", 26 | potential_file = "/mnt/scratch2/chemistry-apps/dkb01416/cp2k/developed/cp2k-9.1/data/GTH_POTENTIALS" 27 | ), 28 | ), 29 | driver = dict( 30 | backend = "ase", 31 | ignore_convergence = True, 32 | ) 33 | ) 34 | 35 | return params 36 | 37 | def test_empty(cp2k_config): 38 | """""" 39 | worker = ComputerVariable(cp2k_config["potential"], cp2k_config["driver"]).value[0] 40 | print(worker) 41 | 42 | driver = worker.driver 43 | driver.directory = "./assets/empty_cand" 44 | 45 | print(driver.read_convergence()) 46 | 47 | return 48 | 49 | def test_broken(cp2k_config): 50 | """""" 51 | worker = ComputerVariable(cp2k_config["potential"], cp2k_config["driver"]).value[0] 52 | print(worker) 53 | 54 | driver = worker.driver 55 | driver.directory = "/mnt/scratch2/users/40247882/porous/nqtrain/r0/_explore/mimescn/0008.run_cp2k/cand18" 56 | 57 | print(driver.read_convergence()) 58 | 59 | return 60 | 61 | def test_broken_by_abort(cp2k_config): 62 | """""" 63 | worker = ComputerVariable(cp2k_config["potential"], cp2k_config["driver"]).value[0] 64 | print(worker) 65 | print(worker.driver.ignore_convergence) 66 | 67 | driver = worker.driver 68 | driver.directory = "/mnt/scratch2/users/40247882/porous/nqtrain/r0/_explore/mimescn/0008.run_cp2k/cand74" 69 | 70 | print("calc: ", driver.calc.read_convergence()) 71 | print("driver: ", driver.read_convergence()) 72 | 73 | return 74 | 75 | 76 | if __name__ == "__main__": 77 | ... -------------------------------------------------------------------------------- /tests/computation/espresso/test_espresso.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | import copy 5 | 6 | import pytest 7 | import tempfile 8 | 9 | 10 | from ase.io import read, write 11 | 12 | from gdpx.worker.interface import ComputerVariable 13 | 14 | 15 | 16 | @pytest.fixture 17 | def espresso_spc_config(): 18 | """""" 19 | params = dict( 20 | potter = dict( 21 | name = "espresso", 22 | params = dict( 23 | backend = "espresso", 24 | command = "mpirun -n 2 pw.x -in PREFIX.pwi > PREFIX.pwo", 25 | pp_path = "/mnt/scratch2/chemistry-apps/dkb01416/espresso/pseudo/oncv_upf", 26 | pp_name = "_ONCV_PBE-1.2.upf", 27 | #kpts = [1, 1, 1], 28 | kspacing = 0.04, 29 | ) 30 | ), 31 | driver = dict( 32 | backend = "ase" 33 | ) 34 | ) 35 | 36 | return params 37 | 38 | def test_spc(espresso_spc_config): 39 | """""" 40 | config = espresso_spc_config 41 | atoms = read("../assets/H2.xyz") 42 | 43 | config = copy.deepcopy(config) 44 | worker = ComputerVariable(**config).value[0] 45 | 46 | driver = worker.driver 47 | driver.directory = "./xxx" # tmpdir 48 | 49 | _ = driver.run(atoms, ...) 50 | 51 | return 52 | 53 | 54 | if __name__ == "__main__": 55 | ... 56 | -------------------------------------------------------------------------------- /tests/computation/lammps/assets/reaxmin.yaml: -------------------------------------------------------------------------------- 1 | potter: 2 | name: reax 3 | params: 4 | backend: lammps 5 | command: lmp_mpi -in ./in.lammps 2>&1 > lmp.out 6 | model: ./assets/ffield.PdO 7 | driver: 8 | task: min 9 | run: 10 | fmax: 0.08 11 | steps: 23 12 | -------------------------------------------------------------------------------- /tests/computation/lammps/assets/reaxnvt.yaml: -------------------------------------------------------------------------------- 1 | potter: 2 | name: reax 3 | params: 4 | backend: lammps 5 | command: lmp_mpi -in ./in.lammps 2>&1 > lmp.out 6 | model: ./assets/ffield.PdO 7 | driver: 8 | task: md 9 | init: 10 | ckpt_period: 7 11 | ensemble: nvt 12 | controller: 13 | name: nose_hoover_chain 14 | run: 15 | steps: 17 16 | random_seed: 45869826 17 | -------------------------------------------------------------------------------- /tests/computation/lammps/assets/reaxspc.yaml: -------------------------------------------------------------------------------- 1 | potter: 2 | name: reax 3 | params: 4 | backend: lammps 5 | command: lmp_mpi -in ./in.lammps 2>&1 > lmp.out 6 | model: ./assets/ffield.PdO 7 | -------------------------------------------------------------------------------- /tests/computation/vasp/assets/H2.xyz: -------------------------------------------------------------------------------- 1 | 2 2 | Lattice="9.0 0.0 0.0 0.0 10.0 0.0 0.0 0.0 11.0" Properties=species:S:1:pos:R:3 pbc="T T T" 3 | H 5.00000000 5.00000000 5.72000000 4 | H 5.00000000 5.00000000 5.00000000 5 | -------------------------------------------------------------------------------- /tests/computation/vasp/assets/INCAR: -------------------------------------------------------------------------------- 1 | # GENERAL 2 | SYSTEM = TEST # system name 3 | NWRITE = 2 # verbosity flag, 0|1|*2|3|4 4 | ISTART = 0 # 0 New | 1 Restart 5 | 6 | # WRITING 7 | LCHARG = .FALSE. # if write CHGCAR 8 | LWAVE = .FALSE. # if write WAVECAR 9 | LORBIT = 10 10 | 11 | # PARALLEL 12 | NPAR = 4 # NCORE=ncores/NPAR 13 | 14 | # ELECTRONIC 15 | ENCUT = 300 # energy cutoff 16 | PREC = NORMAL # precision 17 | EDIFF = 1.0E-5 # stopping criterion for electronic updates 18 | NELM = 180 # maximium number of ionic updates 19 | NELMIN = 4 # minimium number of ionic updates 20 | 21 | # MAGNETIC 22 | ISPIN = 1 # *1 no | 2 yes 23 | 24 | # SMEARING 25 | ISMEAR = 0 # -5 DOS | 0 large cell | 1 metal 26 | SIGMA = 0.1 # smearing parameter 27 | 28 | # ALGO 29 | ALGO = Fast # algorithms for electronic self-consistent 30 | LREAL = Auto # if calculation done in real spcae 31 | ISYM = 0 # 0 off | 1 on | 2 charge | 3 no charge 32 | 33 | # IONIC (This part will be automatically set by gdpx!!) 34 | EDIFFG = 0.00001 # stopping criterion for ionic updates 35 | NSW = 0 # number of steps for ionic updates 36 | IBRION = 2 # 0 MD | 1 quasi-Newton | 2 CG | 3 damped-MD | 5 FC 37 | ISIF = 2 # 0 MD | *2 | 3 lat opt 38 | POTIM = 0.2 # ionic step size / MD time step 39 | 40 | -------------------------------------------------------------------------------- /tests/computation/vasp/assets/potpaw_PBE/Cu/PSCTR: -------------------------------------------------------------------------------- 1 | TITEL = PAW_PBE Cu 22Jun2005 2 | LPAW = T 3 | RPACOR = 2.00 4 | RWIGS = 2.20 nn distance 5 | 6 | ICORE = 3 7 | EMIN = -3 8 | NE = 100 9 | LCOR = .TRUE. 10 | QCUT = 0 11 | 12 | Description 13 | l E TYP RCUT TYP RCUT 14 | 0 0.0 23 2.2 15 | 0 0.0 23 2.2 16 | 1 -0.2 23 2.2 17 | 1 0.0 23 2.2 18 | 2 0.0 23 2.2 19 | 2 0.8 23 2.2 20 | 3 0.2 23 2.3 21 | -------------------------------------------------------------------------------- /tests/computation/vasp/assets/potpaw_PBE/H/PSCTR: -------------------------------------------------------------------------------- 1 | TITEL = PAW_PBE H 15Jun2001 2 | LPAW = T 3 | RWIGS = 0.70 nn distance 4 | 5 | RCLOC = .70 6 | NE = 100 7 | LCOR = .TRUE. 8 | QCUT = 0 9 | RAUG = 1.2 10 | 11 | Description 12 | l E TYP RCUT TYP RCUT 13 | 0 0.0 23 1.1 14 | 0 0.5 23 1.1 15 | 1 -0.3 23 1.1 16 | -------------------------------------------------------------------------------- /tests/computation/vasp/assets/potpaw_PBE/O/PSCTR: -------------------------------------------------------------------------------- 1 | TITEL = PAW_PBE O 08Apr2002 2 | LPAW = T 3 | RWIGS = 1.55 nn distance 4 | 5 | ICORE = 2 6 | RPACOR = 1.2 7 | NE = 100 8 | LCOR = .TRUE. 9 | QCUT = 0 10 | 11 | Description 12 | l E TYP RCUT TYP RCUT 13 | 0 0.0 23 1.2 14 | 0 -0.7 23 1.2 15 | 1 0.0 23 1.52 16 | 1 0.6 23 1.52 17 | 2 0.0 7 1.5 18 | -------------------------------------------------------------------------------- /tests/computation/vasp/assets/vaspmd.yaml: -------------------------------------------------------------------------------- 1 | potter: 2 | name: vasp 3 | params: 4 | backend: vasp 5 | command: mpirun -n 4 vasp_gam 2>&1 > vasp.out 6 | incar: ./assets/INCAR 7 | kpts: [1, 1, 1] 8 | pp_path: ./assets 9 | # vdw_path: /home/jx1279/apps/vasp/potpaw 10 | driver: 11 | task: md 12 | random_seed: 1112 13 | init: 14 | velocity_seed: 1112 15 | ensemble: nvt 16 | controller: 17 | name: langevin 18 | params: 19 | friction: 0.01 # fs^-1 20 | run: 21 | steps: 5 22 | #constraint: "1:4" 23 | -------------------------------------------------------------------------------- /tests/computation/vasp/assets/vaspspc.yaml: -------------------------------------------------------------------------------- 1 | potter: 2 | name: vasp 3 | params: 4 | backend: vasp 5 | command: mpirun -n 4 vasp_gam 2>&1 > vasp.out 6 | incar: ./assets/INCAR 7 | kpts: [1, 1, 1] 8 | pp_path: ./assets 9 | # vdw_path: /home/jx1279/apps/vasp/potpaw 10 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import os 6 | import logging 7 | 8 | import numpy as np 9 | 10 | import pytest 11 | 12 | 13 | @pytest.fixture(autouse=True) 14 | def change_test_dir(request): 15 | os.chdir(request.fspath.dirname) 16 | yield 17 | os.chdir(request.config.invocation_params.dir) 18 | 19 | return 20 | 21 | 22 | # NOTE: We assigan a random_seed and register all necessary modules here!! 23 | from gdpx import config 24 | 25 | config.logger.setLevel(logging.DEBUG) 26 | config.GRNG = np.random.Generator(np.random.PCG64()) 27 | 28 | from gdpx.core.register import import_all_modules_for_register 29 | 30 | import_all_modules_for_register() 31 | 32 | 33 | if __name__ == "__main__": 34 | ... 35 | -------------------------------------------------------------------------------- /tests/potential/nequip/test_nequip.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | try: 5 | import torch 6 | from nequip.ase import NequIPCalculator 7 | except: 8 | ... 9 | 10 | 11 | from ase.io import read, write 12 | 13 | from gdpx.potential.calculators.mixer import CommitteeCalculator 14 | 15 | 16 | def test_nequip_committee(): 17 | """""" 18 | atypes = ["C", "H", "N", "O", "S"] 19 | models = [ 20 | "/mnt/scratch2/users/40247882/porous/nqtrain/r0/_ensemble/0004.train/m0/nequip.pth", 21 | "/mnt/scratch2/users/40247882/porous/nqtrain/r0/_ensemble/0004.train/m1/nequip.pth", 22 | "/mnt/scratch2/users/40247882/porous/nqtrain/r0/_ensemble/0004.train/m2/nequip.pth", 23 | "/mnt/scratch2/users/40247882/porous/nqtrain/r0/_ensemble/0004.train/m3/nequip.pth", 24 | ] 25 | 26 | calcs = [] 27 | for m in models: 28 | curr_calc = NequIPCalculator.from_deployed_model( 29 | model_path=m, 30 | species_to_type_name={k: k for k in atypes}, 31 | device=torch.device("cuda" if torch.cuda.is_available() else "cpu"), 32 | ) 33 | calcs.append(curr_calc) 34 | 35 | calc = CommitteeCalculator(calcs) 36 | 37 | atoms = read("/mnt/scratch2/users/40247882/porous/init/structures/methanol.xyz") 38 | 39 | atoms.calc = calc 40 | 41 | # print(atoms.get_potential_energy()) 42 | print(atoms.get_forces()) 43 | print(atoms.calc.results) 44 | 45 | return 46 | 47 | 48 | if __name__ == "__main__": 49 | ... 50 | -------------------------------------------------------------------------------- /tests/potential/plumed/test_plumed.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -* 3 | 4 | import pytest 5 | 6 | import numpy as np 7 | 8 | from ase.io import read, write 9 | 10 | ASE_NEQUIP_PLUMED_CONFIG = """potential: 11 | name: mixer 12 | params: 13 | backend: ase 14 | potters: 15 | - name: nequip 16 | params: 17 | backend: ase 18 | type_list: ["C", "H", "N", "O", "S"] 19 | model: 20 | - /mnt/scratch2/users/40247882/porous/nqtrain/r0/_ensemble/0004.train/m0/nequip.pth 21 | - name: plumed 22 | params: 23 | backend: ase 24 | driver: 25 | task: md 26 | init: 27 | md_style: nvt 28 | timestep: 0.5 29 | temp: 360 30 | Tdamp: 100 31 | remove_translation: true 32 | remove_rotation: true 33 | dump_period: 2 34 | run: 35 | steps: 10 36 | """ 37 | 38 | def xxx(): 39 | frames = read("./xxx/cand0/traj.xyz", ":") 40 | for atoms in frames: 41 | positions = atoms.positions 42 | dis = np.linalg.norm(positions[0] - positions[1]) 43 | print(dis) 44 | 45 | def run(): 46 | 47 | return 48 | 49 | if __name__ == "__main__": 50 | ... -------------------------------------------------------------------------------- /tests/potential/reax/assets/reaxmd.yaml: -------------------------------------------------------------------------------- 1 | potter: 2 | name: reax 3 | params: 4 | backend: lammps 5 | command: lmp_mpi -in ./in.lammps 2>&1 > lmp.out 6 | model: ./assets/ffield.PdO 7 | driver: 8 | task: md 9 | init: 10 | ensemble: nvt 11 | run: 12 | steps: 17 13 | random_seed: 45869826 14 | -------------------------------------------------------------------------------- /tests/potential/reax/assets/reaxmin.yaml: -------------------------------------------------------------------------------- 1 | potter: 2 | name: reax 3 | params: 4 | backend: lammps 5 | command: lmp_mpi -in ./in.lammps 2>&1 > lmp.out 6 | model: ./assets/ffield.PdO 7 | driver: 8 | task: min 9 | run: 10 | fmax: 0.08 11 | steps: 23 12 | -------------------------------------------------------------------------------- /tests/potential/reax/assets/reaxspc.yaml: -------------------------------------------------------------------------------- 1 | potter: 2 | name: reax 3 | params: 4 | backend: lammps 5 | command: lmp_mpi -in ./in.lammps 2>&1 > lmp.out 6 | model: ./assets/ffield.PdO 7 | -------------------------------------------------------------------------------- /tests/potential/reax/test_reax.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import copy 6 | import tempfile 7 | 8 | import pytest 9 | 10 | import numpy as np 11 | 12 | from ase.io import read, write 13 | 14 | 15 | from gdpx.cli.compute import convert_config_to_potter 16 | 17 | 18 | def run_computation(structures, worker): 19 | """""" 20 | with tempfile.TemporaryDirectory() as tmpdirname: 21 | worker.directory = tmpdirname 22 | # worker.directory = "./_xxx" 23 | worker.run(structures) 24 | worker.inspect(structures) 25 | if worker.get_number_of_running_jobs() == 0: 26 | results = worker.retrieve(include_retrieved=True) 27 | else: 28 | results = [] 29 | 30 | return results 31 | 32 | 33 | @pytest.mark.lammps 34 | def test_reax_spc(): 35 | """""" 36 | atoms = read("./assets/Pd38_oct.xyz") 37 | structures = [atoms] 38 | 39 | worker = convert_config_to_potter("./assets/reaxspc.yaml")[0] 40 | 41 | results = run_computation(structures, worker) 42 | 43 | energy = results[-1][-1].get_potential_energy() 44 | 45 | assert np.allclose(energy, -114.443082558) 46 | 47 | 48 | @pytest.mark.lammps 49 | def test_reax_min(): 50 | """""" 51 | atoms = read("./assets/Pd38_oct.xyz") 52 | structures = [atoms] 53 | 54 | worker = convert_config_to_potter("./assets/reaxmin.yaml")[0] 55 | 56 | results = run_computation(structures, worker) 57 | 58 | energy = results[-1][-1].get_potential_energy() 59 | 60 | assert np.allclose(energy, -116.122977589) 61 | 62 | 63 | @pytest.mark.lammps 64 | def test_reax_nvt(): 65 | """""" 66 | atoms = read("./assets/Pd38_oct.xyz") 67 | structures = [atoms] 68 | 69 | worker = convert_config_to_potter("./assets/reaxmd.yaml")[0] 70 | 71 | results = run_computation(structures, worker) 72 | 73 | energy = results[-1][-1].get_potential_energy() 74 | 75 | assert np.allclose(energy, -115.205969893) 76 | 77 | 78 | if __name__ == "__main__": 79 | ... 80 | -------------------------------------------------------------------------------- /tests/reactor/asedriver/assets/aseneb.yaml: -------------------------------------------------------------------------------- 1 | type: reactor 2 | potter: 3 | name: emt 4 | driver: 5 | init: 6 | dump_period: 2 7 | nimages: 3 8 | mic: false 9 | run: 10 | fmax: 0.08 11 | steps: 5 12 | constraint: "1:8" 13 | -------------------------------------------------------------------------------- /tests/reactor/asedriver/test_ase_neb.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import copy 6 | import tempfile 7 | 8 | import pytest 9 | import yaml 10 | 11 | import numpy as np 12 | 13 | from ase.build import add_adsorbate, fcc100 14 | from ase.calculators.emt import EMT 15 | from ase.constraints import FixAtoms 16 | from ase.optimize import QuasiNewton 17 | 18 | from gdpx.cli.compute import convert_config_to_potter 19 | 20 | 21 | @pytest.mark.basic 22 | def test_vasp_neb(): 23 | """""" 24 | # - prepare structures 25 | ini = fcc100("Al", size=(2, 2, 3)) 26 | add_adsorbate(ini, "Au", 1.7, "hollow") 27 | ini.center(axis=2, vacuum=4.0) 28 | 29 | # FIXME: Check whether IS and FS are the same... 30 | fin = copy.deepcopy(ini) 31 | fin[-1].x += fin.get_cell()[0, 0] / 2.0 32 | 33 | # FIXME: Better pathway input structures... 34 | structures = [ini, fin] 35 | 36 | for atoms in structures: 37 | atoms.set_constraint(FixAtoms(indices=range(8))) 38 | atoms.calc = EMT() 39 | qn = QuasiNewton(atoms, trajectory=None) 40 | qn.run(fmax=0.08) 41 | 42 | # - 43 | with open("./assets/aseneb.yaml", "r") as fopen: 44 | emt_params = yaml.safe_load(fopen) 45 | 46 | worker = convert_config_to_potter(emt_params) 47 | print(f"{worker =}") 48 | 49 | with tempfile.TemporaryDirectory() as tmpdirname: 50 | worker.directory = tmpdirname 51 | # worker.directory = "./test_ase_neb" 52 | worker.run(structures) 53 | worker.inspect(structures) 54 | if worker.get_number_of_running_jobs() == 0: 55 | results = worker.retrieve(include_retrieved=True) 56 | else: 57 | results = [] 58 | 59 | mid_atoms = results[0][-1][1] 60 | final_energy = mid_atoms.get_potential_energy() 61 | print(f"{final_energy = }") 62 | 63 | assert np.allclose([final_energy], [3.698085]) 64 | 65 | ... 66 | -------------------------------------------------------------------------------- /tests/reactor/vasp/assets/INCAR: -------------------------------------------------------------------------------- 1 | # GENERAL 2 | SYSTEM = TEST # system name 3 | NWRITE = 2 # verbosity flag, 0|1|*2|3|4 4 | ISTART = 0 # 0 New | 1 Restart 5 | 6 | # WRITING 7 | LCHARG = .FALSE. # if write CHGCAR 8 | LWAVE = .FALSE. # if write WAVECAR 9 | LORBIT = 10 10 | 11 | # PARALLEL 12 | NPAR = 4 # NCORE=ncores/NPAR 13 | 14 | # ELECTRONIC 15 | ENCUT = 300 # energy cutoff 16 | PREC = NORMAL # precision 17 | EDIFF = 1.0E-5 # stopping criterion for electronic updates 18 | NELM = 180 # maximium number of ionic updates 19 | NELMIN = 4 # minimium number of ionic updates 20 | 21 | # MAGNETIC 22 | ISPIN = 1 # *1 no | 2 yes 23 | 24 | # SMEARING 25 | ISMEAR = 0 # -5 DOS | 0 large cell | 1 metal 26 | SIGMA = 0.1 # smearing parameter 27 | 28 | # ALGO 29 | ALGO = Fast # algorithms for electronic self-consistent 30 | LREAL = Auto # if calculation done in real spcae 31 | ISYM = 0 # 0 off | 1 on | 2 charge | 3 no charge 32 | 33 | # IONIC (This part will be automatically set by gdpx!!) 34 | EDIFFG = 0.00001 # stopping criterion for ionic updates 35 | NSW = 0 # number of steps for ionic updates 36 | IBRION = 2 # 0 MD | 1 quasi-Newton | 2 CG | 3 damped-MD | 5 FC 37 | ISIF = 2 # 0 MD | *2 | 3 lat opt 38 | POTIM = 0.2 # ionic step size / MD time step 39 | 40 | -------------------------------------------------------------------------------- /tests/reactor/vasp/assets/potpaw_PBE/C/PSCTR: -------------------------------------------------------------------------------- 1 | TITEL = PAW_PBE C 08Apr2002 2 | LPAW = T 3 | RWIGS = 1.63 nn distance 4 | 5 | ICORE = 2 6 | RPACOR = 1.2 7 | NE = 100 8 | LCOR = .TRUE. 9 | QCUT = 0 10 | RDEPT = 1.3 11 | 12 | Description 13 | l E TYP RCUT TYP RCUT 14 | 0 0.0 23 1.2 15 | 0 0.0 23 1.2 16 | 1 0.0 23 1.5 17 | 1 2.5 23 1.5 18 | 2 0.0 7 1.5 19 | -------------------------------------------------------------------------------- /tests/reactor/vasp/assets/potpaw_PBE/Cu/PSCTR: -------------------------------------------------------------------------------- 1 | TITEL = PAW_PBE Cu 22Jun2005 2 | LPAW = T 3 | RPACOR = 2.00 4 | RWIGS = 2.20 nn distance 5 | 6 | ICORE = 3 7 | EMIN = -3 8 | NE = 100 9 | LCOR = .TRUE. 10 | QCUT = 0 11 | 12 | Description 13 | l E TYP RCUT TYP RCUT 14 | 0 0.0 23 2.2 15 | 0 0.0 23 2.2 16 | 1 -0.2 23 2.2 17 | 1 0.0 23 2.2 18 | 2 0.0 23 2.2 19 | 2 0.8 23 2.2 20 | 3 0.2 23 2.3 21 | -------------------------------------------------------------------------------- /tests/reactor/vasp/assets/potpaw_PBE/H/PSCTR: -------------------------------------------------------------------------------- 1 | TITEL = PAW_PBE H 15Jun2001 2 | LPAW = T 3 | RWIGS = 0.70 nn distance 4 | 5 | RCLOC = .70 6 | NE = 100 7 | LCOR = .TRUE. 8 | QCUT = 0 9 | RAUG = 1.2 10 | 11 | Description 12 | l E TYP RCUT TYP RCUT 13 | 0 0.0 23 1.1 14 | 0 0.5 23 1.1 15 | 1 -0.3 23 1.1 16 | -------------------------------------------------------------------------------- /tests/reactor/vasp/assets/potpaw_PBE/O/PSCTR: -------------------------------------------------------------------------------- 1 | TITEL = PAW_PBE O 08Apr2002 2 | LPAW = T 3 | RWIGS = 1.55 nn distance 4 | 5 | ICORE = 2 6 | RPACOR = 1.2 7 | NE = 100 8 | LCOR = .TRUE. 9 | QCUT = 0 10 | 11 | Description 12 | l E TYP RCUT TYP RCUT 13 | 0 0.0 23 1.2 14 | 0 -0.7 23 1.2 15 | 1 0.0 23 1.52 16 | 1 0.6 23 1.52 17 | 2 0.0 7 1.5 18 | -------------------------------------------------------------------------------- /tests/reactor/vasp/assets/potpaw_PBE/Pt/PSCTR: -------------------------------------------------------------------------------- 1 | TITEL = PAW_PBE Pt 04Feb2005 2 | LPAW = T 3 | RPACOR = 2.33 4 | RWIGS = 2.75 nn distance 5 | 6 | ICORE = 3 7 | EMIN = -3 8 | NE = 400 9 | LCOR = .TRUE. 10 | QCUT = 0 11 | 12 | Description 13 | l E TYP RCUT TYP RCUT 14 | 0 0.0 23 2.5 15 | 0 0.0 23 2.5 16 | 1 -0.2 23 2.6 17 | 1 1.5 23 2.6 18 | 2 0.0 23 2.5 19 | 2 0.0 23 2.5 20 | 3 0.0 23 2.5 21 | -------------------------------------------------------------------------------- /tests/reactor/vasp/assets/vaspneb.yaml: -------------------------------------------------------------------------------- 1 | type: reactor 2 | potter: 3 | name: vasp 4 | params: 5 | backend: vasp 6 | command: mpirun -n 4 vasp_gam 2>&1 > vasp.out 7 | incar: ./assets/INCAR 8 | kpts: [1, 1, 1] 9 | pp_path: ./assets 10 | driver: 11 | init: 12 | nimages: 3 13 | run: 14 | fmax: 0.08 15 | steps: 5 16 | constraint: "lowest 8" 17 | -------------------------------------------------------------------------------- /tests/reactor/vasp/test_vaspneb.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | import tempfile 6 | 7 | import pytest 8 | import yaml 9 | 10 | import numpy as np 11 | 12 | from ase.io import read, write 13 | 14 | from gdpx.cli.compute import convert_config_to_potter 15 | 16 | 17 | @pytest.mark.vasp_rxn 18 | def test_vasp_neb(): 19 | """""" 20 | structures = read("./assets/CO+O_mep.xyz", ":") 21 | 22 | with open("./assets/vaspneb.yaml", "r") as fopen: 23 | vasp_params = yaml.safe_load(fopen) 24 | 25 | worker = convert_config_to_potter(vasp_params) 26 | print(f"{worker =}") 27 | 28 | with tempfile.TemporaryDirectory() as tmpdirname: 29 | worker.directory = tmpdirname 30 | # worker.directory = "./test_vasp_neb" 31 | worker.run(structures) 32 | worker.inspect(structures) 33 | if worker.get_number_of_running_jobs() == 0: 34 | results = worker.retrieve(include_retrieved=True) 35 | else: 36 | results = [] 37 | 38 | mid_atoms = results[0][-1][1] 39 | final_energy = mid_atoms.get_potential_energy() 40 | print(f"{final_energy = }") 41 | 42 | assert np.allclose([final_energy], [-82.195963]) 43 | 44 | 45 | if __name__ == "__main__": 46 | ... 47 | -------------------------------------------------------------------------------- /tests/selector/test_cache.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import tempfile 5 | 6 | from gdpx.selector.selector import save_cache, load_cache 7 | 8 | temp_info = """# index confid step natoms ene aene maxfrc score 9 | 3,1 1 10 40 -283.3720 -7.0843 2.2355 nan 10 | 1,1 1 10 40 -283.2048 -7.0801 2.4317 nan 11 | 1,7 1 70 40 -285.4730 -7.1368 2.2214 nan 12 | 1,4 1 40 40 -284.0159 -7.1004 2.3146 nan 13 | 0,1 0 10 40 -283.1768 -7.0794 2.3500 nan 14 | 0,7 0 70 40 -285.4587 -7.1365 2.0297 nan 15 | 1,10 1 100 40 -285.8283 -7.1457 1.3351 nan 16 | 3,10 1 100 40 -285.0987 -7.1275 2.2206 nan 17 | 3,7 1 70 40 -284.3956 -7.1099 2.2663 nan 18 | random_seed 6094 19 | """ 20 | 21 | def test_load_cache(): 22 | """""" 23 | with tempfile.NamedTemporaryFile() as tmp: 24 | with open(tmp.name, "w") as fopen: 25 | fopen.write(temp_info) 26 | 27 | markers = load_cache(tmp.name) 28 | 29 | #assert raw_unmasks == [[0, [1, 7]], [1, [1, 4, 7, 10]], [3,[1, 7, 10]]] 30 | assert markers == [[3,1],[1,1],[1,7],[1,4],[0,1],[0,7],[1,10],[3,10],[3,7]] 31 | 32 | 33 | if __name__ == "__main__": 34 | ... --------------------------------------------------------------------------------