├── .github
├── workflows
│ ├── docs.yml
│ ├── publish_to_pypi.yml
│ └── pytests.yml
└── workflows_assets
│ ├── config.json
│ ├── job.test_github_CI.slurm
│ └── slurm.conf
├── .gitignore
├── LICENSE
├── README.md
├── deprecated
├── cli_plotting.py
├── devtools
│ ├── NOTES
│ ├── docker
│ │ └── Dockerfile
│ └── scripts
│ │ ├── CLI_tests
│ │ ├── GAP_RSS_ITER_FIT_test
│ │ ├── gap_rss_archive
│ │ ├── monitor_gap_rss_iter_fit
│ │ └── pretty_pca.py
├── examples
│ ├── DFT-calculations
│ │ ├── castep.py
│ │ ├── periodic_structures.xyz
│ │ ├── quantum_espresso.py
│ │ └── vasp.py
│ ├── ace_fit_params.json
│ ├── ace_fit_params.yaml
│ ├── expyre
│ │ └── config.json
│ ├── gap-rss
│ │ ├── LiCu
│ │ │ ├── LiCu.json
│ │ │ └── job.gap_rss_iter_fit.LiCu.pbs
│ │ ├── length_scales.yaml
│ │ └── multistage_GAP_fit_settings.short_range_2b_SE_universal_SOAPs_12_3.template.yaml
│ ├── organic_reactions
│ │ ├── CHO_methane_burning
│ │ │ ├── config.json
│ │ │ ├── fragments.xyz
│ │ │ ├── length_scales.json
│ │ │ └── multistage_GAP_fit_settings.fixed_3_6_SOAPs_10_6.json.template
│ │ └── input_structure.txt
│ └── simple_gap_fit_parameters.yml
├── plotting
│ ├── __init__.py
│ ├── maxveit_plottools.py
│ ├── normal_modes.py
│ ├── plot_2b.py
│ ├── plot_ef_correlation.py
│ └── reactions_plotting.py
└── user
│ └── reactions
│ ├── cli_cli.py
│ ├── cli_reactions_iter_fit.py
│ ├── generate
│ ├── collision.py
│ ├── irc.py
│ ├── neb.py
│ ├── radicals.py
│ └── ts.py
│ ├── reactions_processing
│ ├── __init__.py
│ └── trajectory_processing.py
│ ├── select
│ ├── simple_filters.py
│ └── weighted_cur.py
│ └── tests
│ └── test_molecules_radicals.py
├── docs
├── .gitignore
├── Makefile
├── source
│ ├── attic
│ │ ├── command_line.rst
│ │ └── operations.utils.md
│ ├── command_line.automatic_docs.rst
│ ├── conf.py
│ ├── examples.buildcell.ipynb
│ ├── examples.contributions.md
│ ├── examples.daisy_chain_mlip_fitting.ipynb
│ ├── examples.dimers.ipynb
│ ├── examples.fhiaims_calculator.ipynb
│ ├── examples.index.md
│ ├── examples.md.md
│ ├── examples.mlip_fitting.md
│ ├── examples.normal_modes.md
│ ├── examples.orca_python.md
│ ├── examples.rst
│ ├── examples.select_fps.ipynb
│ ├── examples.smiles.md
│ ├── examples_files
│ │ ├── fhiaims_calculator
│ │ │ └── Input_Structures.xyz
│ │ └── select_fps
│ │ │ ├── md.traj
│ │ │ └── params.yaml
│ ├── first_example.md
│ ├── index.rst
│ ├── modules.rst
│ ├── operations.ace_fitting.md
│ ├── operations.calculators.md
│ ├── operations.descriptors.md
│ ├── operations.fitting.rst
│ ├── operations.gap_fitting.md
│ ├── operations.generate.md
│ ├── operations.multistage_gap_fitting.rst
│ ├── operations.rst
│ ├── operations.select.md
│ ├── overview.configset.rst
│ ├── overview.overall_design.rst
│ ├── overview.parallelisation.rst
│ ├── overview.queued.md
│ ├── overview.rst
│ └── workflows.rss.rst
├── wf_logo_final.png
└── wf_logo_final.svg
├── examples
└── iterative_gap_fit
│ ├── EMT_atoms.xyz
│ ├── batch_gap_fit.py
│ ├── init_md.traj
│ └── multistage_gap_params.json
├── pyproject.toml
├── pytest.ini
├── tests
├── .coveragerc
├── TODO
├── __init__.py
├── assets
│ ├── B_DFT_data.xyz
│ ├── B_DFT_data_mace_ftting.xyz
│ ├── QE
│ │ └── Si.pz-vbc.UPF
│ ├── cli_rss
│ │ ├── LiCu.json
│ │ ├── job.test_cli_rss_create_ref.slurm
│ │ ├── length_scales.yaml
│ │ ├── multistage_GAP_fit_settings.template.yaml
│ │ ├── run_iter_0
│ │ │ ├── DFT_evaluated_fitting.ALL.xyz
│ │ │ ├── DFT_evaluated_testing.ALL.xyz
│ │ │ ├── cli_rss_test_energies
│ │ │ ├── initial_random_configs.Z_29_1.narrow_even.xyz
│ │ │ ├── initial_random_configs.Z_29_1.narrow_odd.xyz
│ │ │ ├── initial_random_configs.Z_29_1.wide_even.xyz
│ │ │ ├── initial_random_configs.Z_3_1.narrow_even.xyz
│ │ │ ├── initial_random_configs.Z_3_1.narrow_odd.xyz
│ │ │ ├── initial_random_configs.Z_3_1.wide_even.xyz
│ │ │ ├── initial_random_configs.Z_3_1__29_1.narrow.xyz
│ │ │ ├── initial_random_configs.Z_3_1__29_1.wide.xyz
│ │ │ ├── initial_random_configs.Z_3_1__29_3.narrow.xyz
│ │ │ ├── initial_random_configs.Z_3_1__29_3.wide.xyz
│ │ │ ├── initial_random_configs.Z_3_3__29_1.narrow.xyz
│ │ │ └── initial_random_configs.Z_3_3__29_1.wide.xyz
│ │ ├── run_iter_1
│ │ │ ├── DFT_evaluated_fitting.ALL.xyz
│ │ │ ├── DFT_evaluated_testing.ALL.xyz
│ │ │ ├── cli_rss_test_energies
│ │ │ ├── initial_random_configs.Z_29_1.narrow_even.xyz
│ │ │ ├── initial_random_configs.Z_29_1.narrow_odd.xyz
│ │ │ ├── initial_random_configs.Z_29_1.wide_even.xyz
│ │ │ ├── initial_random_configs.Z_3_1.narrow_even.xyz
│ │ │ ├── initial_random_configs.Z_3_1.narrow_odd.xyz
│ │ │ ├── initial_random_configs.Z_3_1.wide_even.xyz
│ │ │ ├── initial_random_configs.Z_3_1__29_1.narrow.xyz
│ │ │ ├── initial_random_configs.Z_3_1__29_1.wide.xyz
│ │ │ ├── initial_random_configs.Z_3_1__29_3.narrow.xyz
│ │ │ ├── initial_random_configs.Z_3_1__29_3.wide.xyz
│ │ │ ├── initial_random_configs.Z_3_3__29_1.narrow.xyz
│ │ │ └── initial_random_configs.Z_3_3__29_1.wide.xyz
│ │ ├── run_iter_2
│ │ │ ├── DFT_evaluated_fitting.ALL.xyz
│ │ │ ├── DFT_evaluated_testing.ALL.xyz
│ │ │ ├── cli_rss_test_energies
│ │ │ ├── initial_random_configs.Z_29_1.narrow_even.xyz
│ │ │ ├── initial_random_configs.Z_29_1.narrow_odd.xyz
│ │ │ ├── initial_random_configs.Z_29_1.wide_even.xyz
│ │ │ ├── initial_random_configs.Z_3_1.narrow_even.xyz
│ │ │ ├── initial_random_configs.Z_3_1.narrow_odd.xyz
│ │ │ ├── initial_random_configs.Z_3_1.wide_even.xyz
│ │ │ ├── initial_random_configs.Z_3_1__29_1.narrow.xyz
│ │ │ ├── initial_random_configs.Z_3_1__29_1.wide.xyz
│ │ │ ├── initial_random_configs.Z_3_1__29_3.narrow.xyz
│ │ │ ├── initial_random_configs.Z_3_1__29_3.wide.xyz
│ │ │ ├── initial_random_configs.Z_3_3__29_1.narrow.xyz
│ │ │ └── initial_random_configs.Z_3_3__29_1.wide.xyz
│ │ └── test_cli_rss_create_ref.stdout
│ ├── configs_for_error_test.xyz
│ ├── descriptor_heuristics.json
│ ├── length_scales.yaml
│ ├── mace_fit_parameters.yaml
│ ├── normal_modes
│ │ ├── displaced_out.xyz
│ │ ├── nm_7.xyz
│ │ ├── water_dftb_evecs.npy
│ │ └── water_dftb_nms.xyz
│ ├── orca
│ │ ├── orca.janpa
│ │ ├── orca.out
│ │ ├── orca_scf_converged.out
│ │ ├── orca_scf_unconverged.out
│ │ └── orca_scf_unfinished.out
│ ├── simple_gap_fit_parameters.yml
│ └── simple_gap_training_set.xyz
├── calculators
│ ├── test_aims.py
│ ├── test_ase_fileio_caching.py
│ ├── test_basin_hopping_orca.py
│ ├── test_calc_generic.py
│ ├── test_calculator_committee.py
│ ├── test_castep.py
│ ├── test_mopac.py
│ ├── test_orca.py
│ ├── test_qe.py
│ ├── test_vasp.py
│ └── test_wrapped_calculator.py
├── cli
│ ├── test_descriptor.py
│ ├── test_error.py
│ ├── test_generate.py
│ └── test_select.py
├── conda-build
│ └── meta.yaml
├── conftest.py
├── local_scripts
│ ├── complete_pytest.tin
│ └── gelzinyte.workstation.sh
├── mpi
│ ├── README.md
│ └── test_autopara_thread_vs_mpi.py
├── prep_test_cli_rss.sh
├── test_ace_fitting.py
├── test_atoms_and_dimers.py
├── test_autoparallelize.py
├── test_batch_gap_fit.py
├── test_buildcell.py
├── test_calc_descriptor.py
├── test_cli_rss.py
├── test_configset.py
├── test_descriptor_heuristics.py
├── test_doc_examples.py
├── test_error.py
├── test_flat_histo_to_nearby.py
├── test_gap_fitting_multistage.py
├── test_gap_fitting_simple.py
├── test_kspacing.py
├── test_list_with_nested_configset_info.py
├── test_mace_fitting.py
├── test_map.py
├── test_md.py
├── test_minimahopping.py
├── test_molecules.py
├── test_ndim_neigh_list.py
├── test_neb.py
├── test_normal_modes.py
├── test_optimize.py
├── test_outputspec.py
├── test_phonopy.py
├── test_point_defects.py
├── test_remote_run.py
├── test_rng_determinism.py
├── test_select_greedy_fps.py
├── test_select_simple.py
├── test_utils.py
└── test_version_str.py
└── wfl
├── __init__.py
├── __version__.py
├── autoparallelize
├── __init__.py
├── autoparainfo.py
├── base.py
├── mpipool_support.py
├── pool.py
├── remote.py
├── remoteinfo.py
└── utils.py
├── calculators
├── __init__.py
├── aims.py
├── castep.py
├── committee.py
├── espresso.py
├── generic.py
├── kpts.py
├── mopac.py
├── orca
│ ├── __init__.py
│ └── basinhopping.py
├── utils.py
├── vasp.py
└── wfl_fileio_calculator.py
├── cli
├── __init__.py
├── cli.py
├── cli_options.py
├── commands
│ ├── __init__.py
│ ├── descriptor.py
│ ├── error.py
│ ├── eval.py
│ ├── generate.py
│ └── select.py
├── dft_convergence_test.py
└── gap_rss_iter_fit.py
├── configset.py
├── descriptor_heuristics.py
├── descriptors
├── __init__.py
└── quippy.py
├── fit
├── __init__.py
├── ace.py
├── error.py
├── gap
│ ├── __init__.py
│ ├── glue_2b.py
│ ├── multistage.py
│ ├── relocate.py
│ └── simple.py
├── mace.py
├── modify_database
│ ├── __init__.py
│ ├── gap_rss_set_config_sigmas_from_convex_hull.py
│ ├── scale_orig.py
│ └── simple_factor_nonperiodic.py
└── utils.py
├── generate
├── __init__.py
├── atoms_and_dimers.py
├── buildcell.py
├── md
│ ├── __init__.py
│ ├── abort.py
│ └── abort_base.py
├── minimahopping.py
├── neb.py
├── normal_modes.py
├── optimize.py
├── phonopy.py
├── smiles.py
├── supercells.py
└── utils.py
├── map.py
├── select
├── __init__.py
├── by_descriptor.py
├── convex_hull.py
├── flat_histogram.py
├── selection_space.py
└── simple.py
└── utils
├── __init__.py
├── configs.py
├── convex_hull.py
├── file_utils.py
├── find_voids.py
├── gap_xml_tools.py
├── julia.py
├── logging.py
├── misc.py
├── ndim_neighbor_list.py
├── parallel.py
├── params.py
├── pressure.py
├── quip_cli_strings.py
├── replace_eval_in_strs.py
├── round_sig_figs.py
├── save_calc_results.py
├── version.py
└── vol_composition_space.py
/.github/workflows/publish_to_pypi.yml:
--------------------------------------------------------------------------------
1 | # https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/
2 | name: publish to pypi
3 |
4 | on: push
5 |
6 | jobs:
7 | build:
8 | name: Build distribution
9 | runs-on: ubuntu-latest
10 |
11 | steps:
12 | - uses: actions/checkout@v4
13 | - name: Set up Python
14 | uses: actions/setup-python@v5
15 | with:
16 | python-version: "3.X"
17 |
18 | - name: Install pypa/build
19 | run: python3 -m pip install build --user
20 | - name: Build a binary wheel and source tarball
21 | run: python3 -m build
22 | - name: Store distribution packages
23 | uses: actions/upload-artifact@v4
24 | with:
25 | name: python-package-distributions
26 | path: dist/
27 |
28 | publish-to-pypi:
29 | name: Publish distribution to PyPI
30 | if: startsWith(github.ref, 'refs/tags/')
31 | needs:
32 | - build
33 | runs-on: ubuntu-latest
34 | environment:
35 | name: pypi
36 | url: https://pypi.org/p/wfl
37 | permissions:
38 | id-token: write # for trusted publishing
39 | steps:
40 | - name: Download all dists
41 | uses: actions/download-artifact@v4
42 | with:
43 | name: python-package-distributions
44 | path: dist/
45 | - name: Publish distribution to PyPI
46 | uses: pypa/gh-action-pypi-publish@release/v1
47 |
48 | # could add signing into github release here
49 |
--------------------------------------------------------------------------------
/.github/workflows_assets/config.json:
--------------------------------------------------------------------------------
1 | { "systems": {
2 | "github": { "host": null,
3 | "scheduler": "slurm",
4 | "header": ["#SBATCH --nodes={num_nodes}",
5 | "#SBATCH --ntasks={num_cores}",
6 | "#SBATCH --ntasks-per-node={num_cores_per_node}"],
7 | "partitions": { "standard": { "num_cores" : 2, "max_time" : null, "max_mem" : "10GB" } }
8 | }
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/.github/workflows_assets/job.test_github_CI.slurm:
--------------------------------------------------------------------------------
1 | #!/bin/bash -l
2 | #SBATCH --nodes=1
3 | #SBATCH --ntasks=2
4 | #SBATCH --ntasks-per-node=2
5 | #SBATCH --nice=20
6 | #SBATCH --hint=nomultithread
7 | #SBATCH --job-name=test
8 | #SBATCH --partition=standard
9 | #SBATCH --time=0:01:00
10 | #SBATCH --output=stdout
11 | #SBATCH --error=stderr
12 |
13 | hostname
14 |
--------------------------------------------------------------------------------
/.github/workflows_assets/slurm.conf:
--------------------------------------------------------------------------------
1 | SlurmUser=root
2 | SlurmdUser=root
3 | SlurmctldPort=6817
4 | SlurmdPort=6818
5 | AuthType=auth/munge
6 | CryptoType=crypto/munge
7 | StateSaveLocation=/var/spool/slurm.state
8 | SlurmdSpoolDir=/var/spool/slurmd
9 | SwitchType=switch/none
10 | MpiDefault=none
11 | SlurmctldPidFile=/var/run/slurmctld.pid
12 | SlurmdPidFile=/var/run/slurmd.pid
13 | ProctrackType=proctrack/linuxproc
14 | #PluginDir=/usr/lib64/slurm
15 | CacheGroups=0
16 | JobCheckpointDir=/var/spool/slurm.checkpoint
17 | #SallocDefaultCommand = "xterm"
18 | #GresTypes=gpu
19 | #FirstJobId=
20 | ReturnToService=2
21 | #MaxJobCount=
22 | #PlugStackConfig=
23 | #PropagatePrioProcess=
24 | #PropagateResourceLimits=
25 | PropagateResourceLimitsExcept=MEMLOCK
26 | #Prolog=
27 | #Epilog=
28 | #SrunProlog=
29 | #SrunEpilog=
30 | #TaskProlog=/share/apps/sbin/slurm_std_TaskProlog.sh
31 | #TaskEpilog=
32 | #TaskPlugin=task/affinity,task/cgroup
33 | TrackWCKey=yes
34 | TopologyPlugin=topology/none
35 | #TreeWidth=50
36 | TmpFs=/state/partition1
37 | #UsePAM=
38 | SlurmctldTimeout=300
39 | SlurmdTimeout=300
40 | InactiveLimit=30
41 | MinJobAge=300
42 | KillWait=60
43 | WaitTime=60
44 | SelectType=select/cons_res
45 | SelectTypeParameters=CR_Core_Memory
46 | VSizeFactor=0
47 | # FastSchedule=0
48 | SchedulerParameters=enable_user_top
49 |
50 | JobCompType=jobcomp/none
51 | JobAcctGatherType=jobacct_gather/linux
52 | JobAcctGatherFrequency=30
53 |
54 | SlurmctldDebug=error
55 | SlurmctldLogFile=/var/log/slurm/slurmctld.log
56 | SlurmdDebug=error
57 | SlurmdLogFile=/var/log/slurm/slurmd.log
58 | DebugFlags=Priority,NO_CONF_HASH,backfill,BackfillMap
59 |
60 | NodeName=DEFAULT State=UNKNOWN
61 | NodeName=_HOST_ CPUs=2 Weight=1 Sockets=1 CoresPerSocket=2 ThreadsPerCore=1 RealMemory=1000
62 |
63 | PartitionName=DEFAULT AllocNodes=ALL State=UP
64 |
65 | JobRequeue=0
66 |
67 | ################ Do not edit below #############################################################
68 | # include /etc/slurm/head.conf
69 | ClusterName=github_expyre_test
70 | SlurmctldHost=_HOST_
71 | DefaultStorageType=none
72 | # DefaultStorageHost=_HOST_
73 |
74 | # include /etc/slurm/node.conf
75 | # include /etc/slurm/parts.conf
76 | PartitionName=standard Default=YES AllocNodes=_HOST_ Nodes=ALL State=UP
77 |
78 | ################################################################################################
79 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ._*
2 | __pycache__
3 | *xyz.idx
4 | *.egg-info
5 | build
6 | dist
7 | test_cli_create_ref
8 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | # Overview
4 |
5 | Workflow is a Python toolkit for building interatomic potential creation and atomistic simulation workflows.
6 |
7 | The main functions of Workflow is to efficiently parallelise operations over a set of atomic configurations (Atomic Simulation Environment's "Atoms" objects). Given an operation that is defined to act on a single configuration (e.g. evaluate energy of a structure with CASTEP ASE calculator), Workflow may apply the operation to multiple configurations in parallel. Workflow also interfaces with [ExPyRe](https://github.com/libAtoms/ExPyRe/tree/main/expyre) to manage evaluation of (autoparallelized) Python functions via a queueing system on a (remote) cluster.
8 |
9 | For examples and more information see [documentation](https://libatoms.github.io/workflow/)
10 |
11 | `wfl` and its dependencies may be installed via `pip install wfl`.
12 |
13 |
14 | # Recent changes
15 |
16 | v0.3.2:
17 |
18 | - Add `+` operator for combining `ConfigSet` objects
19 | - Improved `wfl.generate.md` logging
20 | - Little bug fixes / error message improvements
21 |
22 | v0.3.1:
23 |
24 | - additional updates to file-based calculators for ASE v3.23.
25 | - fixes to parity plots
26 |
27 | v0.3.0:
28 |
29 | - Update the file-based calculators (Orca, FHI-Aims, Vasp, Quantum Espresso, Castep) to work
30 | with with ASE v3.23. This update breaks backwards-compatibility. For compatibility with with
31 | the ASE v3.22 see use wfl v0.2.8 or earlier.
32 |
33 | v0.2.8:
34 |
35 | - Latest version compatible with ASE v3.22.x. To install, use `pip install wfl==0.2.8`.
36 |
37 | For older changes see [documentation](https://libatoms.github.io/workflow).
38 |
39 |
--------------------------------------------------------------------------------
/deprecated/devtools/docker/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM libatomsquip/quip-minimal
2 | MAINTAINER "Tamas K. Stenczel &2
5 | exit 1
6 | fi
7 |
8 | sys=$1
9 | archive_dir=$2
10 |
11 | if [ ! -f ${sys}.json ]; then
12 | echo "ERROR: ${sys}.json file does not exist, refusing to try to archive" 1>&2
13 | exit 2
14 | fi
15 |
16 | mkdir ${archive_dir}
17 | if [ $? != 0 ]; then
18 | echo "ERROR: Failed to make archive_dir '$archive_dir'" 1>&2
19 | exit 3
20 | fi
21 |
22 | mv gap_rss_*out run_iter_* run_dft_convergence_test multistage_GAP_fit_settings.json buildcell.*input ACTIVE_ITER gap_rss_${sys}_new.stderr gap_rss_iter_fit.prep.* atoms_and_dimers.xyz ${archive_dir}/
23 | cp ${sys}.json job.gap_rss_iter_fit.${sys}.pbs ${archive_dir}/
24 |
--------------------------------------------------------------------------------
/deprecated/devtools/scripts/monitor_gap_rss_iter_fit:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [[ $# -ge 1 && $1 == '-min' ]]; then
4 | min=1
5 | nrecent=1
6 | nvasp=1
7 | shift
8 | else
9 | nrecent=3
10 | nvasp=3
11 | fi
12 |
13 | if [ $# == 0 ]; then
14 | set -- '.'
15 | fi
16 |
17 | for d in $*; do
18 | echo "#### DIR $d"
19 | pushd $d 2>&1 > /dev/null
20 |
21 | last_iter=`ls -tdr run_iter_* | tail -1`
22 |
23 | if [ -z $min ]; then
24 | echo "XYZ FILES IN $last_iter"
25 | fgrep -c Lattice `ls -tr $last_iter/*xyz`
26 | else
27 | echo "RECENT FILES IN $last_iter"
28 | ls -ltr $last_iter | tail -$nrecent
29 | fi
30 | echo ""
31 |
32 | last_file=`ls -tr $last_iter | grep -v 'tmp.DFT_eval' | tail -1`
33 | last_xyz_file=`ls -tr $last_iter/*xyz | tail -1`
34 |
35 | echo LAST_FILE $last_file
36 | if echo $last_file | egrep -q 'minim_traj'; then
37 | echo -n "relaxation trajectories, num "
38 | egrep -c 'minim_config_type=[^ ]*last' $last_iter/$last_file
39 | elif echo $last_file | egrep -q 'xyz$'; then
40 | echo -n "misc xyz file, config num "
41 | fgrep -c Lat $last_iter/$last_file
42 | elif echo $last_file | egrep -q '^run_VASP_'; then
43 | # echo "vasp runs going" `ls -d $last_iter/run_VASP_* | wc -l`
44 | for OUTCAR in $last_iter/run_VASP_*/OUTCAR; do
45 | dE=`fgrep energy-ch $OUTCAR | tail -1 | sed 's/:/ /' | awk '{print $5}'`
46 | niter=`fgrep -c LOOP $OUTCAR`
47 | steptime=`fgrep LOOP $OUTCAR | tail -1 | awk '{print $7}'`
48 | echo `dirname $OUTCAR` "niter $niter dE $dE steptime $steptime"
49 | done | sort -k2n | nl | tail -$nvasp
50 | # ls $last_iter/run_VASP_*/*/OUTCAR | wc -l
51 | # echo -n "vasps runs done "
52 | # cat $last_iter/run_VASP_*/*/OUTCAR | fgrep -c 'free '
53 | fi
54 |
55 | if [ $last_xyz_file != $last_file ]; then
56 | echo ""
57 | echo -n "LAST_XYZ_FILE $last_xyz_file # of Lat "
58 | fgrep -c Lat $last_xyz_file
59 | fi
60 |
61 | if [ ! -z $min ]; then
62 | popd 2>&1 > /dev/null
63 | echo ""
64 | continue
65 | fi
66 |
67 | outfile=`ls *.[0-9]*.*out | grep -v stdout | tail -1`
68 | echo "OUTFILE $outfile"
69 | if [ ! -z $outfile ]; then
70 | tail -10 $outfile
71 | fi
72 | echo ""
73 |
74 | stderr=`ls -tr *.stderr | tail -1`
75 | echo "STDERR $stderr"
76 | if [ ! -z $stderr ]; then
77 | tail -3 $stderr
78 | fi
79 | echo ""
80 |
81 | popd 2>&1 > /dev/null
82 | done
83 |
--------------------------------------------------------------------------------
/deprecated/examples/DFT-calculations/castep.py:
--------------------------------------------------------------------------------
1 | """
2 | This is a simple example of how to use Quantum Espresso
3 | """
4 | from pprint import pprint
5 |
6 | from wfl.configset import ConfigSet, OutputSpec
7 | from wfl.utils.logging import print_log
8 | from wfl.calculators.dft import evaluate_dft
9 |
10 |
11 | def main(verbose=True):
12 | # settings
13 | # replace this with your local configuration in productions
14 | workdir_root = "CASTEP-calculations"
15 | castep_kwargs = {
16 | "ecut": 400.0,
17 | "kpoint_mp_spacing": 0.1,
18 | "xc": "pbesol",
19 | "SPIN_POLARIZED": False,
20 | "PERC_EXTRA_BANDS": 100,
21 | "MAX_SCF_CYCLES": 200,
22 | "NUM_DUMP_CYCLES": 0,
23 | "MIXING_SCHEME": "pulay",
24 | "MIX_HISTORY_LENGTH": 20,
25 | "SMEARING_WIDTH": 0.2,
26 | "FIX_OCCUPANCY": False,
27 | }
28 | castep_command = "mpirun -n 2 castep.mpi"
29 |
30 | # IO
31 | configs_in = ConfigSet(input_files="periodic_structures.xyz")
32 | configs_out = OutputSpec(
33 | output_files="DFT_evaluated.CASTEP.periodic_structures.xyz",
34 | force=True,
35 | all_or_none=True,
36 | )
37 |
38 | if verbose:
39 | print_log("Quantum Espresso example calculation")
40 | print(configs_in)
41 | print(configs_out)
42 | print(f"workdir_root: {workdir_root}")
43 | print(f"castep_command: {castep_command}")
44 | pprint(castep_kwargs)
45 |
46 | # run the calculation
47 | _ = evaluate_dft(
48 | calculator_name="CASTEP",
49 | inputs=configs_in,
50 | outputs=configs_out,
51 | workdir_root=workdir_root, # directory where to put the calculation directories
52 | calculator_command=castep_command,
53 | calculator_kwargs=castep_kwargs,
54 | keep_files="default", # keeps the .pwo file only
55 | )
56 |
57 |
58 | if __name__ == "__main__":
59 | main()
60 |
--------------------------------------------------------------------------------
/deprecated/examples/DFT-calculations/periodic_structures.xyz:
--------------------------------------------------------------------------------
1 | 2
2 | Lattice="0.0 3.0 3.0 3.0 0.0 3.0 3.0 3.0 0.0" Properties=species:S:1:pos:R:3 pbc="T T T"
3 | Cu 0.00000000 0.00000000 0.00000000
4 | Cu 1.5 1.5 1.5
5 | 2
6 | Lattice="3.5 0.0 0.0 0.0 3.5 0.0 1.75 1.75 1.75" Properties=species:S:1:pos:R:3 pbc="T T T"
7 | Li 0.00000000 0.00000000 0.00000000
8 | 2
9 | Lattice="0.0 2.8 2.8 2.8 0.0 2.8 2.8 2.8 0.0" Properties=species:S:1:pos:R:3 pbc="T T T"
10 | Li 0.00000000 0.00000000 0.00000000
11 | Cu 1.4 1.4 1.4
12 |
--------------------------------------------------------------------------------
/deprecated/examples/DFT-calculations/vasp.py:
--------------------------------------------------------------------------------
1 | """
2 | This is a simple example of how to use VASP
3 | """
4 | import os
5 | from pprint import pprint
6 |
7 | from wfl.calculators.dft import evaluate_dft
8 | from wfl.configset import ConfigSet, OutputSpec
9 | from wfl.utils.logging import print_log
10 |
11 |
12 | def main(verbose=True):
13 | # settings
14 | # replace this with your local configuration in productions
15 | workdir_root = "VASP-calculations"
16 | vasp_kwargs = {
17 | "encut": 200.0,
18 | "kspacing": 0.3,
19 | "xc": "pbesol",
20 | "nelm": 200,
21 | "sigma": 0.2,
22 | }
23 | os.environ["MPIRUN_EXTRA_ARGS"] = "-np 2"
24 | vasp_command = "vasp.para"
25 |
26 | # path for your pseudo-potential directory
27 | assert "VASP_PP_PATH" in os.environ
28 |
29 | # IO
30 | configs_in = ConfigSet(input_files="periodic_structures.xyz")
31 | configs_out = OutputSpec(
32 | output_files="DFT_evaluated.VASP.periodic_structures.xyz",
33 | force=True,
34 | all_or_none=True,
35 | )
36 |
37 | if verbose:
38 | print_log("VASP example calculation")
39 | print(configs_in)
40 | print(configs_out)
41 | print(f"workdir_root: {workdir_root}")
42 | print(f"vasp_command: {vasp_command}")
43 | pprint(vasp_kwargs)
44 |
45 | # run the calculation
46 | _ = evaluate_dft(
47 | calculator_name="VASP",
48 | inputs=configs_in,
49 | outputs=configs_out,
50 | workdir_root=workdir_root, # directory where to put the calculation directories
51 | calculator_command=vasp_command,
52 | calculator_kwargs=vasp_kwargs,
53 | keep_files="default", # keeps files minimum for NOMAD upload
54 | )
55 |
56 |
57 | if __name__ == "__main__":
58 | main()
59 |
--------------------------------------------------------------------------------
/deprecated/examples/ace_fit_params.json:
--------------------------------------------------------------------------------
1 | {
2 | "data": {
3 | "fname": "",
4 | "energy_key": "energy",
5 | "force_key": "force",
6 | "virial_key": "virial"
7 | },
8 | "basis": {
9 | "rpi_basis": {
10 | "type": "rpi",
11 | "species": [
12 | "Ti",
13 | "Al"
14 | ],
15 | "N": 3,
16 | "maxdeg": 6,
17 | "r0": 2.88,
18 | "rad_basis": {
19 | "type": "rad",
20 | "rcut": 5.0,
21 | "rin": 1.44,
22 | "pcut": 2,
23 | "pin": 2
24 | },
25 | "transform": {
26 | "type": "polynomial",
27 | "p": 2,
28 | "r0": 2.88
29 | },
30 | "degree": {
31 | "type": "sparse",
32 | "wL": 1.5,
33 | "csp": 1.0,
34 | "chc": 0.0,
35 | "ahc": 0.0,
36 | "bhc": 0.0,
37 | "p": 1.0
38 | }
39 | },
40 | "pair_basis": {
41 | "type": "pair",
42 | "species": [
43 | "Ti",
44 | "Al"
45 | ],
46 | "maxdeg": 6,
47 | "r0": 2.88,
48 | "rcut": 5.0,
49 | "rin": 0.0,
50 | "pcut": 2,
51 | "pin": 0,
52 | "transform": {
53 | "type": "polynomial",
54 | "p": 2,
55 | "r0": 2.88
56 | }
57 | }
58 | },
59 | "solver": {
60 | "solver": "lsqr",
61 | "lsqr_damp": 0.005,
62 | "lsqr_atol": 1e-06
63 | },
64 | "P": {
65 | "type": "laplacian",
66 | "rlap_scal": 3.0
67 | },
68 | "e0": {
69 | "Ti": -1586.0195,
70 | "Al": -105.5954
71 | },
72 | "weights": {
73 | "default": {
74 | "E": 5.0,
75 | "F": 1.0,
76 | "V": 1.0
77 | },
78 | "FLD_TiAl": {
79 | "E": 5.0,
80 | "F": 1.0,
81 | "V": 1.0
82 | },
83 | "TiAl_T5000": {
84 | "E": 30.0,
85 | "F": 1.0,
86 | "V": 1.0
87 | }
88 | },
89 | "ACE_fname_stem": "tial_ace"
90 | }
91 |
--------------------------------------------------------------------------------
/deprecated/examples/ace_fit_params.yaml:
--------------------------------------------------------------------------------
1 | ACE_fname_stem: tial_ace
2 | data:
3 | energy_key: energy
4 | force_key: force
5 | virial_key: virial
6 | fname: ""
7 | e0:
8 | Al: -105.5954
9 | Ti: -1586.0195
10 | basis:
11 | rpi_basis:
12 | type: rpi
13 | N: 3
14 | maxdeg: 6
15 | r0: 2.88
16 | rad_basis:
17 | type: rad
18 | pin: 2
19 | pcut: 2
20 | rcut: 5.0
21 | rin: 1.44
22 | species:
23 | - Ti
24 | - Al
25 | pair_basis:
26 | type: pair
27 | maxdeg: 6
28 | r0: 2.88
29 | species:
30 | - Ti
31 | - Al
32 | rcut: 5.0
33 | rin: 0.0
34 | pcut: 2
35 | pin: 0
36 | solver:
37 | solver: lsqr
38 | lsqr_damp: 0.005
39 | lsqr_atol: 1e-6
40 | P:
41 | type: laplacian
42 | rlap_scal: 3.0
43 | weights:
44 | FLD_TiAl:
45 | E: 5.0
46 | F: 1.0
47 | V: 1.0
48 | TiAl_T5000:
49 | E: 30.0
50 | F: 1.0
51 | V: 1.0
52 | default:
53 | E: 5.0
54 | F: 1.0
55 | V: 1.0
56 |
--------------------------------------------------------------------------------
/deprecated/examples/expyre/config.json:
--------------------------------------------------------------------------------
1 | { "systems": {
2 | "womble0_local": { "host": null,
3 | "scheduler": "sge",
4 | "commands": [ "echo $(date)", "hostname", "conda activate wfl_dev"],
5 | "header": ["#$ -pe smp {ncores_per_node}"],
6 | "partitions": { "any@node15,any@node19,any@node21,any@node24,any@node25": {
7 | "ncores" : 16, "max_time" : "168h", "max_mem" : "47GB" },
8 | "any@node18": { "ncores" : 16, "max_time" : "168h", "max_mem" : "62GB" },
9 | "any@node22": { "ncores" : 16, "max_time" : "168h", "max_mem" : "54GB" },
10 | "any@node23": { "ncores" : 16, "max_time" : "168h", "max_mem" : "23GB" },
11 | "any@node27,any@node28,any@node29,any@node30,any@node31,any@node32,any@node33": {
12 | "ncores" : 32, "max_time" : "168h", "max_mem" : "125GB" },
13 | "any@node34": { "ncores" : 32, "max_time" : "4h", "max_mem" : "125GB" },
14 | "any@node35": { "ncores" : 32, "max_time" : null, "max_mem" : "125GB" },
15 | "any@node5,any@node6,any@node7,any@node8": {
16 | "ncores" : 28, "max_time" : "168h", "max_mem" : "62GB" },
17 | "any@node9": { "ncores" : 28, "max_time" : null, "max_mem" : "62GB" },
18 | "mem1000G@node36": { "ncores" : 32, "max_time" : "168h", "max_mem" : "1000GB" },
19 | "mem1000G@node37": { "ncores" : 32, "max_time" : "48h", "max_mem" : "1000GB" },
20 | "mem1500G@node16": { "ncores" : 32, "max_time" : "168h", "max_mem" : "1500GB" },
21 | "mem200G@node17": { "ncores" : 16, "max_time" : "168h", "max_mem" : "220GB" },
22 | "mem200G@node26": { "ncores" : 16, "max_time" : "168h", "max_mem" : "204GB" }
23 |
24 |
25 | }
26 | }
27 | }}
28 |
29 |
30 |
--------------------------------------------------------------------------------
/deprecated/examples/gap-rss/LiCu/LiCu.json:
--------------------------------------------------------------------------------
1 | {
2 | "global": {
3 | "compositions": [
4 | [ "Cu", 1.0 ], ["LiCu7", 1.0 ], ["LiCu3", 1.0], [ "Li3Cu5", 1.0 ], [ "LiCu", 1.0 ], [ "Li5Cu3", 1.0 ], [ "Li3Cu", 1.0 ], ["Li7Cu", 1.0 ], [ "Li", 1.0 ]
5 | ],
6 | "config_selection_descriptor": {
7 | "soap": true,
8 | "n_max": 10,
9 | "l_max": 3,
10 | "atom_sigma": "_EVAL_ {BOND_LEN_Z_MAX}/10",
11 | "cutoff": "_EVAL_ {BOND_LEN_Z_MAX}*2.0",
12 | "cutoff_transition_width": "_EVAL_ {BOND_LEN_Z_MAX}/4.0",
13 | "average": true,
14 | "central_weight": 1.0,
15 | "Z": "_EVAL_ {Zcenter}",
16 | "n_species": "_EVAL_ {nZ}",
17 | "species_Z": "_EVAL_ {Zs}",
18 | "add_species": "manual_Zcenter"
19 | },
20 | "config_selection_descriptor_add_species": "manual_Zcenter"
21 | },
22 | "prep": {
23 | "length_scales_file": "length_scales.yaml",
24 | "dimer_n_steps": 40
25 | },
26 | "initial_step": {
27 | "buildcell_total_N": 10000,
28 | "fitting_by_desc_select_N": 200,
29 | "testing_by_desc_select_N": 50
30 | },
31 | "rss_step": {
32 | "buildcell_total_N": 10000,
33 | "minima_flat_histo_N": 2500,
34 | "minima_by_desc_select_N": 1000,
35 | "final_flat_histo_N": 2500,
36 | "fitting_by_desc_select_N": 200,
37 | "testing_by_desc_select_N": 50,
38 | "select_convex_hull": false,
39 | "minim_kwargs": {
40 | "pressure": [ "exponential", 0.2 ]
41 | },
42 | "iter_specific": {
43 | "flat_histo_kT": {
44 | "1": 0.3,
45 | "2": 0.2,
46 | "3:": 0.1
47 | }
48 | }
49 | },
50 | "MD_bulk_defect_step": {
51 | "buildcell_total_N": 10000,
52 | "minima_flat_histo_N": 2500,
53 | "minima_by_desc_select_N": 1000,
54 | "final_flat_histo_N": 2500,
55 | "fitting_by_desc_select_N": 200,
56 | "testing_by_desc_select_N": 50,
57 | "flat_histo_kT": 0.1,
58 | "minim_kwargs": {
59 | "pressure": [ "exponential", 0.2 ]
60 | },
61 | "N_bulk": 16,
62 | "N_vacancy": 16,
63 | "N_interstitial": 16,
64 | "N_surface": 16,
65 | "max_n_atoms": 64,
66 | "MD_dt": 1.0,
67 | "bulk_MD_n_steps": 5000,
68 | "bulk_MD_T_range": [
69 | 100.0,
70 | 2500.0
71 | ],
72 | "defect_MD_n_steps": 2000,
73 | "defect_MD_T_range": [
74 | 50.0,
75 | 1250.0
76 | ]
77 | },
78 | "DFT_evaluate": {
79 | "calculator": "VASP",
80 | "kwargs": {
81 | "encut": 500.0,
82 | "kspacing": 0.15,
83 | "ediff": 1.0e-7,
84 | "prec": "acc",
85 | "lreal": false,
86 | "ismear": 0,
87 | "sigma": 0.05,
88 | "algo": "normal",
89 | "amix": 0.1,
90 | "nelm": 150,
91 | "isym": 0,
92 | "lplane": false,
93 | "lscalapack": false,
94 | "VASP_PP_PATH": "/share/apps/vasp/pot/rev_54/PBE"
95 | },
96 | },
97 | "fit": {
98 | "GAP_template_file": "multistage_GAP_fit_settings.short_range_2b_SE_universal_SOAPs_12_3.template.yaml",
99 | "universal_SOAP_sharpness": 0.5,
100 | "database_modify_mod": "wfl.fit.modify_database.gap_rss_set_config_sigmas_from_convex_hull"
101 | }
102 | }
103 |
--------------------------------------------------------------------------------
/deprecated/examples/gap-rss/LiCu/job.gap_rss_iter_fit.LiCu.pbs:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #### #CCMS nprocs=72:network=fast_loose:bigmem
3 | #SBATCH --partition=bigmem
4 | #SBATCH --nodes=1
5 | #SBATCH --exclusive
6 | ####
7 | #PBS -l walltime=2400:00:00
8 | #PBS -N gap_rss_LiCu
9 | #PBS -o gap_rss_LiCu.stdout
10 | #PBS -e gap_rss_LiCu.stderr
11 | #PBS -S /bin/bash
12 |
13 | cd $PBS_O_WORKDIR
14 |
15 | # when specifying SBATCH --nodes, end up with one SLURM_TASK per hyperthread,
16 | # so divide by two for real cores
17 | export WFL_AUTOPARA_NPOOL=$((SLURM_TASKS_PER_NODE / 2))
18 |
19 | module unload compilers mpi lapack python ase quip vasp
20 | module load compilers/gnu mpi lapack python ase quip_variant/openmp quip
21 | module load vasp
22 |
23 | module list
24 | which gap_fit
25 |
26 | export VASP_COMMAND=vasp.serial
27 | export VASP_COMMAND_GAMMA=vasp.gamma_serial
28 |
29 | export GRIF_BUILDCELL_CMD=$HOME/src/work/AIRSS/airss-0.9.1/src/buildcell/src/buildcell
30 |
31 | export OMP_NUM_THREADS=1
32 | if [[ $WFL_AUTOPARA_NPOOL -gt 16 ]]; then
33 | export GAP_FIT_OMP_NUM_THREADS=$WFL_AUTOPARA_NPOOL
34 | else
35 | export GAP_FIT_OMP_NUM_THREADS=$((WFL_AUTOPARA_NPOOL / 2))
36 | fi
37 |
38 | system_json=LiCu.json
39 |
40 | for f in run_iter_* ACTIVE_ITER atoms_and_dimers.xyz gap_rss_LiCu.*.out; do
41 | echo "WARNING: Trace of old run found file '$f'" 1>&2
42 | break
43 | done
44 |
45 | rm -f ACTIVE_ITER
46 |
47 | # abort if any fail
48 | set -e
49 |
50 | # prep
51 | gap_rss_iter_fit -c ${system_json} prep >>gap_rss_LiCu.prep.out
52 |
53 | # dft_convergence_test -c LiCu.json \
54 | # -r '{ "encut" : [ 250, 601, 50 ], "kspacing" : [ 0.35, 0.049, -0.05 ] }' \
55 | # buildcell.narrow_vol_range.Z_3_1__29_1.input > gap_rss_LiCu.dft_convergence_test.out
56 |
57 | # RSS iters
58 | gap_rss_iter_fit -c ${system_json} initial_step >>gap_rss_LiCu.0.initial_step.out
59 | for iter_i in $(seq 1 24); do
60 | gap_rss_iter_fit -c ${system_json} rss_step >>gap_rss_LiCu.${iter_i}.rss_step.out
61 | done
62 |
63 | iter_i=25
64 | # bulk/defect supercell MD iter
65 | gap_rss_iter_fit -c ${system_json} MD_bulk_defect_step >>gap_rss_Li_new.${iter_i}.MD_step.out
66 |
--------------------------------------------------------------------------------
/deprecated/examples/gap-rss/length_scales.yaml:
--------------------------------------------------------------------------------
1 | 3:
2 | bond_len:
3 | - 3.0
4 | min_bond_len:
5 | - 2.4
6 | vol_per_atom:
7 | - 20.0
8 | source: NB gap-rss
9 | 5:
10 | bond_len:
11 | - 1.7
12 | min_bond_len:
13 | - 1.7
14 | vol_per_atom:
15 | - 8.2
16 | source: NB VASP auto_length_scale
17 | 32:
18 | bond_len:
19 | - 2.5
20 | min_bond_len:
21 | - 2.3
22 | vol_per_atom:
23 | - 23
24 | source: NB gap-rss
25 | 52:
26 | bond_len:
27 | - 3.1
28 | min_bond_len:
29 | - 2.6
30 | vol_per_atom:
31 | - 31
32 | source: NB gap-rss
--------------------------------------------------------------------------------
/deprecated/examples/gap-rss/multistage_GAP_fit_settings.short_range_2b_SE_universal_SOAPs_12_3.template.yaml:
--------------------------------------------------------------------------------
1 | stages:
2 | - error_scale_factor: 10.0
3 | descriptors:
4 | - descriptor:
5 | distance_Nb: True
6 | order: 2
7 | cutoff: "_EVAL_ {BOND_LEN_Z1_Z2}*1.5"
8 | cutoff_transition_width: "_EVAL_ {BOND_LEN_Z1_Z2}*1.5/5.0"
9 | compact_clusters: True
10 | Z: [ "_EVAL_ {Z1}", "_EVAL_ {Z2}" ]
11 | add_species: "manual_Z_pair"
12 | fit:
13 | n_sparse: 15
14 | covariance_type: "ard_se"
15 | theta_uniform: "_EVAL_ {BOND_LEN_Z1_Z2}*1.5/5.0"
16 | sparse_method: "uniform"
17 | f0: 0.0
18 | count_cutoff: "_EVAL_ {BOND_LEN_Z1_Z2}*1.4"
19 | - error_scale_factor: 1.0
20 | descriptors:
21 | - descriptor:
22 | soap: true
23 | n_max: 12
24 | l_max: 3
25 | atom_sigma: "_EVAL_ {ATOM_SIGMA}"
26 | cutoff: "_EVAL_ {R_CUT}"
27 | cutoff_transition_width: "_EVAL_ {R_TRANS}"
28 | central_weight: 1.0
29 | Z: "_EVAL_ {Zcenter}"
30 | n_species: "_EVAL_ {nZ}"
31 | species_Z: "_EVAL_ {Zs}"
32 | add_species: "manual_universal_SOAP"
33 | fit:
34 | n_sparse: 1000
35 | f0: 0.0
36 | covariance_type: "dot_product"
37 | zeta: 4
38 | sparse_method: "cur_points"
39 | print_sparse_index: true
40 |
41 | gap_params:
42 | default_sigma: [0.0025, 0.0625, 0.125, 0.125]
43 | sparse_jitter: 1.0e-8
44 | do_copy_at_file: false
45 | sparse_separate_file: true
46 |
--------------------------------------------------------------------------------
/deprecated/examples/organic_reactions/CHO_methane_burning/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "global": {
3 | "fragments_file": "fragments.xyz",
4 | "config_selection_descriptor": [
5 | "soap n_max=10 l_max=6 atom_sigma=0.3 cutoff=3.0 cutoff_transition_width=0.6 central_weight=1.0 Z=${Zcenter} n_species=${nZ} species_Z={${Zs}}",
6 | "soap n_max=10 l_max=6 atom_sigma=0.6 cutoff=6.0 cutoff_transition_width=1.2 central_weight=1.0 Z=${Zcenter} n_species=${nZ} species_Z={${Zs}}"
7 | ],
8 | "config_select_descriptor_add_species": "manual_Zcenter"
9 | },
10 | "prep": {
11 | "length_scales_file": "length_scales.json"
12 | },
13 | "initial_step": {
14 | "e0_mode": "dimer",
15 | "dimer": {
16 | "cutoff": 6.0,
17 | "n_steps": 40,
18 | "r_min": 0.2,
19 | "inclusion_energy_upper_limit": -1.0
20 | },
21 | "NormalModes": {
22 | "n_free": 2,
23 | "num_per_mode": 4
24 | }
25 | },
26 | "collision_step": {
27 | "kwargs": {
28 | "nsteps": 1000,
29 | "T": 1000.0,
30 | "d0": 6.0,
31 | "trajectory_interval": 8,
32 | "velocity_params": [
33 | [0.10, 0.10],
34 | [0.10, 0.15],
35 | [0.10, 0.20],
36 | [0.15, 0.10],
37 | [0.15, 0.15],
38 | [0.15, 0.20]
39 | ],
40 | "min_atoms": 4
41 | },
42 | "selection": {
43 | "cut_threshold": 0.85,
44 | "lower_energy_limit": 0.01,
45 | "num_select": [
46 | [1, 50],
47 | [6, 50],
48 | [8, 50]
49 | ]
50 | }
51 | },
52 | "neb_step": {
53 | "minim_interval": 50,
54 | "minim_kwargs": {
55 | "fmax": 0.05
56 | },
57 | "neb_kwargs": {
58 | "nimages": 17,
59 | "interpolation_method": false,
60 | "fmax": 0.1,
61 | "steps": 50,
62 | "k": 0.01
63 | },
64 | "ts_kwargs": {
65 | "fmax": 0.05,
66 | "steps": 50
67 | },
68 | "irc_kwargs": {
69 | "fmax": 0.05,
70 | "steps": 100
71 | },
72 | "selection": {
73 | "lower_energy_limit": 0.10,
74 | "num_select": [
75 | [1, 25],
76 | [6, 25],
77 | [8, 10]
78 | ]
79 | }
80 | },
81 | "DFT_evaluate": {
82 | "calculator": "ORCA",
83 | "kwargs": {
84 | "n_run": 3,
85 | "n_hop": 15,
86 | "n_orb": 10,
87 | "max_angle": 60.0,
88 | "smearing": 5000,
89 | "maxiter": 500,
90 | "scratch_path": "/tmp/",
91 | "orca_simple_input": "UHF revPBE def2-TZVP def2/J D3BJ slowconv",
92 | "orca_command": "/opt/womble/orca/orca_4_2_1_linux_x86-64_openmpi314/orca"
93 | }
94 | },
95 | "fit": {
96 | "GAP_template_file": "multistage_GAP_fit_settings.fixed_3_6_SOAPs_10_6.json.template",
97 | "universal_SOAP_sharpness": 0.5,
98 | "num_committee": 3,
99 | "database_modify_mod": "wfl.fit.modify_database.simple_factor_nonperiodic",
100 | "field_error_scale_factors": {
101 | "default_sigma": [0.01, 0.150, false, false],
102 | "extra_space": 6.0,
103 | "config_type_sigma": {
104 | "dimer": [0.1, 0.5, false, false],
105 | "fragment": [0.001, 0.01, false, false]
106 | }
107 | }
108 | }
109 | }
--------------------------------------------------------------------------------
/deprecated/examples/organic_reactions/CHO_methane_burning/fragments.xyz:
--------------------------------------------------------------------------------
1 | 5
2 | Properties=species:S:1:pos:R:3 pbc="F F F" config_type=fragment
3 | C 0.00000000 0.00000000 0.00000000
4 | H 0.63365859 0.63365753 0.63366070
5 | H -0.63365421 -0.63365475 0.63366519
6 | H 0.63366576 -0.63365396 -0.63365405
7 | H -0.63365484 0.63366669 -0.63365227
8 | 4
9 | Properties=species:S:1:pos:R:3 pbc="F F F" config_type=fragment
10 | C 0.00000000 0.00000000 0.00000000
11 | H 0.00000003 1.08779863 -0.00000065
12 | H 0.94217990 -0.54331121 -0.00000026
13 | H -0.94217967 -0.54331158 0.00000050
14 | 2
15 | Properties=species:S:1:pos:R:3 pbc="F F F" config_type=fragment
16 | O 0.00000000 0.00000000 0.00000000
17 | H 0.98631848 0.00000000 0.00000000
18 | 2
19 | Properties=species:S:1:pos:R:3 pbc="F F F" config_type=fragment
20 | C 0.00000000 0.00000000 0.00000000
21 | O 1.14006251 0.00000000 0.00000000
22 | 2
23 | Properties=species:S:1:pos:R:3 pbc="F F F" config_type=fragment
24 | O 0.00000000 0.00000000 0.00000000
25 | O 1.22449087 -0.00000000 -0.00000000
26 | 1
27 | Properties=species:S:1:pos:R:3 pbc="F F F" config_type=fragment
28 | O 0.00000000 0.00000000 0.00000000
--------------------------------------------------------------------------------
/deprecated/examples/organic_reactions/CHO_methane_burning/length_scales.json:
--------------------------------------------------------------------------------
1 | {
2 | "1": {
3 | "bond_len": [1.2, "NB VASP auto_length_scale"],
4 | "min_bond_len": [0.75, "NB VASP auto_length_scale"],
5 | "other links": {"wikipedia": "https://en.wikipedia.org/wiki/Bond_length"},
6 | "vol_per_atom": [3.4, "NB VASP auto_length_scale"]
7 | },
8 | "6": {
9 | "bond_len": [1.5, "NB VASP auto_length_scale"],
10 | "min_bond_len": [1.37, "wikipedia organic bond lengths"],
11 | "other links": {"wikipedia": "https://en.wikipedia.org/wiki/Bond_length"},
12 | "vol_per_atom": [5.7, "NB VASP auto_length_scale"]
13 | },
14 | "8": {
15 | "bond_len": [1.7, "NB VASP auto_length_scale"],
16 | "min_bond_len": [1.2, "NB VASP auto_length_scale"],
17 | "other links": {"wikipedia": "https://en.wikipedia.org/wiki/Bond_length"},
18 | "vol_per_atom": [11, "NB VASP auto_length_scale"]
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/deprecated/examples/organic_reactions/CHO_methane_burning/multistage_GAP_fit_settings.fixed_3_6_SOAPs_10_6.json.template:
--------------------------------------------------------------------------------
1 | { "stages" : [
2 | { "error_scale_factor" : 10.0, "add_species" : "manual_Z_pair",
3 | "descriptors" : [ { "desc_str" : "distance_2b cutoff=6.0 cutoff_transition_width=1.0 Z={{${Z1} ${Z2}}}",
4 | "fit_str" : "n_sparse=30 covariance_type=ard_se theta_uniform=1.0 sparse_method=uniform f0=0.0 add_species=F",
5 | "count_cutoff" : "_F_ 3.0" } ] } ,
6 |
7 | { "error_scale_factor" : 2.0, "add_species" : "manual_Zcenter",
8 | "descriptors" : [ { "desc_str" : "soap n_max=10 l_max=6 atom_sigma=0.4 cutoff=3.0 cutoff_transition_width=0.6 central_weight=1.0 Z=${Zcenter} n_species=${nZ} species_Z={{${Zs}}}",
9 | "fit_str" : "n_sparse=1000 f0=0.0 covariance_type=dot_product zeta=3 sparse_method=cur_points print_sparse_index add_species=F" } ] } ,
10 |
11 | { "error_scale_factor" : 1.0, "add_species" : "manual_Zcenter",
12 | "descriptors" : [ { "desc_str" : "soap n_max=10 l_max=6 atom_sigma=0.6 cutoff=6.0 cutoff_transition_width=1.2 central_weight=1.0 Z=${Zcenter} n_species=${nZ} species_Z={{${Zs}}}",
13 | "fit_str" : "n_sparse=1000 f0=0.0 covariance_type=dot_product zeta=4 sparse_method=cur_points print_sparse_index add_species=F" } ] }
14 | ],
15 | "gap_params" : "default_sigma='{0.010 0.150 0. 0.}' sparse_jitter=1.0e-8 do_copy_at_file=F sparse_separate_file=T",
16 | "core_ip_file" : "glue.xml",
17 | "core_ip_args" : "IP Glue"
18 | }
19 |
--------------------------------------------------------------------------------
/deprecated/examples/simple_gap_fit_parameters.yml:
--------------------------------------------------------------------------------
1 | # mandatory parameters
2 | default_sigma: [ 0.001, 0.03, 0.0, 0.0 ]
3 |
4 | _gap:
5 | - soap: True
6 | l_max: 6
7 | n_max: 12
8 | cutoff: 3
9 | delta: 1
10 | covariance_type: dot_product
11 | zeta: 4
12 | n_sparse: 200
13 | sparse_method: cur_points
14 | atom_gaussian_width: 0.3
15 | cutoff_transition_width: 0.5
16 | add_species: True
17 |
18 | - soap: True
19 | l_max: 6
20 | n_max: 12
21 | cutoff: 6
22 | delta: 1
23 | covariance_type: dot_product
24 | zeta: 4
25 | n_sparse: 200
26 | sparse_method: cur_points
27 | atom_gaussian_width: 0.6
28 | cutoff_transition_width: 1
29 | add_species: True
30 |
31 | # optional parameters
32 | config_type_sigma:
33 | isolated_atom: [ 0.0001, 0.0, 0.0, 0.0 ]
34 | sparse_separate_file: False
35 | energy_parameter_name: dft_energy
36 | force_parameter_name: dft_forces
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/deprecated/plotting/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/libAtoms/workflow/252f61ce8dd9c0db21e957bd03eb9fd68f51867d/deprecated/plotting/__init__.py
--------------------------------------------------------------------------------
/deprecated/user/reactions/generate/radicals.py:
--------------------------------------------------------------------------------
1 | import warnings
2 |
3 | import numpy as np
4 | from ase import neighborlist
5 | from wfl.utils.misc import atoms_to_list
6 |
7 | from wfl.generate.utils import config_type_append
8 |
9 |
10 | def abstract_sp3_hydrogen_atoms(input_mol, label_config_type=True,
11 | cutoffs=None):
12 | """ Removes molecule's sp3 hydrogen atoms one at a time to give a number
13 | of corresponding unsaturated (radical) structures.
14 |
15 | Method of determining sp3: carbon has 4 neighbors of any kind. Only
16 | removes from Carbon atoms.
17 |
18 | Parameters
19 | ----------
20 |
21 | inputs: Atoms
22 | structure to remove sp3 hydrogen atoms from
23 | label_config_type: bool, default True
24 | whether to append config_type with 'mol' or 'rad{idx}'
25 | cutoffs: dict, default None
26 | cutoffs to override default values from neighborlist.natural_cutoffs()
27 |
28 | Returns
29 | -------
30 | list(Atoms): All sp3 radicals corresponding to input molecule
31 |
32 | """
33 |
34 | natural_cutoffs = neighborlist.natural_cutoffs(input_mol)
35 |
36 | if cutoffs is not None:
37 | natural_cutoffs.update(cutoffs)
38 |
39 | neighbor_list = neighborlist.NeighborList(natural_cutoffs,
40 | self_interaction=False,
41 | bothways=True)
42 | _ = neighbor_list.update(input_mol)
43 |
44 | symbols = np.array(input_mol.symbols)
45 | sp3_hs = []
46 | for at in input_mol:
47 | if at.symbol == 'H':
48 | h_idx = at.index
49 |
50 | indices, offsets = neighbor_list.get_neighbors(h_idx)
51 | if len(indices) != 1:
52 | raise RuntimeError("Got no or more than one hydrogen "
53 | "neighbors")
54 |
55 | # find index of the atom H is bound to
56 | h_neighbor_idx = indices[0]
57 |
58 | if symbols[h_neighbor_idx] != 'C':
59 | continue
60 |
61 | # count number of neighbours of the carbon H is bound to
62 | indices, offsets = neighbor_list.get_neighbors(h_neighbor_idx)
63 |
64 | no_carbon_neighbors = len(indices)
65 |
66 | if no_carbon_neighbors == 4:
67 | sp3_hs.append(h_idx)
68 |
69 | if len(sp3_hs) == 0:
70 | warnings.warn("No sp3 hydrogens were found; no radicals returned")
71 | return []
72 |
73 | radicals = []
74 | for h_idx in sp3_hs:
75 | at = input_mol.copy()
76 | del at[h_idx]
77 | radicals.append(at)
78 |
79 | if label_config_type:
80 | for rad, h_id in zip(radicals, sp3_hs):
81 | config_type_append(rad, f'rad{h_id}')
82 |
83 | return radicals
84 |
85 |
--------------------------------------------------------------------------------
/deprecated/user/reactions/generate/ts.py:
--------------------------------------------------------------------------------
1 | from tempfile import NamedTemporaryFile
2 |
3 | import ase.io
4 |
5 | import wfl.utils.misc
6 | from wfl.generate import optimize
7 | from wfl.utils.parallel import construct_calculator_picklesafe
8 |
9 | try:
10 | from sella import Sella, IRC
11 | except ModuleNotFoundError:
12 | Sella = None
13 | IRC = None
14 |
15 |
16 | # noinspection PyProtectedMember
17 | def calc_ts(atoms, calculator, fmax=1.0e-3, steps=200, traj_step_interval=1, traj_equispaced_n=None, verbose=False):
18 | """Runs TS calculation
19 |
20 | Notes
21 | -----
22 | - Pressure and cell movement is not supported yet
23 | - Constraints are not implemented, Sella needs special treatment of them,
24 | see: https://github.com/zadorlab/sella/wiki/Constraints
25 | - Keeping the symmetry is not supported by the Sella optimiser
26 |
27 | Parameters
28 | ----------
29 | atoms: list(Atoms)
30 | input configs
31 | calculator: Calculator / (initializer, args, kwargs)
32 | ASE calculator or routine to call to create calculator
33 | fmax: float, default 1e-3
34 | force convergence tolerance
35 | steps: int, default 200
36 | max number of steps
37 | traj_step_interval: int, default 1
38 | if present, interval between trajectory snapshots
39 | traj_equispaced_n: int, default None
40 | if present, number of configurations to save from trajectory,
41 | trying to be equispaced in Cartesian path length
42 | verbose: bool, default False
43 | optimisation logs are not printed unless this is True
44 |
45 | Returns
46 | -------
47 | list(Atoms) trajectories
48 | """
49 | if not Sella or not IRC:
50 | raise RuntimeError('Need Sella, IRC from sella module')
51 |
52 | if verbose:
53 | logfile = '-'
54 | else:
55 | logfile = None
56 |
57 | calculator = construct_calculator_picklesafe(calculator)
58 |
59 | all_trajs = []
60 |
61 | for at in wfl.utils.misc.atoms_to_list(atoms):
62 |
63 | at.calc = calculator
64 | at.constraints = None
65 | at.info['config_type'] = 'minim_traj'
66 |
67 | with NamedTemporaryFile(prefix="sella_", suffix="_.traj") as trajfile:
68 | opt = Sella(at, trajectory=trajfile.name, logfile=logfile)
69 | opt.run(fmax=fmax, steps=steps)
70 |
71 | traj = ase.io.read(trajfile.name, ":")
72 | if traj_step_interval is not None and traj_step_interval > 0:
73 | # enforce having the last frame in it
74 | traj = traj[:-1:traj_step_interval] + [traj[-1]]
75 |
76 | if opt.converged():
77 | traj[-1].info['config_type'] = 'ts_last_converged'
78 | else:
79 | traj[-1].info['config_type'] = 'ts_last_unconverged'
80 |
81 | traj = optimize._resample_traj(traj, traj_equispaced_n)
82 |
83 | all_trajs.append(traj)
84 |
85 | return all_trajs
86 |
--------------------------------------------------------------------------------
/deprecated/user/reactions/reactions_processing/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/libAtoms/workflow/252f61ce8dd9c0db21e957bd03eb9fd68f51867d/deprecated/user/reactions/reactions_processing/__init__.py
--------------------------------------------------------------------------------
/deprecated/user/reactions/select/simple_filters.py:
--------------------------------------------------------------------------------
1 | # STUB deprecated from wfl.select.simple_filters
2 | def by_energy(inputs, outputs, lower_limit, upper_limit, energy_parameter_name=None, e0=None):
3 | """Filter by binding energy
4 |
5 | Parameters
6 | ----------
7 | inputs: ConfigSet
8 | source configurations
9 | outputs: OutputSpec
10 | output configurations
11 | lower_limit: float / None
12 | lower energy limit for binding energy, None is -inf
13 | upper_limit: float / None
14 | upper energy limit for binging energy, None is +inf
15 | energy_parameter_name: str / None, default None
16 | parameter name to use for energy, if None then atoms.get_potential_energy() is used
17 | e0 : dict / None
18 | energy of isolated atoms, to use for binding energy calculation, with chemical symbols as keys
19 | None triggers all zero
20 |
21 | Returns
22 | -------
23 | ConfigSet pointing to selected configurations
24 |
25 | """
26 |
27 | if lower_limit is None:
28 | lower_limit = - np.inf
29 |
30 | if upper_limit is None:
31 | upper_limit = np.inf
32 |
33 | def get_energy(at: Atoms):
34 | if e0 is None:
35 | shift = 0.
36 | else:
37 | shift = np.sum([e0[symbol] for symbol in at.get_chemical_symbols()])
38 |
39 | if energy_parameter_name is None:
40 | return (at.get_potential_energy() - shift) / len(at)
41 | else:
42 | return (at.info.get(energy_parameter_name) - shift) / len(at)
43 |
44 | if outputs.is_done():
45 | sys.stderr.write(f'Returning before by_energy since output is done\n')
46 | return outputs.to_ConfigSet()
47 |
48 | selected_any = False
49 | for atoms in inputs:
50 | if lower_limit < get_energy(atoms) < upper_limit:
51 | outputs.write(atoms)
52 | selected_any = True
53 |
54 | outputs.end_write()
55 | if selected_any:
56 | return outputs.to_ConfigSet()
57 | else:
58 | return ConfigSet(input_configs=[])
59 |
60 |
--------------------------------------------------------------------------------
/deprecated/user/reactions/tests/test_molecules_radicals.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from wfl.configset import OutputSpec, ConfigSet
3 |
4 | import pytest
5 |
6 | # tested modules
7 | from user.generate import radicals
8 | from wfl.generate import smiles
9 |
10 | # wfl.generate_configs.smiles depends on rdkit.Chem
11 | pytest.importorskip("rdkit.Chem")
12 |
13 | def test_abstract_sp3_hydrogens():
14 |
15 | smiles_list = ['C', 'C=CCO']
16 | ConfigSet(input_configs=[smiles.smi_to_atoms(smi) for smi in smiles_list])
17 | cfs_out = OutputSpec()
18 |
19 | expected_formuli = [['CH3']*4,
20 | ['C3OH5']*2]
21 |
22 | expected_config_types = [['rad1', 'rad2', 'rad3', 'rad4'],
23 | ['rad7', 'rad8']]
24 |
25 | for mol, ref_formuli, ref_cfg_types in zip(cfs_out.output_configs,
26 | expected_formuli,
27 | expected_config_types):
28 |
29 | rads = radicals.abstract_sp3_hydrogen_atoms(mol)
30 | formuli = [str(at.symbols) for at in rads]
31 | assert np.all(formuli == ref_formuli)
32 | config_types = [at.info['config_type'] for at in rads]
33 | assert np.all(config_types == ref_cfg_types)
34 |
35 |
--------------------------------------------------------------------------------
/docs/.gitignore:
--------------------------------------------------------------------------------
1 | build
2 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | export PYTHONPATH := $(PWD)/..:$(PYTHONPATH)
18 |
19 | # Catch-all target: route all unknown targets to Sphinx using the new
20 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
21 | %: Makefile
22 | sphinx-apidoc -o source ../wfl
23 | mv source/wfl.generate.md.rst source/wfl.generate.md_PROTECT.rst
24 | sed -i -e 's/wfl.generate.md/wfl.generate.md_PROTECT/' source/wfl.generate.rst
25 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
26 |
--------------------------------------------------------------------------------
/docs/source/attic/operations.utils.md:
--------------------------------------------------------------------------------
1 | # Utility functions
2 |
3 | Little functions in `wfl.utils` that might come handy.
4 |
5 | ## Find Voids
6 |
7 | `find_voids.find_voids()` returns positions of Voronoi centers and distances to the nearest atom.
8 |
9 | ## e0 from GAP XML
10 |
11 | `gap_xml_tools.extract_e0()` gives e0 values used for the GAP fit.
12 |
13 | ...
14 |
--------------------------------------------------------------------------------
/docs/source/command_line.automatic_docs.rst:
--------------------------------------------------------------------------------
1 | .. _auto_command_line:
2 |
3 | .. click:: wfl.cli.cli:cli
4 | :prog: wfl
5 | :nested: full
6 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import sys
15 | from wfl import __version__
16 |
17 | sys.path.insert(0, os.path.abspath('../../wfl'))
18 |
19 |
20 | # -- Project information -----------------------------------------------------
21 |
22 | project = 'workflow'
23 | #copyright = '2021, N. Bernstein, T. K. Stenczel, E. Gelzinyte'
24 | author = 'N. Bernstein, T. K. Stenczel, E. Gelzinyte'
25 |
26 | release = __version__
27 |
28 |
29 | # -- General configuration ---------------------------------------------------
30 |
31 | # Add any Sphinx extension module names here, as strings. They can be
32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
33 | # ones.
34 | extensions = ['sphinx.ext.autodoc',
35 | 'sphinx.ext.napoleon',
36 | 'sphinx_click.ext',
37 | 'nbsphinx',
38 | 'myst_parser',
39 | 'IPython.sphinxext.ipython_console_highlighting'
40 | ]
41 |
42 | # Add any paths that contain templates here, relative to this directory.
43 | templates_path = ['_templates']
44 |
45 | # List of patterns, relative to source directory, that match files and
46 | # directories to ignore when looking for source files.
47 | # This pattern also affects html_static_path and html_extra_path.
48 | exclude_patterns = []
49 |
50 | # which files done in which language
51 | source_suffix = {
52 | '.rst': 'restructuredtext',
53 | '.md': 'markdown'
54 | }
55 |
56 |
57 | # -- Options for HTML output -------------------------------------------------
58 |
59 | # The theme to use for HTML and HTML Help pages. See the documentation for
60 | # a list of builtin themes.
61 | #
62 | html_theme = "sphinx_book_theme"
63 | # html_theme = 'sphinx_material'
64 |
65 | # Add any paths that contain custom static files (such as style sheets) here,
66 | # relative to this directory. They are copied after the builtin static files,
67 | # so a file named "default.css" will overwrite the builtin "default.css".
68 | html_static_path = []
69 |
70 | # Set nbsphinx Configuration Values
71 | nbsphinx_execute = 'never'
72 | nbsphinx_input_prompt = '%s'
73 | nbsphinx_prompt_width = '0'
74 |
75 | html_logo = "../wf_logo_final.png"
76 | html_favicon = "../wf_logo_final.png"
77 |
--------------------------------------------------------------------------------
/docs/source/examples.buildcell.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "be6185f2-8bfa-4499-bf7e-faadc6166383",
6 | "metadata": {},
7 | "source": [
8 | "# Random Structures via buildcell\n",
9 | "\n",
10 | "In Workflow random structures can be generated via the `wfl.generate.buildcell.buildcell()` routine. \n",
11 | "It's functionality builds on [AIRSS](https://airss-docs.github.io/technical-reference/buildcell-manual)’s `buildcell` to constructs sensible random structures.\n",
12 | "\n",
13 | "The example below illustrates its application for the random generation of aluminum unit cells.\n",
14 | "Here, we are aiming for a total of 20 structures and, thus, set `inputs` to an iterable of that length.\n",
15 | "Next, we define an `OutputSpec` to handle the output structures that will be generated.\n",
16 | "In order to have a proper `buildcell_input` available, we are using the `wfl.generate.buildcell.create_input()` routine in this example\n",
17 | "where we pass arguments that characterise the systems we are aiming to generate.\n",
18 | "Finally, we set the `buildcell_cmd` appropriately to the `buildcell` executable we use on our machine and run the script\n",
19 | "to obtain the desired number of random Al-based unit cells."
20 | ]
21 | },
22 | {
23 | "cell_type": "code",
24 | "execution_count": null,
25 | "id": "4fd84da0-9483-4a01-9aa6-8de6d53aed9d",
26 | "metadata": {},
27 | "outputs": [],
28 | "source": [
29 | "import os\n",
30 | "\n",
31 | "from wfl.generate import buildcell\n",
32 | "from wfl.configset import OutputSpec\n",
33 | "\n",
34 | "\n",
35 | "inputs = range(20)\n",
36 | "outputspec = OutputSpec('buildcell_output.xyz')\n",
37 | "\n",
38 | "buildcell_input = buildcell.create_input(z=13, vol_per_atom=10, bond_lengths=2)\n",
39 | "\n",
40 | "outputs = buildcell.buildcell(\n",
41 | " inputs=inputs,\n",
42 | " outputs=outputspec,\n",
43 | " buildcell_input=buildcell_input,\n",
44 | " buildcell_cmd='buildcell',\n",
45 | " )"
46 | ]
47 | },
48 | {
49 | "cell_type": "code",
50 | "execution_count": null,
51 | "id": "082b6fa6-523c-487f-a07a-3c91aa520ebc",
52 | "metadata": {
53 | "nbsphinx": "hidden"
54 | },
55 | "outputs": [],
56 | "source": [
57 | "assert len(list(outputs)) == len(inputs)\n",
58 | "assert os.path.exists(outputspec.files[0]), f'{outputspec.files[0]} not found!'"
59 | ]
60 | }
61 | ],
62 | "metadata": {
63 | "kernelspec": {
64 | "display_name": "Python 3 (ipykernel)",
65 | "language": "python",
66 | "name": "python3"
67 | },
68 | "language_info": {
69 | "codemirror_mode": {
70 | "name": "ipython",
71 | "version": 3
72 | },
73 | "file_extension": ".py",
74 | "mimetype": "text/x-python",
75 | "name": "python",
76 | "nbconvert_exporter": "python",
77 | "pygments_lexer": "ipython3",
78 | "version": "3.9.13"
79 | }
80 | },
81 | "nbformat": 4,
82 | "nbformat_minor": 5
83 | }
84 |
--------------------------------------------------------------------------------
/docs/source/examples.contributions.md:
--------------------------------------------------------------------------------
1 | # Contributing Examples
2 |
3 | Examples complementing the ones provided in the current documentation are always welcome!
4 | In case you have an example and you would like to see it here, feel free to contribute it.
5 | In doing so please make sure to account for the remarks described in the following.
6 |
7 | ## Preparing the Example
8 |
9 | An example should be provided in terms of a jupyter notebook. Besides a descriptive title (and possibly sub-titles) these notebooks should include some explanatory text (```cell type``` is ```Markdown```)
10 | and two types of code blocks (```cell type``` is ```Code```). The first type is used to provide the actual code for the example illustrating the explicit commands used to achieve the operation described in the text.
11 | Code blocks of the second type will not be displayed in the online documentation, but are used for testing purposes only. Consequently, these need to contain some sort of checks (e.g. ```assert``` statements)
12 | that raise an Error in case executing the code of the first type is not working as intended.
13 |
14 | ## Hiding Tests in the Online Documentation
15 |
16 | Before saving the jupyter notebook in ```./docs/source/``` under ```examples..ipynb``` make sure to ```Restart Kernel and Clear All Outputs``` in order to provide it in a defined state.
17 | Afterwards open the file with you favorite text editor, search for all of your code blocks of the second type and add ```"nbsphinx": "hidden"``` to the ```"metadata": {}```.
18 |
19 | ## Updating Related Files
20 |
21 | Update the ```examples.rst``` file by adding a line with your ```examples..ipynb```, as well as the ```examples.index.md``` file by updating the list of common operations with links to your example.
22 |
23 | To have the examples actually been tested open the file ```./tests/test_doc_examples.py``` and add a test for the code blocks in your jupyter notebook. In the simplest case this only requires adding a new ```pytest.param('examples..ipynb', 'all', id='')``` to the ```@pytest.mark.parametrize```.
24 |
25 |
--------------------------------------------------------------------------------
/docs/source/examples.md.md:
--------------------------------------------------------------------------------
1 | # Molecular Dynamics
2 |
3 | The following script takes atomic structures from "configs.xyz", runs Berendsen NVT molecular dynamics simulation for 6000 steps (3ps) and writes snapshots to "configs.sample.xyz" every 1000 steps (0.5 ps). The script submits jobs to the "standard" queue of "local" cluster on 4 cores each, each job containing 4 MD simulations running in parallel.
4 |
5 | ```
6 | import os
7 | from xtb.ase.calculator import XTB
8 | from expyre.resources import Resources
9 | from wfl.autoparallelize import RemoteInfo
10 | from wfl.autoparallelize import AutoparaInfo
11 | from wfl.generate import md
12 | from wfl.configset import ConfigSet, OutputSpec
13 |
14 | temp = 300
15 |
16 | num_cores=4
17 | steps = 6000
18 | sample_interval = 1000
19 | input_fname = "configs.xyz"
20 | out_fname = "configs.sample.xyz"
21 |
22 | num_inputs_per_queued_job = num_cores
23 | max_time = '48h'
24 | partitions="standard"
25 | sysname="local"
26 |
27 | # remote info
28 | resources = Resources(
29 | max_time = max_time,
30 | num_cores = num_cores,
31 | partitions = partitions)
32 |
33 | remote_info = RemoteInfo(
34 | sys_name = sysname,
35 | job_name = "md",
36 | resources = resources,
37 | num_inputs_per_queued_job=num_inputs_per_queued_job,
38 | exact_fit=False,
39 | pre_cmds = ["conda activate my-env"]
40 | )
41 |
42 | calc = (XTB, [], {'method':'GFN2-xTB'})
43 |
44 | ci = ConfigSet(input_fname)
45 | co = OutputSpec(out_fname)
46 |
47 | # Needed for the script be re-runable, otherwise a different random seed is generated.
48 | # Without this, if this script is interrupted while the jobs is running, re-starting this
49 | # script would make it create and submit new jobs rather than monitor the ones already running.
50 | os.environ["WFL_DETERMINISTIC_HACK"] = "true"
51 |
52 | # xTB has some internal parallelisation that needs turning off by setting this env. variable.
53 | os.environ["OMP_NUM_THREADS"] = "1"
54 |
55 | ci = md.md(
56 | inputs=ci,
57 | outputs=co,
58 | calculator=calc,
59 | steps = steps,
60 | dt = 0.5,
61 | temperature = temp,
62 | temperature_tau = 500,
63 | traj_step_interval = sample_interval,
64 | results_prefix = "xtb2_",
65 | update_config_type = False,
66 | autopara_info = AutoparaInfo(
67 | remote_info=remote_info,
68 | num_inputs_per_python_subprocess=1)
69 | )
70 |
71 | ```
72 |
73 | expyre config.json:
74 |
75 | ```
76 | { "systems": {
77 | "local": { "host": null,
78 | "scheduler": "slurm",
79 | "commands": ["source ~/.bashrc", "echo $(date)", "hostname"],
80 | "header": ["#SBATCH --nodes={nnodes}",
81 | "#SBATCH --tasks-per-node={num_cores_per_node}",
82 | "#SBATCH --cpus-per-task=1",
83 | "#SBATCH --account=change-me",
84 | "#SBATCH --qos=standard"],
85 | "partitions": {"standard" : {"num_cores": 128, "max_time" : "24h", "max_mem": "256G"},
86 | "highmem" : {"num_cores": 128, "max_time" : "24h", "max_mem": "512G"}}
87 | }
88 | }}
89 |
90 | ```
91 |
--------------------------------------------------------------------------------
/docs/source/examples.rst:
--------------------------------------------------------------------------------
1 | .. _examples:
2 |
3 | ########################################
4 | Examples
5 | ########################################
6 |
7 | Self-contained code snippets of common things that can be done with Workflow.
8 |
9 | .. toctree::
10 | :maxdepth: 2
11 | :caption: Contents:
12 |
13 | examples.index.md
14 | examples.dimers.ipynb
15 | examples.buildcell.ipynb
16 | examples.orca_python.md
17 | examples.fhiaims_calculator.ipynb
18 | examples.mace.md
19 | examples.mlip_fitting.md
20 | examples.daisy_chain_mlip_fitting.ipynb
21 | examples.normal_modes.md
22 | examples.smiles.md
23 | examples.md.md
24 | examples.select_fps.ipynb
25 | examples.contributions.md
26 |
27 |
--------------------------------------------------------------------------------
/docs/source/examples.smiles.md:
--------------------------------------------------------------------------------
1 | # SMILES to `Atoms`
2 |
3 | Conversion of [SMILES](https://en.wikipedia.org/wiki/Simplified_molecular-input_line-entry_system) to ASE's `Atoms` is done via [RDKit](http://rdkit.org/). To install:
4 |
5 | ```
6 | conda install -c conda-forge rdkit
7 | ```
8 |
9 | ## Command line
10 |
11 | ```
12 | wfl generate-configs smiles -o configs.xyz CCCCCC CC c1ccccc1
13 | ```
14 |
15 | ## Python script
16 |
17 | Single operation:
18 |
19 | ```python
20 | from wfl.generate import smiles
21 | atoms = smiles.smi_to_atoms("CCCCCC")
22 | ```
23 |
24 | With Workflow's parallelization:
25 |
26 | ```python
27 | from wfl.configset import ConfigSet
28 | from wfl.generate import smiles
29 |
30 | outputspec = OutputSpec("compounds.xyz")
31 | smiles = ["CO", "CCCC", "c1ccccc1"]
32 | smiles.smiles(smiles, outputs=outputspec)
33 | ```
34 |
35 | NB `smiles` has to be given as the first argument.
36 |
--------------------------------------------------------------------------------
/docs/source/examples_files/fhiaims_calculator/Input_Structures.xyz:
--------------------------------------------------------------------------------
1 | 8
2 | Properties=species:S:1:pos:R:3 ID_mol=mol_01 pbc="F F F"
3 | O 0.18287400 -0.40349600 -1.66669000
4 | O 1.42266000 -0.70203500 0.98701400
5 | O -1.66526000 -0.15640600 -0.61028000
6 | O -0.09728560 0.99613300 1.29070000
7 | C -0.48849000 -0.29202800 -0.53908400
8 | C 0.40672000 0.11351500 0.60257700
9 | H 1.07322000 -0.49406600 -1.21222000
10 | H 1.25592000 -1.37704000 1.63601000
11 | 8
12 | Properties=species:S:1:pos:R:3 ID_mol=mol_02 pbc="F F F"
13 | O 1.06736000 0.42835500 -0.58504100
14 | O -1.64895000 0.64090300 -0.00389491
15 | O 0.90024900 -1.64785000 0.35815600
16 | O -1.51007000 -1.42555000 -1.07198000
17 | C 0.31790100 -0.64231400 0.02288090
18 | C -1.06843000 -0.57166500 -0.35741500
19 | H 2.09480000 0.39991800 -0.60420500
20 | H -1.04190000 1.37716000 0.26904900
21 | 8
22 | Lattice="10.0 0.0 0.0 0.0 10.0 0.0 0.0 0.0 10.0" Properties=species:S:1:pos:R:3 ID_mol=mol_03 pbc="T T T"
23 | O 1.57900000 -0.32773800 -0.08223020
24 | O -0.82956400 1.04070000 -0.78563100
25 | O 0.56772300 0.27693100 1.79026000
26 | O -1.09194000 -1.10555000 -0.95812000
27 | C 0.49712700 -0.23995400 0.61337700
28 | C -0.80033100 -0.19725000 -0.19251100
29 | H 1.26690000 -0.44885400 -0.98820300
30 | H -1.26842000 0.61321800 -1.56205000
31 | 8
32 | Lattice="20.0 0.0 0.0 0.0 20.0 0.0 0.0 0.0 20.0" Properties=species:S:1:pos:R:3 ID_mol=mol_04 pbc="T T T"
33 | O 0.78118500 -1.14763000 -1.28070000
34 | O -0.50048600 0.87194000 1.34350000
35 | O 0.59600000 1.10911000 -1.02344000
36 | O -1.03142000 -1.13277000 0.95403200
37 | C 0.35422700 -0.03683980 -0.69259500
38 | C -0.36310800 -0.23146800 0.55996800
39 | H 0.89113800 -1.63271000 -0.46617400
40 | H -0.54317300 0.23428600 2.10803000
41 |
--------------------------------------------------------------------------------
/docs/source/examples_files/select_fps/md.traj:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/libAtoms/workflow/252f61ce8dd9c0db21e957bd03eb9fd68f51867d/docs/source/examples_files/select_fps/md.traj
--------------------------------------------------------------------------------
/docs/source/modules.rst:
--------------------------------------------------------------------------------
1 | .. _api:
2 |
3 | wfl
4 | ===
5 |
6 | .. toctree::
7 | :maxdepth: 4
8 |
9 | wfl
10 |
--------------------------------------------------------------------------------
/docs/source/operations.ace_fitting.md:
--------------------------------------------------------------------------------
1 | # Fitting ACE
2 |
3 | Workflow provides a wrapper for the [ACE1pack](https://acesuit.github.io/ACE1pack.jl/stable/) package. The function makes use of Workflow's atomic structure handling to fit in with the rest of potential fitting infrastructure and makes use of [ExPyRe](https://libatoms.github.io/ExPyRe/) for submitting just this fitting function as a (remotely) queued cluster job.
4 |
5 | The ace fitting function takes in a parameter dictionary, writes it to (temporary) JSON file and calls ACE1pack's fitting script, `ace_fit.jl`. The executable (e.g. `/path/to/julia $HOME/.julia/packages/ACE1pack/ChRvA/scripts/ace_fit.jl`) is found automatically, unless specified as an argument or via `WFL_ACE_FIT_COMMAND` variable.
6 |
7 | Examples of parameters may be found on [ACE1pack docummentation](https://acesuit.github.io/ACE1pack.jl/stable/command_line/). `wfl.fit.ace.fit()` does some preparation:
8 |
9 | - converts stress to virial
10 | - sets `energy_key`, etc based on `ref_property_prefix`, i.e. `ace_fit_params["data"]["energy_key"] = f"{ref_property_prefix}energy"`
11 | - parses isolated atom values from the isolated atoms present among the fitting configs
12 | - updates energy/force/virial weights from "energy/force/virial_sigma" `Atoms.info` entries.
13 |
14 | To avoid these modifications, `wfl.fit.ace.run_ace_fit()` can be called directly (which is what `wfl.fit.ace.fit()` calls after the modifications).
15 |
--------------------------------------------------------------------------------
/docs/source/operations.calculators.md:
--------------------------------------------------------------------------------
1 |
2 | # Calculators in Workflow
3 |
4 | In principle, any ASE calculator can be parallelized using Workflow. The parallelization happens at `Atoms` object level. That is, if we wanted to get single-point energies & forces on 16 `Atoms` structures and using 16 cores, all 16 `Atoms` objects would get processed at the same time, each on a single core.
5 |
6 |
7 | ## Parallelize with `generic.calculate()`
8 |
9 | In ASE, we iterate over all `Atoms` objects, initialize a calculator, set it to an `Atoms` object and call it to evaluate energies and forces sequentially. In Workflow, with `generic.calculate` we define a way to initialize a calculator, define where from and to read and write configurations (`ConfigSet` and `OutputSpec`) and set a directive for how many cores to parallelize over.
10 |
11 | The calculator has to be defined as a tuple of `(Calculator, [args], **kwargs)`, for example
12 |
13 | ```python
14 | dftb_calc = (
15 | quippy.potentials.Potential,
16 | ["TB DFTB"],
17 | {"param_filename": :"tightbind.parms.DFTB.mio-0-1.xm"}
18 | )
19 | ```
20 |
21 | Further see [autoparallelization page](overview.parallelisation.rst) and [examples page](examples.index.md).
22 |
23 |
24 | ## File-based calculators
25 |
26 | ASE's calculators that write & read files to & from disk must to be modified if they were to be parallelized via Workflow's `generic` calculator. Specifically, each instance of calculator must execute the calculation in a separate folder so processes running in parallel don't attempt to read and write to the same files. Workflow handles the files, as well as creation and clean-up of temporary directories.
27 |
28 | Currently, ORCA, VASP, QuantumEspresso, CASTEP and FHI-Aims are compatible with the `generic` calculator.
29 |
30 |
--------------------------------------------------------------------------------
/docs/source/operations.descriptors.md:
--------------------------------------------------------------------------------
1 | # Descriptors
2 |
3 | Workflow has a parallelized interface (`wfl.descriptors.quippy.calculate()`) for calculating quippy descriptors via `quippy.descriptors.Descriptors`. See [Fitting a MLIP](examples.mlip_fitting.md) example that generates a global SOAP descriptor.
--------------------------------------------------------------------------------
/docs/source/operations.fitting.rst:
--------------------------------------------------------------------------------
1 | .. _fitting:
2 |
3 |
4 | ########################################
5 | Fitting potentials
6 | ########################################
7 |
8 | Workflow has interface to (multistage) GAP and ACE fitting that integrates with the rest of Workflow's infrastructure, e.g. working with ConfigSets, remote execution, etc.
9 |
10 |
11 |
12 | .. toctree::
13 | :maxdepth: 2
14 | :caption: Contents:
15 |
16 | operations.gap_fitting.md
17 | operations.multistage_gap_fitting.rst
18 | operations.ace_fitting.md
19 |
--------------------------------------------------------------------------------
/docs/source/operations.generate.md:
--------------------------------------------------------------------------------
1 | # Generating Atomic Structures
2 |
3 | This page (and submodules of `wfl.generate`) gives a brief overview self-contained operations in Workflow that loosely cover creating new atomic structures or modifying existing ones. All but "atoms and dimers" functions below make use of Workflow's autoparallelization functionality.
4 |
5 | ## Atoms and Dimers
6 |
7 | `wfl.generate.atoms_and_dimers.prepare()` makes a set of dimer (two-atom) configurations for specified elements and specified distance range. See documentation and example [Generating Dimer Structures](examples.dimers.ipynb).
8 |
9 |
10 | ## SMILES to Atoms
11 |
12 | `wfl.generate.smiles.smiles()` converts [SMILES](https://en.wikipedia.org/wiki/Simplified_molecular-input_line-entry_system) (e.g. "CCCC" for n-butane) to ASE's `Atoms`. See example [SMILES to `Atoms`](examples.smiles.md).
13 |
14 |
15 | ## BuildCell
16 |
17 | `wfl.generate.buildcell.buildcell()` wrapps [AIRSS](https://airss-docs.github.io/technical-reference/buildcell-manual)'s `buildcell` that constructs sensible random structures. See documentation and example [Random Structures via buildcell](examples.buildcell.ipynb).
18 |
19 |
20 | ## Super Cells
21 |
22 | Functions in `wfl.generate.supercells` creates supercells from given primitive cells. These include
23 |
24 | - `largest_bulk()` - makes largest bulk-like supercell with no more than specified number of atoms.
25 | - `vacancy()` - makes a vacancy in a largest bulk-like supercell from above.
26 | - `antisite()` - makes antisites in a largest bulk-like supercell from above.
27 | - `interstitial()` - makes interstitials in a largest bulk-like supercell from above.
28 | - `surface()` - makes a surface supercell.
29 |
30 |
31 | ## Molecular Dynamics
32 |
33 | Molecular dynamics submodule aimed at sampling atomic configurations. Allows for NVE (Velocity Verlet), NPT (Berendsen) and NVT (Berendsen) integrators. Has hooks for custom functions that sample configs from the trajectory on-the-fly and/or at the end of the individual simulation and also for stopping the simulation early if some condition is met (e.g. MD is unstable).
34 |
35 |
36 | ## Geometry Optimization
37 |
38 | `wfl.generate.optimize.optimize() optimizes geometry with the given calculator and PreconLBFGS, including symmetry constraints.
39 |
40 |
41 | ## Minima Hopping
42 |
43 | `wfl.generate.minimahopping.minimahopping()` wraps ASE's [Minima hopping](https://wiki.fysik.dtu.dk/ase/ase/optimize.html#minima-hopping) code. This algorithm utilizes a series of alternating steps of NVE molecular dynamics and local optimizations.
44 |
45 |
46 | ## Structures for phonons
47 |
48 | `wfl.generate.phonopy.phonopy()` creates displaced configs with phonopy or phono3py.
49 |
50 |
51 | ## Normal Modes of Molecules
52 |
53 | Calculates normal mode directions and frequencies of molecules. From these can generate a Boltzmann sample of random displacements along multiple normal modes. See example on [Normal modes of molecules](examples.normal_modes.md).
54 |
55 |
56 |
57 |
--------------------------------------------------------------------------------
/docs/source/operations.rst:
--------------------------------------------------------------------------------
1 | .. _operations:
2 |
3 | ########################################
4 | Operations
5 | ########################################
6 |
7 | This page is for the functions/modules/code operations that are somewhat more modular. These might be useful for every day messing about with fitting potentials or to construct more complex workflows.
8 |
9 |
10 |
11 | .. toctree::
12 | :maxdepth: 2
13 | :caption: Contents:
14 |
15 | operations.calculators.md
16 | operations.generate.md
17 | operations.descriptors.md
18 | operations.select.rst
19 | operations.fitting.rst
20 |
21 |
--------------------------------------------------------------------------------
/docs/source/operations.select.md:
--------------------------------------------------------------------------------
1 | # Selecting Configs
2 |
3 | ## CUR on global descriptor
4 |
5 | `wfl.select.by_descriptor.CUR_conf_global()` selects atomic structures based on the global (per-config) descriptors.
6 |
7 |
8 | ## Furthest point sampling
9 |
10 | `wfl.select.by_descriptor.greedy_fps_conf_global()` selects atomic structures using greedy farthest point selection on global (per-config) descriptors
11 |
12 |
13 | ## Flat histogram
14 |
15 | `wfl.select.flat_histogram.biased_select_conf()` selects configurations by Boltzmann biased flat histogram on a given quantity (e.g. per-atom enthalpy). The method first construct a histogram of the given quantity. The probability of selecting each atomic configuration is then inversely proportional to the density of a given histogram bin, multiplied by a Boltzmann biasing factor. The biasing factor is exponential in the quantity relative to its lowest value divided by a "temperature" in the same units as the quantity.
16 |
17 |
18 | ## Convex hull
19 |
20 | `wfl.select.convex_hul.select()` finds convex hull in the space of volume, composition and another per-atom property (mainly per-atom energy) and returns configs at the vertices of the convex hull, but only the half that lies below the rest of the points.
21 |
22 |
23 | ## Simple select
24 |
25 | - `wfl.select.simple.by_bool_function()` - applies a boolean filter function to all input configs and returns those that were evaluated as `True`.
26 | - `wfl.select.simple.by_index()` - returns structures based on the index.
27 |
28 |
--------------------------------------------------------------------------------
/docs/source/overview.overall_design.rst:
--------------------------------------------------------------------------------
1 | .. _overall_design:
2 |
3 | ######################################
4 | Overall design
5 | ######################################
6 |
7 | The main goal of the Workflow package is to make it easy operate on sets of input atomic
8 | configurations, typically doing the same operation to each one, and
9 | returning corresponding sets of output configurations. There are also
10 | functions that do not fit this structure, but use the same data types,
11 | or are otherwise useful.
12 |
13 | Most of :ref:`operations ` in Workflow take in
14 |
15 | * an iterator, usually a ``ConfigSet`` (see below) which returns ASE ``Atoms`` objects
16 | * an ``OutputSpec`` (see below) indicating where to store the returned ASE ``Atoms`` objects
17 |
18 | and return
19 |
20 | * a ``ConfigSet`` containing the output configurations.
21 |
22 | These two classes abstract the storage of atomic configurations in memory,
23 | files (CURRENTLY UNSUPPORTED: or the ABCD database). A ``ConfigSet`` used for input may be
24 | initialised with
25 |
26 | * a list (or list of lists) of ASE ``Atoms`` objects in memory
27 | * one or more filenames that can be read by ``ase.io.read()``, such as ``.extxyz``
28 | * a list of ``ConfigSet`` objects that use the same type of storage
29 | * [CURRENTLY UNSUPPORTED] a query to an `ABCD database `_
30 |
31 | Similarly, returned configurations can be held in memory or
32 | file(s) [currently unsupported: ABCD], depending on the arguments to the ``OutputSpec``
33 | constructor. The workflow function returns a ``ConfigSet`` generated by
34 | ``OutputSpec.to_ConfigSet()``, which can be used to access the output
35 | configs. This way, an operation may iterate over a ``ConfigSet``
36 | and write ``Atoms`` to ``OutputSpec``, regardless of how the input
37 | configs were supplied or how or where to the output configs are going
38 | to be collected.
39 |
40 | In addition to this abstraction of ``Atoms`` storage, the workflow makes
41 | it easy to parallelize operations over sets of configurations and/or
42 | run them as (possibly remote) queued jobs, and this has been implemented
43 | for most of its operations. This is achieved by wrapping the operation in a
44 | call to ``wfl.pipeline.autoparallelize``. In addition to parallelising
45 | on readily accessible cores, the operations may be executed in a number
46 | of independently queued jobs on an HPC cluster with the help of
47 | `ExPyRe `_.
48 |
49 | Some parts of Workflow (e.g. how many parallel processes to run) are controlled via environment variables. The
50 | most commonly used ones are
51 |
52 | * ``WFL_NUM_PYTHON_SUBPROCESSES`` which controls how many python processes (on the
53 | same node) are used to parallelize a single operation
54 | * ``WFL_EXPYRE_INFO`` which controls what HPC resources will be used for a remote job
55 |
56 |
--------------------------------------------------------------------------------
/docs/source/overview.rst:
--------------------------------------------------------------------------------
1 | .. _overview:
2 |
3 | ######################################
4 | Overview
5 | ######################################
6 |
7 | .. toctree::
8 | :maxdepth: 1
9 | :caption: This section outlines:
10 |
11 | overview.overall_design.rst
12 | overview.configset.rst
13 | overview.parallelisation.rst
14 | overview.queued.md
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/docs/wf_logo_final.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/libAtoms/workflow/252f61ce8dd9c0db21e957bd03eb9fd68f51867d/docs/wf_logo_final.png
--------------------------------------------------------------------------------
/examples/iterative_gap_fit/init_md.traj:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/libAtoms/workflow/252f61ce8dd9c0db21e957bd03eb9fd68f51867d/examples/iterative_gap_fit/init_md.traj
--------------------------------------------------------------------------------
/examples/iterative_gap_fit/multistage_gap_params.json:
--------------------------------------------------------------------------------
1 | {
2 | "stages": [
3 | {
4 | "error_scale_factor": 10.0,
5 | "descriptors": [
6 | {
7 | "descriptor": {
8 | "distance_Nb": true,
9 | "order": 2,
10 | "cutoff": "_EVAL_ {BOND_LEN_Z1_Z2}*1.5",
11 | "cutoff_transition_width": "_EVAL_ {BOND_LEN_Z1_Z2}*1.5/5.0",
12 | "compact_clusters": true,
13 | "Z": ["_EVAL_ {Z1}", "_EVAL_ {Z2}"]
14 | },
15 | "fit": {
16 | "n_sparse": 20,
17 | "covariance_type": "ard_se",
18 | "theta_uniform": "_EVAL_ {BOND_LEN_Z1_Z2}*1.5/5.0",
19 | "sparse_method": "uniform",
20 | "f0": 0.0
21 | },
22 | "count_cutoff": "_EVAL_ {BOND_LEN_Z1_Z2}*1.4",
23 | "add_species": "manual_Z_pair"
24 | }
25 | ]
26 | },
27 | {
28 | "error_scale_factor": 1.0,
29 | "descriptors": [
30 | {
31 | "descriptor": {
32 | "soap": true,
33 | "n_max": 4,
34 | "l_max": 2,
35 | "atom_sigma": "_EVAL_ {ATOM_SIGMA}",
36 | "cutoff": "_EVAL_ {R_CUT}",
37 | "cutoff_transition_width": "_EVAL_ {R_TRANS}",
38 | "central_weight": 1.0,
39 | "Z": "_EVAL_ {Zcenter}",
40 | "n_species": "_EVAL_ {nZ}",
41 | "species_Z": "_EVAL_ {Zs}"
42 | },
43 | "add_species": "manual_universal_SOAP",
44 | "fit": {
45 | "n_sparse": 50,
46 | "f0": 0.0,
47 | "covariance_type": "dot_product",
48 | "zeta": 4.0,
49 | "sparse_method": "cur_points",
50 | "print_sparse_index": true
51 | }
52 | }
53 | ]
54 | }
55 | ],
56 | "gap_params": {
57 | "default_sigma": [0.0025, 0.0625, 0.125, 0.125],
58 | "sparse_jitter": 1e-08,
59 | "do_copy_at_file": false,
60 | "sparse_separate_file": true,
61 | "energy_parameter_name": "DFT_energy",
62 | "force_parameter_name": "DFT_forces"
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 |
3 | name = "wfl"
4 | requires-python = ">=3.9"
5 | dependencies = [ "click>=7.0", "numpy<2", "ase>=3.22.1", "pyyaml", "spglib",
6 | "docstring_parser", "expyre-wfl", "universalSOAP", "pandas" ]
7 | readme = "README.md"
8 | license = { file = "LICENSE" }
9 | dynamic = ["version"]
10 |
11 | [project.scripts]
12 | wfl = "wfl.cli.cli:cli"
13 | gap_rss_iter_fit = "wfl.cli.gap_rss_iter_fit:cli"
14 | dft_convergence_test = "wfl.cli.dft_convergence_test:cli"
15 | reactions_iter_fit = "wfl.cli.reactions_iter_fit:cli"
16 |
17 | [tool.setuptools.packages.find]
18 | exclude = [ "test*" ]
19 |
20 | [tool.setuptools.dynamic]
21 | version = {attr = "wfl.__version__"}
22 |
23 | [project.optional-dependencies]
24 | test = [
25 | "pytest",
26 | "mace-torch",
27 | "quippy-ase",
28 | "requests",
29 | "rdkit==2024.3.3",
30 | ]
31 | doc = [
32 | "sphinx",
33 | "sphinx_click",
34 | "nbsphinx",
35 | "myst_parser",
36 | "sphinx_book_theme",
37 | ]
38 | all = ["wfl[test,doc]"]
39 |
40 |
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | testpaths = tests/
3 | #addopts = --cov=wfl --cov-report=html --cov-config=.coveragerc
--------------------------------------------------------------------------------
/tests/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | branch = True
3 |
4 | [report]
5 | precision = 2
6 | omit =
7 | ../Attic
8 | ../simple_tests
9 | ../examples
10 |
11 | [html]
12 | directory = coverage-html
13 |
--------------------------------------------------------------------------------
/tests/TODO:
--------------------------------------------------------------------------------
1 | configset.py
2 | ConfigSet_in -> _out, in various combinations
3 | non-files in -> 1
4 | files 1-1, N-1, N-dict
5 | is_done() and interactions with all_or_none
6 |
7 | pipeline.py
8 | pipeline (of simple or dummy op) with and without autopara, to/from various ConfigSet_in, _out formats
9 | interaction with is_done()
10 |
11 | ABCD (need ABCD server in CI)
12 | configset.py: simple ABCD database I/O functionality
13 | pipeline.py + configset.py: pipelines to/from ABCD
14 |
15 | generate_configs:
16 | buildcell (need AIRSS buildcell in CI)
17 | md
18 |
19 | select_configs:
20 | CUR select
21 | convex hull
22 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | # only needed for pytest-cov
2 |
--------------------------------------------------------------------------------
/tests/assets/cli_rss/LiCu.json:
--------------------------------------------------------------------------------
1 | {
2 | "global": {
3 | "compositions": [
4 | ["Cu", 1.0], ["LiCu3", 1.0], ["LiCu", 1.0], ["Li3Cu", 1.0], ["Li", 1.0]
5 | ],
6 | "config_selection_descriptor": {
7 | "soap": true,
8 | "n_max": 10,
9 | "l_max": 4,
10 | "atom_sigma": "_EVAL_ {BOND_LEN_Z_MAX}/10",
11 | "cutoff": "_EVAL_ {BOND_LEN_Z_MAX}*2.0",
12 | "cutoff_transition_width": "_EVAL_ {BOND_LEN_Z_MAX}/4.0",
13 | "average": true,
14 | "central_weight": 1.0,
15 | "Z": "_EVAL_ {Zcenter}",
16 | "n_species": "_EVAL_ {nZ}",
17 | "species_Z": "_EVAL_ {Zs}",
18 | "add_species": "manual_Zcenter"
19 | },
20 | "config_selection_descriptor_add_species": "manual_Zcenter"
21 | },
22 | "prep": {
23 | "length_scales_file": "length_scales.yaml",
24 | "dimer_n_steps": 40,
25 | "buildcell" : {
26 | "default": [6, 24],
27 | "MD_bulk_defect" : [4, 8]
28 | }
29 | },
30 | "initial_step": {
31 | "buildcell_total_N": 100,
32 | "fitting_by_desc_select_N": 30,
33 | "testing_by_desc_select_N": 10
34 | },
35 | "rss_step": {
36 | "buildcell_total_N": 100,
37 | "minima_flat_histo_N": 50,
38 | "minima_by_desc_select_N": 20,
39 | "final_flat_histo_N": 40,
40 | "fitting_by_desc_select_N": 30,
41 | "testing_by_desc_select_N": 10,
42 | "select_convex_hull": false,
43 | "optimize_kwargs": {
44 | "pressure" : [ "exponential", 0.2 ],
45 | "steps" : 20
46 | },
47 | "iter_specific": {
48 | "flat_histo_kT": {
49 | "1": 0.3,
50 | "2": 0.2,
51 | "3:": 0.1
52 | }
53 | }
54 | },
55 | "MD_bulk_defect_step": {
56 | "buildcell_total_N": 100,
57 | "minima_flat_histo_N": 50,
58 | "minima_by_desc_select_N": 20,
59 | "final_flat_histo_N": 40,
60 | "fitting_by_desc_select_N": 30,
61 | "testing_by_desc_select_N": 10,
62 | "flat_histo_kT": 0.1,
63 | "optimize_kwargs": {
64 | "pressure" : [ "exponential", 0.2 ],
65 | "steps" : 20
66 | },
67 | "N_bulk": 2,
68 | "N_vacancy": 2,
69 | "N_interstitial": 2,
70 | "N_surface": 2,
71 | "max_n_atoms": 8,
72 | "MD_dt": 1.0,
73 | "bulk_MD_n_steps": 200,
74 | "bulk_MD_T_range": [
75 | 100.0,
76 | 2500.0
77 | ],
78 | "defect_MD_n_steps": 200,
79 | "defect_MD_T_range": [
80 | 50.0,
81 | 1250.0
82 | ]
83 | },
84 | "DFT_evaluate": {
85 | "calculator": "VASP",
86 | "kwargs": {
87 | "encut": 200.0,
88 | "kspacing": 0.5,
89 | "ediff": 1.0e-7,
90 | "prec": "acc",
91 | "lreal": false,
92 | "ismear": 0,
93 | "sigma": 0.05,
94 | "algo": "normal",
95 | "amix": 0.1,
96 | "nelm": 150,
97 | "isym": 0,
98 | "lplane": false,
99 | "lscalapack": false
100 | }
101 | },
102 | "fit": {
103 | "GAP_template_file": "multistage_GAP_fit_settings.template.yaml",
104 | "universal_SOAP_sharpness": 0.5,
105 | "database_modify_mod": "wfl.fit.modify_database.gap_rss_set_config_sigmas_from_convex_hull"
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/tests/assets/cli_rss/job.test_cli_rss_create_ref.slurm:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #SBATCH --job-name=test_cli_rss_create_ref
3 | #SBATCH --partition=CPU_16
4 | #SBATCH --nodes=1
5 | #SBATCH --ntasks=16
6 | #SBATCH --exclusive
7 | #SBATCH --output=test_cli_rss_create_ref.stdout
8 | #SBATCH --error=test_cli_rss_create_ref.stderr
9 | #SBATCH --time=6:00:00
10 |
11 | pwd
12 |
13 | rm -rf test_cli_rss_create_ref
14 | mkdir test_cli_rss_create_ref
15 | export GAP_RSS_TEST_SETUP=$PWD/test_cli_rss_create_ref
16 |
17 | module purge
18 | module load compiler/gnu python/system python_extras/quippy dft/vasp
19 | # make sure it's not OpenMP enabled, so gap_fit is deterministic
20 | export OMP_NUM_THREADS=1
21 | export OPENBLAS_NUM_THREADS=1
22 | export MKL_NUM_THREADS=1
23 |
24 | export ASE_VASP_COMMAND=vasp.serial
25 | export ASE_VASP_COMMAND_GAMMA=vasp.gamma.serial
26 | export VASP_PP_PATH=$VASP_PATH/pot/rev_54/PBE
27 | export GRIF_BUILDCELL_CMD=$HOME/src/work/AIRSS/airss-0.9.1/src/buildcell/src/buildcell
28 |
29 | export WFL_NUM_PYTHON_SUBPROCESSES=16
30 |
31 | pytest --runslow -rxXs -s tests/test_cli_rss.py
32 |
33 | cp job.test_cli_rss_create_ref.slurm test_cli_rss_create_ref.stdout tests/assets/cli_rss
34 |
--------------------------------------------------------------------------------
/tests/assets/cli_rss/length_scales.yaml:
--------------------------------------------------------------------------------
1 | 3:
2 | bond_len:
3 | - 3.0
4 | min_bond_len:
5 | - 2.4
6 | vol_per_atom:
7 | - 20.0
8 | source: NB gap-rss
9 | 5:
10 | bond_len:
11 | - 1.7
12 | min_bond_len:
13 | - 1.7
14 | vol_per_atom:
15 | - 8.2
16 | source: NB VASP auto_length_scale
17 | 29:
18 | bond_len:
19 | - 2.6
20 | min_bond_len:
21 | - 2.2
22 | vol_per_atom:
23 | - 12
24 | source: NB VASP auto_length_scale
25 | 32:
26 | bond_len:
27 | - 2.5
28 | min_bond_len:
29 | - 2.3
30 | vol_per_atom:
31 | - 23
32 | source: NB gap-rss
33 | 52:
34 | bond_len:
35 | - 3.1
36 | min_bond_len:
37 | - 2.6
38 | vol_per_atom:
39 | - 31
40 | source: NB gap-rss
--------------------------------------------------------------------------------
/tests/assets/cli_rss/multistage_GAP_fit_settings.template.yaml:
--------------------------------------------------------------------------------
1 | stages:
2 | - error_scale_factor: 10.0
3 | descriptors:
4 | - descriptor:
5 | distance_Nb: True
6 | order: 2
7 | cutoff: "_EVAL_ {BOND_LEN_Z1_Z2}*1.5"
8 | cutoff_transition_width: "_EVAL_ {BOND_LEN_Z1_Z2}*1.5/5.0"
9 | compact_clusters: True
10 | Z: [ "_EVAL_ {Z1}", "_EVAL_ {Z2}" ]
11 | add_species: "manual_Z_pair"
12 | fit:
13 | n_sparse: 15
14 | covariance_type: "ard_se"
15 | theta_uniform: "_EVAL_ {BOND_LEN_Z1_Z2}*1.5/5.0"
16 | sparse_method: "uniform"
17 | f0: 0.0
18 | count_cutoff: "_EVAL_ {BOND_LEN_Z1_Z2}*1.4"
19 | - error_scale_factor: 1.0
20 | descriptors:
21 | - descriptor:
22 | soap: true
23 | n_max: 12
24 | l_max: 3
25 | atom_sigma: "_EVAL_ {BOND_LEN_Z}/4"
26 | cutoff: "_EVAL_ {BOND_LEN_Z_MAX}*2"
27 | cutoff_transition_width: "_EVAL_ {BOND_LEN_Z_MAX}/6"
28 | central_weight: 1.0
29 | Z: "_EVAL_ {Zcenter}"
30 | n_species: "_EVAL_ {nZ}"
31 | species_Z: "_EVAL_ {Zs}"
32 | add_species: "manual_Zcenter"
33 | fit:
34 | n_sparse: 1000
35 | f0: 0.0
36 | covariance_type: "dot_product"
37 | zeta: 4
38 | sparse_method: "cur_points"
39 | print_sparse_index: true
40 |
41 | gap_params:
42 | default_sigma: [0.0025, 0.0625, 0.125, 0.125]
43 | sparse_jitter: 1.0e-8
44 | do_copy_at_file: false
45 | sparse_separate_file: true
46 |
--------------------------------------------------------------------------------
/tests/assets/cli_rss/run_iter_0/cli_rss_test_energies:
--------------------------------------------------------------------------------
1 | -5.295965626273918
2 | -22.135978757823587
3 | -25.81037897028164
4 | 6.752403121411502
5 | -41.459295069299316
6 | -19.64485863161421
7 | -14.48116260816163
8 | -17.078386164702472
9 | -27.213268025226775
10 | 20.474129387341033
11 |
--------------------------------------------------------------------------------
/tests/assets/cli_rss/run_iter_0/initial_random_configs.Z_29_1.narrow_odd.xyz:
--------------------------------------------------------------------------------
1 | 15
2 | Lattice="6.33468 0.0 0.0 -0.8274204238560217 6.280409711522482 0.0 -2.7536301236451273 -3.1402052384446373 4.823921632966168" Properties=species:S:1:pos:R:3 config_type=buildcell buildcell_config_i=88 RSS_min_vol_per_atom=6.0 buildcell_type=narrow_odd gap_rss_group=ALL gap_rss_iter=0 buildcell_symmetry="1 P1 P 1 @ 0.01" _ConfigSet_loc=" / 0" pbc="T T T"
3 | Cu 1.27611623 -1.61124547 3.69140520
4 | Cu 1.16200404 4.12407639 1.17387159
5 | Cu -1.06852404 1.67416194 3.57968890
6 | Cu 4.55181436 2.08657603 1.15071231
7 | Cu 1.89417521 -0.04053106 0.75340305
8 | Cu 3.43007422 -1.36783292 3.92945153
9 | Cu 4.37389848 -0.29202748 1.07245971
10 | Cu 2.82579885 1.27330201 3.75555198
11 | Cu -1.33216383 4.37214648 1.19377626
12 | Cu 0.94710515 2.38299199 3.48587664
13 | Cu 0.39013429 2.10458310 1.30752590
14 | Cu -1.84272736 3.91960778 3.39296165
15 | Cu 2.77451599 2.94423766 2.17534685
16 | Cu 0.01968595 0.07384646 2.53289300
17 | Cu 2.74206496 3.11009449 -0.14785220
18 | 17
19 | Lattice="6.15296 0.0 0.0 1.5449018060815527 5.9558538574385755 0.0 1.5449018060815527 1.1952941755651412 5.834677969264971" Properties=species:S:1:pos:R:3 config_type=buildcell buildcell_config_i=89 RSS_min_vol_per_atom=6.0 buildcell_type=narrow_odd gap_rss_group=ALL gap_rss_iter=0 buildcell_symmetry="1 P1 P 1 @ 0.01" _ConfigSet_loc=" / 1" pbc="T T T"
20 | Cu 1.27156681 3.31473213 0.37023817
21 | Cu 3.68103010 0.34548110 0.25523010
22 | Cu 1.40100268 1.01663134 3.15258400
23 | Cu 1.84681806 3.02062732 2.47210580
24 | Cu 3.44299824 2.46978979 0.26746919
25 | Cu 3.37804389 0.85977953 2.53051475
26 | Cu 7.01971648 5.36133361 1.37502248
27 | Cu 7.08904350 2.31425308 5.00019809
28 | Cu 3.96474772 6.31438503 5.02896921
29 | Cu 5.31958614 4.04754541 0.82680997
30 | Cu 5.24207690 1.84290953 3.82528240
31 | Cu 2.83011048 4.77753198 3.73016433
32 | Cu 3.51422297 5.12284282 1.52245328
33 | Cu 6.04522767 2.10605113 1.68927852
34 | Cu 3.46844851 2.67614837 4.73054247
35 | Cu 4.01765288 3.11809816 2.68737693
36 | Cu 6.99869733 5.21201881 4.42878340
37 |
--------------------------------------------------------------------------------
/tests/assets/cli_rss/run_iter_0/initial_random_configs.Z_3_1.narrow_odd.xyz:
--------------------------------------------------------------------------------
1 | 7
2 | Lattice="5.50106 0.0 0.0 -1.7229480776879935 5.224280911780228 0.0 -1.7229480776879935 -2.3824505306728927 4.649412921443706" Properties=species:S:1:pos:R:3 config_type=buildcell buildcell_config_i=28 RSS_min_vol_per_atom=10.0 buildcell_type=narrow_odd gap_rss_group=ALL gap_rss_iter=0 buildcell_symmetry="1 P1 P 1 @ 0.01" _ConfigSet_loc=" / 0" pbc="T T T"
3 | Li -1.65300546 1.68913619 3.74986099
4 | Li 2.30286650 3.75035263 0.69271910
5 | Li 3.06825396 -0.75632344 3.14915292
6 | Li 2.54947091 0.93984068 0.47545753
7 | Li 0.17925151 -0.94824924 2.44937725
8 | Li -0.82363018 2.27304062 1.14715365
9 | Li 1.15241573 1.57786132 2.95910680
10 | 11
11 | Lattice="4.14424 0.0 0.0 -0.3204149683394467 7.044717045280387 0.0 -1.7195239575325498 -2.4406367757703165 7.7438942243957" Properties=species:S:1:pos:R:3 config_type=buildcell buildcell_config_i=29 RSS_min_vol_per_atom=10.0 buildcell_type=narrow_odd gap_rss_group=ALL gap_rss_iter=0 buildcell_symmetry="1 P1 P 1 @ 0.01" _ConfigSet_loc=" / 1" pbc="T T T"
12 | Li 1.28170835 2.31007721 0.35408384
13 | Li 0.32945950 4.66987451 1.01639383
14 | Li 2.88042013 2.33807965 2.49283768
15 | Li 2.96474169 0.27433955 5.42432954
16 | Li 2.29426979 5.80785417 2.99155111
17 | Li 0.14644299 4.25201413 3.74961683
18 | Li 0.43974880 5.56975602 6.17540336
19 | Li -1.65082696 3.91848095 5.51309635
20 | Li 0.75086056 1.99207521 4.74289562
21 | Li 0.48351466 3.21463710 7.24042836
22 | Li -0.29241822 0.42875146 1.19234447
23 |
--------------------------------------------------------------------------------
/tests/assets/cli_rss/run_iter_1/cli_rss_test_energies:
--------------------------------------------------------------------------------
1 | -21.413459738184233
2 | -19.925531205822555
3 | -20.384403988433622
4 | -27.640899618934032
5 | -36.19531134525743
6 | -36.177882041761556
7 | -33.419141520229
8 | -36.820901715652006
9 | -23.035818481059017
10 | -19.370666421211205
11 | -9.659546941936476
12 | -34.502910169119886
13 | -32.31995469784405
14 | 2.259827131647998
15 | -42.98171723585547
16 | -18.807087993493475
17 | -19.614354991130394
18 | -30.31803960444498
19 | -27.933745885108046
20 | 3.968422891292665
21 |
--------------------------------------------------------------------------------
/tests/assets/cli_rss/run_iter_1/initial_random_configs.Z_29_1.narrow_odd.xyz:
--------------------------------------------------------------------------------
1 | 11
2 | Lattice="3.59532 0.0 0.0 -1.7438602178191682 3.79219285695067 0.0 -0.3518289039606815 -1.4032808613357275 9.921760482341162" Properties=species:S:1:pos:R:3 config_type=buildcell buildcell_config_i=88 RSS_min_vol_per_atom=6.0 buildcell_type=narrow_odd gap_rss_group=ALL gap_rss_iter=1 buildcell_symmetry="1 P1 P 1 @ 0.01" _ConfigSet_loc=" / 0" pbc="T T T"
3 | Cu 0.93425927 1.70881861 9.83859133
4 | Cu -0.42951986 2.63122328 6.32572555
5 | Cu 1.36927084 1.14726040 2.40245211
6 | Cu 0.83060428 -0.58864577 9.86928330
7 | Cu 1.42127184 0.28360566 4.53570998
8 | Cu 1.36814763 2.55796301 4.89289634
9 | Cu 3.06642892 -0.36450219 2.76417171
10 | Cu -0.42779816 2.01344601 1.25745416
11 | Cu -0.48332756 -1.05978190 7.75684822
12 | Cu -0.42950413 1.75646699 3.70294190
13 | Cu -0.86339050 1.06250466 8.55770395
14 | 17
15 | Lattice="3.98393 0.0 0.0 1.9919661892706193 6.95016734337402 0.0 1.9919661892706193 0.20181292910210516 6.947236691127625" Properties=species:S:1:pos:R:3 config_type=buildcell buildcell_config_i=89 RSS_min_vol_per_atom=6.0 buildcell_type=narrow_odd gap_rss_group=ALL gap_rss_iter=1 buildcell_symmetry="23 I222 I 2 2 @ 0.01" _ConfigSet_loc=" / 1" pbc="T T T"
16 | Cu 4.78431470 1.89408362 0.53173038
17 | Cu 3.18354573 0.58650492 1.87784502
18 | Cu 4.78431665 5.25789665 6.41550631
19 | Cu 3.18354768 6.56547535 5.06939167
20 | Cu 2.96993879 1.14145302 5.10908957
21 | Cu 4.99792330 5.14007976 0.99261840
22 | Cu 2.96993908 6.01052725 1.83814712
23 | Cu 4.99792359 2.01190052 5.95461829
24 | Cu 5.16872759 6.07952043 4.01426273
25 | Cu 6.78306583 4.18910189 5.96039422
26 | Cu 1.18479655 1.07245984 2.93297397
27 | Cu 2.79913479 2.96287839 0.98684247
28 | Cu 3.00100638 2.45414016 3.22075699
29 | Cu 4.96685553 3.29066010 2.35958379
30 | Cu 3.00100685 4.69784011 3.72647970
31 | Cu 4.96685600 3.86132017 4.58765291
32 | Cu 1.99196500 0.00000000 0.00000000
33 |
--------------------------------------------------------------------------------
/tests/assets/cli_rss/run_iter_1/initial_random_configs.Z_3_1.narrow_odd.xyz:
--------------------------------------------------------------------------------
1 | 17
2 | Lattice="5.7233 0.0 0.0 -2.8616499999999987 4.9565231934794785 0.0 0.0 0.0 11.7223" Properties=species:S:1:pos:R:3 config_type=buildcell buildcell_config_i=28 RSS_min_vol_per_atom=10.0 buildcell_type=narrow_odd gap_rss_group=ALL gap_rss_iter=1 buildcell_symmetry="143 P3 P 3 @ 0.01" _ConfigSet_loc=" / 0" pbc="T T T"
3 | Li -1.22409683 4.26954462 8.75923196
4 | Li -0.22383540 1.76165144 8.75923196
5 | Li 1.44793309 3.88184983 8.75923196
6 | Li 4.32508665 0.71477971 6.56494048
7 | Li 2.94173928 3.38824506 6.56494048
8 | Li 1.31812407 0.85349842 6.56494048
9 | Li 1.43321419 2.27633135 3.78785962
10 | Li 3.03533184 0.10303472 3.78785962
11 | Li 4.11640311 2.57715762 3.78785962
12 | Li -1.45155622 4.29013302 1.70662270
13 | Li -0.12793607 1.55437212 1.70662270
14 | Li 1.57949230 4.06854125 1.70662270
15 | Li 4.14834227 2.53313923 11.37020900
16 | Li 1.45536594 2.32600018 11.37020900
17 | Li 2.98124179 0.09738379 11.37020900
18 | Li 0.00000000 0.00000000 4.07656111
19 | Li -2.86165000 4.95652319 -0.51195152
20 | 19
21 | Lattice="5.94301 0.0 0.0 -2.9715049999999987 5.146797634944957 0.0 0.0 0.0 11.92748" Properties=species:S:1:pos:R:3 config_type=buildcell buildcell_config_i=29 RSS_min_vol_per_atom=10.0 buildcell_type=narrow_odd gap_rss_group=ALL gap_rss_iter=1 buildcell_symmetry="174 P-6 P -6 @ 0.01" _ConfigSet_loc=" / 1" pbc="T T T"
22 | Li -1.54057766 7.05848143 4.63868404
23 | Li -2.37103040 0.28337753 4.63868404
24 | Li 3.91160806 2.95173631 4.63868404
25 | Li -1.54057766 7.05848143 7.28879596
26 | Li -2.37103040 0.28337753 7.28879596
27 | Li 3.91160806 2.95173631 7.28879596
28 | Li -0.60282328 1.42495783 8.94561000
29 | Li 2.03886725 3.91225793 8.94561000
30 | Li -1.43604308 4.95637899 8.94561000
31 | Li -0.60282328 1.42495783 2.98187000
32 | Li 2.03886725 3.91225793 2.98187000
33 | Li -1.43604308 4.95637899 2.98187000
34 | Li 1.43092764 1.91168328 10.60242404
35 | Li 3.57197990 0.28337805 10.60242404
36 | Li 3.91160747 2.95173631 10.60242404
37 | Li 1.43092764 1.91168328 1.32505596
38 | Li 3.57197990 0.28337805 1.32505596
39 | Li 3.91160747 2.95173631 1.32505596
40 | Li 0.00000000 0.00000000 0.00000000
41 |
--------------------------------------------------------------------------------
/tests/assets/cli_rss/run_iter_2/cli_rss_test_energies:
--------------------------------------------------------------------------------
1 | -14.34994513455185
2 | -14.390010768177474
3 | -14.527916154600694
4 | -13.658846730580697
5 | -14.353457379585029
6 | -6.685501277146269
7 | -10.20483136915988
8 | -99.14794227107457
9 | 5175.854473584192
10 | 6784.439208318476
11 | 17.8082040011205
12 | -121.37944250510824
13 | -12.586180597730035
14 | -34.696761624237666
15 | -40.20509362867415
16 | -15.013360267927046
17 | -20.895221778423835
18 | -31.582419369711392
19 | -1.4269143760690586
20 | 13.283898701514037
21 | -18.951740338975927
22 | -18.738776205036476
23 | -19.948506369362587
24 | -27.55444787223385
25 | -37.23438770951067
26 | -37.733517092711224
27 | -32.5038462679593
28 | -18.166434930792995
29 | -22.337401513851354
30 | -18.149282478051504
31 |
--------------------------------------------------------------------------------
/tests/assets/cli_rss/run_iter_2/initial_random_configs.Z_29_1.narrow_odd.xyz:
--------------------------------------------------------------------------------
1 | 7
2 | Lattice="3.73095 0.0 0.0 0.0 5.26274 0.0 0.0 -2.6313699999999987 4.557666533512518" Properties=species:S:1:pos:R:3 config_type=buildcell buildcell_config_i=88 RSS_min_vol_per_atom=6.0 buildcell_type=narrow_odd gap_rss_group=ALL gap_rss_iter=2 buildcell_symmetry="143 P3 P 3 @ 0.01" _ConfigSet_loc=" / 0" pbc="T T T"
3 | Cu 1.86378003 -1.29307995 3.34060549
4 | Cu 1.86378003 0.38486049 1.76752415
5 | Cu 1.86378003 0.90821867 4.00720388
6 | Cu 0.34537516 2.26265533 2.93485513
7 | Cu 0.34537516 1.58975324 0.49208943
8 | Cu 0.34537516 4.04170143 1.13072197
9 | Cu 2.28984482 2.63137026 1.51922203
10 | 7
11 | Lattice="4.02514 0.0 0.0 -0.185687002701142 4.88066900916543 0.0 -0.185687002701142 -1.615894086469291 4.605411629631106" Properties=species:S:1:pos:R:3 config_type=buildcell buildcell_config_i=89 RSS_min_vol_per_atom=6.0 buildcell_type=narrow_odd gap_rss_group=ALL gap_rss_iter=2 buildcell_symmetry="12 C2/m -C 2y @ 0.01" _ConfigSet_loc=" / 1" pbc="T T T"
12 | Cu 2.79161897 -0.05779112 2.73350477
13 | Cu 0.86214702 0.66629940 3.75493349
14 | Cu 0.86214702 3.32256604 1.87190686
15 | Cu 2.79161897 2.59847553 0.85047814
16 | Cu 3.60018266 2.56782155 3.62226355
17 | Cu 0.05358333 0.69695338 0.98314808
18 | Cu 2.01257000 0.00000000 0.00000000
19 |
--------------------------------------------------------------------------------
/tests/assets/cli_rss/run_iter_2/initial_random_configs.Z_3_1.narrow_odd.xyz:
--------------------------------------------------------------------------------
1 | 7
2 | Lattice="3.51065 0.0 0.0 -1.313604405065434 6.631957486820364 0.0 -0.5232364472739529 -3.313746695876906 6.151911747734115" Properties=species:S:1:pos:R:3 config_type=buildcell buildcell_config_i=28 RSS_min_vol_per_atom=10.0 buildcell_type=narrow_odd gap_rss_group=ALL gap_rss_iter=2 buildcell_symmetry="1 P1 P 1 @ 0.01" _ConfigSet_loc=" / 0" pbc="T T T"
3 | Li -0.06149200 4.59285730 2.22251592
4 | Li 1.86866089 0.40373157 0.11849935
5 | Li 0.13582887 0.42148218 2.18906613
6 | Li 2.22625734 -0.56357685 4.26336220
7 | Li 1.69432944 2.58915168 1.69442905
8 | Li 3.10064659 -3.11784318 4.22935197
9 | Li 0.47080737 0.83973895 5.76009587
10 | 5
11 | Lattice="4.83253 0.0 0.0 1.6638661291199632 4.537058045172814 0.0 1.6638661291199632 1.1620376986212064 4.385722755971982" Properties=species:S:1:pos:R:3 config_type=buildcell buildcell_config_i=29 RSS_min_vol_per_atom=10.0 buildcell_type=narrow_odd gap_rss_group=ALL gap_rss_iter=2 buildcell_symmetry="146 R3 R 3 @ 0.01" _ConfigSet_loc=" / 1" pbc="T T T"
12 | Li 4.54522347 1.59071657 -0.39142181
13 | Li 1.59221949 4.25731575 1.63790912
14 | Li 3.05840146 0.57431001 3.69580778
15 | Li 4.12773075 2.88279127 2.21844374
16 | Li 2.05419343 1.43464078 1.10402369
17 |
--------------------------------------------------------------------------------
/tests/assets/descriptor_heuristics.json:
--------------------------------------------------------------------------------
1 | {
2 | "desc_Zs": [
3 | [
4 | 6,
5 | 6
6 | ],
7 | [
8 | 6,
9 | 14
10 | ],
11 | [
12 | 14,
13 | 14
14 | ],
15 | 6,
16 | 14,
17 | 6,
18 | 6,
19 | 14
20 | ],
21 | "descs": [
22 | {
23 | "neighbors_Nb": true,
24 | "order": 2,
25 | "cutoff": 30,
26 | "Z": [
27 | 6,
28 | 6
29 | ],
30 | "add_species": false
31 | },
32 | {
33 | "neighbors_Nb": true,
34 | "order": 2,
35 | "cutoff": 45,
36 | "Z": [
37 | 6,
38 | 14
39 | ],
40 | "add_species": false
41 | },
42 | {
43 | "neighbors_Nb": true,
44 | "order": 2,
45 | "cutoff": 60,
46 | "Z": [
47 | 14,
48 | 14
49 | ],
50 | "add_species": false
51 | },
52 | {
53 | "soap": true,
54 | "n_max": 3,
55 | "cutoff": 30,
56 | "atom_sigma": 3.3,
57 | "Z": 6,
58 | "n_species": 2,
59 | "species_Z": [
60 | 6,
61 | 14
62 | ],
63 | "add_species": false
64 | },
65 | {
66 | "soap": true,
67 | "n_max": 3,
68 | "cutoff": 40,
69 | "atom_sigma": 6.7,
70 | "Z": 14,
71 | "n_species": 2,
72 | "species_Z": [
73 | 6,
74 | 14
75 | ],
76 | "add_species": false
77 | },
78 | {
79 | "soap": true,
80 | "n_max": 3,
81 | "cutoff": 3,
82 | "atom_sigma": 0.5,
83 | "Z": 6,
84 | "n_species": 2,
85 | "species_Z": [
86 | 6,
87 | 14
88 | ],
89 | "add_species": false
90 | },
91 | {
92 | "soap": true,
93 | "n_max": 3,
94 | "cutoff": 4,
95 | "atom_sigma": 1,
96 | "Z": 6,
97 | "n_species": 2,
98 | "species_Z": [
99 | 6,
100 | 14
101 | ],
102 | "add_species": false
103 | },
104 | {
105 | "soap": true,
106 | "n_max": 3,
107 | "cutoff": 5,
108 | "atom_sigma": 1.5,
109 | "Z": 14,
110 | "n_species": 2,
111 | "species_Z": [
112 | 6,
113 | 14
114 | ],
115 | "add_species": false
116 | }
117 | ]
118 | }
119 |
120 |
--------------------------------------------------------------------------------
/tests/assets/length_scales.yaml:
--------------------------------------------------------------------------------
1 | 3:
2 | bond_len:
3 | - 3.0
4 | min_bond_len:
5 | - 2.4
6 | vol_per_atom:
7 | - 20.0
8 | source: NB gap-rss
9 | 5:
10 | bond_len:
11 | - 1.7
12 | min_bond_len:
13 | - 1.7
14 | vol_per_atom:
15 | - 8.2
16 | source: NB VASP auto_length_scale
17 | 32:
18 | bond_len:
19 | - 2.5
20 | min_bond_len:
21 | - 2.3
22 | vol_per_atom:
23 | - 23
24 | source: NB gap-rss
25 | 52:
26 | bond_len:
27 | - 3.1
28 | min_bond_len:
29 | - 2.6
30 | vol_per_atom:
31 | - 31
32 | source: NB gap-rss
--------------------------------------------------------------------------------
/tests/assets/mace_fit_parameters.yaml:
--------------------------------------------------------------------------------
1 | valid_fraction: 0.05
2 | config_type_weights: '{"Default":1.0}'
3 | E0s:
4 | 5: -0.0046603
5 | hidden_irreps: '128x0e + 128x1o'
6 | device: cpu
7 | ema: null
8 | max_num_epochs: 10
9 | model: MACE
10 | r_max: 4.0
11 | restart_latest: null
12 | save_cpu: null
13 | batch_size: 4
14 |
--------------------------------------------------------------------------------
/tests/assets/normal_modes/water_dftb_evecs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/libAtoms/workflow/252f61ce8dd9c0db21e957bd03eb9fd68f51867d/tests/assets/normal_modes/water_dftb_evecs.npy
--------------------------------------------------------------------------------
/tests/assets/normal_modes/water_dftb_nms.xyz:
--------------------------------------------------------------------------------
1 | 3
2 | Properties=species:S:1:pos:R:3:REF_normal_mode_displacements_0:R:3:REF_normal_mode_displacements_1:R:3:REF_normal_mode_displacements_2:R:3:REF_normal_mode_displacements_3:R:3:REF_normal_mode_displacements_4:R:3:REF_normal_mode_displacements_5:R:3:REF_normal_mode_displacements_6:R:3:REF_normal_mode_displacements_7:R:3:REF_normal_mode_displacements_8:R:3 REF_normal_mode_frequencies="-0.006403119982148006 -6.89681528400446e-05 -2.860510796156689e-06 3.472030353154315e-05 0.0029448640505536877 0.0029812416162021253 0.17062138489870082 0.4413592261041818 0.4765042821349567" pbc="F F F"
3 | O 0.00000000 -0.00000000 0.11442792 0.00000000 -0.04964776 -0.00000000 -0.23358138 0.00000000 -0.00000000 0.00000000 -0.00000001 -0.23560519 -0.00000000 -0.23533841 0.00000001 0.08912714 -0.00000000 0.00000000 -0.00000000 -0.00000000 0.00000000 0.00000000 0.00000000 -0.06645510 0.00000000 0.00000000 -0.05077227 -0.00000000 0.06822638 0.00000000
4 | H -0.00000000 0.78302105 -0.47462996 0.00000000 0.38258819 0.57454030 -0.25107943 0.00000000 -0.00000000 0.00000000 0.00000751 -0.23559560 -0.00000000 -0.23768178 -0.00311609 -0.65802041 -0.00000000 0.00000000 0.70429521 -0.00000000 -0.00000000 -0.00000000 0.42756113 0.52742353 -0.00000000 -0.55966350 0.40292789 0.00000000 -0.54144750 0.40733952
5 | H 0.00000000 -0.78302105 -0.47462996 -0.00000000 0.38258819 -0.57454030 -0.25107943 0.00000000 -0.00000000 0.00000000 -0.00000753 -0.23559560 -0.00000000 -0.23768178 0.00311612 -0.65802042 -0.00000000 0.00000000 -0.70429521 -0.00000000 0.00000000 0.00000000 -0.42756113 0.52742353 -0.00000000 0.55966350 0.40292789 -0.00000000 -0.54144750 -0.40733952
6 |
--------------------------------------------------------------------------------
/tests/assets/simple_gap_fit_parameters.yml:
--------------------------------------------------------------------------------
1 | # mandatory parameters
2 | default_sigma: [ 0.001, 0.03, 0.0, 0.0 ]
3 |
4 | _gap:
5 | - soap: True
6 | l_max: 6
7 | n_max: 12
8 | cutoff: 3
9 | delta: 1
10 | covariance_type: dot_product
11 | zeta: 4
12 | n_sparse: 20
13 | sparse_method: cur_points
14 | atom_gaussian_width: 0.3
15 | cutoff_transition_width: 0.5
16 | add_species: True
17 |
18 | - soap: True
19 | l_max: 6
20 | n_max: 12
21 | cutoff: 6
22 | delta: 1
23 | covariance_type: dot_product
24 | zeta: 4
25 | n_sparse: 20
26 | sparse_method: cur_points
27 | atom_gaussian_width: 0.6
28 | cutoff_transition_width: 1
29 | add_species: False
30 | n_species: 3
31 | Z: 8
32 | species_Z: [ 8, 1, 6 ]
33 |
34 | # optional parameters
35 | config_type_sigma: "isolated_atom:0.0001:0.0:0.0:0.0"
36 | sparse_separate_file: True
37 | energy_parameter_name: dft_energy
38 | force_parameter_name: dft_forces
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/tests/calculators/test_ase_fileio_caching.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import pytest
4 |
5 | from ase.atoms import Atoms
6 |
7 |
8 | ########################
9 | # test Vasp calculator
10 |
11 | from tests.calculators.test_vasp import pytestmark as vasp_pytestmark
12 | @vasp_pytestmark
13 | def test_vasp_cache_timing(tmp_path, monkeypatch):
14 | from ase.calculators.vasp import Vasp as Vasp_ase
15 | from wfl.calculators.vasp import Vasp as Vasp_wrap
16 |
17 | config = Atoms('Si', positions=[[0, 0, 9]], cell=[2, 2, 2], pbc=[True, True, True])
18 | kwargs_ase = {'encut': 200, 'pp': os.environ['PYTEST_VASP_POTCAR_DIR']}
19 | kwargs_wrapper = {'workdir': tmp_path}
20 | # make sure 'pp' is relative to correct dir (see wfl.calculators.vasp)
21 | if os.environ['PYTEST_VASP_POTCAR_DIR'].startswith('/'):
22 | monkeypatch.setenv("VASP_PP_PATH", "/.")
23 | else:
24 | monkeypatch.setenv("VASP_PP_PATH", ".")
25 | cache_timing(config, Vasp_ase, kwargs_ase, Vasp_wrap, kwargs_wrapper, tmp_path, monkeypatch)
26 |
27 | ########################
28 | # test quantum espresso calculator
29 | from tests.calculators.test_qe import espresso_avail, qe_pseudo
30 | @espresso_avail
31 | def test_qe_cache_timing(tmp_path, monkeypatch, qe_pseudo):
32 | from ase.calculators.espresso import Espresso as Espresso_ASE
33 | from wfl.calculators.espresso import Espresso as Espresso_wrap
34 |
35 | config = Atoms('Si', positions=[[0, 0, 9]], cell=[2, 2, 2], pbc=[True, True, True])
36 |
37 | pspot = qe_pseudo
38 | kwargs_ase = dict(
39 | pseudopotentials=dict(Si=pspot.name),
40 | pseudo_dir=pspot.parent,
41 | input_data={"SYSTEM": {"ecutwfc": 40, "input_dft": "LDA",}},
42 | kpts=(2, 3, 4),
43 | conv_thr=0.0001,
44 | workdir=tmp_path
45 | )
46 |
47 | kwargs_wrapper = {}
48 | cache_timing(config, Espresso_ASE, kwargs_ase, Espresso_wrap, kwargs_wrapper, tmp_path, monkeypatch)
49 |
50 |
51 | ########################
52 | # generic code used by all calculators
53 |
54 | import time
55 |
56 | from wfl.configset import ConfigSet, OutputSpec
57 | from wfl.calculators import generic
58 |
59 | def cache_timing(config, calc_ase, kwargs_ase, calc_wfl, kwargs_wrapper, rundir, monkeypatch):
60 | (rundir / "run_calc_ase").mkdir()
61 |
62 | calc = calc_ase(**kwargs_ase)
63 | config.calc = calc
64 |
65 | monkeypatch.chdir(rundir / "run_calc_ase")
66 | t0 = time.time()
67 | E = config.get_potential_energy()
68 | ase_time = time.time() - t0
69 |
70 | monkeypatch.chdir(rundir)
71 | t0 = time.time()
72 | _ = generic.calculate(inputs=ConfigSet(config), outputs=OutputSpec(),
73 | calculator=calc_wfl(**kwargs_wrapper, **kwargs_ase))
74 | wfl_time = time.time() - t0
75 |
76 | print("ASE", ase_time, "WFL", wfl_time)
77 |
78 | assert wfl_time < ase_time * 1.25
79 |
--------------------------------------------------------------------------------
/tests/calculators/test_basin_hopping_orca.py:
--------------------------------------------------------------------------------
1 | from os import path
2 | from shutil import rmtree
3 |
4 | import numpy as np
5 | from ase import Atoms
6 | from ase.build import molecule
7 | from ase.calculators.calculator import CalculationFailed
8 | from pytest import approx, raises
9 |
10 | from wfl.calculators.orca.basinhopping import BasinHoppingORCA
11 |
12 |
13 | def test_orca_utils(tmp_path):
14 | # setup
15 | at_ch4 = molecule("CH4")
16 | at_ch3 = molecule("CH3")
17 |
18 | calc_even = BasinHoppingORCA(scratchdir=tmp_path, rng=np.random.default_rng(1))
19 | calc_even.atoms = at_ch4
20 | calc_odd = BasinHoppingORCA(rng=np.random.default_rng(1))
21 | calc_odd.atoms = at_ch3
22 |
23 | # HOMO
24 | assert calc_even.get_homo() == (4, 4)
25 | assert calc_odd.get_homo() == (4, 3)
26 |
27 | # multiplicity
28 | assert calc_even.get_multiplicity() == 1
29 | assert calc_odd.get_multiplicity() == 2
30 |
31 | # new atoms
32 | new_at_even: Atoms = calc_even._copy_atoms()
33 | assert new_at_even.positions == approx(at_ch4.positions)
34 | assert new_at_even.get_atomic_numbers() == approx(at_ch4.get_atomic_numbers())
35 |
36 | # scratch paths
37 | generated_scratch_dir = calc_even._make_tempdir()
38 | assert f"{tmp_path}/orca_" in generated_scratch_dir
39 |
40 | if path.isdir(generated_scratch_dir):
41 | rmtree(generated_scratch_dir)
42 |
43 |
44 | def test_orca_process_results(tmp_path):
45 | # setup
46 | at_ch4 = molecule("CH4")
47 | calc = BasinHoppingORCA(scratchdir=tmp_path, forces_tol=0.05, rng=np.random.default_rng(1))
48 | calc.atoms = at_ch4
49 |
50 | # shape errors, correct are (3, 10) and (3, 10, 5, 3)
51 | with raises(AssertionError):
52 | _ = calc.process_results(np.zeros(shape=(2, 10)), np.zeros(shape=(3, 10, 5, 3)))
53 | _ = calc.process_results(np.zeros(shape=(3, 10)), np.zeros(shape=(3, 100, 5, 3)))
54 |
55 | # none succeeding
56 | e = np.zeros(shape=(3, 10)) + np.inf
57 | f = np.zeros(shape=(3, 10, 5, 3))
58 | with raises(CalculationFailed, match="Not enough runs succeeded.*0.*in wavefunction basin hopping"):
59 | _ = calc.process_results(e, f)
60 |
61 | # one of three
62 | e[0, 1] = 0.999
63 | with raises(CalculationFailed, match="Not enough runs succeeded.*1.*in wavefunction basin hopping"):
64 | _ = calc.process_results(e, f)
65 |
66 | # 2 of three passes
67 | e[1, 2] = 1.0001
68 | _ = calc.process_results(e, f)
69 |
70 | # energy difference
71 | e[2, 4] = 0.0
72 | with raises(CalculationFailed, match="Too high energy difference found: *1.0.*"):
73 | _ = calc.process_results(e, f)
74 |
75 | # force error not triggered on non-minimum frame's force diff
76 | e[2, 4] = np.inf
77 | f[0, -1] = 100
78 | _ = calc.process_results(e, f)
79 |
80 | # error on forces
81 | f[1, 2, 0, 0] = 0.051
82 | with raises(CalculationFailed, match="Too high force difference found: *0.051.*"):
83 | _ = calc.process_results(e, f)
84 |
85 | # results tested
86 | f[0, 1] = 0.025
87 | e_out, force_out = calc.process_results(e, f)
88 | assert e_out == 0.999
89 | assert force_out == approx(0.025)
90 |
--------------------------------------------------------------------------------
/tests/calculators/test_castep.py:
--------------------------------------------------------------------------------
1 | import os
2 | import shutil
3 | import numpy as np
4 | from pytest import approx
5 | import pytest
6 | from ase import Atoms
7 | from ase.build import bulk
8 | import wfl.calculators.castep
9 | from wfl.configset import ConfigSet, OutputSpec
10 | from wfl.autoparallelize import AutoparaInfo
11 | from wfl.calculators import generic
12 |
13 | pytestmark = pytest.mark.skipif("CASTEP_COMMAND" not in os.environ, reason="'CASTEP_COMMAND' not given.")
14 |
15 | def test_castep_calculation(tmp_path):
16 |
17 | atoms = bulk("Al", "bcc", a=4.05, cubic=True)
18 |
19 | castep_kwargs = {
20 | 'write_checkpoint':"none",
21 | 'cut_off_energy':400,
22 | 'calculate_stress': True,
23 | 'kpoints_mp_spacing': 0.04
24 | }
25 |
26 | calc = wfl.calculators.castep.Castep(
27 | workdir=tmp_path,
28 | **castep_kwargs
29 | )
30 |
31 | atoms.calc = calc
32 | assert atoms.get_potential_energy() == approx(-217.2263559019, 2e-3)
33 | assert atoms.get_forces() == approx(np.array([[-0., -0., 0.], [ 0., 0., -0.]]), abs=1e-4)
34 | assert atoms.get_stress() == approx(np.array([ 0.06361731, 0.06361731, 0.06361731,-0., 0., 0.]), abs=1e-5)
35 |
36 |
37 | def test_castep_calc_via_generic(tmp_path):
38 |
39 | atoms = bulk("Al", "bcc", a=4.05, cubic=True)
40 | cfgs = [atoms]
41 |
42 | kwargs = {
43 | 'write_checkpoint':"none",
44 | 'cut_off_energy':400,
45 | 'calculate_stress': True,
46 | 'kpoints_mp_spacing': 0.04,
47 | 'workdir' : tmp_path,
48 | }
49 |
50 | calc = (wfl.calculators.castep.Castep, [], kwargs)
51 |
52 | ci = ConfigSet(cfgs)
53 | co = OutputSpec()
54 | autoparainfo = AutoparaInfo(
55 | num_python_subprocesses=0
56 | )
57 |
58 | ci = generic.calculate(
59 | inputs=ci,
60 | outputs=co,
61 | calculator=calc,
62 | output_prefix='castep_',
63 | autopara_info=autoparainfo
64 | )
65 |
66 |
67 |
68 |
--------------------------------------------------------------------------------
/tests/calculators/test_mopac.py:
--------------------------------------------------------------------------------
1 | import os
2 | import numpy as np
3 | import pytest
4 | import shutil
5 |
6 | from ase.io import write
7 | from ase.build import molecule
8 | from ase.calculators.mopac import MOPAC
9 |
10 | from wfl.calculators import generic
11 | from wfl.calculators.mopac import MOPAC
12 | from wfl.configset import ConfigSet, OutputSpec
13 |
14 | ref_energy = -0.38114618938803013
15 | ref_forces = np.array([[ 0.00598923, -0.00306901, -0.01411094],
16 | [ 0.00374263, -0.00137642, 0.01242343],
17 | [ 0.00710343, -0.00450089, 0.00556635],
18 | [-0.00226356, -0.00497725, 0.00713461],
19 | [-0.01457173, 0.01392352, -0.0110134 ]])
20 | ref_pos = np.array([[-0.14171658, -0.08539037, -0.06833327],
21 | [ 0.68255623, 0.39319224, 0.51566672],
22 | [-0.64855418, -0.61104146, 0.50794689],
23 | [ 0.50823426, -0.74095259, -0.65120197],
24 | [-0.7545869, 0.4412089, -0.47989706]])
25 |
26 | @pytest.fixture
27 | def atoms():
28 | atoms = molecule("CH4")
29 | atoms.rattle(stdev=0.1, seed=1305)
30 | return atoms
31 |
32 | @pytest.mark.skipif(not shutil.which("mopac") and "ASE_MOPAC_COMMAND" not in os.environ,
33 | reason="mopac not in PATH and command not given")
34 | def test_ase_mopac(tmp_path, atoms):
35 | """test that the regular MOPAC works, since there was a recent bug fix
36 | Should be returning final heat of formation for "energy", e.g.:
37 | self.results['energy'] = self.results['final_hof']
38 | This step was missing at some point which failed with
39 | PropertyNotImplementedError"""
40 |
41 | os.chdir(tmp_path)
42 |
43 | atoms.calc = MOPAC(label='tmp', method="AM1", task="GRADIENTS")
44 |
45 | assert np.allclose(atoms.get_potential_energy(), ref_energy)
46 | assert np.allclose(atoms.get_forces(), ref_forces)
47 |
48 | @pytest.mark.skipif(not shutil.which("mopac") and "ASE_MOPAC_COMMAND" not in os.environ,
49 | reason="mopac not in PATH and command not given")
50 | def test_wfl_mopac(tmp_path, atoms):
51 | fn_in = tmp_path / 'mopac_in.xyz'
52 |
53 | write(fn_in, atoms)
54 |
55 | calc = (MOPAC, [], {"workdir":tmp_path, "method": "AM1", "task":"GRADIENTS"})
56 |
57 | configs_eval = generic.calculate(
58 | inputs=ConfigSet(fn_in),
59 | outputs = OutputSpec(),
60 | calculator = calc,
61 | output_prefix="mopac_"
62 | )
63 |
64 | at = list(configs_eval)[0]
65 | assert np.allclose(at.info["mopac_energy"], ref_energy)
66 | assert np.allclose(at.arrays["mopac_forces"], ref_forces, rtol=1e-4)
67 |
68 |
--------------------------------------------------------------------------------
/tests/calculators/test_wrapped_calculator.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from ase.atoms import Atoms
3 | from wfl.configset import ConfigSet, OutputSpec
4 | from wfl.calculators import generic
5 |
6 | ########################
7 | # test a RuntimeWarning is raised when using the Espresso Calculator directly from ase
8 | from tests.calculators.test_qe import espresso_avail, qe_pseudo
9 | @espresso_avail
10 | def test_wrapped_qe(tmp_path, qe_pseudo):
11 | from ase.calculators.espresso import Espresso as Espresso_ASE
12 | from wfl.calculators.espresso import Espresso as Espresso_wrap
13 |
14 | config = Atoms('Si', positions=[[0, 0, 9]], cell=[2, 2, 2], pbc=[True, True, True])
15 |
16 | pspot = qe_pseudo
17 | kwargs = dict(
18 | pseudopotentials=dict(Si=pspot.name),
19 | pseudo_dir=pspot.parent,
20 | input_data={"SYSTEM": {"ecutwfc": 40, "input_dft": "LDA",}},
21 | kpts=(2, 3, 4),
22 | conv_thr=0.0001,
23 | workdir=tmp_path,
24 | tstress=True,
25 | tprnfor=True
26 | )
27 |
28 | direct_calc = (Espresso_ASE, [], kwargs)
29 | kwargs_generic = dict(inputs=ConfigSet(config), outputs=OutputSpec(), calculator=direct_calc)
30 | pytest.warns(RuntimeWarning, generic.calculate, **kwargs_generic)
31 |
--------------------------------------------------------------------------------
/tests/cli/test_descriptor.py:
--------------------------------------------------------------------------------
1 | import os
2 | import numpy as np
3 |
4 | import click
5 | from click.testing import CliRunner
6 |
7 | import pytest
8 | from pytest import approx
9 |
10 | from ase import Atoms
11 | from ase.io import write, read
12 |
13 | from wfl.cli.cli import cli
14 |
15 | try:
16 | from quippy.descriptors import Descriptor
17 | except ModuleNotFoundError:
18 | pytestmark = pytest.mark.skip(reason='no quippy')
19 |
20 |
21 | def get_ats():
22 | np.random.seed(5)
23 | return [Atoms('Si3C1', cell=(2, 2, 2), pbc=[True] * 3, scaled_positions=np.random.uniform(size=(4, 3)))]
24 |
25 |
26 | def test_descriptor_quippy(tmp_path):
27 | ats = get_ats()
28 |
29 | fn_in = tmp_path / "ats_in.xyz"
30 | fn_out = tmp_path / "ats_out.xyz"
31 |
32 | write(fn_in, ats)
33 |
34 | descriptor_str = 'soap n_max=4 l_max=4 cutoff=5.0 atom_sigma=0.5 average n_species=2 species_Z={6 14}'
35 |
36 | params = [
37 | '-v',
38 | 'descriptor',
39 | 'quippy',
40 | f'-i {str(fn_in)}',
41 | f'-o {str(fn_out)}',
42 | f'--descriptor "{descriptor_str}" ',
43 | f'--key soap'
44 | ]
45 |
46 | runner = CliRunner()
47 | result = runner.invoke(cli, ' '.join(params))
48 |
49 | target = Descriptor(
50 | descriptor_str).calc(ats[0])['data'][0]
51 | assert target.shape[0] == 181
52 |
53 | ats_out = read(fn_out, ":")
54 |
55 | #check that shape matches
56 | for at in ats_out:
57 | assert 'soap' in at.info and at.info['soap'].shape == (181,)
58 |
59 | # check one manually
60 | assert target == approx(list(ats_out)[0].info['soap'])
61 |
--------------------------------------------------------------------------------
/tests/cli/test_error.py:
--------------------------------------------------------------------------------
1 | import os
2 | import warnings
3 | from click.testing import CliRunner
4 | from wfl.cli.cli import cli
5 |
6 |
7 | def test_error_table(tmp_path):
8 | """just makes sure code runs without error"""
9 |
10 | ats_filename = os.path.join(os.path.abspath(os.path.dirname(__file__)), '../assets', 'configs_for_error_test.xyz')
11 | fig_name = tmp_path / "error_scatter.png"
12 | warnings.warn(f"error plots in {fig_name}")
13 |
14 |
15 | command = "-v error "\
16 | f"--inputs {ats_filename} "\
17 | "--calc-property-prefix mace_ "\
18 | "--ref-property-prefix dft_ "\
19 | "--config-properties energy/atom "\
20 | "--config-properties energy "\
21 | "--atom-properties forces/comp/Z "\
22 | "--atom-properties forces/comp "\
23 | "--category-keys mol_or_rad "\
24 | f"--fig-name {fig_name} "
25 |
26 |
27 | print(command)
28 |
29 | runner = CliRunner()
30 | result = runner.invoke(cli, command)
31 | assert result.exit_code == 0
32 |
--------------------------------------------------------------------------------
/tests/cli/test_generate.py:
--------------------------------------------------------------------------------
1 | from click.testing import CliRunner
2 | from ase.io import read
3 | from wfl.cli.cli import cli
4 |
5 | def test_generate_smiles(tmp_path):
6 |
7 | fn_out = tmp_path / "ats_out.xyz"
8 |
9 | params = [
10 | '-v',
11 | 'generate',
12 | 'smiles',
13 | f'-o {str(fn_out)}',
14 | '-ei "config_type=rdkit" '
15 | 'CCC'
16 | ]
17 |
18 | print(" ".join(params))
19 |
20 | runner = CliRunner()
21 | result = runner.invoke(cli, " ".join(params))
22 |
23 | ats = read(fn_out)
24 | assert len(ats) ==11
25 | assert ats.info["config_type"] == "rdkit"
26 |
27 |
28 |
29 | def test_generate_buildcell():
30 | pass
31 |
32 |
--------------------------------------------------------------------------------
/tests/cli/test_select.py:
--------------------------------------------------------------------------------
1 | from click.testing import CliRunner
2 | from ase.io import read, write
3 | from wfl.cli.cli import cli
4 | from wfl.generate import smiles
5 |
6 |
7 | def test_select_by_lambda(tmp_path):
8 |
9 | fn_out = tmp_path / "ats_out.xyz"
10 | fn_in = tmp_path / "ats_in.xyz"
11 | # alkane chanes with 1-10 carbon atoms
12 | ats_in = [smiles.smi_to_atoms("C"*i) for i in range(1, 11)]
13 | write(fn_in, ats_in)
14 |
15 | params = [
16 | '-v',
17 | 'select',
18 | 'lambda',
19 | f'--outputs {fn_out}',
20 | f'--inputs {fn_in}',
21 | # select only structures with even number of carbon atoms
22 | f'--exec-code "len([sym for sym in list(atoms.symbols) if sym == \'C\']) % 2 == 0"',
23 | ]
24 |
25 | print(' '.join(params))
26 | runner = CliRunner()
27 | result = runner.invoke(cli, " ".join(params))
28 |
29 | ats_out = read(fn_out, ":")
30 | assert len(ats_out) == 5
31 |
--------------------------------------------------------------------------------
/tests/conda-build/meta.yaml:
--------------------------------------------------------------------------------
1 | package:
2 | name: wfl
3 | version: "0.0.1"
4 |
5 | source:
6 | path: ../..
7 |
8 | build:
9 | number: 0
10 | script: "{{ PYTHON }} -m pip install . -vv"
11 |
12 | requirements:
13 | noarch: python
14 |
15 | channels:
16 | - defaults
17 | - conda-forge
18 |
19 | build:
20 | - python
21 | - setuptools
22 |
23 | run:
24 | - python
25 | - numpy=1.19
26 | - ase
27 | - scipy
28 | - click
29 |
30 | test:
31 | requires:
32 | - pytest
33 | - pytest-cov
34 | imports:
35 | - wfl
36 |
37 | about:
38 | home: https://github.com/gabor1/workflow
39 |
--------------------------------------------------------------------------------
/tests/local_scripts/gelzinyte.workstation.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | export ASE_CONFIG_PATH=${HOME}/.config/ase/pytest.config.ini
4 |
5 | # MOPAC isn't updated with the profile
6 | export ASE_MOPAC_COMMAND="${HOME}/programs/mopac-22.1.1-linux/bin/mopac PREFIX.mop 2> /dev/null"
7 |
8 | export JANPA_HOME_DIR="${HOME}/programs/janpa"
9 |
10 | # Aims
11 | pytest -v -s -rxXs ../calculators/test_aims.py
12 | pytest -v -s -rxXs ../calculators/test_orca.py
13 |
14 |
--------------------------------------------------------------------------------
/tests/mpi/README.md:
--------------------------------------------------------------------------------
1 | # MPI tests
2 |
3 | These tests are intended to be run with MPI, and test the parallelism of the
4 | code in this way.
5 |
6 | How to run there:
7 | - install: `mpi4py` and `mpipool` linked to your MPI installation
8 | - run the tests with:
9 | ```bash
10 | export WFL_MPIPOOL=2
11 |
12 | # no coverage
13 | mpirun -n 2 pytest --with-mpi -k mpi
14 |
15 | # with coverage
16 | mpirun -n 2 pytest --cov=wfl --cov-report term --cov-config=tests/.coveragerc --cov-report term-missing --cov-report term:skip-covered --with-mpi --cov-append -k mpi
17 | ```
18 | - there will be duplicate output in the terminal window
19 |
20 | The latter appends the coverage to any done before, which should be helpful.
21 | The GitHub CI is set up such that this is happening automatically on the chosen version of
22 | python where we are doing coverage as well.
23 |
24 | ## Gotchas:
25 | - these tests need the pytest mpi decorator
26 | - any test in this directory will be run with the MPI (due to `-k mpi` in pytest)
27 | - any test in this directory *not* having the mpi decorator on top of it will be run wihtout MPI
28 | as well
29 | - not all tests elsewhere are compatible with MPI
30 | - any test elsewhere that has MPI in its name will be ran with MPI as well, be carefl
--------------------------------------------------------------------------------
/tests/mpi/test_autopara_thread_vs_mpi.py:
--------------------------------------------------------------------------------
1 | # for this test to work WFL_MPIPOOL must be in os.environ, because
2 | # wfl.mpipool_support.init() requires it to actually do something
3 |
4 | import os
5 | from pathlib import Path
6 |
7 | import numpy as np
8 | import pytest
9 | from ase import Atoms
10 | from ase.build import molecule
11 | from ase.calculators.lj import LennardJones
12 |
13 | import wfl.autoparallelize.mpipool_support
14 | from wfl.calculators import generic
15 | from wfl.configset import ConfigSet, OutputSpec
16 |
17 |
18 | def get_atoms():
19 | atoms = [molecule("CH4").copy() for _ in range(1000)]
20 | rng = np.random.RandomState(5)
21 | for at in atoms:
22 | at.rattle(0.1, rng=rng)
23 | return atoms
24 |
25 |
26 | @pytest.mark.skipif('WFL_MPIPOOL' not in os.environ,
27 | reason="only if WFL_MPIPOOL is in env")
28 | @pytest.mark.mpi(minsize=2)
29 | def test_run(tmp_path):
30 | from mpi4py import MPI
31 |
32 | if MPI.COMM_WORLD.rank > 0:
33 | return
34 |
35 | ## assert MPI.COMM_WORLD.size > 1
36 |
37 | # on one thread, run a serial reference calc
38 | os.environ['WFL_NUM_PYTHON_SUBPROCESSES'] = '0'
39 |
40 | mol_in = get_atoms()
41 |
42 | serial_mol_out = generic.calculate(mol_in, OutputSpec(tmp_path / "run_serial.xyz"),
43 | LennardJones(),
44 | properties=["energy", "forces"], output_prefix="_auto_")
45 | # check that serial output is correct type of object
46 | assert isinstance(serial_mol_out, ConfigSet)
47 | for at in serial_mol_out:
48 | assert isinstance(at, Atoms)
49 |
50 | # re-enable mpi pool based parallelism (although actual value is ignore if > 0 )
51 | os.environ['WFL_NUM_PYTHON_SUBPROCESSES'] = str(MPI.COMM_WORLD.size)
52 |
53 | mol_in = get_atoms()
54 |
55 | mpi_mol_out = generic.calculate(mol_in, OutputSpec(tmp_path / "run_mpi.xyz"), LennardJones(),
56 | properties=["energy", "forces"], output_prefix="_auto_")
57 | # check that MPI parallel output is correct type of object
58 | assert isinstance(mpi_mol_out, ConfigSet)
59 | for at in mpi_mol_out:
60 | assert isinstance(at, Atoms)
61 |
62 | # check that serial and MPI parallel outputs agree
63 | for at_t, at_m in zip(serial_mol_out, mpi_mol_out):
64 | # print(at_t == at_m, at_t.info['LennardJones_energy'], at_m.info['LennardJones_energy'])
65 | assert at_t == at_m
66 |
67 |
68 | if __name__ == '__main__':
69 | import conftest
70 | conftest.do_init_mpipool()
71 |
72 | rundir = Path('.')
73 | test_run(rundir)
74 | try:
75 | os.unlink(rundir / 'run_serial.xyz')
76 | except FileNotFoundError:
77 | pass
78 | try:
79 | os.unlink(rundir / 'run_mpi.xyz')
80 | except FileNotFoundError:
81 | pass
82 |
--------------------------------------------------------------------------------
/tests/prep_test_cli_rss.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | module load vasp
4 |
5 | export GAP_RSS_TEST_SETUP=${PWD}/setup_rss_test
6 |
7 | # rm -r $GAP_RSS_TEST_SETUP
8 | if [ -e $GAP_RSS_TEST_SETUP ]; then
9 | if [ ! -d $GAP_RSS_TEST_SETUP ]; then
10 | echo "GAP_RSS_TEST_SETUP=$GAP_RSS_TEST_SETUP but is not a directory" 1>&2
11 | exit 1
12 | fi
13 | echo "WARNING: run dir $GAP_RSS_TEST_SETUP exists, trying to resume from current point." 1>&2
14 | echo "Type enter to continue or ^C to abort if you need to delete it" 1>&2
15 | read dummy
16 | else
17 | mkdir -p $GAP_RSS_TEST_SETUP
18 | fi
19 |
20 | export VASP_COMMAND=vasp.serial
21 | export VASP_COMMAND_GAMMA=vasp.gamma_serial
22 | export VASP_PP_PATH=${VASP_PATH}/pot/rev_54/PBE
23 | export GRIF_BUILDCELL_CMD=${HOME}/src/work/AIRSS/airss-0.9.1/src/buildcell/src/buildcell
24 |
25 | nohup pytest -s tests/test_cli_rss.py 1> prep_test_cli_rss.stdout 2> prep_test_cli_rss.stderr &
26 |
--------------------------------------------------------------------------------
/tests/test_autoparallelize.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import time
3 |
4 | import numpy as np
5 |
6 | import ase.io
7 | from ase.atoms import Atoms
8 | from ase.calculators.emt import EMT
9 |
10 | from wfl.configset import ConfigSet, OutputSpec
11 | from wfl.generate import buildcell
12 | from wfl.calculators import generic
13 | from wfl.autoparallelize import AutoparaInfo
14 |
15 |
16 | def test_empty_iterator(tmp_path):
17 | co = buildcell.buildcell(range(0), OutputSpec(tmp_path / 'dummy.xyz'), buildcell_cmd='dummy', buildcell_input='dummy')
18 |
19 | assert len([at for at in co]) == 0
20 |
21 |
22 | def test_autopara_info_dict():
23 | np.random.seed(5)
24 |
25 | ats = []
26 | nconf = 60
27 | for _ in range(nconf):
28 | ats.append(Atoms(['Al'] * nconf, scaled_positions=np.random.uniform(size=(nconf, 3)), cell=[10, 10, 10], pbc=[True] * 3))
29 |
30 | co = generic.calculate(ConfigSet(ats), OutputSpec(), EMT(), output_prefix="_auto_", autopara_info={"num_python_subprocesses": 1})
31 | assert len(list(co)) == nconf
32 |
33 |
34 | @pytest.mark.perf
35 | def test_pool_speedup():
36 | np.random.seed(5)
37 |
38 | ats = []
39 | nconf = 60
40 | for _ in range(nconf):
41 | ats.append(Atoms(['Al'] * nconf, scaled_positions=np.random.uniform(size=(nconf, 3)), cell=[10, 10, 10], pbc=[True] * 3))
42 |
43 | t0 = time.time()
44 | co = generic.calculate(ConfigSet(ats), OutputSpec(), EMT(), output_prefix="_auto_", autopara_info=AutoparaInfo(num_python_subprocesses=1))
45 | dt_1 = time.time() - t0
46 |
47 | t0 = time.time()
48 | co = generic.calculate(ConfigSet(ats), OutputSpec(), EMT(), output_prefix="_auto_", autopara_info=AutoparaInfo(num_python_subprocesses=2))
49 | dt_2 = time.time() - t0
50 |
51 | print("time ratio", dt_2 / dt_1)
52 | assert dt_2 < dt_1 * (2/3)
53 |
54 | def test_outputspec_overwrite(tmp_path):
55 | with open(tmp_path / "ats.xyz", "w") as fout:
56 | fout.write("BOB")
57 |
58 | os = OutputSpec("ats.xyz", file_root=tmp_path)
59 | assert os.all_written()
60 |
61 | ats = []
62 | nconf = 60
63 | for _ in range(nconf):
64 | ats.append(Atoms(['Al'] * nconf, scaled_positions=np.random.uniform(size=(nconf, 3)), cell=[10, 10, 10], pbc=[True] * 3))
65 |
66 | co = generic.calculate(ConfigSet(ats), OutputSpec(), EMT(), output_prefix="_auto_", autopara_info=AutoparaInfo(num_python_subprocesses=1))
67 |
68 | # should skip ops, incorrectly, and ats.xyz doesn't actually contain atoms
69 | with pytest.raises(ase.io.extxyz.XYZError):
70 | _ = ase.io.read(tmp_path / "ats.xyz", ":")
71 |
--------------------------------------------------------------------------------
/tests/test_batch_gap_fit.py:
--------------------------------------------------------------------------------
1 | import os, shutil, sys
2 |
3 | import numpy as np
4 | import pytest
5 |
6 | @pytest.mark.skipif(not shutil.which("gap_fit"), reason="gap_fit not in PATH") # skips it if gap_fit not in path
7 | def test_batch_gap_fit():
8 | example_dir = os.path.join(
9 | os.path.dirname(__file__), '../', 'examples', 'iterative_gap_fit'
10 | )
11 | sys.path.append(example_dir)
12 | import batch_gap_fit
13 |
14 | batch_gap_fit.main(max_count=1, verbose=True)
15 |
16 | assert os.path.exists(os.path.join(example_dir, 'GAP'))
17 | assert os.path.exists(os.path.join(example_dir, 'MD'))
18 | assert os.path.exists(os.path.join(example_dir, 'errors.json'))
19 | assert os.path.exists(os.path.join(example_dir, 'GAP/GAP_1.xml'))
20 |
21 | shutil.rmtree(os.path.join(example_dir, 'MD'))
22 | shutil.rmtree(os.path.join(example_dir, 'GAP'))
23 | os.remove(os.path.join(example_dir, 'errors.json'))
24 | os.remove('T')
25 |
26 | if __name__ == '__main__':
27 | test_batch_gap_fit()
28 |
--------------------------------------------------------------------------------
/tests/test_doc_examples.py:
--------------------------------------------------------------------------------
1 | import os
2 | import shutil
3 | import json
4 | import pytest
5 | from expyre import config
6 |
7 | from .calculators.test_aims import aims_prerequisites
8 |
9 |
10 | def _get_coding_blocks(nb_file):
11 | """Parse ```nb_file``` for coding blocks and return as list of strings."""
12 | with open(nb_file, 'r') as fo:
13 | nb = json.load(fo)
14 | return [''.join(cell['source']) for cell in nb['cells'] if cell['cell_type'] == 'code']
15 |
16 |
17 | @pytest.mark.parametrize(
18 | ('nb_file', 'idx_execute', 'needs_expyre'),
19 | (
20 | pytest.param('examples.buildcell.ipynb', 'all', False, id='buildcell',
21 | marks=pytest.mark.skipif(not shutil.which("buildcell"), reason="buildcell not in PATH")),
22 | pytest.param('examples.dimers.ipynb', 'all', False, id='dimer structures'),
23 | pytest.param('examples.select_fps.ipynb', 'all', False, id='select fps'),
24 | pytest.param('examples.fhiaims_calculator.ipynb', 'all', False, id='fhiaims_calculator',
25 | marks=aims_prerequisites),
26 | pytest.param("examples.daisy_chain_mlip_fitting.ipynb", "all", True, id="daisy_chain_mlip_fitting")
27 | )
28 | )
29 | def test_example(tmp_path, nb_file, idx_execute, monkeypatch, needs_expyre, expyre_systems):
30 | if needs_expyre and "github" not in expyre_systems:
31 | pytest.skip(reason=f'Notebook {nb_file} requires ExPyRe, but system "github" is not in config.json or'
32 | f'"EXPYRE_PYTEST_SYSTEMS" environment variable.')
33 | print("running test_example", nb_file)
34 | basepath = os.path.join(f'{os.path.dirname(__file__)}/../docs/source')
35 | coding_blocks = _get_coding_blocks(f'{basepath}/{nb_file}')
36 | code = '\n'.join([cb_i for idx_i, cb_i in enumerate(coding_blocks) if idx_execute == 'all' or idx_i in idx_execute])
37 | assert code is not ''
38 |
39 | monkeypatch.chdir(tmp_path)
40 | try:
41 | exec(code, globals())
42 | except Exception as exc:
43 | import traceback, re
44 | tb_str = traceback.format_exc()
45 | line_nos = list(re.findall("line ([0-9]+),", tb_str))
46 | line_no = int(line_nos[-1])
47 | lines = list(enumerate(code.splitlines()))[line_no - 5 : line_no + 5]
48 | actual_error = "\n".join([f"{li:4d}{'*' if li == line_no else ' '} {l}" for li, l in lines])
49 |
50 | raise RuntimeError(f"Exception raised by test_example {nb_file}, traceback:\n{actual_error}\n") from exc
51 |
--------------------------------------------------------------------------------
/tests/test_flat_histo_to_nearby.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from ase.atoms import Atoms
3 |
4 | from wfl.configset import ConfigSet, OutputSpec
5 | from wfl.select.flat_histogram import biased_select_conf
6 | from wfl.select.selection_space import val_relative_to_nearby_composition_volume_min
7 |
8 |
9 | def c(at):
10 | return (at.get_volume() / len(at), sum(at.numbers == 6) / len(at), sum(at.numbers == 1) / len(at))
11 |
12 |
13 | def test_flat_histo_to_nearby(tmp_path):
14 | rng = np.random.default_rng(5)
15 |
16 | n_at = 30
17 | n3 = 0
18 |
19 | p_center = (5.5, 0.5, 0.0)
20 |
21 | ats = []
22 | for at_i in range(1000):
23 | n1 = rng.integers(n_at + 1)
24 | ################################################################################
25 | # n2 = rng.integers((n_at-n1) + 1)
26 | # if n1+n2 < n_at:
27 | # n3 = rng.integers((n_at - n1 - n2) + 1)
28 | # else:
29 | # n3 = 0
30 | ################################################################################
31 | n2 = n_at - n1
32 | ################################################################################
33 | at = Atoms(f'Si{n1}C{n2}H{n3}', cell=(1, 1, 1), pbc=[True] * 3)
34 | at.cell *= (len(at) * rng.uniform(3, 8)) ** (1.0 / 3.0)
35 |
36 | ################################################################################
37 | # E_min = np.linalg.norm(np.array(c(at))-p_center)
38 | ################################################################################
39 | E_min = np.linalg.norm(np.array(c(at)) - p_center)
40 | ################################################################################
41 |
42 | at.info['energy'] = rng.uniform(E_min, 5.0)
43 | at.info['config_i'] = at_i
44 | ats.append(at)
45 |
46 | output_ats = val_relative_to_nearby_composition_volume_min(
47 | ConfigSet(ats),
48 | OutputSpec('test_flat_histo_relative.xyz', file_root=tmp_path),
49 | 1.0, 0.25, 'energy', 'E_dist_to_nearby')
50 |
51 | selected_ats = biased_select_conf(output_ats,
52 | OutputSpec('test_flat_histo_relative.selected.xyz', file_root=tmp_path),
53 | 10, 'E_dist_to_nearby', kT=0.05, rng=rng)
54 |
55 | print("selected_ats", [at.info['config_i'] for at in selected_ats])
56 | assert [at.info['config_i'] for at in selected_ats] == [22, 32, 53, 301, 307, 389, 590, 723, 895, 906]
57 |
--------------------------------------------------------------------------------
/tests/test_list_with_nested_configset_info.py:
--------------------------------------------------------------------------------
1 | from wfl.configset import ConfigSet, OutputSpec
2 | from wfl.calculators.generic import calculate
3 | from wfl.generate.md import md
4 |
5 | import ase.io
6 | from ase.atoms import Atoms
7 | from ase.calculators.emt import EMT
8 |
9 | import pytest
10 |
11 | pytestmark = pytest.mark.remote
12 |
13 | def test_list_with_nested_configset_info(tmp_path, expyre_systems, remoteinfo_env):
14 | for sys_name in expyre_systems:
15 | if sys_name.startswith('_'):
16 | continue
17 |
18 | do_test_list_with_nested_configset_info(tmp_path, sys_name, remoteinfo_env)
19 |
20 | def do_test_list_with_nested_configset_info(tmp_path, sys_name, remoteinfo_env):
21 | ri = {'sys_name': sys_name, 'job_name': 'pytest_'+sys_name,
22 | 'resources': {'max_time': '1h', 'num_nodes': 1},
23 | 'num_inputs_per_queued_job': 1, 'check_interval': 10}
24 |
25 | remoteinfo_env(ri)
26 |
27 | print('RemoteInfo', ri)
28 |
29 | cs = [Atoms('Al', cell=[3.0] * 3, pbc=[True] * 3) for _ in range(20)]
30 |
31 | for at_i, at in enumerate(cs):
32 | at.info["_ConfigSet_loc"] = f" / {at_i} / 0 / 10000"
33 |
34 | ase.io.write(tmp_path / "tt.extxyz", cs)
35 |
36 | configs = ConfigSet("tt.extxyz", file_root=tmp_path)
37 |
38 | os = OutputSpec("t1.extxyz", file_root=tmp_path)
39 | cc = md(configs, os, (EMT, [], {}), steps=10, dt=1.0, autopara_info={'remote_info': ri})
40 |
41 | traj_final_configs = []
42 | for traj_grp in cc.groups():
43 | traj_final_configs.append([atoms for atoms in traj_grp][-1])
44 |
45 | # ConfigSet should work
46 | print("trying t2")
47 | configs = ConfigSet(traj_final_configs)
48 |
49 | os = OutputSpec("t2.extxyz", file_root=tmp_path)
50 | _ = md(configs, os, (EMT, [], {}), steps=10, dt=1.0, autopara_info={'remote_info': ri})
51 |
52 | # list originally failed, as in https://github.com/libAtoms/workflow/issues/344
53 | print("trying t3")
54 | configs = traj_final_configs
55 |
56 | os = OutputSpec("t3.extxyz", file_root=tmp_path)
57 | _ = md(configs, os, (EMT, [], {}), steps=10, dt=1.0, autopara_info={'remote_info': ri})
58 |
--------------------------------------------------------------------------------
/tests/test_map.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from ase.atoms import Atoms
4 |
5 | from wfl.configset import ConfigSet, OutputSpec
6 | from wfl.map import map as wfl_map
7 |
8 | def displ(at):
9 | at_copy = at.copy()
10 | at_copy.positions[:, 0] += 0.1
11 | return at_copy
12 |
13 |
14 | def displ_args(at, dx=None, dy=None):
15 | at_copy = at.copy()
16 | if dx is not None:
17 | at_copy.positions += dx
18 | if dy is not None:
19 | at_copy.positions += dy
20 |
21 | return at_copy
22 |
23 | def test_map(tmp_path):
24 | inputs = ConfigSet([Atoms('H2', positions=[[0, 0, 0], [2, 0, 0]], cell=[5]*3, pbc=[True]*3) for _ in range(20)])
25 |
26 | displ_inputs = wfl_map(inputs, OutputSpec(tmp_path / "pert_configs.xyz"), displ)
27 | assert len(list(displ_inputs)) == 20
28 |
29 | displ_inputs_manual = np.asarray([at.positions for at in inputs])
30 | displ_inputs_manual[:, :, 0] += 0.1
31 |
32 | assert np.allclose(displ_inputs_manual, [at.positions for at in displ_inputs])
33 |
34 |
35 | def test_map_args(tmp_path):
36 | inputs = ConfigSet([Atoms('H2', positions=[[0, 0, 0], [2, 0, 0]], cell=[5]*3, pbc=[True]*3) for _ in range(20)])
37 |
38 | # one positional argument
39 | displ_inputs = wfl_map(inputs, OutputSpec(tmp_path / "pert_configs.xyz"), displ_args, args=[[0.1, 0.0, 0.0]])
40 | assert len(list(displ_inputs)) == 20
41 |
42 | displ_inputs_manual = np.asarray([at.positions for at in inputs])
43 | displ_inputs_manual[:, :, 0] += 0.1
44 |
45 | assert np.allclose(displ_inputs_manual, [at.positions for at in displ_inputs])
46 |
47 |
48 | def test_map_kwargs(tmp_path):
49 | inputs = ConfigSet([Atoms('H2', positions=[[0, 0, 0], [2, 0, 0]], cell=[5]*3, pbc=[True]*3) for _ in range(20)])
50 |
51 | # one keyword argument
52 | displ_inputs = wfl_map(inputs, OutputSpec(tmp_path / "pert_configs.xyz"), displ_args, kwargs={"dx": [0.1, 0.0, 0.0]})
53 | assert len(list(displ_inputs)) == 20
54 |
55 | displ_inputs_manual = np.asarray([at.positions for at in inputs])
56 | displ_inputs_manual[:, :, 0] += 0.1
57 |
58 | assert np.allclose(displ_inputs_manual, [at.positions for at in displ_inputs])
59 |
60 | def test_map_args_kwargs(tmp_path):
61 | inputs = ConfigSet([Atoms('H2', positions=[[0, 0, 0], [2, 0, 0]], cell=[5]*3, pbc=[True]*3) for _ in range(20)])
62 |
63 | # one keyword argument
64 | displ_inputs = wfl_map(inputs, OutputSpec(tmp_path / "pert_configs.xyz"), displ_args, args=[ [0.05, 0.0, 0.0] ], kwargs={"dy": [0.05, 0.0, 0.0]})
65 | assert len(list(displ_inputs)) == 20
66 |
67 | displ_inputs_manual = np.asarray([at.positions for at in inputs])
68 | displ_inputs_manual[:, :, 0] += 0.1
69 |
70 | assert np.allclose(displ_inputs_manual, [at.positions for at in displ_inputs])
71 |
--------------------------------------------------------------------------------
/tests/test_minimahopping.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import numpy as np
3 | from pathlib import Path
4 |
5 | import ase.io
6 | from ase.build import bulk
7 | from ase.calculators.emt import EMT
8 |
9 | from wfl.generate import minimahopping
10 | from wfl.configset import ConfigSet, OutputSpec
11 |
12 |
13 | @pytest.fixture
14 | def cu_slab():
15 |
16 | atoms = bulk("Cu", "fcc", a=3.8, cubic=True)
17 | atoms.rattle(stdev=0.01, seed=159)
18 |
19 | atoms.info['config_type'] = 'cu_slab'
20 |
21 | return atoms
22 |
23 | def test_return_md_traj(cu_slab, tmp_path):
24 |
25 | calc = EMT()
26 |
27 | input_configs = cu_slab
28 | inputs = ConfigSet(input_configs)
29 | outputs = OutputSpec()
30 |
31 | atoms_opt = minimahopping.minimahopping(inputs, outputs, calc, fmax=1, totalsteps=5, save_tmpdir=True, return_all_traj=True,
32 | rng=np.random.default_rng(1), workdir=tmp_path)
33 |
34 | assert any(["minhop_min" in at.info["config_type"] for at in atoms_opt])
35 | assert any(["minhop_traj" in at.info["config_type"] for at in atoms_opt])
36 |
37 |
38 | def test_mult_files(cu_slab, tmp_path):
39 | ase.io.write(tmp_path / 'f1.xyz', [cu_slab] * 2)
40 | ase.io.write(tmp_path / 'f2.xyz', cu_slab)
41 | infiles = [str(tmp_path / 'f1.xyz'), str(tmp_path / 'f2.xyz')]
42 | inputs = ConfigSet(infiles)
43 | outputs = OutputSpec([f.replace('.xyz', '.out.xyz') for f in infiles])
44 |
45 | calc = EMT()
46 |
47 | atoms_opt = minimahopping.minimahopping(inputs, outputs, calc, fmax=1, totalsteps=3,
48 | rng=np.random.default_rng(1), workdir=tmp_path)
49 |
50 | n1 = len(ase.io.read(tmp_path / infiles[0].replace('.xyz', '.out.xyz'), ':'))
51 | n2 = len(ase.io.read(tmp_path / infiles[1].replace('.xyz', '.out.xyz'), ':'))
52 |
53 | assert n1 == n2 * 2
54 |
55 |
56 | def test_relax(cu_slab, tmp_path):
57 |
58 | calc = EMT()
59 |
60 | input_configs = [cu_slab, cu_slab]
61 | inputs = ConfigSet(input_configs)
62 | outputs = OutputSpec()
63 |
64 | fmax = 1
65 | totalsteps = 3
66 | num_input_configs = len(input_configs)
67 |
68 | # Although its highly unlikely, we can't fully guarantee that the situation
69 | # where are trajectories fail is excluded. Thus, let's give it some trials to avoid this situation.
70 | trial = 0
71 | while trial < 10:
72 | atoms_opt = minimahopping.minimahopping(inputs, outputs, calc, fmax=fmax, totalsteps=totalsteps,
73 | rng=np.random.default_rng(1), workdir=tmp_path)
74 | if len(list(atoms_opt)) > 0:
75 | break
76 | trial += 1
77 | else:
78 | raise RuntimeError
79 |
80 | assert 1 <= len(list(atoms_opt)) <= num_input_configs
81 | for ats in atoms_opt.groups():
82 | assert 1 <= len(list(ats)) <= totalsteps
83 |
84 | atoms_opt = list(atoms_opt)
85 | assert all(['minhop_min' in at.info['config_type'] for at in atoms_opt])
86 |
87 | for at in atoms_opt:
88 | force_norms = np.linalg.norm(at.arrays["last_op__minhop_forces"], axis=1)
89 | assert all(force_norms <= fmax)
90 |
--------------------------------------------------------------------------------
/tests/test_molecules.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from ase.build import molecule
3 |
4 | import pytest
5 |
6 | # tested modules
7 | from wfl.generate import smiles
8 |
9 | # wfl.generate_configs.smiles depends on rdkit.Chem
10 | pytest.importorskip("rdkit.Chem")
11 |
12 |
13 | def test_smi_to_atoms():
14 | """test for wfl.generate_configs.smiles"""
15 |
16 | smi = 'C'
17 | atoms = smiles.smi_to_atoms(smi)
18 |
19 | assert np.all(atoms.symbols == 'CH4')
20 |
21 |
22 | def test_smiles_run_autopara_wrappable():
23 | """test for wfl.generate_configs.smiles"""
24 |
25 | input_smiles = ['C', 'C(C)C']
26 | extra_info = {'config_type': 'testing', 'info_int': 5}
27 |
28 | atoms = smiles._run_autopara_wrappable(input_smiles, extra_info=extra_info)
29 |
30 | for smi, at in zip(input_smiles, atoms):
31 |
32 | assert at.info['smiles'] == smi, f'{at.info["smiles"]} doesn\'t match {smi}'
33 |
34 | for key, val in extra_info.items():
35 | assert at.info[key] == val, f'info entry {key} ({at.info[key]}) doesn\'t match {val}'
36 |
37 | input_smiles = 'C'
38 | extra_info = None
39 |
40 | atoms = smiles._run_autopara_wrappable(input_smiles, extra_info)
41 |
42 | assert len(atoms) == 1
43 | assert atoms[0].info['smiles'] == input_smiles
44 |
--------------------------------------------------------------------------------
/tests/test_ndim_neigh_list.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from wfl.utils.ndim_neighbor_list import calc_list, calc_list_cells
4 |
5 |
6 | def compare_manual_neighbor_list(positions, nearby_ranges, ii, jj, cartesian_distance=True):
7 | """compare neighbor list (presumably from linear scaling routine) to manual O(N^2) enumeration"""
8 |
9 | nn_pairs = set(zip(ii, jj))
10 | manual_pairs = set()
11 | for i, vi in enumerate(positions):
12 | for j, vj in enumerate(positions):
13 | dv = np.array(vi) - np.array(vj)
14 | if cartesian_distance:
15 | q = np.linalg.norm(dv / nearby_ranges)
16 | else:
17 | q = np.max(np.abs((dv / nearby_ranges)))
18 | if q < 1:
19 | manual_pairs |= {(i, j)}
20 | print('lengths', len(nn_pairs), len(manual_pairs))
21 | for p in nn_pairs:
22 | assert p in manual_pairs
23 | for p in manual_pairs:
24 | assert p in nn_pairs
25 |
26 |
27 | def test_ndim_neighbor_list():
28 | np.random.seed(5)
29 |
30 | vals = np.random.normal(size=(1000, 2))
31 |
32 | print('fake data w/cartesian_distance=False')
33 | ii, jj = calc_list(vals, [0.05, 0.05], cartesian_distance=False)
34 | # compare neighbor list to manual routine
35 | compare_manual_neighbor_list(vals, [0.05, 0.05], ii, jj, cartesian_distance=False)
36 |
37 | print('fake data w/cartesian_distance default True')
38 | ii, jj = calc_list(vals, [0.05, 0.05])
39 | # compare neighbor list to manual routine
40 | compare_manual_neighbor_list(vals, [0.05, 0.05], ii, jj)
41 |
42 | print('comparing to cell list')
43 | # compare cell list to default routine
44 | ii_cells, jj_cells = calc_list_cells(vals, [0.05, 0.05])
45 |
46 | print('avg neighb #', len(ii) / len(vals))
47 | print('cell avg neighb #', len(ii_cells) / len(vals))
48 |
49 | for i in range(len(vals)):
50 | ii_inds = np.where(ii == i)[0]
51 | assert set(ii[ii_inds]) == set(ii_cells[ii_inds])
52 | assert set(jj[ii_inds]) == set(jj_cells[ii_inds])
53 |
--------------------------------------------------------------------------------
/tests/test_point_defects.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from ase.atoms import Atoms
3 |
4 | from wfl.configset import ConfigSet, OutputSpec
5 | from wfl.generate.supercells import vacancy, interstitial, antisite
6 |
7 | def test_vacancy_mono():
8 | at = Atoms('Po', cell=np.eye(3), pbc=[True]*3)
9 |
10 | ci = ConfigSet([at.copy() for _ in range(10)])
11 | co = OutputSpec()
12 | vacs = vacancy(ci, co, 64, rng=np.random.default_rng(1))
13 |
14 | assert all([len(at) == 63 for at in vacs])
15 |
16 |
17 | def test_vacancy_di():
18 | at = Atoms('Po', cell=np.eye(3), pbc=[True]*3)
19 |
20 | ci = ConfigSet([at.copy() for _ in range(10)])
21 | co = OutputSpec()
22 | vacs = vacancy(ci, co, 64, n_vac=2, rng=np.random.default_rng(1))
23 |
24 | assert all([len(at) == 62 for at in vacs])
25 | assert all([len(at.info['vacancy_Z']) == 2 for at in vacs])
26 |
27 |
28 | def test_vacancy_cluster_di():
29 | at = Atoms('Po', cell=2.0 * np.eye(3), pbc=[True]*3)
30 |
31 | ci = ConfigSet([at.copy() for _ in range(10)])
32 | co = OutputSpec()
33 | vacs = vacancy(ci, co, 64, rng=np.random.default_rng(1), n_vac=2, cluster_r=1.5)
34 |
35 | for at in vacs:
36 | assert len(at) == 62
37 | assert len(at.info['vacancy_Z'] == 2)
38 |
39 | dp = at.info['vacancy_pos'][0] - at.info['vacancy_pos'][1]
40 | dp_s = dp @ at.cell.reciprocal().T
41 | dp_s -= np.round(dp_s)
42 | dp = dp_s @ at.cell
43 | assert np.linalg.norm(dp) <= 2.0 * 1.5
44 |
45 |
46 | def test_vacancy_not_enough():
47 | at = Atoms('Po', cell=2.0 * np.eye(3), pbc=[True]*3)
48 | at *= 2
49 |
50 | ci = ConfigSet([at.copy() for _ in range(10)])
51 | co = OutputSpec()
52 | vacs = vacancy(ci, co, 8, rng=np.random.default_rng(1), n_vac=8, cluster_r=1.5)
53 |
54 | # not enough total atoms
55 | for at in vacs:
56 | assert len(at) == 8
57 | assert 'vacancy_Z' not in at.info
58 |
59 | ci = ConfigSet([at.copy() for _ in range(10)])
60 | co = OutputSpec()
61 | vacs = vacancy(ci, co, 64, rng=np.random.default_rng(1), n_vac=8, cluster_r=1.1)
62 |
63 | # not enough total within cutoff
64 | for at in vacs:
65 | assert len(at) == 64
66 | assert 'vacancy_Z' not in at.info
67 |
68 |
69 | def test_interstitial():
70 | at = Atoms('Po', cell=np.eye(3), pbc=[True]*3)
71 |
72 | ci = ConfigSet([at.copy() for _ in range(10)])
73 | co = OutputSpec()
74 | intersts = interstitial(ci, co, 64, rng=np.random.default_rng(1))
75 |
76 | assert all([len(at) == 65 for at in intersts])
77 |
78 |
79 | def test_antisite():
80 | at = Atoms('PoCd', positions=[[0, 0, 0], [0.5, 0.5, 0.5]], cell=np.eye(3), pbc=[True]*3)
81 |
82 | ci = ConfigSet([at.copy() for _ in range(10)])
83 | co = OutputSpec()
84 | intersts = antisite(ci, co, 64, rng=np.random.default_rng(1))
85 |
86 | assert all([len(at) == 54 for at in intersts])
87 |
--------------------------------------------------------------------------------
/tests/test_select_simple.py:
--------------------------------------------------------------------------------
1 | from ase.atoms import Atoms
2 |
3 | import wfl.select.simple as simple
4 | from wfl.configset import ConfigSet, OutputSpec
5 |
6 |
7 | def test_select_lambda(tmp_path):
8 | ats = [Atoms('Si', cell=(1, 1, 1), pbc=[True] * 3) for _ in range(40)]
9 | for at_i, at in enumerate(ats):
10 | at.info['index'] = at_i
11 |
12 | ci = ConfigSet(ats)
13 | co = OutputSpec('test_simple.info_in.xyz', file_root=tmp_path)
14 | selected_ats = simple.by_bool_func(ci, co, lambda at : at.info['index'] in list(range(10, 20)))
15 |
16 | assert len(list(selected_ats)) == 20 - 10
17 | for at in selected_ats:
18 | assert at.info['index'] in range(10, 20)
19 |
20 |
21 | def _pytest_select(at):
22 | return at.info['index'] in list(range(10, 20))
23 |
24 |
25 | def test_select_real_func(tmp_path):
26 | ats = [Atoms('Si', cell=(1, 1, 1), pbc=[True] * 3) for _ in range(40)]
27 | for at_i, at in enumerate(ats):
28 | at.info['index'] = at_i
29 |
30 | ci = ConfigSet(ats)
31 | co = OutputSpec('test_simple.info_in.xyz', file_root=tmp_path)
32 | selected_ats = simple.by_bool_func(ci, co, _pytest_select)
33 |
34 | assert len(list(selected_ats)) == 20 - 10
35 | for at in selected_ats:
36 | assert at.info['index'] in range(10, 20)
37 |
38 |
39 | def test_by_index(tmp_path):
40 | ats = [Atoms('Si', cell=(1, 1, 1), pbc=[True] * 3) for _ in range(40)]
41 | for at_i, at in enumerate(ats):
42 | at.info['index'] = at_i
43 |
44 | ci = ConfigSet(ats)
45 | co = OutputSpec('test_simple.indices.xyz', file_root=tmp_path)
46 | indices = [4, 0, 7, 12, 12, 25, 45, 45]
47 | selected_ats = simple.by_index(ci, co, indices)
48 |
49 | assert len(list(selected_ats)) == len([i for i in indices if (i >= 0 and i < len(ats))])
50 | for at in selected_ats:
51 | assert at.info['index'] in indices
52 |
--------------------------------------------------------------------------------
/tests/test_version_str.py:
--------------------------------------------------------------------------------
1 | from wfl.utils.version import get_wfl_version
2 |
3 |
4 | def test_version_str():
5 | version_str = get_wfl_version()
6 | print('version str', version_str)
7 | assert len(version_str) > 0
8 |
--------------------------------------------------------------------------------
/wfl/__init__.py:
--------------------------------------------------------------------------------
1 | from .__version__ import __version__
2 |
--------------------------------------------------------------------------------
/wfl/__version__.py:
--------------------------------------------------------------------------------
1 | __version__ = "0.3.3"
2 |
--------------------------------------------------------------------------------
/wfl/autoparallelize/__init__.py:
--------------------------------------------------------------------------------
1 | from .base import autoparallelize, autoparallelize_docstring
2 | assert autoparallelize
3 | assert autoparallelize_docstring
4 |
5 | from .autoparainfo import AutoparaInfo
6 | from .remoteinfo import RemoteInfo
7 |
8 | __all__ = ["autoparallelize", "autoparallelize_docstring", "AutoparaInfo", "RemoteInfo"]
9 |
--------------------------------------------------------------------------------
/wfl/autoparallelize/autoparainfo.py:
--------------------------------------------------------------------------------
1 | class AutoparaInfo:
2 | """Object containing information required to autoparallelize a function
3 |
4 | Parameters
5 | ----------
6 |
7 | num_inputs_per_python_subprocess: int, default 1
8 | number of inputs passed to each call of the low level operation.
9 |
10 | iterable_arg: int / str, default 0
11 | index (int positional, str keyword) of input iterable argument in low level function
12 |
13 | skip_failed: bool, default True
14 | skip output for failed low level function calls
15 |
16 | initializer: (func, func_kwargs), default (None, [])
17 | initializer to be called when each python subprocess is started
18 |
19 | num_python_subprocesses: int, default None
20 | number of python subprocesses
21 |
22 | remote_info: RemoteInfo, default None
23 | information for running remotely
24 |
25 | remote_label: str, default None
26 | string label to match to keys in remote_info dict
27 | """
28 |
29 | _kwargs = {"num_inputs_per_python_subprocess": 1,
30 | "iterable_arg": 0,
31 | "skip_failed": True,
32 | "initializer": (None, []),
33 | "num_python_subprocesses": None,
34 | "remote_info": None,
35 | "remote_label": None}
36 |
37 |
38 | def __init__(self, **kwargs):
39 | # receive all args as kwargs so that we can detect if they were passed in explicitly
40 | for k in kwargs:
41 | if k not in AutoparaInfo._kwargs:
42 | raise ValueError(f"Invalid keyword argument in {k}")
43 | setattr(self, k, kwargs[k])
44 |
45 |
46 | def update_defaults(self, default_kwargs):
47 | """Starting from object passed by user at runtime, update all unspecified fields to the defaults
48 | specified when wrapping function, otherwise to class-wide defaults
49 | """
50 | # copy defaults dict so it can be modified (pop)
51 | default_kwargs = default_kwargs.copy()
52 |
53 | # set missing values from default_kwargs, falling back to class-predefined defaults
54 | # remove each as its used from default_kwargs, so any remaining can be detected as invalid
55 | for k in AutoparaInfo._kwargs:
56 | if not hasattr(self, k):
57 | # user hasn't set this attribute, set it from wrapper or class-wide default
58 | setattr(self, k, default_kwargs.pop(k, AutoparaInfo._kwargs[k]))
59 | else:
60 | # user has set this, still remove from default_kwargs to facilitate check for invalid
61 | # default_kwargs keys below
62 | if k in default_kwargs:
63 | del default_kwargs[k]
64 |
65 | if len(default_kwargs) != 0:
66 | raise ValueError(f"default_kwargs contained unknown keywords {list(default_kwargs.keys())}")
67 |
68 |
69 | def __str__(self):
70 | return (" ".join([f"{k} {getattr(self, k)}" for k in AutoparaInfo._kwargs.keys()]))
71 |
--------------------------------------------------------------------------------
/wfl/autoparallelize/mpipool_support.py:
--------------------------------------------------------------------------------
1 | import atexit
2 | import os
3 | import sys
4 | import traceback
5 |
6 | wfl_mpipool = None
7 |
8 |
9 | def shutdown_and_barrier(pool, comm):
10 | print(comm.rank, 'MPIPOOL: shutdown_and_barrier calling barrier')
11 | pool.shutdown()
12 | comm.Barrier()
13 |
14 |
15 | def init(verbose=1):
16 | """Startup code when mpipool will be used. Only master MPI task exists
17 | function, others wait to do mpipool stuff. Initialises
18 | `mpipool_support.wfl_mpipool` with created `MPIExecutor` object.
19 |
20 | Parameters
21 | ----------
22 | verbose: int, default 1
23 | * >= 1 : minimal start/end messages
24 | * > 1 : print stack trace at startup, to tell where it was called from
25 | """
26 | global wfl_mpipool
27 |
28 | if wfl_mpipool is None and 'WFL_MPIPOOL' in os.environ:
29 | # check version
30 | import mpipool
31 | from packaging.version import parse
32 | assert parse(mpipool.__version__) >= parse('1.0.0')
33 |
34 | # wfl_mpipool is not defined, must be first time
35 | if verbose > 0:
36 | from mpi4py import MPI
37 | from mpipool import MPIExecutor
38 |
39 | if verbose > 0:
40 | print(MPI.COMM_WORLD.rank, "MPIPOOL: wfl creating pool")
41 | if verbose > 1:
42 | for item in traceback.format_stack():
43 | print(MPI.COMM_WORLD.rank, "MPIPOOL: STACK", item, end='')
44 | wfl_mpipool = MPIExecutor()
45 |
46 | if wfl_mpipool.is_worker():
47 | print(MPI.COMM_WORLD.rank, 'MPIPOOL: worker calling barrier')
48 | MPI.COMM_WORLD.Barrier()
49 | if "pytest" in sys.modules:
50 | return
51 | else:
52 | exit()
53 |
54 | atexit.register(shutdown_and_barrier, wfl_mpipool, MPI.COMM_WORLD)
55 |
56 | if verbose > 0:
57 | print(MPI.COMM_WORLD.rank, "MPIPOOL: wfl continuing after creating pool")
58 |
--------------------------------------------------------------------------------
/wfl/calculators/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/libAtoms/workflow/252f61ce8dd9c0db21e957bd03eb9fd68f51867d/wfl/calculators/__init__.py
--------------------------------------------------------------------------------
/wfl/calculators/committee.py:
--------------------------------------------------------------------------------
1 | """
2 | Committee of Models
3 |
4 | Calculated properties with a list of models and saves them into info/arrays.
5 | Further operations (eg. mean, variance, etc.) with these are up to the user.
6 | """
7 | from ase import Atoms
8 |
9 | from wfl.utils.save_calc_results import per_atom_properties, per_config_properties
10 | from wfl.utils.misc import atoms_to_list
11 | from wfl.utils.parallel import construct_calculator_picklesafe
12 |
13 | __default_properties = ['energy', 'forces', 'stress']
14 |
15 |
16 | def calculate_committee(atoms, calculator_list, properties=None, output_prefix="committee_{}_"):
17 | """Calculate energy and forces with a committee of models
18 |
19 | Notes
20 | -----
21 | Supports formatter string in the output_prefix arg, but currently only with a single field literally "{}".
22 |
23 | Parameters
24 | ----------
25 | atoms : Atoms / list(Atoms)
26 | input atomic configs
27 | calculator_list : list(Calculator) / list[(initializer, args, kwargs)]
28 | list of calculators to use as a committee of models on the configs
29 | properties: list[str], default ['energy', 'forces', 'stress']
30 | properties to calculate
31 | output_prefix : str, default="committee\_"
32 | prefix for results coming from the committee of models.
33 | If includes "{}" then will use it as a format string, otherwise puts a number at the end of prefix for the
34 | index of the model in the committee of models
35 |
36 | Returns
37 | -------
38 | atoms : Atoms / list(Atoms)
39 |
40 | """
41 | if properties is None:
42 | properties = __default_properties
43 |
44 | atoms_list = atoms_to_list(atoms)
45 |
46 | # create the general key formatter
47 | if "{}" in output_prefix:
48 | if output_prefix.count("{}") != 1:
49 | raise ValueError("Prefix with formatting is incorrect, cannot have more than one of `{}` in it")
50 | key_formatter = f"{output_prefix}{{}}"
51 | else:
52 | key_formatter = f"{output_prefix}{{}}{{}}"
53 |
54 | # create calculator instances
55 | calculator_list_to_use = [construct_calculator_picklesafe(calc) for calc in calculator_list]
56 |
57 | for at in atoms_list:
58 | # calculate forces and energy with all models from the committee
59 | for i_model, pot in enumerate(calculator_list_to_use):
60 | for prop in properties:
61 | if prop in per_atom_properties:
62 | at.arrays[key_formatter.format(i_model, prop)] = pot.get_property(name=prop, atoms=at)
63 | elif prop in per_config_properties:
64 | at.info[key_formatter.format(i_model, prop)] = pot.get_property(name=prop, atoms=at)
65 | else:
66 | raise ValueError("Don't know where to put property: {}".format(prop))
67 |
68 | if isinstance(atoms, Atoms):
69 | return atoms_list[0]
70 | else:
71 | return atoms_list
72 |
--------------------------------------------------------------------------------
/wfl/calculators/mopac.py:
--------------------------------------------------------------------------------
1 | """
2 | MOPAC interface
3 | """
4 | from ase.calculators.mopac import MOPAC as ASE_MOPAC
5 | from ase.calculators.calculator import all_changes
6 |
7 | from .wfl_fileio_calculator import WFLFileIOCalculator
8 |
9 | _default_keep_files = ["*.out"]
10 | _default_properties = ["energy", "forces"]
11 |
12 | class MOPAC(WFLFileIOCalculator, ASE_MOPAC):
13 | """Extension of ASE's MOPAC claculator so that it can be used by wfl.calculators.generic (mainly each
14 | calculation is run in a separate directory)
15 | """
16 |
17 | wfl_generic_default_autopara_info = {"num_inputs_per_python_subprocess": 1}
18 |
19 | def __init__(self, keep_files="default", rundir_prefix="run_MOPAC_",
20 | workdir=None, scratchdir=None,
21 | calculator_exec=None, **kwargs):
22 |
23 | # WFLFileIOCalculator is a mixin, will call remaining superclass constructors for us
24 | super().__init__(keep_files=keep_files, rundir_prefix=rundir_prefix,
25 | workdir=workdir, scratchdir=scratchdir, **kwargs)
26 |
27 | def calculate(self, atoms=None, properties=_default_properties, system_changes=all_changes):
28 | """Do the calculation. Handles the working directories in addition to regular
29 | ASE calculation operations (writing input, executing, reading_results)
30 | Reimplements & extends GenericFileIOCalculator.calculate() for the development version of ASE
31 | or FileIOCalculator.calculate() for the v3.22.1"""
32 |
33 | # from WFLFileIOCalculator
34 | self.setup_rundir()
35 |
36 | try:
37 | super().calculate(atoms=atoms, properties=properties, system_changes=system_changes)
38 | calculation_succeeded = True
39 | if 'FAILED_MOPAC' in atoms.info:
40 | del atoms.info['FAILED_MOPAC']
41 | except Exception as exc:
42 | atoms.info['FAILED_MOPAC'] = True
43 | calculation_succeeded = False
44 | raise exc
45 | finally:
46 | # from WFLFileIOCalculator
47 | self.clean_rundir(_default_keep_files, calculation_succeeded)
48 |
--------------------------------------------------------------------------------
/wfl/calculators/utils.py:
--------------------------------------------------------------------------------
1 | from wfl.utils.file_utils import clean_dir
2 |
3 | def clean_rundir(rundir, keep_files, default_keep_files, calculation_succeeded):
4 | """clean up a run directory from a file-based calculator
5 |
6 | Parameters
7 | ----------
8 | rundir: str
9 | path to run dir
10 | keep_files: 'default' / list(str) / '*' / bool / None
11 | files to keep, None or False for nothing, '*' or True for all
12 | default_keep_files: list(str)
13 | files to keep if keep_files == 'default' or calculation_succeeded is False
14 | calculation_succeeded: bool
15 | """
16 | if not calculation_succeeded:
17 | clean_dir(rundir, set(default_keep_files) | set(keep_files if keep_files else []), force=False)
18 | elif keep_files == 'default':
19 | clean_dir(rundir, default_keep_files, force=False)
20 | elif not keep_files:
21 | clean_dir(rundir, False, force=False)
22 | else:
23 | clean_dir(rundir, keep_files, force=False)
24 |
25 |
26 |
--------------------------------------------------------------------------------
/wfl/cli/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/libAtoms/workflow/252f61ce8dd9c0db21e957bd03eb9fd68f51867d/wfl/cli/__init__.py
--------------------------------------------------------------------------------
/wfl/cli/cli.py:
--------------------------------------------------------------------------------
1 | import warnings
2 |
3 | import click
4 |
5 |
6 | @click.group("wfl")
7 | @click.option("--verbose", "-v", is_flag=True)
8 | @click.pass_context
9 | def cli(ctx, verbose):
10 | """Workflow command line interface.
11 | """
12 | ctx.ensure_object(dict)
13 | ctx.obj["verbose"] = verbose
14 |
15 | # ignore calculator writing warnings
16 | if not verbose:
17 | warnings.filterwarnings("ignore", category=UserWarning, module="ase.io.extxyz")
18 |
19 |
20 | from wfl.cli.commands.error import show_error
21 | cli.add_command(show_error)
22 |
23 | @cli.group("generate")
24 | @click.pass_context
25 | def subcli_generate(ctx):
26 | pass
27 |
28 | from wfl.cli.commands.generate import smiles, buildcell
29 | subcli_generate.add_command(smiles)
30 | subcli_generate.add_command(buildcell)
31 |
32 |
33 | @cli.group("select")
34 | @click.pass_context
35 | def subcli_select(ctx):
36 | pass
37 |
38 | from wfl.cli.commands.select import cur, by_lambda
39 | subcli_select.add_command(cur)
40 | subcli_select.add_command(by_lambda)
41 |
42 |
43 | @cli.group("eval")
44 | @click.pass_context
45 | def subcli_eval(ctx):
46 | pass
47 |
48 | from wfl.cli.commands.eval import gap, ace, mace, atomization_energy
49 | subcli_eval.add_command(gap)
50 | subcli_eval.add_command(ace)
51 | subcli_eval.add_command(mace)
52 | subcli_eval.add_command(atomization_energy)
53 |
54 |
55 | @cli.group("descriptor")
56 | @click.pass_context
57 | def subcli_descriptor(ctx):
58 | pass
59 |
60 | from wfl.cli.commands.descriptor import quippy
61 | subcli_descriptor.add_command(quippy)
62 |
--------------------------------------------------------------------------------
/wfl/cli/cli_options.py:
--------------------------------------------------------------------------------
1 | import click
2 | import json
3 | from wfl.configset import ConfigSet, OutputSpec
4 | from ase.io.extxyz import key_val_str_to_dict
5 |
6 |
7 | def _to_ConfigSet(ctx, param, value):
8 | return ConfigSet(value)
9 |
10 | def inputs(f):
11 | """Add a standard option for the ConfigSet inputs"""
12 | f = click.option("--inputs", "-i", required=True, multiple=True, callback=_to_ConfigSet,
13 | help='Input xyz file(s) to create ConfigSet from')(f)
14 | return f
15 |
16 |
17 | def _to_OutputSpec(ctx, param, value):
18 | return OutputSpec(value)
19 |
20 |
21 | def outputs(f):
22 | """Add a standard option for the OutputSpec outputs"""
23 | f = click.option('--outputs', '-o', required=True, callback=_to_OutputSpec,
24 | help="Ouput file to create OutputSpec from.")(f)
25 | return f
26 |
27 | def _parse_extra_info(ctx, param, value):
28 | if value is not None:
29 | return key_val_str_to_dict(value)
30 | else:
31 | return {}
32 |
33 | def extra_info(f):
34 | """Parse key=val string and return a dictionary"""
35 | f = click.option("--extra-info", "-ei", callback=_parse_extra_info, help="Extra key=val pairs to add to Atoms.info")(f)
36 | return f
37 |
38 | def param_fname(f):
39 | f = click.option("--param-fname", "-pf", type=click.Path(), help="Path to the potential parameter file")(f)
40 | return f
41 |
42 | def _parse_kwargs(ctx, param, value):
43 | if value is not None:
44 | return json.loads(value)
45 | else:
46 | return {}
47 |
48 | def kwargs(f):
49 | f = click.option("--kwargs", "-kw", callback=_parse_kwargs, help="JSON text with additional Calculator constructor kwargs")(f)
50 | return f
51 |
52 | def prop_prefix(f):
53 | f = click.option("--prop-prefix", "-pp", help='Prefix to be pre-pended to all evaluate properties. '
54 | 'Defaults to "gap_"/"ace_"/"mace_" as appropriate')(f)
55 | return f
56 |
57 |
58 | def num_inputs_per_python_subprocess(f):
59 | f = click.option('--num-inputs-per-python-subprocess', default=10,
60 | show_default=True, type=click.INT,
61 | help="Number of configs to be evaluated per each calculator initialization")(f)
62 | return f
63 |
--------------------------------------------------------------------------------
/wfl/cli/commands/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/libAtoms/workflow/252f61ce8dd9c0db21e957bd03eb9fd68f51867d/wfl/cli/commands/__init__.py
--------------------------------------------------------------------------------
/wfl/cli/commands/descriptor.py:
--------------------------------------------------------------------------------
1 | import click
2 | from wfl.cli import cli_options as opt
3 | import wfl.descriptors.quippy
4 |
5 | @click.command("quippy")
6 | @click.pass_context
7 | @click.option("--local", is_flag=True, help="calculate a local (per-atom) descriptor")
8 | @click.option("--force", is_flag=True, help="overwrite existing info or arrays item if present")
9 | @click.option("--descriptor", type=click.STRING, required=True, help="quippy.descriptors.Descriptor arg string")
10 | @click.option("--key", required=True, type=click.STRING, help="Atoms.info (global) or Atoms.arrays (local) for descriptor vector")
11 | @opt.inputs
12 | @opt.outputs
13 | def quippy(ctx, inputs, outputs, descriptor, key, local, force):
14 | calculate_descriptor(inputs, outputs, descriptor, key, local, force)
15 |
16 | def calculate_descriptor(inputs, outputs, descriptor, key, local, force):
17 | wfl.descriptors.quippy.calculate(
18 | inputs=inputs,
19 | outputs=outputs,
20 | descs=descriptor,
21 | key=key,
22 | per_atom=local,
23 | force=force
24 | )
25 |
--------------------------------------------------------------------------------
/wfl/cli/commands/generate.py:
--------------------------------------------------------------------------------
1 | import click
2 | from wfl.cli import cli_options as opt
3 |
4 |
5 | @click.command("smiles")
6 | @click.argument("smiles-string", nargs=-1)
7 | @click.pass_context
8 | @opt.outputs
9 | @opt.extra_info
10 | def smiles(ctx, smiles_string, extra_info, outputs):
11 | """Generate xyz from SMILES strings"""
12 |
13 | import wfl.generate.smiles
14 |
15 | verbose = ctx.obj["verbose"]
16 |
17 | if verbose:
18 | print(f'smiles: {smiles_string}')
19 | print(f'info: {extra_info}')
20 | print(outputs)
21 |
22 | wfl.generate.smiles.smiles(smiles_string, outputs=outputs, extra_info=extra_info)
23 |
24 |
25 | @click.command("buildcell")
26 | @click.option('--buildcell-input', required=True, help='buildcell input file')
27 | @click.option("--buildcell-exec", required=True, help="buildcell executable including path")
28 | @click.option("--n-configs", "-N", type=click.INT, required=True, help="number of configs to generate")
29 | @click.option("--perturbation", type=click.FLOAT, default=0.0, help="magnitude of random perturbation to atomic positions")
30 | @click.pass_context
31 | @opt.outputs
32 | @opt.extra_info
33 | def buildcell(ctx, outputs, buildcell_input, buildcell_exec, n_configs,
34 | extra_info, perturbation):
35 | """Repeatedly runs buildcell (from Pickard's AIRSS distribution) to generate random configs with
36 | specified species, volumes, distances, symmetries, etc.
37 |
38 | Minimal contents of --buildcell-input file:
39 |
40 | \b
41 | #TARGVOL=- (NOTE: volume is volume_per_formula_unit/number_of_species)
42 | #SPECIES=%NUM=[,%NUM=- | { , , ... } ]
44 | #SYMMOPS=- (NOTE: optional)
45 | #SLACK=0.25
46 | #OVERLAP=0.1
47 | #COMPACT
48 | #MINSEP= -= [
49 | - ... ]
50 | ##EXTRA_INFO = (NOTE: optional)
51 | """
52 | import wfl.generate.buildcell
53 |
54 | with open(buildcell_input) as bc_f:
55 | buildcell_input_txt = bc_f.read()
56 |
57 | wfl.generate.buildcell.buildcell(
58 | outputs=outputs,
59 | inputs=range(n_configs),
60 | buildcell_cmd=buildcell_exec,
61 | buildcell_input=buildcell_input_txt,
62 | extra_info=extra_info,
63 | perturbation=perturbation,
64 | verbose=ctx.obj["verbose"])
65 |
--------------------------------------------------------------------------------
/wfl/cli/commands/select.py:
--------------------------------------------------------------------------------
1 | import click
2 | import numpy as np
3 | from wfl.cli import cli_options as opt
4 | import wfl.descriptors.quippy
5 | import wfl.select.by_descriptor
6 | from wfl.select.simple import by_bool_func
7 |
8 |
9 | @click.command("cur")
10 | @click.option("--n-configs", "-N", type=click.INT, required=True,
11 | help="number of configs to select")
12 | @click.option("--keep_descriptor", is_flag=True, help="keep the descriptor value in the final config file")
13 | @click.option("--kernel_exponent", type=click.FLOAT, help="exponent of dot-product for kernel")
14 | @click.option("--deterministic", is_flag=True, help="use deterministic (not stochastic) CUR selection")
15 | @click.option("--key", required=True, type=click.STRING, help="Atoms.info (global) or Atoms.arrays (local) for descriptor vector")
16 | @click.option("--stochastic-seed", type=click.INT, help="seed for `np.random.default_rng()` in stochastic CUR.")
17 | @click.pass_context
18 | @opt.inputs
19 | @opt.outputs
20 | def cur(ctx, inputs, outputs, n_configs, key, keep_descriptor,
21 | kernel_exponent, deterministic, stochastic_seed):
22 | """Select structures by CUR"""
23 |
24 | wfl.select.by_descriptor.CUR_conf_global(
25 | inputs=inputs,
26 | outputs=outputs,
27 | num=n_configs,
28 | at_descs_info_key=key, kernel_exp=kernel_exponent, stochastic=not deterministic,
29 | keep_descriptor_info=keep_descriptor,
30 | rng=np.random.default_rng(stochastic_seed))
31 |
32 |
33 | @click.command("lambda")
34 | @click.option("--exec-code", "-e", required=True,
35 | help='String to be evaluated by the lambda function. Will be substituted into `eval(\"lambda atoms: \" + exec_code)`.')
36 | @click.pass_context
37 | @opt.inputs
38 | @opt.outputs
39 | def by_lambda(ctx, inputs, outputs, exec_code):
40 | """selects atoms based on a lambda function"""
41 |
42 | at_filter_fun = eval("lambda atoms: " + exec_code)
43 |
44 | by_bool_func(
45 | inputs=inputs,
46 | outputs=outputs,
47 | at_filter=at_filter_fun)
48 |
--------------------------------------------------------------------------------
/wfl/descriptors/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/libAtoms/workflow/252f61ce8dd9c0db21e957bd03eb9fd68f51867d/wfl/descriptors/__init__.py
--------------------------------------------------------------------------------
/wfl/fit/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/libAtoms/workflow/252f61ce8dd9c0db21e957bd03eb9fd68f51867d/wfl/fit/__init__.py
--------------------------------------------------------------------------------
/wfl/fit/gap/__init__.py:
--------------------------------------------------------------------------------
1 | from . import multistage, simple, glue_2b
2 |
3 | __all__ = ["multistage", "simple", "glue_2b"]
4 |
--------------------------------------------------------------------------------
/wfl/fit/modify_database/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/libAtoms/workflow/252f61ce8dd9c0db21e957bd03eb9fd68f51867d/wfl/fit/modify_database/__init__.py
--------------------------------------------------------------------------------
/wfl/fit/modify_database/scale_orig.py:
--------------------------------------------------------------------------------
1 | def modify(configs, default_factor=1.0, property_factors={}, config_type_exclude=[]):
2 | property_keys = ['energy', 'force', 'virial', 'hessian']
3 |
4 | for at in configs:
5 | if at.info.get('config_type', None) in config_type_exclude:
6 | continue
7 | for p in property_keys:
8 | psig = p + '_sigma'
9 | if psig in at.info:
10 | # save or restore original value
11 | if '_orig_' + psig in at.info:
12 | # restore
13 | at.info[psig] = at.info['_orig_' + psig]
14 | else:
15 | # save
16 | at.info['_orig_' + psig] = at.info[psig]
17 |
18 | # apply scale (to original value)
19 | at.info[psig] *= property_factors.get(p, default_factor)
20 |
--------------------------------------------------------------------------------
/wfl/generate/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/libAtoms/workflow/252f61ce8dd9c0db21e957bd03eb9fd68f51867d/wfl/generate/__init__.py
--------------------------------------------------------------------------------
/wfl/generate/md/abort.py:
--------------------------------------------------------------------------------
1 | """Communly used and/or examples of classes that can be used to abort an MD sampling
2 | run under specified conditions
3 | """
4 |
5 | import numpy as np
6 |
7 | from .abort_base import AbortSimBase
8 | from ase.neighborlist import neighbor_list
9 |
10 |
11 | class AbortOnCollision(AbortSimBase):
12 | """Abort an MD run if a collision (two atoms closer than some distance) happens
13 | for a number of steps in a row
14 |
15 | Parameters
16 | ----------
17 | collision_radius: float
18 | distance for atoms to be considered a collision
19 |
20 | n_failed_steps: int, default 1
21 | how many steps in a row any atom pairs have to be too close
22 | """
23 |
24 | def __init__(self, collision_radius, n_failed_steps=3):
25 | super().__init__(n_failed_steps)
26 | self.collision_radius = collision_radius
27 |
28 |
29 | def atoms_ok(self, atoms):
30 | i = neighbor_list('i', atoms, self.collision_radius)
31 |
32 | if len(i) > 0:
33 | return False
34 | else:
35 | return True
36 |
37 |
38 | class AbortOnLowEnergy(AbortSimBase):
39 | """Abort an MD run if the energy drops by too much
40 |
41 | Parameters
42 | ----------
43 | delta_E_per_atom: float
44 | drop in energy per atom to trigger abort
45 | """
46 |
47 | def __init__(self, delta_E_per_atom):
48 | super().__init__(1)
49 | self.delta_E_per_atom = -np.abs(delta_E_per_atom)
50 | self.initial_E_per_atom = None
51 |
52 |
53 | def atoms_ok(self, atoms):
54 | E_per_atom = atoms.get_potential_energy() / len(atoms)
55 | if self.initial_E_per_atom is None:
56 | self.initial_E_per_atom = E_per_atom
57 | return True
58 | else:
59 | return (E_per_atom - self.initial_E_per_atom) >= self.delta_E_per_atom
60 |
--------------------------------------------------------------------------------
/wfl/generate/md/abort_base.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 | import numpy as np
3 |
4 |
5 | class AbortSimBase(ABC):
6 | """Base class used for checking and aborting MD simulation of `wfl.generate.md.sample()`.
7 | See `stop` method docstring for its default behavior.
8 | """
9 | def __init__(self, n_failed_steps=1):
10 | self.ok_history = []
11 | self.n_failed_steps = n_failed_steps
12 |
13 |
14 | @abstractmethod
15 | def atoms_ok(self, at):
16 | """Method returning a boolean indicating whether this trajectory step is acceptable.
17 | All derived classes must implement this method.
18 |
19 | Parameters
20 | ----------
21 |
22 | at: Atoms
23 | atomic configuration
24 |
25 | Returns
26 | -------
27 | is_ok: bool containing status
28 | """
29 | ...
30 |
31 |
32 | def stop(self, at):
33 | """Returns a boolean indicating whether `wfl.generate.md.sample()` should stop
34 | the simulation. Defaults to aborting if `n_failed_steps` in a row `atoms_ok()`
35 | are evaluated to False. Derrived classes may overwrite this."""
36 | self.ok_history.append(self.atoms_ok(at))
37 | return np.all(np.logical_not(np.array(self.ok_history[-self.n_failed_steps:])))
38 |
--------------------------------------------------------------------------------
/wfl/generate/smiles.py:
--------------------------------------------------------------------------------
1 | import io
2 |
3 | from ase.io import read
4 | try:
5 | from rdkit import Chem
6 | from rdkit.Chem import AllChem
7 | except ModuleNotFoundError:
8 | Chem = None
9 |
10 | from wfl.autoparallelize import autoparallelize, autoparallelize_docstring
11 |
12 |
13 | def smi_to_atoms(smi, useBasicKnowledge=True, useExpTorsionAnglePrefs=True, randomSeed=-1):
14 | """Converts smiles to 3D Atoms object"""
15 | if Chem is None:
16 | raise RuntimeError("rdkit must be installed for SMILES support")
17 |
18 | mol = Chem.MolFromSmiles(smi)
19 | mol = Chem.AddHs(mol)
20 | _ = AllChem.EmbedMolecule(mol, useBasicKnowledge=useBasicKnowledge,
21 | useExpTorsionAnglePrefs=useExpTorsionAnglePrefs,
22 | randomSeed=randomSeed)
23 |
24 | insert = 'Properties=species:S:1:pos:R:3'
25 | xyz = Chem.rdmolfiles.MolToXYZBlock(mol)
26 | xyz = xyz.split(sep='\n\n')
27 | xyz = f'{xyz[0]}\n{insert}\n{xyz[1]}'
28 | xyz_file = io.StringIO(xyz)
29 |
30 | atoms = read(xyz_file, format='xyz')
31 | return atoms
32 |
33 |
34 |
35 | def _run_autopara_wrappable(smiles, useBasicKnowledge=True, useExpTorsionAnglePrefs=True, extra_info=None,
36 | randomSeed=-1):
37 | """Creates atomic configurations by repeatedly running smi_to_xyz, I/O with OutputSpec.
38 |
39 | Parameters
40 | ----------
41 | smiles: str/list(str)
42 | smiles string to generate structure from
43 | useBasicKnowledge: bool, default True
44 | impose basic knowledge such as flat aromatic rings
45 | useExpTorsionAnglePrefs: bool, default True
46 | impose experimental torsion angle preferences
47 | extra_info: dict, default {}
48 | extra fields to place into created atoms info dict
49 | randomSeed: int, default -1
50 | RDKit EmbedMolecule random seed for reproducibility
51 |
52 | Returns
53 | -------
54 | list(Atoms) generated from SMILES
55 |
56 | """
57 |
58 | if extra_info is None:
59 | extra_info = {}
60 | if isinstance(smiles, str):
61 | smiles = [smiles]
62 |
63 | atoms_list = []
64 | for smi in smiles:
65 | at = smi_to_atoms(smi=smi, useBasicKnowledge=useBasicKnowledge,
66 | useExpTorsionAnglePrefs=useExpTorsionAnglePrefs,
67 | randomSeed=randomSeed)
68 | at.info['smiles'] = smi
69 | for key, value in extra_info.items():
70 | at.info[key] = value
71 | atoms_list.append(at)
72 |
73 | return atoms_list
74 |
75 |
76 | def smiles(*args, **kwargs):
77 | if Chem is None:
78 | raise RuntimeError("rdkit must be installed for SMILES support")
79 | return autoparallelize(_run_autopara_wrappable, *args, **kwargs)
80 | autoparallelize_docstring(smiles, _run_autopara_wrappable, "SMILES string")
81 |
--------------------------------------------------------------------------------
/wfl/generate/utils.py:
--------------------------------------------------------------------------------
1 | from ase.atoms import Atoms
2 |
3 |
4 | def save_config_type(at, action, config_type):
5 | """save a config type in one or more Atoms objects
6 |
7 | parameters:
8 | -----------
9 | at: Atoms:
10 | objects to store config type in
11 | action: "append" | "overwrite" | False
12 | action to perform on additional config type string
13 | config_type: str
14 | string to overwrite/append atoms.info["config_type"]
15 | """
16 | if not action:
17 | return
18 |
19 | if action not in ["append", "overwrite"]:
20 | raise ValueError(f"action {action} not 'append' or 'overwrite'")
21 |
22 | if action == 'append' and 'config_type' in at.info:
23 | at.info['config_type'] += ':' + config_type
24 | else:
25 | at.info['config_type'] = config_type
26 |
--------------------------------------------------------------------------------
/wfl/map.py:
--------------------------------------------------------------------------------
1 | from wfl.autoparallelize import autoparallelize, autoparallelize_docstring
2 |
3 | def _map_autopara_wrappable(atoms, map_func, args=[], kwargs={}):
4 | """apply an arbitrary function to a set of atomic configurations
5 |
6 | Parameters
7 | ----------
8 | atoms: list(Atoms)
9 | input configurations
10 | map_func: function(Atoms, *args, **kwargs)
11 | function to apply
12 | args: list
13 | positional arguments to function
14 | kwargs: dict
15 | keyword arguments to function
16 | """
17 | outputs = []
18 | for at in atoms:
19 | outputs.append(map_func(at, *args, **kwargs))
20 |
21 | return outputs
22 |
23 | def map(*args, **kwargs):
24 | return autoparallelize(_map_autopara_wrappable, *args, **kwargs)
25 | autoparallelize_docstring(map, _map_autopara_wrappable, "Atoms")
26 |
--------------------------------------------------------------------------------
/wfl/select/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/libAtoms/workflow/252f61ce8dd9c0db21e957bd03eb9fd68f51867d/wfl/select/__init__.py
--------------------------------------------------------------------------------
/wfl/select/convex_hull.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | from wfl.select.selection_space import composition_space_Zs, composition_space_coord
4 | from wfl.utils.convex_hull import find_hull
5 |
6 |
7 | def select(inputs, outputs, info_field, Zs=None, verbose=False):
8 | if outputs.all_written():
9 | sys.stderr.write('Returning from {__name__} since output is done\n')
10 | return outputs.to_ConfigSet()
11 |
12 | if Zs is None:
13 | Zs = composition_space_Zs(inputs)
14 |
15 | positions = []
16 | avail_inds = {}
17 | for at_i, at in enumerate(inputs):
18 | if info_field in at.info:
19 | positions.append(composition_space_coord(at, ['_V', '_x', info_field], Zs))
20 | avail_inds[at_i] = len(positions) - 1
21 |
22 | # convert to set for faster checking (O(1)?) of "in" below
23 | _, indices, _, simplices = find_hull(positions)
24 | selected_indices = set(indices)
25 | if verbose:
26 | for s_i, s in enumerate(simplices):
27 | print('arb_polyhedra -name {} -indices {}'.format(s_i, ' '.join([str(i) for i in s])))
28 |
29 | for at_i, at in enumerate(inputs):
30 | try:
31 | if avail_inds[at_i] in selected_indices:
32 | outputs.store(at)
33 | except KeyError:
34 | # skip configs that are not in avail_inds
35 | pass
36 |
37 | outputs.close()
38 | return outputs.to_ConfigSet()
39 |
--------------------------------------------------------------------------------
/wfl/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/libAtoms/workflow/252f61ce8dd9c0db21e957bd03eb9fd68f51867d/wfl/utils/__init__.py
--------------------------------------------------------------------------------
/wfl/utils/convex_hull.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from scipy.spatial import ConvexHull
4 |
5 |
6 | # manually remove indices with range < 1e-10. Should be able to use qhull QbN:0BN:0,
7 | # where N is index number, but gives seg fault
8 | def find_hull(ps, below=True):
9 | """find convex hull of set of points
10 |
11 | Parameters
12 | ----------
13 | ps: ndarray(n_ps, n_dim)
14 | array of positions in arbitrary dim space
15 | below: bool, default True
16 | only return parts of hull that are "below" all of the other points, i.e. lowest values of n_dim-1 component (assumed to be energy)
17 |
18 | Returns
19 | -------
20 | points: ndarray(n_hull_ps, n_dim)
21 | points on hull
22 | indices: list(int)
23 | list of indices of hull points
24 | equations: ConvexHull.equations
25 | equations representing convex hull simplices
26 | simplices: list(list(int))
27 | list of indices in each hull simplex
28 | """
29 |
30 | if not isinstance(ps, np.ndarray):
31 | ps = np.array(ps)
32 | # check for indices with zero range, not including final (energy)
33 | non_degenerate_inds = []
34 | for i in range(len(ps[0]) - 1):
35 | if np.max(ps[:, i]) - np.min(ps[:, i]) > 1.0e-10:
36 | non_degenerate_inds.append(i)
37 | non_degenerate_inds += [len(ps[0]) - 1]
38 |
39 | # create points with indices dropped
40 | if len(non_degenerate_inds) != len(ps[0]):
41 | ps_clean = np.array(ps)[:, non_degenerate_inds]
42 | else:
43 | ps_clean = ps
44 |
45 | if ps_clean.shape[0] < ps_clean.shape[1]:
46 | raise RuntimeError(
47 | "Need at least as many points {} as non-degenerate dimensions {} to make a convex hull".format(
48 | ps_clean.shape[0], ps_clean.shape[1]))
49 |
50 | # find convex hull
51 | hull = ConvexHull(ps_clean)
52 |
53 | indices = set()
54 | equations = []
55 | simplices = []
56 | for (simplex, equation) in zip(hull.simplices, hull.equations):
57 | # select equations for faces that define only _below_ part of convex hull
58 | if not below or equation[-2] < 0:
59 | indices |= set(simplex)
60 | equations.append(equation)
61 | simplices.append(simplex)
62 |
63 | # add indices back into equations
64 | if len(non_degenerate_inds) != len(ps[0]):
65 | equations = np.array(equations)
66 | eqns_out = np.zeros((equations.shape[0], ps.shape[1] + 1))
67 |
68 | eqns_out[:, non_degenerate_inds] = equations[:, :-1]
69 | eqns_out[:, -1] = equations[:, -1]
70 | else:
71 | eqns_out = equations
72 |
73 | return [ps[i] for i in indices], list(indices), eqns_out, simplices
74 |
75 |
76 | def vertical_dist_from_hull(equations, p):
77 | min_dist = None
78 | for eq in equations:
79 | v = eq[:-1]
80 | offset = eq[-1]
81 | # v.(p - x yhat) + offset = 0
82 | # v.p - x v.yhat = -offset
83 | # x = -(-offset - v.p) / (v.yhat)
84 | d = (offset + np.dot(v, p)) / v[-1]
85 | min_dist = d if (min_dist is None or d < min_dist) else min_dist
86 | return min_dist
87 |
--------------------------------------------------------------------------------
/wfl/utils/file_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import shutil
3 | from glob import glob
4 |
5 |
6 | def clean_dir(directory, keep_files, force=False):
7 | """Clean a run directory and keep only the specified files
8 |
9 | Parameters
10 | ----------
11 | directory : directory to be cleaned
12 | keep_files: bool or list(filenames) or str
13 | What to keep in rundir when done:
14 | - list(filenames) : ONLY these filenames if they exist
15 | - True or "*" : everything - does nothing
16 | - False or None : remove everything, or anything evaluating to False in if
17 | force : bool, default = False
18 | fail if directory does not exist
19 |
20 | Returns
21 | -------
22 |
23 | """
24 |
25 | # if the dir is non-existent
26 | if not os.path.isdir(directory):
27 | if force:
28 | raise FileNotFoundError(f"No directory to be cleaned {directory}")
29 | else:
30 | return
31 |
32 | # defaults
33 | if keep_files is None:
34 | keep_files = False
35 | elif keep_files == "*":
36 | keep_files = True
37 | elif isinstance(keep_files, str):
38 | keep_files = [keep_files]
39 |
40 | # operations
41 | if isinstance(keep_files, bool) and keep_files:
42 | return
43 | elif not keep_files:
44 | # lets None and anything else evaluating to False
45 | shutil.rmtree(directory)
46 | elif isinstance(keep_files, (list, tuple, set)):
47 | extended_keep_files = []
48 | for glob_index in keep_files:
49 | extended_keep_files.extend(glob(os.path.join(directory, glob_index)))
50 | extended_keep_files = set(extended_keep_files)
51 |
52 | for run_file in os.listdir(directory):
53 | abs_fn = os.path.join(directory, run_file)
54 | if abs_fn not in extended_keep_files:
55 | if os.path.isfile(abs_fn):
56 | os.remove(abs_fn)
57 | else:
58 | shutil.rmtree(abs_fn)
59 | else:
60 | raise RuntimeError('Got unknown type or value for keep_rundir \'{}\''.format(keep_files))
61 |
--------------------------------------------------------------------------------
/wfl/utils/find_voids.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import scipy.spatial
3 |
4 | import spglib
5 | from ase.atoms import Atom, Atoms
6 |
7 |
8 | def find_voids(at, transl_symprec=1.0e-1, symprec=1.0e-2):
9 | # save original cell
10 | cell_orig = at.get_cell()
11 | reciprocal_cell_orig = at.get_reciprocal_cell()
12 |
13 | # create supercell
14 | at_sc = at * [3, 3, 3]
15 | at_sc.set_positions(at_sc.get_positions() - np.sum(cell_orig, axis=0))
16 |
17 | # calculate Voronoi tesselation
18 | vor = scipy.spatial.Voronoi(at_sc.get_positions())
19 |
20 | # list possible centers from Voronoi vertices that are close to original cell
21 | possible_centers_lat = np.matmul(vor.vertices, reciprocal_cell_orig.T)
22 | possible_indices = np.where(np.all(np.abs(possible_centers_lat - 0.5) <= 0.6, axis=1))[0]
23 |
24 | # create atoms object with supercell of all possible interstitial positions
25 | vertices = vor.vertices[possible_indices]
26 | at_w_interst = at.copy()
27 | at_w_interst.extend(Atoms('X{}'.format(len(possible_indices)), positions=vertices))
28 |
29 | # eliminate duplicates that are equivalent by translation
30 | dists = at_w_interst.get_all_distances(mic=True)
31 | del_list = set()
32 | for i in range(len(at_w_interst) - 1):
33 | dups = i + 1 + np.where(dists[i][i + 1:] < transl_symprec)[0]
34 | del_list = del_list.union(set(dups))
35 |
36 | del at_w_interst[list(del_list)]
37 |
38 | # handle symmetry
39 | dataset = spglib.get_symmetry_dataset((at_w_interst.cell, at_w_interst.get_scaled_positions(), at_w_interst.numbers), symprec)
40 | if dataset is not None:
41 | equivalent_indices = set(dataset["equivalent_atoms"][len(at):])
42 | else:
43 | equivalent_indices = set(range(len(at) + 1, len(at_w_interst)))
44 |
45 | pos = at_w_interst.get_positions()
46 | voids = []
47 | for i in equivalent_indices:
48 | at_t = at + Atom('H')
49 | p = at_t.get_positions()
50 | p[-1] = pos[i]
51 | at_t.set_positions(p)
52 | d = min(at_t.get_distances(len(at_t) - 1, range(len(at_t) - 1), mic=True))
53 | voids.append((d, pos[i][0], pos[i][1], pos[i][2]))
54 |
55 | return sorted(voids, key=lambda x: x[0], reverse=True)
56 |
--------------------------------------------------------------------------------
/wfl/utils/gap_xml_tools.py:
--------------------------------------------------------------------------------
1 | """ Tools for GAP xml file operations"""
2 |
3 | import xml.etree.ElementTree
4 |
5 | import ase.data
6 | import ase.io
7 | import ase.io.extxyz
8 |
9 |
10 | def extract_e0(filename='GAP.xml', include_zeros=False):
11 | """Extracts e0 values from a GAP xml file.
12 |
13 | Parameters
14 | ----------
15 | filename : path_like
16 | GAP xml file
17 | include_zeros : bool
18 | include zero e0 values, gives a dict complete for all elements
19 |
20 | Returns
21 | -------
22 | e0_data : dict
23 | symbol -> e0_value
24 |
25 | """
26 |
27 | gap = xml.etree.ElementTree.parse(filename).getroot()
28 |
29 | e0_data = dict()
30 |
31 | def _equal_zero(num, tol=1E-5):
32 | return abs(float(num)) < abs(tol)
33 |
34 | # for one descriptor only now
35 | for e0_element in gap.findall('GAP_params/GAP_data/e0'):
36 | sym = ase.data.chemical_symbols[int(e0_element.attrib['Z'])]
37 | value = float(e0_element.attrib['value'])
38 |
39 | if include_zeros or not _equal_zero(value):
40 | e0_data[sym] = value
41 |
42 | return e0_data
43 |
--------------------------------------------------------------------------------
/wfl/utils/julia.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | def julia_exec_path():
4 | return os.environ.get("WFL_JULIA_COMMAND", "julia")
5 |
--------------------------------------------------------------------------------
/wfl/utils/logging.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from datetime import datetime
3 |
4 |
5 | def print_log(msg, show_time=True, logfile=sys.stdout):
6 | if show_time:
7 | msg += ' ' + datetime.now().strftime("%Y-%m-%d %H:%M:%S")
8 | logfile.write(msg + '\n')
9 | logfile.flush()
10 |
11 |
12 | def increment_active_iter(active_iter):
13 | try:
14 | with open('ACTIVE_ITER') as fin:
15 | file_active_iter = int(fin.readline())
16 | except FileNotFoundError:
17 | file_active_iter = None
18 | if file_active_iter is not None and active_iter + 1 > file_active_iter:
19 | # file exists and incrementing past its value
20 | with open('ACTIVE_ITER', 'w') as fout:
21 | fout.write('{}\n'.format(active_iter + 1))
22 |
23 |
24 | def process_active_iter(active_iter):
25 | if active_iter is None:
26 | # read from file
27 | try:
28 | with open('ACTIVE_ITER') as fin:
29 | active_iter = int(fin.readline())
30 | except FileNotFoundError:
31 | # initialize file
32 | active_iter = 0
33 | with open('ACTIVE_ITER', 'w') as fout:
34 | fout.write('{}\n'.format(active_iter))
35 |
36 | return active_iter
37 |
--------------------------------------------------------------------------------
/wfl/utils/misc.py:
--------------------------------------------------------------------------------
1 | """Miscellaneous utilities
2 |
3 | This should be temporary and reorganised when there is more, or just make one utils file if there is not much.
4 |
5 | """
6 |
7 | from ase import Atoms
8 |
9 |
10 | def chunks(arr, n):
11 | """Yield successive n-sized chunks from arr
12 |
13 | Parameters
14 | ----------
15 | arr: list-like
16 | n: int
17 | length of chunks
18 |
19 | Yields
20 | ------
21 | arr_chunk: array_like
22 | """
23 | for i in range(0, len(arr), n):
24 | yield arr[i:i + n]
25 |
26 |
27 | def atoms_to_list(atoms):
28 | if isinstance(atoms, Atoms):
29 | return [atoms]
30 | else:
31 | return atoms
32 |
33 |
34 | def dict_tuple_keys_to_str(error_dict):
35 | """Convert tuple keys to strings so Dict is JSON serializable
36 |
37 | Parameters
38 | ----------
39 | error_dict: dict
40 |
41 | Returns
42 | -------
43 | error_dict_json_compatible: dict
44 | """
45 | error_dict_json_compatible = {}
46 | for k, v in error_dict.items():
47 | if isinstance(k, tuple):
48 | k = '(' + ','.join([str(kv) for kv in k]) + ')'
49 | error_dict_json_compatible[k] = v
50 | return error_dict_json_compatible
51 |
--------------------------------------------------------------------------------
/wfl/utils/parallel.py:
--------------------------------------------------------------------------------
1 | # https://gitlab.com/ase/ase/-/issues/1140
2 | try:
3 | from ase.calculators.calculator import BaseCalculator, Calculator
4 | _calc_types = (BaseCalculator, Calculator)
5 | except ImportError:
6 | from ase.calculators.calculator import Calculator
7 | _calc_types = Calculator
8 |
9 |
10 | def construct_calculator_picklesafe(calculator):
11 | """Constructs a calculator safe with multiprocessing.Pool
12 |
13 | Trick: pass a recipe only and create the calculator in the thread created, instead of trying to pickle the entire
14 | object when creating the pool.
15 |
16 | Taken from optimize.py:run_autopara_wrappable
17 |
18 | Parameters
19 | ----------
20 | calculator: Calculator / (initializer, args, kwargs)
21 | ASE calculator or routine to call to create calculator
22 |
23 | Returns
24 | -------
25 | calculator: Calculator
26 | ase calculator object
27 |
28 | """
29 |
30 | # some, like GenericFileIOCalculator from which Espresso is derived, are not actually
31 | # derived from Calculator, but actually from BaseCalculator
32 | if isinstance(calculator, _calc_types):
33 | return calculator
34 | else:
35 | if len(calculator) != 3:
36 | raise RuntimeError('calculator \'{}\' must be (calc_constructor, args, kwargs)'.format(calculator))
37 |
38 | if not callable(calculator[0]):
39 | raise RuntimeError(
40 | 'calculator \'{}\' : first element is not callable, cannot construct a calculator'.format(calculator))
41 |
42 | if calculator[1] is None:
43 | c_args = []
44 | else:
45 | c_args = calculator[1]
46 | if calculator[2] is None:
47 | c_kwargs = {}
48 | else:
49 | c_kwargs = calculator[2]
50 |
51 | return calculator[0](*c_args, **c_kwargs)
52 |
--------------------------------------------------------------------------------
/wfl/utils/pressure.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | class PressureRecursionError(BaseException):
5 | pass
6 |
7 |
8 | def sample_pressure(pressure, at=None, rng=None):
9 | """Sample pressure for calculation with various modes
10 |
11 | Parameters
12 | ----------
13 | pressure: float / list / tuple
14 | Pressure, type and length defines mode as well
15 | - float: used as pressure
16 | - ("info", dict_key): looks for dict_key in at.info, parsed same as pressure argument here
17 | - ("exponential", float): exponential distribution, rate=1. and scaled by float given
18 | - ("normal_positive", mean, sigma): normal distribution with (mean, sigma) thrown away if negative value drawn,
19 | max 1000 tries
20 | - ("uniform", lower, upper): uniform distribution between bounds (lower, upper)
21 |
22 | at: ase.Atoms, default None
23 | atoms object, only needed or used if mode is `info`
24 |
25 | rng: numpy Generator object, default None
26 | random number generator to use. Only required if pressure will be generated randomly
27 |
28 | Returns
29 | -------
30 | p: float
31 |
32 | """
33 | try:
34 | if isinstance(pressure, (float, int)):
35 | p = pressure
36 | elif pressure[0] == 'info':
37 | if len(pressure) != 2:
38 | raise ValueError()
39 | # recursion on this to allow float and options below,
40 | # but corner case of at.info["key"] = ("info", "key") can lead to infinite recursion
41 | pressure_v = at.info[pressure[1]]
42 | if not isinstance(pressure_v, float) and pressure_v[0] == 'info' and at.info[pressure_v][1] == pressure[1]:
43 | raise PressureRecursionError('Infinite recursion in pressure {}'.format(pressure))
44 |
45 | p = sample_pressure(at.info[pressure[1]], at)
46 | elif pressure[0] == 'exponential':
47 | if len(pressure) != 2:
48 | raise ValueError()
49 | p = pressure[1] * rng.exponential(1.0)
50 | elif pressure[0] == 'normal_positive':
51 | if len(pressure) != 3:
52 | raise ValueError()
53 | n_try = 0
54 | p = -1.0
55 | while p < 0:
56 | n_try += 1
57 | if n_try >= 1000:
58 | raise RuntimeError('Failed to get positive from normal distribution in 1000 iterations')
59 | p = rng.normal(pressure[1], pressure[2])
60 | elif pressure[0] == 'uniform':
61 | if len(pressure) != 3:
62 | raise ValueError()
63 | p = rng.uniform(pressure[1], pressure[2])
64 | else:
65 | raise ValueError()
66 | except ValueError as exc:
67 | raise RuntimeError('Failed to parse pressure \'{}\''.format(pressure)) from exc
68 |
69 | return p
70 |
--------------------------------------------------------------------------------
/wfl/utils/quip_cli_strings.py:
--------------------------------------------------------------------------------
1 | """
2 | QUIP-related string manipulations
3 | """
4 | from ase.io.extxyz import key_val_dict_to_str
5 |
6 |
7 | def dict_to_quip_str(d, list_brackets='{}'):
8 | """dictionary to QUIP CLI string
9 |
10 | Parameters
11 | ----------
12 | d: dict
13 | descriptor key-value pairs
14 | list_brackets: str, default '{}'
15 | string containing open and close symbols for lists (usually '{}' or '{{}}')
16 |
17 | Returns
18 | -------
19 | str: descriptor string
20 | """
21 |
22 | assert len(list_brackets) % 2 == 0
23 |
24 | def _list_join(sep, v):
25 | if isinstance(v, str):
26 | # strings are iterable but need to be used as is
27 | return v
28 |
29 | try:
30 | # try treating as an iterable
31 | return sep.join([str(vv) for vv in v])
32 | except TypeError:
33 | return v
34 |
35 | string = ''
36 | for key, val in d.items():
37 | if isinstance(val, list):
38 | # special treatment for lists, normally in brackets, and can be other things like
39 | # double brackets
40 | string += f'{key}=' + (list_brackets[0:len(list_brackets) // 2] +
41 | ' '.join([str(v) for v in val]) +
42 | list_brackets[len(list_brackets) // 2:])
43 | elif isinstance(val, dict):
44 | string += f'{key}=' + ':'.join([k + ':' + _list_join(':', v) for k, v in val.items()])
45 | else:
46 | # hope that key_val_dict_to_string encodes value properly
47 | string += key_val_dict_to_str({key: val})
48 |
49 | string += ' '
50 |
51 | # trim off final space
52 | return string[:-1]
53 |
--------------------------------------------------------------------------------
/wfl/utils/replace_eval_in_strs.py:
--------------------------------------------------------------------------------
1 | """
2 | Evaluation of expressions marked with _EVAL_ in strings, mainly used for interpreting config files.
3 | """
4 |
5 | import warnings
6 |
7 | from wfl.utils.round_sig_figs import round_sig_figs
8 |
9 |
10 | def replace_eval_in_strs(obj, replacements, n_float_sig_figs=None):
11 | """Replace some string beginning with _EVAL_ in nested data structures
12 | with the result of eval() on them. Any lists, tuples, and dicts will
13 | be gone through recursively and replaced with substituted contents.
14 | Any strings starting with '_EVAL_ ' will be replaced with the result
15 | of an eval() call on the remainder of the string, after `replacements`
16 | has been used as the kwargs of a format() call.
17 |
18 | Parameters
19 | ----------
20 | obj: python object
21 | data structure to go through and replace '_EVAL_ ...' with return value of eval()
22 | replacements: dict
23 | keywords to format() call to be applied to each string before eval()
24 | n_float_sig_figs: int
25 | if not None, round float output of each eval to this many significant figures
26 |
27 | Returns
28 | -------
29 | obj: python object with new lists, tuples, and dicts, with _EVAL_ strings replaced by
30 | their eval() result
31 | """
32 | if isinstance(obj, str):
33 | if obj.startswith('_EVAL_ '):
34 | value = eval(obj.replace('_EVAL_ ', '', 1).format(**replacements))
35 | if n_float_sig_figs is not None and isinstance(value, float):
36 | value = float(round_sig_figs(value, n_float_sig_figs))
37 | return value
38 | elif isinstance(obj, list):
39 | return [replace_eval_in_strs(subobj, replacements, n_float_sig_figs) for subobj in obj]
40 | elif isinstance(obj, tuple):
41 | return (replace_eval_in_strs(subobj, replacements, n_float_sig_figs) for subobj in obj)
42 | elif isinstance(obj, dict):
43 | return {k: replace_eval_in_strs(v, replacements, n_float_sig_figs) for k, v in obj.items()}
44 | elif not isinstance(obj, (bool, int, float)):
45 | warnings.warn('replace_in_strings got unknown type {}, skipping replacement'.format(type(obj)))
46 |
47 | return obj
48 |
--------------------------------------------------------------------------------
/wfl/utils/round_sig_figs.py:
--------------------------------------------------------------------------------
1 | """
2 | Rounding floats to significant figures
3 | """
4 |
5 |
6 | def round_sig_figs(value, n_sig_figs):
7 | """Round to a certain number of significant figures
8 |
9 | based on:
10 | https://stackoverflow.com/questions/3410976/how-to-round-a-number-to-significant-figures-in-python
11 |
12 | Parameters
13 | ----------
14 | value: float
15 | value to round
16 | n_sig_figs: int
17 | number of significant figures
18 |
19 | Returns
20 | -------
21 | string representation of v, rounded to n_sig_figs significant figures
22 | """
23 | return '{:g}'.format(float('{:.{p}g}'.format(value, p=n_sig_figs)))
24 |
--------------------------------------------------------------------------------
/wfl/utils/version.py:
--------------------------------------------------------------------------------
1 | import os
2 | import subprocess
3 |
4 | import wfl
5 |
6 |
7 | def get_wfl_version():
8 | try:
9 | with subprocess.Popen("cd '" + os.path.dirname(__file__) + "'; " +
10 | "git describe --always --tags --dirty",
11 | shell=True, stdout=subprocess.PIPE,
12 | env={'PATH': os.environ['PATH']}) as gitv:
13 | version_str = gitv.stdout.read().strip().decode('utf-8')
14 | except Exception:
15 | version_str = ''
16 |
17 | if len(version_str.strip()) == 0:
18 | try:
19 | version_str = wfl.__version__
20 | except AttributeError:
21 | version_str = 'None'
22 | else:
23 | version_str = 'git ' + version_str
24 |
25 | return version_str
26 |
--------------------------------------------------------------------------------
/wfl/utils/vol_composition_space.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | def composition_space_Zs(ats):
5 | """Elements from composition space
6 |
7 | Parameters
8 | ----------
9 | ats : list(Atoms)
10 |
11 | Returns
12 | -------
13 | Zs : list(int)
14 | set of atomic numbers found, sorted
15 | """
16 | Zs = set()
17 | for at in ats:
18 | Zs |= set(at.numbers)
19 | return sorted(list(Zs))
20 |
21 |
22 | def composition_space_coord(at, fields, composition_Zs=None):
23 | """Calculate coordinates in vol-composition space
24 |
25 | Parameters
26 | ----------
27 | at : Atoms
28 | fields : list(str)
29 | fields of atoms objects to find:
30 | - "_V": volume per atom
31 | - "_x": compositions, n_elements-1
32 | - any at.info key which is then divided by the number of atoms
33 | composition_Zs : list(int)
34 | atomic numbers of elements for composition space
35 |
36 | Returns
37 | -------
38 | coords : list(float)
39 | coordinates, with volume and n_species-1 dimensions
40 |
41 | """
42 | coords = []
43 | for f in fields:
44 | if f == "_V":
45 | coords.append(at.get_volume() / len(at))
46 | elif f == "_x":
47 | # Zs[1:] because you only need n_types-1 composition fractions to fully determine composition
48 | coords += [np.sum(at.get_atomic_numbers() == Zi) / len(at) for Zi in composition_Zs[1:]]
49 | elif f in at.info:
50 | coords.append(at.info[f] / len(at))
51 | else:
52 | raise RuntimeError("Got select_coord field {}, not _V or _x or in at.info".format(f))
53 | return coords
54 |
--------------------------------------------------------------------------------