├── tests
├── __init__.py
├── czone_test_fixtures.py
├── test_transform.py
├── test_blueprints.py
├── test_generator.py
├── test_util.py
├── README.md
├── test_rng.py
├── test_scene.py
└── test_molecule.py
├── czone
├── util
│ ├── __init__.py
│ ├── eset.py
│ ├── misc.py
│ ├── voxel.py
│ └── measure.py
├── prefab
│ └── __init__.py
├── molecule
│ ├── __init__.py
│ └── molecule.py
├── scene
│ ├── __init__.py
│ └── scene.py
├── blueprint
│ ├── __init__.py
│ └── serializer.py
├── __init__.py
├── generator
│ └── __init__.py
├── surface
│ ├── __init__.py
│ ├── alpha_shape.py
│ └── adsorbate.py
├── volume
│ ├── __init__.py
│ └── algebraic.py
├── types
│ └── __init__.py
└── transform
│ ├── __init__.py
│ ├── post.py
│ └── strain.py
├── pyproject.toml
├── .gitignore
├── docs
├── source
│ ├── modules.rst
│ ├── imgs
│ │ ├── csgf_logo.png
│ │ └── toyota_research_institute.png
│ ├── examples.rst
│ ├── _templates
│ │ └── layout.html
│ ├── czone.scene.rst
│ ├── czone.viz.rst
│ ├── czone.rst
│ ├── _static
│ │ └── css
│ │ │ └── custom.css
│ ├── czone.util.rst
│ ├── czone.prefab.rst
│ ├── czone.transform.rst
│ ├── czone.generator.rst
│ ├── czone.volume.rst
│ ├── license.rst
│ ├── references.rst
│ ├── acknowledgement.rst
│ ├── installation.rst
│ ├── index.rst
│ ├── conf.py
│ ├── examples
│ │ ├── core_shell_nanoparticle.rst
│ │ └── nanoparticle_on_substrate.rst
│ └── refs.bib
├── Makefile
└── make.bat
├── .git-blame-ignore-revs
├── ruff.toml
├── requirements.txt
├── .readthedocs.yaml
├── setup.py
├── README.md
└── examples
├── demo_1.ipynb
└── presentation_examples.ipynb
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/czone/util/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/czone/prefab/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = [None]
2 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.ruff]
2 | line-length = 100
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *egg*
2 | *pycache*
3 | scratch/*
4 | .ipynb*
5 | dist/*
6 | docs/build/*
--------------------------------------------------------------------------------
/czone/molecule/__init__.py:
--------------------------------------------------------------------------------
1 | from .molecule import Molecule
2 |
3 | __all__ = ["Molecule"]
4 |
--------------------------------------------------------------------------------
/docs/source/modules.rst:
--------------------------------------------------------------------------------
1 | Modules
2 | ==========
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 |
7 | czone
8 |
--------------------------------------------------------------------------------
/czone/scene/__init__.py:
--------------------------------------------------------------------------------
1 | from .scene import PeriodicScene, Scene
2 |
3 | __all__ = ["PeriodicScene", "Scene"]
4 |
--------------------------------------------------------------------------------
/.git-blame-ignore-revs:
--------------------------------------------------------------------------------
1 | # Formatting and module import refactoring
2 | 6ac146afd997dda3d2f6c624d4cc77f4cbbf3877
3 |
4 |
--------------------------------------------------------------------------------
/docs/source/imgs/csgf_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lerandc/construction_zone/HEAD/docs/source/imgs/csgf_logo.png
--------------------------------------------------------------------------------
/ruff.toml:
--------------------------------------------------------------------------------
1 | line-length = 100
2 | indent-width = 4
3 |
4 | target-version = "py310"
5 |
6 |
7 | [format]
8 | indent-style = "space"
--------------------------------------------------------------------------------
/czone/blueprint/__init__.py:
--------------------------------------------------------------------------------
1 | from .blueprint import Blueprint
2 | from .serializer import Serializer
3 |
4 | __all__ = ["Blueprint", "Serializer"]
5 |
--------------------------------------------------------------------------------
/docs/source/imgs/toyota_research_institute.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lerandc/construction_zone/HEAD/docs/source/imgs/toyota_research_institute.png
--------------------------------------------------------------------------------
/czone/__init__.py:
--------------------------------------------------------------------------------
1 | # from . import blueprint, generator, molecule, prefab, scene, surface, transform, viz, volume
2 | from . import types
3 |
4 | __all__ = ["types"]
5 |
--------------------------------------------------------------------------------
/czone/generator/__init__.py:
--------------------------------------------------------------------------------
1 | from .generator import AmorphousGenerator, Generator, NullGenerator
2 |
3 | __all__ = ["AmorphousGenerator", "Generator", "NullGenerator"]
4 |
--------------------------------------------------------------------------------
/docs/source/examples.rst:
--------------------------------------------------------------------------------
1 | Examples
2 | =================================
3 |
4 | .. toctree::
5 | examples/nanoparticle_on_substrate
6 | examples/core_shell_nanoparticle
--------------------------------------------------------------------------------
/docs/source/_templates/layout.html:
--------------------------------------------------------------------------------
1 | {% extends "!layout.html" %}
2 | {% block footer %} {{ super() }}
3 |
4 | {% endblock %}
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | pymatgen >= 2022.x.x
2 | numpy >= 1.16.2
3 | scipy >= 1.3.0
4 | ase >= 3.21.0
5 | wulffpack >= 1.1.0
6 | sphinx==4.1.2
7 | sphinx_rtd_theme==0.5.1
8 | sphinxcontrib-bibtex==2.3.0
--------------------------------------------------------------------------------
/docs/source/czone.scene.rst:
--------------------------------------------------------------------------------
1 | Scene Module
2 | ===================
3 |
4 | Scenes
5 | ------------------------
6 |
7 | .. automodule:: czone.scene.scene
8 | :members:
9 | :undoc-members:
10 | :show-inheritance:
--------------------------------------------------------------------------------
/docs/source/czone.viz.rst:
--------------------------------------------------------------------------------
1 | Visualization Module
2 | ========================
3 |
4 | Submodules
5 | ----------
6 |
7 | Visualization
8 | --------------------
9 |
10 | .. automodule:: czone.viz.viz
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
--------------------------------------------------------------------------------
/docs/source/czone.rst:
--------------------------------------------------------------------------------
1 | Construction Zone
2 | ==============================
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 |
7 | czone.generator
8 | czone.volume
9 | czone.scene
10 | czone.transform
11 | czone.prefab
12 | czone.util
13 | czone.viz
14 |
15 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # File: .readthedocs.yaml
2 |
3 | version: 2
4 |
5 | # Build from the docs/ directory with Sphinx
6 | sphinx:
7 | configuration: docs/source/conf.py
8 |
9 | # Explicitly set the version of Python and its requirements
10 | python:
11 | version: 3.7
12 | install:
13 | - requirements: requirements.txt
--------------------------------------------------------------------------------
/czone/surface/__init__.py:
--------------------------------------------------------------------------------
1 | from .adsorbate import add_adsorbate, get_nearest_neighbors
2 | from .alpha_shape import alpha_shape_alg_3D, alpha_shape_alg_3D_with_sampling
3 |
4 | __all__ = [
5 | "add_adsorbate",
6 | "get_nearest_neighbors",
7 | "alpha_shape_alg_3D",
8 | "alpha_shape_alg_3D_with_sampling",
9 | ]
10 |
--------------------------------------------------------------------------------
/docs/source/_static/css/custom.css:
--------------------------------------------------------------------------------
1 | .wy-side-nav-search,
2 | .wy-nav-top {
3 | background: #6E54C9;
4 | }
5 | /* Sidebar */
6 | .wy-nav-side {
7 | background: #2A2F37;
8 | }
9 |
10 | .wy-nav-content, .wy-nav-content-wrap{
11 | background: #F0EDEE;
12 | }
13 |
14 | .wy-menu-vertical a:hover{background-color:#626D7F;cursor:pointer}
--------------------------------------------------------------------------------
/czone/volume/__init__.py:
--------------------------------------------------------------------------------
1 | from .algebraic import Cylinder, Plane, Sphere, convex_hull_to_planes, get_bounding_box
2 | from .volume import MultiVolume, Volume, makeRectPrism
3 |
4 | __all__ = [
5 | "Cylinder",
6 | "Plane",
7 | "Sphere",
8 | "convex_hull_to_planes",
9 | "get_bounding_box",
10 | "MultiVolume",
11 | "Volume",
12 | "makeRectPrism",
13 | ]
14 |
--------------------------------------------------------------------------------
/docs/source/czone.util.rst:
--------------------------------------------------------------------------------
1 | Utilities
2 | ==================
3 |
4 | Measurements
5 | -------------------------
6 |
7 | .. automodule:: czone.util.measure
8 | :members:
9 | :undoc-members:
10 | :show-inheritance:
11 |
12 | Miscellaneous
13 | ----------------------
14 |
15 | .. automodule:: czone.util.misc
16 | :members:
17 | :undoc-members:
18 | :show-inheritance:
19 |
--------------------------------------------------------------------------------
/docs/source/czone.prefab.rst:
--------------------------------------------------------------------------------
1 | Prefab Module
2 | ====================
3 |
4 | Prefab Objects
5 | --------------------------
6 |
7 | .. automodule:: czone.prefab.prefab
8 | :members:
9 | :undoc-members:
10 | :show-inheritance:
11 |
12 | Wulff Constructions
13 | -------------------------
14 |
15 | .. automodule:: czone.prefab.wulff
16 | :members:
17 | :undoc-members:
18 | :show-inheritance:
19 |
--------------------------------------------------------------------------------
/docs/source/czone.transform.rst:
--------------------------------------------------------------------------------
1 | Transformation Module
2 | =======================
3 |
4 | General Transformations
5 | --------------------------------
6 |
7 | .. automodule:: czone.transform.transform
8 | :members:
9 | :undoc-members:
10 | :show-inheritance:
11 |
12 | Strain Fields
13 | -----------------------------
14 |
15 | .. automodule:: czone.transform.strain
16 | :members:
17 | :undoc-members:
18 | :show-inheritance:
19 |
--------------------------------------------------------------------------------
/czone/types/__init__.py:
--------------------------------------------------------------------------------
1 | from .types import (
2 | BaseAlgebraic,
3 | BaseGenerator,
4 | BasePostTransform,
5 | BasePrefab,
6 | BaseScene,
7 | BaseStrain,
8 | BaseTransform,
9 | BaseVolume,
10 | )
11 |
12 | __all__ = [
13 | "BaseAlgebraic",
14 | "BaseGenerator",
15 | "BasePostTransform",
16 | "BasePrefab",
17 | "BaseScene",
18 | "BaseStrain",
19 | "BaseTransform",
20 | "BaseVolume",
21 | ]
22 |
--------------------------------------------------------------------------------
/docs/source/czone.generator.rst:
--------------------------------------------------------------------------------
1 | Generator Module
2 | =======================
3 |
4 | Generators
5 | --------------------------------
6 |
7 | .. automodule:: czone.generator.generator
8 | :members:
9 | :undoc-members:
10 | :show-inheritance:
11 |
12 | Amorphous Algorithms
13 | --------------------------------------------
14 |
15 | .. automodule:: czone.generator.amorphous_algorithms
16 | :members:
17 | :undoc-members:
18 | :show-inheritance:
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | setup(
4 | name='czone',
5 | version='2022.09.20',
6 | description='An open source python package for generating nanoscale+ atomic scenes',
7 | url='https://github.com/lerandc/construction_zone',
8 | author='Luis Rangel DaCosta',
9 | author_email='luisrd@berkeley.edu',
10 | python_requires='>=3.7',
11 | packages=find_packages(),
12 | install_requires=[
13 | 'pymatgen',
14 | 'numpy',
15 | 'scipy',
16 | 'ase',
17 | 'wulffpack']
18 | )
19 |
--------------------------------------------------------------------------------
/docs/source/czone.volume.rst:
--------------------------------------------------------------------------------
1 | Volume Module
2 | ====================
3 |
4 | Volumes and MultiVolumes
5 | --------------------------
6 |
7 | .. automodule:: czone.volume.volume
8 | :members:
9 | :undoc-members:
10 | :show-inheritance:
11 |
12 | Algebraic Surfaces
13 | -----------------------------
14 |
15 | .. automodule:: czone.volume.algebraic
16 | :members:
17 | :undoc-members:
18 | :show-inheritance:
19 |
20 | Voxels
21 | -------------------------
22 |
23 | .. automodule:: czone.volume.voxel
24 | :members:
25 | :undoc-members:
26 | :show-inheritance:
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/source/license.rst:
--------------------------------------------------------------------------------
1 | License
2 | =================================
3 |
4 |
5 | Construction Zone is released under the GPLv3 License. For full information
6 | about the GPL licenses and for the full text, check out `these `_ `resources `_.
7 | It is free to use, modify, or distribute, provided that any derivative works
8 | also provide the complete source code and freedom of use--we encouage any
9 | personal development of the software for your own use. We emphasize that
10 | Construction Zone is a research software under active development, and as such,
11 | we cannot provide any guarantees or warranties on the accuracy of any of our
12 | scientific implementations.
--------------------------------------------------------------------------------
/czone/transform/__init__.py:
--------------------------------------------------------------------------------
1 | from .post import ChemicalSubstitution, CustomPostTransform
2 | from .strain import HStrain, IStrain
3 | from .transform import (
4 | Inversion,
5 | MatrixTransform,
6 | MultiTransform,
7 | Reflection,
8 | Rotation,
9 | Translation,
10 | rot_align,
11 | rot_v,
12 | rot_vtv,
13 | rot_zxz,
14 | s2s_alignment,
15 | )
16 |
17 | __all__ = [
18 | "ChemicalSubstitution",
19 | "CustomPostTransform",
20 | "HStrain",
21 | "IStrain",
22 | "Inversion",
23 | "MatrixTransform",
24 | "MultiTransform",
25 | "Reflection",
26 | "Rotation",
27 | "Translation",
28 | "rot_align",
29 | "rot_v",
30 | "rot_vtv",
31 | "rot_zxz",
32 | "s2s_alignment",
33 | ]
34 |
--------------------------------------------------------------------------------
/docs/source/references.rst:
--------------------------------------------------------------------------------
1 | References
2 | =================================
3 |
4 | Construction Zone is an open-source project developed within a greater ecosystem
5 | of open-source scientific software.
6 |
7 | If you use Construction Zone in your own work, we kindly ask that you cite the following:
8 |
9 | .. bibliography::
10 | rangel_dacosta_luis_2021_5161161
11 |
12 | The following work and packages were crucial to the development of Construction Zone:
13 |
14 | .. bibliography::
15 |
16 | ONG2013314
17 | 2020SciPy
18 | 2020NumPy
19 | Hjorth_Larsen_2017
20 | Rahm2020
21 |
22 |
23 | For more background on Wulff constructions and the algorithms adapted from WulffPack:
24 |
25 | .. bibliography::
26 |
27 | wulff
28 | ldmarks1
29 | ldmarks2
30 | WINTERBOTTOM1967303
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/tests/czone_test_fixtures.py:
--------------------------------------------------------------------------------
1 | # ruff: noqa: F401
2 |
3 | import sys
4 | import unittest
5 |
6 | ## Import everything into the namespace so that repr's can evaluate
7 | import numpy as np
8 | from numpy import array
9 | from pymatgen.core import Lattice, Structure
10 |
11 | from czone.generator import AmorphousGenerator, Generator, NullGenerator
12 | from czone.molecule import Molecule
13 | from czone.scene import PeriodicScene, Scene
14 | from czone.transform import ChemicalSubstitution, HStrain
15 | from czone.util.voxel import Voxel
16 | from czone.volume import Cylinder, MultiVolume, Plane, Sphere, Volume
17 |
18 |
19 | class czone_TestCase(unittest.TestCase):
20 | def assertArrayEqual(self, first, second, msg=None) -> None:
21 | "Fail if the two arrays are unequal by via Numpy's array_equal method."
22 | self.assertTrue(np.array_equal(first, second), msg=msg)
23 |
24 | def assertReprEqual(
25 | self,
26 | obj,
27 | msg=None,
28 | ) -> None:
29 | "Fail if the object re-created by the __repr__ method is not equal to the original."
30 | with np.printoptions(threshold=sys.maxsize, floatmode="unique"):
31 | self.assertEqual(obj, eval(repr(obj)), msg=msg)
32 |
--------------------------------------------------------------------------------
/tests/test_transform.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from czone.transform.post import ChemicalSubstitution
4 | from czone.transform.strain import HStrain
5 |
6 | from .czone_test_fixtures import czone_TestCase
7 |
8 | seed = 871342
9 | rng = np.random.default_rng(seed=seed)
10 |
11 |
12 | def get_random_mapping(rng: np.random.Generator, N=8):
13 | Z = np.arange(1, 119, dtype=int)
14 | targets = rng.choice(Z, N, replace=False)
15 | subs = rng.choice(np.setdiff1d(Z, targets), N, replace=False)
16 | return {int(t): int(s) for t, s in zip(targets, subs)}
17 |
18 |
19 | class Test_ChemicalSubstitution(czone_TestCase):
20 | def setUp(self):
21 | self.N_trials = 128
22 |
23 | def test_init(self):
24 | for _ in range(self.N_trials):
25 | frac = rng.uniform()
26 | mapping = get_random_mapping(rng)
27 | chem_sub = ChemicalSubstitution(mapping, frac)
28 | self.assertReprEqual(chem_sub)
29 |
30 |
31 | class Test_HStrain(czone_TestCase):
32 | def setUp(self):
33 | self.N_trials = 128
34 |
35 | def test_init(self):
36 | def yield_strain_matrices():
37 | for shape in ((3,), (3, 3), (9,), (6,)):
38 | yield rng.uniform(size=shape)
39 |
40 | for _ in range(self.N_trials):
41 | for m in yield_strain_matrices():
42 | hstrain = HStrain(m, origin="generator", mode="crystal")
43 | self.assertReprEqual(hstrain)
44 |
--------------------------------------------------------------------------------
/docs/source/acknowledgement.rst:
--------------------------------------------------------------------------------
1 | Acknowledgement
2 | =================================
3 |
4 | We acknowledge support for the development of Construction Zone from the Toyota Research Institute.
5 | This material is based upon work supported by the U.S. Department of Energy, Office of Science,
6 | Office of Advanced Scientific Computing Research, Department of Energy Computational Science Graduate Fellowship
7 | under Award Number DE-SC0021110.
8 |
9 | This software was prepared as an account of work sponsored by an agency of the United
10 | States Government. Neither the United States Government nor any agency thereof, nor any of their
11 | employees, makes any warranty, express or implied, or assumes any legal liability or responsibility for the
12 | accuracy, completeness, or usefulness of any information, apparatus, product, or process disclosed, or
13 | represents that its use would not infringe privately owned rights. Reference herein to any specific
14 | commercial product, process, or service by trade name, trademark, manufacturer, or otherwise does not
15 | necessarily constitute or imply its endorsement, recommendation, or favoring by the United States
16 | Government or any agency thereof. The views and opinions of authors expressed herein do not
17 | necessarily state or reflect those of the United States Government or any agency thereof.
18 |
19 | .. image:: imgs/csgf_logo.png
20 | :height: 80px
21 | :width: 267px
22 | :target: https://www.krellinst.org/csgf/
23 |
24 | .. image:: imgs/toyota_research_institute.png
25 | :target: https://www.tri.global/
--------------------------------------------------------------------------------
/docs/source/installation.rst:
--------------------------------------------------------------------------------
1 | Installation
2 | =================================
3 |
4 | Construction Zone can be easily installed with pip::
5 |
6 | pip install czone
7 |
8 | We strongly recommend utilizing an environment manager such as Anaconda, and
9 | installing Construction Zone into your environment of choice.
10 |
11 | To install Construction Zone into a clean environment with Anaconda, you could
12 | do the following::
13 |
14 | conda create -n environment_name python=3.7
15 | conda activate environment_name
16 | pip install czone
17 |
18 |
19 | In this example, we manually set the target Python version to v3.7.
20 | Construction Zone has been tested only for Python 3.7 and above; earlier versions
21 | may work but are not supported.
22 |
23 | Stable versions of Construction Zone will be passed onto PyPi. To use the current,
24 | development version of Construction Zone, you can set up the Python package in
25 | development mode. Again, we recommend doing so with an environment manager.
26 |
27 | An example development installation could be achieved by the following::
28 |
29 | conda create -n environment_name python=3.7
30 | conda activate environment_name
31 | git clone https://github.com/lerandc/construction_zone.git
32 | cd construction_zone
33 | python setup.py --develop
34 |
35 | Development mode installations import Python packages directly from the source
36 | every time. You could freely edit the source code yourself, or just use the
37 | installation to pull fresh code from the development branch by running :code:`git pull`
38 | in the repository directory.
39 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | Construction Zone
2 | =============================================
3 |
4 | Construction Zone is an open-source Python package designed as a tool to help
5 | build and generate complex nanoscale atomic models. The package was designed with
6 | simulation workflows, like TEM and MD simulations, and machine learning pipelines in mind.
7 | Its interface is flexible and allows for easy development of progammatic constructions
8 | that are suitable for applications in materials science, and much of the bookkeeping
9 | typically required when making complex atomic objects is abstracted from the user.
10 |
11 | The basis design principle of Construction Zone is to a sculpting approach to creating atomic objects and scenes.
12 | Construction Zone implements several core classes which help accomplish this task.
13 | Generator objects are additive elements that provide the sculpting blocks--
14 | they are classes which fill a given space with atoms, whether that is as a crystalline
15 | lattice or an amorphous collection of points. Volume objects are subtractive elements
16 | that define the boundaries of an nanoscale structure. Together, Volumes and Generators
17 | can be joined together and treated as semantic objects, like a nanoparticle or a substrate.
18 | Multiple objects in space can interact with eachother in Scenes, which take care
19 | of conflict resolutions like object intersetion and atom overlapping.
20 | Volumes and Generators can also be transformed with Transformation objects, which
21 | can apply arbitrary transformations to the structures at hand, like symmetry operations
22 | or strain fields.
23 |
24 | If you use Construction Zone in your own work, we kindly ask that you cite the following:
25 | Rangel DaCosta, Luis, & Scott, Mary. (2021). Construction Zone (v2021.08.04). Zenodo. https://doi.org/10.5281/zenodo.5161161
26 |
27 | .. toctree::
28 | :maxdepth: 4
29 | :caption: Contents
30 |
31 | installation
32 | examples
33 | modules
34 | references
35 | license
36 | acknowledgement
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | def setup(app):
14 | app.add_css_file('custom.css')
15 |
16 | import os
17 | import sys
18 | sys.path.insert(0,"../../")
19 |
20 | # -- Project information -----------------------------------------------------
21 |
22 | project = 'Construction Zone'
23 | copyright = '2021, Luis Rangel DaCosta'
24 | author = 'Luis Rangel DaCosta'
25 |
26 | # The full version, including alpha/beta/rc tags
27 | release = 'v0.0.1'
28 |
29 | # -- General configuration ---------------------------------------------------
30 |
31 | # Add any Sphinx extension module names here, as strings. They can be
32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
33 | # ones.
34 | extensions = ['sphinx.ext.autodoc',
35 | 'sphinx.ext.napoleon',
36 | 'sphinx.ext.imgmath',
37 | 'sphinxcontrib.bibtex']
38 |
39 | bibtex_bibfiles = ['refs.bib']
40 |
41 | autosummary_generate = True # Turn on sphinx.ext.autosummary
42 |
43 | # Add any paths that contain templates here, relative to this directory.
44 | templates_path = ['_templates']
45 |
46 | # List of patterns, relative to source directory, that match files and
47 | # directories to ignore when looking for source files.
48 | # This pattern also affects html_static_path and html_extra_path.
49 | exclude_patterns = []
50 |
51 |
52 | # -- Options for HTML output -------------------------------------------------
53 |
54 | # The theme to use for HTML and HTML Help pages. See the documentation for
55 | # a list of builtin themes.
56 | #
57 | html_theme = 'sphinx_rtd_theme'
58 |
59 | # Add any paths that contain custom static files (such as style sheets) here,
60 | # relative to this directory. They are copied after the builtin static files,
61 | # so a file named "default.css" will overwrite the builtin "default.css".
62 | html_static_path = ['_static']
63 |
64 | html_css_files = [
65 | 'css/custom.css',
66 | ]
--------------------------------------------------------------------------------
/tests/test_blueprints.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 |
3 | import numpy as np
4 |
5 | from czone.blueprint import Blueprint, Serializer
6 | from czone.generator import NullGenerator
7 | from czone.util.voxel import Voxel
8 |
9 | from .czone_test_fixtures import czone_TestCase
10 | from .test_generator import get_random_amorphous_generator, get_random_generator
11 | from .test_scene import get_random_object, get_random_scene
12 |
13 | seed = 9815108923
14 | rng = np.random.default_rng(seed=seed)
15 |
16 |
17 | class Test_Blueprint(czone_TestCase):
18 | """object -> blueprint -> object"""
19 |
20 | def setUp(self):
21 | self.N_trials = 32
22 |
23 | def test_generator(self):
24 | n_G = NullGenerator()
25 | n_blueprint = Blueprint(n_G)
26 | self.assertEqual(n_G, n_blueprint.to_object())
27 | self.assertReprEqual(n_G)
28 |
29 | for _ in range(self.N_trials):
30 | a_G = get_random_amorphous_generator(rng=rng)
31 | a_blueprint = Blueprint(a_G)
32 | self.assertReprEqual(a_G)
33 | self.assertEqual(a_G, a_blueprint.to_object())
34 |
35 | G = get_random_generator(rng=rng)
36 | blueprint = Blueprint(G)
37 | self.assertReprEqual(G)
38 | self.assertEqual(G, blueprint.to_object())
39 |
40 | def test_volume(self):
41 | for _ in range(self.N_trials):
42 | V = get_random_object()
43 | blueprint = Blueprint(V)
44 | self.assertEqual(V, blueprint.to_object())
45 |
46 | def test_voxel(self):
47 | for _ in range(self.N_trials):
48 | bases = rng.normal(size=(3, 3))
49 | scale = rng.uniform(0.1, 10)
50 | origin = rng.uniform(-100, 100, size=(3,))
51 |
52 | V = Voxel(bases, scale, origin)
53 | blueprint = Blueprint(V)
54 | self.assertEqual(V, blueprint.to_object())
55 |
56 | def test_scene(self):
57 | for _ in range(self.N_trials):
58 | for periodic in [False, True]:
59 | S = get_random_scene(periodic=periodic)
60 | blueprint = Blueprint(S)
61 | self.assertEqual(S, blueprint.to_object())
62 |
63 |
64 | class Test_Serializer(czone_TestCase):
65 | """blueprint -> serialized form -> blueprint"""
66 |
67 | def setUp(self):
68 | self.N_trials = 16
69 | self.formats = ["json", "yaml", "toml", "h5"]
70 | self.generator_args = {"with_strain": False, "with_sub": False}
71 |
72 | def test_generator(self):
73 | for _ in range(self.N_trials):
74 | G = get_random_generator(**self.generator_args)
75 | blueprint = Blueprint(G)
76 | for f in self.formats:
77 | test_path = "generator_test_file" + "." + f
78 | Serializer.write(Path(test_path), blueprint)
79 |
80 | test_bp = Serializer.read(Path(test_path))
81 | test_G = test_bp.to_object()
82 | self.assertEqual(G, test_G)
83 |
84 | def test_volume(self):
85 | for _ in range(self.N_trials):
86 | V = get_random_object(generator_args=self.generator_args)
87 | blueprint = Blueprint(V)
88 | for f in self.formats:
89 | test_path = "volume_test_file" + "." + f
90 | Serializer.write(Path(test_path), blueprint)
91 |
92 | test_bp = Serializer.read(Path(test_path))
93 | test_V = test_bp.to_object()
94 | self.assertEqual(V, test_V)
95 |
96 | def test_scene(self):
97 | for _ in range(self.N_trials):
98 | for periodic in [False, True]:
99 | S = get_random_scene(periodic=periodic, generator_args=self.generator_args)
100 | blueprint = Blueprint(S)
101 | for f in self.formats:
102 | test_path = "scene_test_file" + "." + f
103 | Serializer.write(Path(test_path), blueprint)
104 |
105 | test_S = Serializer.read(Path(test_path)).to_object()
106 | self.assertEqual(S, test_S)
107 |
--------------------------------------------------------------------------------
/czone/transform/post.py:
--------------------------------------------------------------------------------
1 | """
2 | Short module for arbitrary post-generation, pre-volume transformations
3 |
4 | Useful for chemical modifciations, statistical defects, etc.
5 | """
6 |
7 | from __future__ import annotations
8 |
9 | from typing import List
10 |
11 | import numpy as np
12 |
13 | from czone.types import BasePostTransform
14 |
15 |
16 | class ChemicalSubstitution(BasePostTransform):
17 | def __init__(self, mapping: dict, frac, rng=None):
18 | self.mapping = mapping
19 | self.frac = frac
20 | self.rng = np.random.default_rng() if rng is None else rng
21 |
22 | def __repr__(self):
23 | return f"ChemicalSubstitution(mapping={repr(self.mapping)}, frac={self.frac})"
24 |
25 | def __eq__(self, other):
26 | if isinstance(other, ChemicalSubstitution):
27 | return (self.mapping == other.mapping) and (np.isclose(self.frac, other.frac))
28 | else:
29 | return False
30 |
31 | @property
32 | def mapping(self) -> dict:
33 | return self._mapping
34 |
35 | @mapping.setter
36 | def mapping(self, m):
37 | if isinstance(m, dict):
38 | for k, v in m.items():
39 | if (not isinstance(k, int)) or (not isinstance(v, int)):
40 | raise TypeError
41 | if k == v:
42 | raise ValueError
43 | self._mapping = m
44 | else:
45 | raise TypeError
46 |
47 | @property
48 | def target(self):
49 | return self.mapping.keys()
50 |
51 | @property
52 | def substitute(self):
53 | return self.mapping.values()
54 |
55 | @property
56 | def frac(self) -> float:
57 | return self._frac
58 |
59 | @frac.setter
60 | def frac(self, val: float):
61 | if val <= 0 or val > 1:
62 | raise ValueError
63 |
64 | self._frac = val
65 |
66 | def _replace_species(self, species):
67 | out_species = np.copy(species)
68 |
69 | for t, s in zip(self.target, self.substitute):
70 | t_filter = species == t
71 | t_probs = self.rng.uniform(0, 1, size=species.shape)
72 |
73 | out_species[(t_filter) & (t_probs <= self.frac)] = s
74 |
75 | return out_species
76 |
77 | def apply_function(self, points: np.ndarray, species: np.ndarray, **kwargs):
78 | return points, self._replace_species(species, **kwargs)
79 |
80 | @property
81 | def rng(self):
82 | """Random number generator associated with Generator"""
83 | return self._rng
84 |
85 | @rng.setter
86 | def rng(self, new_rng: np.random.BitGenerator):
87 | if not isinstance(new_rng, np.random.Generator):
88 | raise TypeError("Must supply a valid Numpy Generator")
89 |
90 | self._rng = new_rng
91 |
92 |
93 | class CustomPostTransform(BasePostTransform):
94 | def __init__(self, fun):
95 | self.fun = fun
96 |
97 | def apply_function(self, points: np.ndarray, species: np.ndarray, **kwargs):
98 | return self.fun(points, species)
99 |
100 |
101 | class PostSequence(BasePostTransform):
102 | """Apply sequence of transforms"""
103 |
104 | def __init__(self, transforms: List[BasePostTransform]):
105 | self._transforms = []
106 | if transforms is not None:
107 | self.add_transform(transforms)
108 |
109 | def add_transform(self, transform: BasePostTransform):
110 | """Add transform to Multitransform.
111 |
112 | Args:
113 | transform (Basetransform): transform object to add to Multitransform.
114 | """
115 | if hasattr(transform, "__iter__"):
116 | for v in transform:
117 | assert isinstance(v, BasePostTransform), "transforms must be transform objects"
118 | self._transforms.extend(transform)
119 | else:
120 | assert isinstance(transform, BasePostTransform), "transforms must be transform objects"
121 | self._transforms.append(transform)
122 |
123 | def apply_function(self, points: np.ndarray, species: np.ndarray, **kwargs):
124 | for t in self._transforms:
125 | points, species = t.apply_function(points, species, **kwargs)
126 |
127 | return points, species
128 |
--------------------------------------------------------------------------------
/czone/util/eset.py:
--------------------------------------------------------------------------------
1 | from collections import deque
2 | from collections.abc import Iterable
3 | from functools import reduce
4 |
5 | import numpy as np
6 |
7 |
8 | class EqualSet:
9 | """Creates a set based on equality operations."""
10 |
11 | def __init__(self, x=[]):
12 | self._storage = deque([])
13 | self.update(x)
14 |
15 | def __iter__(self):
16 | yield from iter(self._storage)
17 |
18 | def __len__(self):
19 | return len(self._storage)
20 |
21 | def __contains__(self, item):
22 | return item in self._storage
23 |
24 | @staticmethod
25 | def _make_unique(x: Iterable):
26 | match x:
27 | case EqualSet():
28 | yield from x
29 | case _:
30 | N_items = len(x)
31 | add_dict = {}
32 | for i in range(N_items):
33 | add_dict[i] = True if i not in add_dict else False
34 | if add_dict[i]:
35 | for j in range(i + 1, N_items):
36 | if x[j] == x[i]:
37 | add_dict[j] = False
38 | yield from (val for k, val in zip(sorted(add_dict.keys()), x) if add_dict[k])
39 |
40 | def _check_if_new(self, other):
41 | for o in other:
42 | add = True
43 | for s in self:
44 | if o == s:
45 | add = False
46 | break
47 | if add:
48 | yield o
49 |
50 | def update(self, other: Iterable):
51 | tmp = self._make_unique(other)
52 | if len(self) == 0:
53 | self._storage.extend(tmp)
54 | else:
55 | self._storage.extend(self._check_if_new(tmp))
56 |
57 | def union(self, other: Iterable):
58 | res = EqualSet(self)
59 | res.update(other)
60 | return res
61 |
62 | def __or__(self, other):
63 | return self.union(other)
64 |
65 | def __ior__(self, other):
66 | self.update(other)
67 | return self
68 |
69 | def remove(self, other: Iterable):
70 | tmp = self._make_unique(other)
71 | for t in tmp:
72 | try:
73 | self._storage.remove(t)
74 | except ValueError:
75 | pass
76 |
77 | def difference(self, other: Iterable):
78 | res = EqualSet(self)
79 | res.remove(other)
80 | return res
81 |
82 | def __sub__(self, other):
83 | return self.difference(other)
84 |
85 | def __isub__(self, other):
86 | self.remove(other)
87 | return self
88 |
89 | def symmetric_difference(self, other: Iterable):
90 | A = EqualSet(other)
91 | return (self - A) | (A - self)
92 |
93 | def __xor__(self, other: Iterable):
94 | return self.symmetric_difference(other)
95 |
96 | def intersection(self, other: Iterable):
97 | A = self - other
98 | return self - A
99 |
100 | def __and__(self, other: Iterable):
101 | return self.intersection(other)
102 |
103 | def isdisjoint(self, other):
104 | return len(self.intersection(other)) == 0
105 |
106 | def issubset(self, other):
107 | other = EqualSet(other)
108 | if len(other) >= len(self):
109 | return reduce(lambda x, y: x and y, [i in other for i in self], True)
110 | else:
111 | return False
112 |
113 | def __le__(self, other):
114 | return self.issubset(other)
115 |
116 | def issuperset(self, other):
117 | other = EqualSet(other)
118 | if len(other) <= len(self):
119 | return reduce(lambda x, y: x and y, [i in self for i in other], True)
120 | else:
121 | return False
122 |
123 | def __ge__(self, other):
124 | return self.issuperset(other)
125 |
126 | def __eq__(self, other):
127 | other = EqualSet(other)
128 | if len(other) != len(self):
129 | return False
130 | else:
131 | return (self <= other) and (self >= other)
132 |
133 | def __lt__(self, other):
134 | return (self <= other) and (not self >= other)
135 |
136 | def __gt__(self, other):
137 | return (self >= other) and (not self <= other)
138 |
139 |
140 | def array_set_equal(x, y, **kwargs):
141 | if x.shape != y.shape:
142 | return False
143 |
144 | xs = np.sort(x, axis=0)
145 | ys = np.sort(y, axis=0)
146 |
147 | return np.allclose(xs, ys, **kwargs)
148 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Construction Zone
2 | [](https://zenodo.org/badge/latestdoi/261777347)
3 |
4 | Modules for generating nanoscale+ atomic scenes, primarily using pymatgen as generators with S/TEM image simulation in mind.
5 |
6 | Full documentation can be found here: [https://construction-zone.readthedocs.io/](https://construction-zone.readthedocs.io/).
7 |
8 | If you use Construction Zone in your own work, we kindly ask that you cite the following:
9 | Rangel DaCosta, Luis, & Scott, Mary. (2021). Construction Zone (v2021.08.04). Zenodo. https://doi.org/10.5281/zenodo.5161161
10 |
11 |
12 |
13 | ## Installation
14 |
15 | Construction Zone can be easily installed with pip:
16 | ```
17 | pip install czone
18 | ```
19 |
20 | We strongly recommend utilizing an environment manager such as Anaconda, and
21 | installing Construction Zone into your environment of choice.
22 |
23 | To install Construction Zone into a clean environment with Anaconda, you could
24 | do the following:
25 | ```
26 | conda create -n environment_name python=3.7
27 | conda activate environment_name
28 | pip install czone
29 | ```
30 |
31 | In this example, we manually set the target Python version to v3.7.
32 | Construction Zone has been tested only for Python 3.7 and above; earlier versions
33 | may work but are not supported.
34 |
35 | Stable versions of Construction Zone will be passed onto PyPi. To use the current,
36 | development version of Construction Zone, you can set up the Python package in
37 | development mode. Again, we recommend doing so with an environment manager.
38 |
39 | An example development installation could be achieved by the following:
40 | ```
41 | conda create -n environment_name python=3.7
42 | conda activate environment_name
43 | git clone https://github.com/lerandc/construction_zone.git
44 | cd construction_zone
45 | python setup.py --develop
46 | ```
47 |
48 | Development mode installations import Python packages directly from the source
49 | every time. You could freely edit the source code yourself, or just use the
50 | installation to pull fresh code from the development branch by running `git pull`
51 | in the repository directory.
52 |
53 | ### Manual Coverage Summaray (LRD, 2 Feb. 2024):
54 | We have begun writing unit tests for Construction Zone. A partial coverage summary can be found below.
55 | For more a more detailed coverage summary, and information about testing priorities and a testing road map,
56 | please look towards the `tests' folder. Once unit tests for the core modules have been completed, CZ will
57 | be updated on relevant installatio channels. Minor to major refactoring can be expected in certain areas,
58 | but we will aim to have miminal breaking API changes, where possible.
59 |
60 | (x = total, + = partial (direct), - = partial (indirect), blank = none)
61 |
62 | [ ] Generator \
63 | | [ ] Amorphous Algorithms \
64 | | [ ] Generator Core
65 |
66 | [x] Molecule
67 |
68 | [ ] Prefab \
69 | | [ ] Prefab Core \
70 | | [ ] Wulff
71 |
72 | [ ] Scene
73 |
74 | [ ] Surface \
75 | | [ ] Adsorbate \
76 | | [ ] Alpha shapes
77 |
78 | [-] Transform \
79 | | [ ] Post \
80 | | [ ] Strain \
81 | | [-] Transform Core
82 |
83 | [ ] Util \
84 | | [ ] Measure \
85 | | [ ] Misc.
86 |
87 | [ ] Viz
88 |
89 | [+] Volume \
90 | | [+] Algebraic \
91 | | [ ] Volume Core \
92 | | [ ] Voxel
93 |
94 |
95 | ## Acknowledgment
96 |
97 | We acknowledge support for the development of Construction Zone from the Toyota Research Institute.
98 | This material is based upon work supported by the U.S. Department of Energy, Office of Science,
99 | Office of Advanced Scientific Computing Research, Department of Energy Computational Science Graduate Fellowship
100 | under Award Number DE-SC0021110.
101 |
102 | This software was prepared as an account of work sponsored by an agency of the United
103 | States Government. Neither the United States Government nor any agency thereof, nor any of their
104 | employees, makes any warranty, express or implied, or assumes any legal liability or responsibility for the
105 | accuracy, completeness, or usefulness of any information, apparatus, product, or process disclosed, or
106 | represents that its use would not infringe privately owned rights. Reference herein to any specific
107 | commercial product, process, or service by trade name, trademark, manufacturer, or otherwise does not
108 | necessarily constitute or imply its endorsement, recommendation, or favoring by the United States
109 | Government or any agency thereof. The views and opinions of authors expressed herein do not
110 | necessarily state or reflect those of the United States Government or any agency thereof.
111 |
112 |
113 | [
](https://www.krellinst.org/csgf/)
114 |
115 | [](https://www.tri.global/)
--------------------------------------------------------------------------------
/examples/demo_1.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import czone as cz\n",
10 | "import numpy as np"
11 | ]
12 | },
13 | {
14 | "cell_type": "code",
15 | "execution_count": 2,
16 | "metadata": {},
17 | "outputs": [],
18 | "source": [
19 | "box = cz.volume.makeRectPrism(5,5,10)\n",
20 | "sc_gen = cz.generator.BasicStructure()\n",
21 | "H_volume = cz.volume.Volume(points=box, generator=sc_gen)\n",
22 | "\n",
23 | "box_side = cz.volume.makeRectPrism(5,10,5)\n",
24 | "He_volume = cz.volume.Volume(points=box_side, generator=sc_gen)\n",
25 | "He_volume.generator.species = [2]"
26 | ]
27 | },
28 | {
29 | "cell_type": "code",
30 | "execution_count": 3,
31 | "metadata": {},
32 | "outputs": [],
33 | "source": [
34 | "H_volume_t_zero = cz.volume.from_volume(H_volume, translate=np.array([10,0,0]))\n",
35 | "H_volume_t_one = cz.volume.from_volume(H_volume, translate=np.array([20,0,0]))\n",
36 | "\n",
37 | "He_volume_t_zero = cz.volume.from_volume(He_volume, translate=np.array([20,0,0]))\n",
38 | "He_volume_t_one = cz.volume.from_volume(He_volume, translate=np.array([10,0,0]))\n",
39 | "\n",
40 | "H_volume_t_one.priority=1\n",
41 | "He_volume_t_one.priority=1"
42 | ]
43 | },
44 | {
45 | "cell_type": "code",
46 | "execution_count": 4,
47 | "metadata": {},
48 | "outputs": [],
49 | "source": [
50 | "bounds = np.array([[0.0,0.0,0.0], [30.0, 30.0, 30.0]])\n",
51 | "obj_list = [H_volume, He_volume, H_volume_t_zero, H_volume_t_one, He_volume_t_zero, He_volume_t_one]\n",
52 | "exclusion_scene = cz.scene.Scene(bounds=bounds, objects=obj_list)"
53 | ]
54 | },
55 | {
56 | "cell_type": "code",
57 | "execution_count": 5,
58 | "metadata": {},
59 | "outputs": [],
60 | "source": [
61 | "exclusion_scene.populate()"
62 | ]
63 | },
64 | {
65 | "cell_type": "code",
66 | "execution_count": 6,
67 | "metadata": {},
68 | "outputs": [
69 | {
70 | "data": {
71 | "application/vnd.jupyter.widget-view+json": {
72 | "model_id": "7ccd725d534944c6a9f57a35c6718466",
73 | "version_major": 2,
74 | "version_minor": 0
75 | },
76 | "text/plain": [
77 | "interactive(children=(FloatSlider(value=-60.0, description='Azimuth', layout=Layout(height='30px', width='400p…"
78 | ]
79 | },
80 | "metadata": {},
81 | "output_type": "display_data"
82 | },
83 | {
84 | "data": {
85 | "text/plain": [
86 | ""
87 | ]
88 | },
89 | "execution_count": 6,
90 | "metadata": {},
91 | "output_type": "execute_result"
92 | }
93 | ],
94 | "source": [
95 | "cz.viz.simple_scene_widget(exclusion_scene)"
96 | ]
97 | },
98 | {
99 | "cell_type": "code",
100 | "execution_count": 15,
101 | "metadata": {},
102 | "outputs": [],
103 | "source": [
104 | "obj_list = []\n",
105 | "for i in range(5):\n",
106 | " box = cz.volume.makeRectPrism(25-3.75*i, 25-3.75*i, 5)\n",
107 | " box += np.array([3.75*i, 3.75*i, 0.0])\n",
108 | " tmp_vol = cz.volume.Volume(points=box, generator=sc_gen)\n",
109 | " tmp_vol.generator.species = [i]\n",
110 | " tmp_vol.priority = 5-i\n",
111 | " obj_list.append(tmp_vol)\n",
112 | " \n",
113 | "bounds = np.array([[0.0,0.0,0.0], [25.0, 25.0, 5.0]])\n",
114 | "stacking_scene = cz.scene.Scene(bounds=bounds, objects=obj_list)"
115 | ]
116 | },
117 | {
118 | "cell_type": "code",
119 | "execution_count": 16,
120 | "metadata": {},
121 | "outputs": [],
122 | "source": [
123 | "stacking_scene.populate()"
124 | ]
125 | },
126 | {
127 | "cell_type": "code",
128 | "execution_count": 17,
129 | "metadata": {},
130 | "outputs": [
131 | {
132 | "data": {
133 | "application/vnd.jupyter.widget-view+json": {
134 | "model_id": "3cfc8d5838f64366a017b482e4388edf",
135 | "version_major": 2,
136 | "version_minor": 0
137 | },
138 | "text/plain": [
139 | "interactive(children=(FloatSlider(value=-60.0, description='Azimuth', layout=Layout(height='30px', width='400p…"
140 | ]
141 | },
142 | "metadata": {},
143 | "output_type": "display_data"
144 | },
145 | {
146 | "data": {
147 | "text/plain": [
148 | ""
149 | ]
150 | },
151 | "execution_count": 17,
152 | "metadata": {},
153 | "output_type": "execute_result"
154 | }
155 | ],
156 | "source": [
157 | "cz.viz.simple_scene_widget(stacking_scene)"
158 | ]
159 | },
160 | {
161 | "cell_type": "code",
162 | "execution_count": null,
163 | "metadata": {},
164 | "outputs": [],
165 | "source": []
166 | }
167 | ],
168 | "metadata": {
169 | "kernelspec": {
170 | "display_name": "Python [conda env:ML_work]",
171 | "language": "python",
172 | "name": "conda-env-ML_work-py"
173 | },
174 | "language_info": {
175 | "codemirror_mode": {
176 | "name": "ipython",
177 | "version": 3
178 | },
179 | "file_extension": ".py",
180 | "mimetype": "text/x-python",
181 | "name": "python",
182 | "nbconvert_exporter": "python",
183 | "pygments_lexer": "ipython3",
184 | "version": "3.7.9"
185 | }
186 | },
187 | "nbformat": 4,
188 | "nbformat_minor": 4
189 | }
190 |
--------------------------------------------------------------------------------
/czone/util/misc.py:
--------------------------------------------------------------------------------
1 | from typing import List, Tuple
2 |
3 | import numpy as np
4 |
5 | from czone.transform import HStrain
6 | from czone.volume import Plane
7 |
8 |
9 | def round_away(x: float) -> float:
10 | """Round to float integer away from zero--opposite of np.fix.
11 |
12 | Args:
13 | x (float, ArrayLike[float]): number(s) to round
14 |
15 | Returns:
16 | float, ArrayLike[float]: rounded number(s)
17 | """
18 | return np.sign(x) * np.ceil(np.abs(x))
19 |
20 |
21 | def get_N_splits(N: int, M: int, L: int, rng=None) -> List[int]:
22 | """Get N uniform random integers in interval [M,L-M) with separation at least M.
23 |
24 | Args:
25 | N (int): number of indices
26 | M (int): minimum distance between indices and ends of list
27 | L (int): length of initial list
28 | seed (int): seed for random number generator, default None
29 |
30 | Returns:
31 | List[int]: sorted list of random indices
32 | """
33 | if N == 0:
34 | return []
35 |
36 | if L - 2 * M < (N - 1) * M:
37 | raise ValueError(
38 | f"Minimum separation {M} is too large for {N} requested splits and length {L}"
39 | )
40 |
41 | rng = np.random.default_rng() if rng is None else rng
42 |
43 | # seed an initial choice and create array to calculate distances in
44 | splits = [rng.integers(M, L - M)]
45 | data = np.array([x for x in range(M, L - M)])
46 | idx = np.ma.array(data=data, mask=np.abs(data - splits[-1]) < M)
47 |
48 | while len(splits) < N:
49 | while np.all(idx.mask):
50 | # no options left, reseed
51 | splits = [rng.integers(M, L - M)]
52 | idx.mask = np.abs(idx.data - splits[-1]) < M
53 |
54 | # add new choice to list and check distance against other indices
55 | splits.append(rng.choice(idx.compressed()))
56 | idx.mask = np.logical_or(idx.mask, np.abs(idx.data - splits[-1]) < M)
57 |
58 | splits.sort()
59 | return splits
60 |
61 |
62 | def vangle(v1: np.ndarray, v2: np.ndarray) -> float:
63 | """Calculate angle between two vectors of same dimension in R^N.
64 |
65 | Args:
66 | v1 (np.ndarray): N-D vector
67 | v2 (np.ndarray): N-D vector
68 |
69 | Returns:
70 | float: angle in radians
71 | """
72 | return np.arccos(np.dot(v1, v2) / (np.linalg.norm(v1) * np.linalg.norm(v2)))
73 |
74 |
75 | def snap_plane_near_point(
76 | point: np.ndarray, generator, miller_indices: Tuple[int], mode: str = "nearest"
77 | ):
78 | """Determine nearest crystallographic nearest to point in space for given crystal coordinate system.
79 |
80 | Args:
81 | point (np.ndarray): Point in space.
82 | generator (Generator): Generator describing crystal coordinate system.
83 | miller_indices (Tuple[int]): miller indices of desired plane.
84 | mode (str): "nearest" for absolute closest plane to point; "floor" for
85 | next nearest valid plane towards generator origin; "ceil"
86 | for next furthest valid plane from generator origin.
87 |
88 | Returns:
89 | Plane in space with orientation given by Miller indices snapped to
90 | nearest valid location.
91 |
92 | """
93 |
94 | miller_indices = np.array(miller_indices)
95 |
96 | # check if generator has a strain field
97 | if generator.strain_field is None:
98 | # get point coordinates in generator coordinate system
99 | point_fcoord = np.array(np.linalg.solve(generator.voxel.sbases, point))
100 | else:
101 | assert isinstance(
102 | generator.strain_field, HStrain
103 | ), "Finding Miller planes with inhomogenous strain fields is not supported."
104 |
105 | if generator.strain_field.mode == "crystal":
106 | H = generator.strain_field.matrix
107 | point_fcoord = np.array(np.linalg.solve(H @ generator.voxel.sbases, point))
108 |
109 | # get lattice points that are intersected by miller plane
110 | with np.errstate(divide="ignore"): # check for infs directly
111 | target_fcoord = 1 / miller_indices
112 |
113 | new_point = np.zeros((3, 1))
114 |
115 | # TODO: if bases are not orthonormal, this procedure is not correct
116 | # since the following rounds towards the nearest lattice points, with equal
117 | # weights given to all lattice vectors
118 | if mode == "nearest":
119 | for i in range(3):
120 | new_point[i, 0] = (
121 | np.round(point_fcoord[i] / target_fcoord[i]) * target_fcoord[i]
122 | if not np.isinf(target_fcoord[i])
123 | else point_fcoord[i]
124 | )
125 | elif mode == "ceil":
126 | for i in range(3):
127 | new_point[i, 0] = (
128 | round_away(point_fcoord[i] / target_fcoord[i]) * target_fcoord[i]
129 | if not np.isinf(target_fcoord[i])
130 | else point_fcoord[i]
131 | )
132 | elif mode == "floor":
133 | for i in range(3):
134 | new_point[i, 0] = (
135 | np.fix(point_fcoord[i] / target_fcoord[i]) * target_fcoord[i]
136 | if not np.isinf(target_fcoord[i])
137 | else point_fcoord[i]
138 | )
139 |
140 | if generator.strain_field is None:
141 | # scale back to real space
142 | new_point = generator.voxel.sbases @ new_point
143 |
144 | # get perpendicular vector
145 | normal = generator.voxel.reciprocal_bases.T @ miller_indices
146 | else:
147 | H = generator.voxel.sbases
148 | G = generator.strain_field.matrix
149 | new_point = G @ H @ new_point
150 |
151 | # get perpendicular vector
152 | normal = np.linalg.inv(H @ G).T @ miller_indices
153 |
154 | return Plane(normal=normal, point=new_point)
155 |
--------------------------------------------------------------------------------
/czone/util/voxel.py:
--------------------------------------------------------------------------------
1 | from functools import reduce
2 |
3 | import numpy as np
4 |
5 |
6 | class Voxel:
7 | """Voxel class used to span space for generators and track transformations.
8 |
9 | Voxels provide an alterable view of bases and orientation of crystalline
10 | generators and are the actual transformed object, not Generators.
11 | This is in contrst to applying transformations directly to the underlying
12 | pymatgen Structure object, for speed and ease of manipulation.
13 | Voxels also help determine how much of a "block" to a Generator needs to
14 | build for the purpose of supplying atoms to a larger volume.
15 |
16 | Attributes:
17 | scale (float): Scaling factor of basis set.
18 | bases (np.ndarray): Basis vectors defining crystal unit cell.
19 | sbases (np.ndarray): Scaled basis set.
20 | reciprocal_bases (np.ndarray): Basis vectors defining unit cell of reciprocal lattice.
21 | origin (np.ndarray): Origin of Voxel grid.
22 | """
23 |
24 | def __init__(
25 | self,
26 | bases: np.ndarray = np.identity(3),
27 | scale: float = np.array([1]),
28 | origin: np.ndarray = np.array([0.0, 0.0, 0.0]),
29 | ):
30 | self._scale = None
31 | self._bases = None
32 | self._origin = None
33 | self.bases = bases
34 | self.scale = scale
35 | self.origin = origin
36 |
37 | def __repr__(self):
38 | return (
39 | f"Voxel(bases={repr(self.bases)}, scale={repr(self.scale)}, origin={repr(self.origin)})"
40 | )
41 |
42 | def __eq__(self, other):
43 | if isinstance(other, Voxel):
44 | checks = [
45 | np.allclose(x, y)
46 | for x, y in zip(
47 | [self.bases, self.scale, self.origin], [other.bases, other.scale, other.origin]
48 | )
49 | ]
50 | return reduce(lambda x, y: x and y, checks)
51 | else:
52 | return False
53 |
54 | @property
55 | def scale(self):
56 | """Scaling factor of basis set."""
57 | return self._scale
58 |
59 | @scale.setter
60 | def scale(self, scale: float):
61 | try:
62 | self.bases * np.array(scale)
63 | except ValueError:
64 | raise ValueError(
65 | "Bases and scale are not broadcastable. Scale array must be of shape=(1,) (3,), (1,3), or (3,1)"
66 | )
67 |
68 | self._scale = np.array(scale)
69 |
70 | @property
71 | def bases(self):
72 | """Basis vectors defining crystal unit cell. Vectors are rows of matrix."""
73 | return self._bases
74 |
75 | @bases.setter
76 | def bases(self, bases: np.ndarray):
77 | bases = np.array(bases)
78 |
79 | assert bases.shape == (
80 | 3,
81 | 3,
82 | ), """Bases must be 3x3 numpy array that defines vectors that span 3D space
83 | [0,:] = [x_1, y_1, z_1]
84 | [1,:] = [x_2, y_2, z_2]
85 | [2,:] = [x_3, y_3, z_3]"""
86 |
87 | # check for collinearity
88 | assert not self._collinear(
89 | bases[0, :], bases[1, :]
90 | ), "Bases vectors must linearly indepedent"
91 | assert not self._collinear(
92 | bases[0, :], bases[2, :]
93 | ), "Bases vectors must linearly indepedent"
94 | assert not self._collinear(
95 | bases[1, :], bases[2, :]
96 | ), "Bases vectors must linearly indepedent"
97 |
98 | self._bases = bases
99 |
100 | @property
101 | def origin(self):
102 | """Origin of Voxel grid in space."""
103 | return self._origin
104 |
105 | @origin.setter
106 | def origin(self, origin: np.ndarray):
107 | try:
108 | self._origin = np.array(origin).reshape((3,))
109 | except Exception as e:
110 | print(e)
111 |
112 | @property
113 | def sbases(self):
114 | """Basis vectors defining crystal unit cell, scaled by scaling factor."""
115 | return self._bases * self._scale
116 |
117 | @property
118 | def reciprocal_bases(self):
119 | """Basis vectors defining unit cell of reciprocal lattice."""
120 | return np.linalg.inv(self.sbases)
121 |
122 | def _collinear(self, vec1: np.ndarray, vec2: np.ndarray):
123 | """Check if two vectors are collinear. Used for determining validity of basis set.
124 |
125 | Args:
126 | vec1 (np.ndarray): first vector
127 | vec2 (np.ndarray): second vector
128 |
129 | Returns:
130 | bool indicating whether vectors are collinear.
131 | """
132 | # TODO: need a better check with tolerance
133 | return np.abs((np.dot(vec1, vec2) / (np.linalg.norm(vec1) * np.linalg.norm(vec2)))) == 1.0
134 |
135 | def get_voxel_coords(self, points):
136 | """Convert points from Cartesian basis to voxel basis"""
137 | points = points - self.origin
138 | coords = np.linalg.solve(self.sbases, points.T).T
139 | return coords
140 |
141 | def get_cartesian_coords(self, points):
142 | """Convert points from voxel basis to Cartesian basis"""
143 | res = (self.sbases @ points.T).T
144 | return res + self.origin
145 |
146 | def get_extents(self, box: np.ndarray):
147 | """Determine minimum contiguous block of voxels that fully covers a space.
148 |
149 | Args:
150 | box (np.ndarray): Set of points defining extremities of space.
151 |
152 | Returns:
153 | Tuple of minimum extents and maximum extents indicating how many
154 | voxels to tile in space, and where, to span a given region.
155 | """
156 |
157 | extents = self.get_voxel_coords(box)
158 | min_extent = np.floor(np.min(extents, axis=0))
159 | max_extent = np.ceil(np.max(extents, axis=0))
160 |
161 | return min_extent.astype(np.int64), max_extent.astype(np.int64)
162 |
--------------------------------------------------------------------------------
/tests/test_generator.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from pymatgen.core import Lattice, Structure
3 |
4 | from czone.generator import AmorphousGenerator, Generator, NullGenerator
5 | from czone.generator.amorphous_algorithms import _parse_min_distance_arg
6 | from czone.transform import (
7 | ChemicalSubstitution,
8 | HStrain,
9 | Inversion,
10 | Reflection,
11 | Rotation,
12 | Translation,
13 | rot_vtv,
14 | )
15 | from czone.volume import Plane, Sphere, Volume
16 |
17 | from .czone_test_fixtures import czone_TestCase
18 | from .test_transform import get_random_mapping
19 |
20 | seed = 72349
21 | rng = np.random.default_rng(seed=seed)
22 |
23 |
24 | def get_transforms():
25 | for basis_only in [False, True]:
26 | origin = rng.uniform(-10, 10, size=(1, 3))
27 | yield (
28 | Inversion(origin=origin, basis_only=basis_only),
29 | f"Inversion with basis_only={basis_only}",
30 | )
31 |
32 | plane = Plane(rng.normal(size=(3,)), rng.normal(size=(3,)))
33 | yield (
34 | Reflection(plane, basis_only=basis_only),
35 | f"Reflection with basis_only={basis_only}",
36 | )
37 |
38 | normal = rng.normal(size=(3,))
39 | R = rot_vtv([0, 0, 1], normal)
40 | yield (
41 | Rotation(R, origin=origin, basis_only=basis_only),
42 | f"Rotation with basis_only={basis_only}",
43 | )
44 |
45 | yield (
46 | Translation(shift=rng.uniform(-10, 10, size=(1, 3)), basis_only=basis_only),
47 | f"Translation with basis_only={basis_only}",
48 | )
49 |
50 |
51 | def get_transformed_generators(G):
52 | yield G.from_generator(), " Identity"
53 | for t, msg in get_transforms():
54 | yield G.from_generator(transformation=[t]), msg
55 |
56 |
57 | class Test_NullGenerator(czone_TestCase):
58 | def test_init_and_eq(self):
59 | A = NullGenerator()
60 | B = NullGenerator()
61 | self.assertEqual(A, B)
62 | self.assertReprEqual(A)
63 |
64 | for g, msg in get_transformed_generators(A):
65 | self.assertEqual(A, g, msg=f"Failed with {msg}")
66 |
67 | def test_supply_atoms(self):
68 | A = NullGenerator()
69 | bbox = rng.normal(size=(8, 3))
70 | pos, species = A.supply_atoms(bbox)
71 | self.assertEqual(pos.shape, (0, 3))
72 | self.assertEqual(species.shape, (0,))
73 | for g, msg in get_transformed_generators(A):
74 | t_pos, t_species = g.supply_atoms(bbox)
75 | self.assertArrayEqual(pos, t_pos, msg=f"Failed with {msg}")
76 | self.assertArrayEqual(species, t_species, f"Failed with {msg}")
77 |
78 |
79 | def get_random_generator(N_species=8, with_strain=True, with_sub=True, rng=rng):
80 | if with_strain and rng.uniform() < 0.5:
81 | hstrain = HStrain(rng.uniform(size=(3,)))
82 | else:
83 | hstrain = None
84 |
85 | if with_sub and rng.uniform() < 0.5:
86 | chem_sub = ChemicalSubstitution(get_random_mapping(rng), frac=rng.uniform())
87 | else:
88 | chem_sub = None
89 |
90 | lattice = Lattice(5 * np.eye(3) + rng.normal(size=(3, 3)))
91 |
92 | N_species = rng.integers(1, N_species)
93 | species = rng.integers(1, 119, size=(N_species))
94 | pos = rng.uniform(size=(N_species, 3))
95 |
96 | structure = Structure(lattice, species, pos)
97 |
98 | origin = rng.uniform(-10, 10, size=(1, 3))
99 |
100 | return Generator(
101 | origin=origin,
102 | structure=structure,
103 | strain_field=hstrain,
104 | post_transform=chem_sub,
105 | )
106 |
107 |
108 | class Test_Generator(czone_TestCase):
109 | def setUp(self):
110 | self.N_trials = 128
111 |
112 | def test_init(self):
113 | for _ in range(self.N_trials):
114 | G_0 = get_random_generator()
115 | for G, msg in get_transformed_generators(G_0):
116 | self.assertReprEqual(G, msg)
117 |
118 | def test_eq(self):
119 | for _ in range(self.N_trials):
120 | G = get_random_generator(with_strain=False, with_sub=False)
121 | H = G.from_generator()
122 |
123 | self.assertEqual(G, H)
124 | V = Volume(alg_objects=Sphere(radius=10, center=np.zeros(3)))
125 | bbox = V.get_bounding_box()
126 | gpos, gspecies = G.supply_atoms(bbox)
127 | hpos, hspecies = H.supply_atoms(bbox)
128 |
129 | self.assertArrayEqual(gpos, hpos)
130 | self.assertArrayEqual(gspecies, hspecies)
131 |
132 |
133 | def get_random_amorphous_generator(rng=rng):
134 | origin = rng.uniform(-10, 10, size=(1, 3))
135 | min_dist = rng.uniform(0.5, 10)
136 | density = rng.uniform(0.05, 1.0)
137 | species = rng.integers(1, 119)
138 |
139 | return AmorphousGenerator(origin, min_dist, density, species)
140 |
141 |
142 | class Test_AmorphousGenerator(czone_TestCase):
143 | def setUp(self):
144 | self.N_trials = 128
145 |
146 | def test_init(self):
147 | for _ in range(self.N_trials):
148 | G = get_random_amorphous_generator()
149 | self.assertReprEqual(G)
150 |
151 | def test_multielement_mindist_parse(self):
152 | species = [1, 2, 3]
153 | ref_res = np.zeros((3, 3))
154 | ref_res[0, :] = [1, 2, 3]
155 | ref_res[1, 1:] = [4, 5]
156 | ref_res[2, 2] = 6
157 | ref_res += np.triu(ref_res).T - np.diag(np.diag(ref_res))
158 |
159 | dist_as_nested_dict = {
160 | 1: {1: 1.0, 2: 2.0, 3: 3.0},
161 | 2: {2: 4.0, 3: 5.0},
162 | 3: {3: 6.0},
163 | }
164 |
165 | self.assertArrayEqual(
166 | ref_res,
167 | (test_res := _parse_min_distance_arg(species, dist_as_nested_dict)),
168 | f"Parsed answer:\n {test_res}. \n Should be:\n {ref_res}",
169 | )
170 |
171 |
172 | dist_as_pair_dict = {
173 | (1,1):1.0,
174 | (1,2):2.0,
175 | (1,3):3.0,
176 | (2,2):4.0,
177 | (2,3):5.0,
178 | (3,3):6.0
179 | }
180 | self.assertArrayEqual(
181 | ref_res,
182 | (test_res := _parse_min_distance_arg(species, dist_as_pair_dict)),
183 | f"Parsed answer:\n {test_res}. \n Should be:\n {ref_res}",
184 | )
185 |
186 |
--------------------------------------------------------------------------------
/tests/test_util.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from czone.util.voxel import Voxel
4 | from czone.util.eset import EqualSet, array_set_equal
5 |
6 | from .czone_test_fixtures import czone_TestCase
7 |
8 | seed = 9817924
9 | rng = np.random.default_rng(seed=seed)
10 |
11 |
12 | class Test_EqualSet(czone_TestCase):
13 | def setUp(self):
14 | self.N_trials = 64
15 | self.N_points = 256
16 |
17 | def assertSetEquivalent(self, first, second, msg=None) -> None:
18 | "Fail if two sets are not equal, checking"
19 | self.assertEqual(len(first), len(second), msg)
20 | self.assertEqual(first.union(second), first, msg)
21 | self.assertEqual(first.symmetric_difference(second), set([]), msg)
22 |
23 | def test_init(self):
24 | for _ in range(self.N_trials):
25 | x = rng.integers(0, 100, size=(self.N_points))
26 |
27 | sx_ref = set([i for i in x])
28 |
29 | sx_test = EqualSet(x)
30 | self.assertSetEquivalent(sx_ref, sx_test)
31 |
32 | def test_union(self):
33 | for _ in range(self.N_trials):
34 | x = rng.integers(0, 100, size=(self.N_points))
35 | y = rng.integers(0, 100, size=(self.N_points))
36 |
37 | sx_ref = set([i for i in x]).union([j for j in y])
38 | sx_test = EqualSet(x).union([j for j in y])
39 | sx_itest = EqualSet(x)
40 | sx_itest |= [j for j in y]
41 |
42 | self.assertSetEquivalent(sx_ref, sx_test)
43 | self.assertSetEquivalent(sx_ref, sx_itest)
44 | self.assertSetEquivalent(sx_ref, EqualSet(x) | [j for j in y])
45 |
46 | def test_difference(self):
47 | for _ in range(self.N_trials):
48 | x = rng.integers(0, 100, size=(self.N_points))
49 | y = rng.integers(0, 100, size=(self.N_points))
50 |
51 | sx_ref = set([i for i in x]).difference([j for j in y])
52 | sx_test = EqualSet(x).difference([j for j in y])
53 | sx_itest = EqualSet(x)
54 | sx_itest -= [j for j in y]
55 |
56 | self.assertSetEquivalent(sx_ref, sx_test)
57 | self.assertSetEquivalent(sx_ref, sx_itest)
58 | self.assertSetEquivalent(sx_ref, EqualSet(x) - [j for j in y])
59 |
60 | def test_symmetric_difference(self):
61 | for _ in range(self.N_trials):
62 | x = rng.integers(0, 100, size=(self.N_points))
63 | y = rng.integers(0, 100, size=(self.N_points))
64 |
65 | sx_ref = set([i for i in x]).symmetric_difference([j for j in y])
66 | sx_test = EqualSet(x).symmetric_difference([j for j in y])
67 |
68 | self.assertSetEquivalent(sx_ref, sx_test)
69 | self.assertSetEquivalent(sx_ref, EqualSet(x) ^ [j for j in y])
70 |
71 | def test_intersection(self):
72 | for _ in range(self.N_trials):
73 | x = rng.integers(0, 100, size=(self.N_points))
74 | y = rng.integers(0, 100, size=(self.N_points))
75 |
76 | sx_ref = set([i for i in x]).intersection([j for j in y])
77 | sx_test = EqualSet(x).intersection([j for j in y])
78 |
79 | self.assertSetEquivalent(sx_ref, sx_test)
80 | self.assertSetEquivalent(sx_ref, EqualSet(x) & [j for j in y])
81 |
82 | def test_isdisjoint(self):
83 | for _ in range(self.N_trials):
84 | x = rng.integers(0, 100, size=(self.N_points))
85 | y = rng.integers(0, 100, size=(self.N_points))
86 |
87 | ref_A = set([i for i in x])
88 | ref_B = set([j for j in y])
89 | ref_A.difference_update(ref_B)
90 |
91 | true_check = EqualSet(list(ref_A)).isdisjoint(list(ref_B))
92 | self.assertTrue(ref_A.isdisjoint(ref_B))
93 | self.assertTrue(true_check)
94 |
95 | ref_A.update(ref_B)
96 | false_check = EqualSet(list(ref_A)).isdisjoint(list(ref_B))
97 | self.assertFalse(ref_A.isdisjoint(ref_B))
98 | self.assertFalse(false_check)
99 |
100 | def test_equalities(self):
101 | ## Make sure empty sets are handled correctly
102 | self.assertTrue(EqualSet() == EqualSet())
103 | self.assertTrue(EqualSet() < EqualSet([1]))
104 | self.assertTrue(EqualSet([1]) > EqualSet())
105 |
106 | for _ in range(self.N_trials):
107 | x = rng.integers(0, 100, size=(self.N_points))
108 | y = rng.integers(0, 100, size=(self.N_points))
109 |
110 | sx_ref = set([i for i in x])
111 | sy_ref = set([j for j in y])
112 | sx_test = EqualSet(x)
113 | sy_test = EqualSet(y)
114 |
115 | ## subset, superset
116 | self.assertEqual(sx_ref.issubset(sy_ref), sx_test.issubset(sy_test))
117 | self.assertEqual(sx_ref.issuperset(sy_ref), sx_test.issuperset(sy_test))
118 | self.assertEqual(sx_ref <= sy_ref, sx_test <= sy_test)
119 | self.assertEqual(sx_ref >= sy_ref, sx_test >= sy_test)
120 |
121 | ## subset AND superset
122 | self.assertEqual(sx_ref == sy_ref, sx_test == sy_test)
123 | self.assertTrue(sx_test == EqualSet(x))
124 |
125 | ## proper subset/supersets
126 | self.assertTrue((sx_test | sy_test) > sx_test)
127 | self.assertTrue((sx_test | sy_test) > sy_test)
128 | self.assertTrue(sx_test < (sx_test | sy_test))
129 | self.assertTrue(sy_test < (sx_test | sy_test))
130 |
131 |
132 | class Test_ArraySet(czone_TestCase):
133 | def setUp(self):
134 | self.N_trials = 1024
135 | self.N_points = 1024
136 |
137 | def test_array_set_equal(self):
138 | for _ in range(self.N_trials):
139 | X = rng.uniform(size=(self.N_points, 3))
140 | Xp = rng.permutation(X, axis=0)
141 | self.assertTrue(array_set_equal(X, Xp))
142 |
143 | Xp[23, :] = rng.uniform(size=(1, 3)) + 1.0
144 | self.assertFalse(array_set_equal(X, Xp))
145 |
146 |
147 | #### Tests for Voxel class
148 | class Test_Voxel(czone_TestCase):
149 | def setUp(self):
150 | self.N_trials = 32
151 |
152 | def test_init(self):
153 | for _ in range(self.N_trials):
154 | bases = rng.normal(size=(3, 3))
155 | scale = rng.uniform(0.1, 10)
156 | origin = rng.uniform(-100, 100, size=(3,))
157 |
158 | voxel = Voxel(bases, scale, origin)
159 | self.assertReprEqual(voxel)
160 |
--------------------------------------------------------------------------------
/czone/transform/strain.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import copy
4 | from typing import Callable
5 |
6 | import numpy as np
7 |
8 | from czone.types import BaseStrain
9 |
10 |
11 | class HStrain(BaseStrain):
12 | """Strain class for applying homogeneous strain fields to generators.
13 |
14 | HStrain objects can be attached to generators, and transform the coordinates
15 | of the atoms post-generation of the supercell via simple strain tensor.
16 | HStrain fields apply strain in crystal coordinate system by default.
17 |
18 | Attributes:
19 | matrix (np.ndarray): Matrix representing homogeneous strain tensor.
20 | Can be set with 3 (x,y,z), 6 (Voigt notation), or
21 | 9 values (as list or 3x3 array).
22 | """
23 |
24 | def __init__(self, matrix=None, origin="generator", mode="crystal"):
25 | super().__init__()
26 | if matrix is not None:
27 | self.matrix = matrix
28 | else:
29 | # apply no strain
30 | self._matrix = np.eye(3)
31 |
32 | self.mode = mode
33 |
34 | if origin != "generator":
35 | self.origin = origin
36 | self.origin_type = "global"
37 | else:
38 | self.origin_type = "generator"
39 |
40 | self._bases = None
41 |
42 | def __repr__(self):
43 | if self.origin_type == "generator":
44 | return f"HStrain(matrix={repr(self.matrix)}, origin='generator', mode='{self.mode}')"
45 | else:
46 | return f"HStrain(matrix={repr(self.matrix)}, origin={self.origin}, mode='{self.mode}')"
47 |
48 | def __eq__(self, other):
49 | if isinstance(other, HStrain):
50 | base_check = np.allclose(self.matrix, other.matrix) and self.mode == other.mode
51 | if self.origin_type == "generator":
52 | return base_check and self.origin_type == other.origin_type
53 | else:
54 | return base_check and self.origin == other.origin
55 | else:
56 | return False
57 |
58 | ##############
59 | # Properties #
60 | ##############
61 |
62 | @property
63 | def matrix(self):
64 | """Homogeneous strain tensor."""
65 | return self._matrix
66 |
67 | @matrix.setter
68 | def matrix(self, vals):
69 | vals = np.squeeze(np.array(vals))
70 | match vals.shape:
71 | case (3,):
72 | self._matrix = np.eye(3) * vals
73 | case (3, 3):
74 | self._matrix = vals
75 | case (9,):
76 | self._matrix = np.reshape(vals, (3, 3))
77 | case (6,):
78 | # voigt notation
79 | v = vals
80 | self._matrix = np.array(
81 | [[v[0], v[5], v[4]], [v[5], v[1], v[3]], [v[4], v[3], v[2]]]
82 | )
83 | case _:
84 | raise ValueError("Input shape must be either 3,6, or 9 elements")
85 |
86 | ##############
87 | ### Methods ##
88 | ##############
89 | def apply_strain(self, points: np.ndarray) -> np.ndarray:
90 | # get points relative to origin
91 | sp = np.copy(points) - self.origin
92 |
93 | if self.mode == "crystal":
94 | # project onto crystal coordinates, strain, project back into real space
95 | sp = sp @ np.linalg.inv(self.bases).T @ self.matrix @ self.bases.T
96 | else:
97 | # strain
98 | sp = sp @ self.matrix
99 |
100 | # shift back w.r.t. origin
101 | sp += self.origin
102 |
103 | return sp
104 |
105 |
106 | class IStrain(BaseStrain):
107 | """Strain class for applying inhomogenous strain fields to generators.
108 |
109 | IStrain objects can be attached to generators, and transform the coordinates
110 | of the atoms post-generation of the supercell via arbitrary strain functions.
111 | IStrain fields apply strain in crystal coordinate system by default.
112 |
113 | User must input a custom strain function; strain functions by default should
114 | accept only points as positional arguments and can take any kwargs.
115 |
116 | Attributes:
117 | fun_kwargs (dict): kwargs to pass to custom strain function
118 | strain_fun (Callable): strain function F: R3 -> R3 for
119 | np.arrays of shape (N,3)->(N,3)
120 | """
121 |
122 | def __init__(self, fun=None, origin="generator", mode="crystal", **kwargs):
123 | if fun is not None:
124 | self.strain_fun = fun
125 | else:
126 | # apply no strain
127 | self.strain_fun = lambda x: x
128 |
129 | self.mode = mode
130 |
131 | if origin != "generator":
132 | self.origin = origin
133 | else:
134 | super().__init__()
135 |
136 | self._bases = None
137 | self.fun_kwargs = kwargs
138 |
139 | ##############
140 | # Properties #
141 | ##############
142 |
143 | @property
144 | def fun_kwargs(self):
145 | """kwargs passed to custom strain function upon application of strain."""
146 | return self._fun_kwargs
147 |
148 | @fun_kwargs.setter
149 | def fun_kwargs(self, kwargs_dict: dict):
150 | assert isinstance(kwargs_dict, dict), "Must supply dictionary for arbirtrary extra kwargs"
151 | self._fun_kwargs = kwargs_dict
152 |
153 | @property
154 | def strain_fun(self):
155 | """Inhomogenous strain function to apply to coordinates."""
156 | return self._strain_fun
157 |
158 | @strain_fun.setter
159 | def strain_fun(self, fun: Callable[[np.ndarray], np.ndarray]):
160 | try:
161 | ref_arr = np.random.rand((100, 3))
162 | test_arr = fun(ref_arr, **self.fun_kwargs)
163 | assert test_arr.shape == (100, 3)
164 | except AssertionError:
165 | raise ValueError(
166 | "Strain function must return numpy arrays with shape (N,3) for input arrays of shape (N,3)"
167 | )
168 |
169 | self._strain_fun = copy.deepcopy(fun)
170 |
171 | ##############
172 | ### Methods ##
173 | ##############
174 | def apply_strain(self, points: np.ndarray) -> np.ndarray:
175 | # get points relative to origin
176 | sp = np.copy(points) - self.origin
177 |
178 | if self.mode == "crystal":
179 | # project onto crystal coordinates
180 | sp = sp @ np.linalg.inv(self.bases)
181 |
182 | # strain
183 | sp = self.strain_fun(sp, basis=self.bases, **self.fun_kwargs)
184 |
185 | # project back into real space
186 | sp = sp @ self.bases
187 | else:
188 | # strain
189 | sp = self.strain_fun(sp, **self.fun_kwargs)
190 |
191 | # shift back w.r.t. origin
192 | sp += self.origin
193 |
194 | return sp
195 |
--------------------------------------------------------------------------------
/czone/scene/scene.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from functools import reduce
4 | from itertools import product
5 |
6 | import numpy as np
7 | from ase import Atoms
8 |
9 | from czone.transform import Translation
10 | from czone.types import BaseScene
11 | from czone.util.eset import EqualSet
12 | from czone.util.voxel import Voxel
13 |
14 |
15 | class Scene(BaseScene):
16 | """Scene classes manage multiple objects interacting in space with cell boundaries.
17 |
18 | Attributes:
19 | bounds (np.ndarray): 2x3 array defining rectangular bounds of scene.
20 | objects (List[BaseVolume]): List of all objects currently in scene.
21 | all_atoms (np.ndarray): Coordinates of all atoms in scene after precedence checks.
22 | all_species (np.ndarray): Atomic numbers of all atoms in scene after precedence checks.
23 | ase_atoms (Atoms): Collection of atoms in scene as ASE Atoms object.
24 | """
25 |
26 | def __init__(self, domain: Voxel, objects=None):
27 | super().__init__(domain, objects)
28 |
29 | def __repr__(self) -> str:
30 | return f"Scene(domain={repr(self.domain)}, objects={repr(self.objects)})"
31 |
32 | def __eq__(self, other: Scene) -> bool:
33 | if isinstance(other, Scene):
34 | domain_check = self.domain == other.domain
35 | object_check = EqualSet(self.objects) == EqualSet(other.objects)
36 | return domain_check and object_check
37 | else:
38 | return False
39 |
40 | @property
41 | def ase_atoms(self):
42 | """Collection of atoms in scene as ASE Atoms object."""
43 | cell_dims = self.domain.sbases.T
44 | celldisp = self.domain.origin
45 | return Atoms(
46 | symbols=self.all_species, positions=self.all_atoms, cell=cell_dims, celldisp=celldisp
47 | )
48 |
49 | def check_against_object(self, atoms, idx):
50 | return np.logical_not(self.objects[idx].checkIfInterior(atoms))
51 |
52 | def _prepare_for_population(self):
53 | pass
54 |
55 |
56 | class PeriodicScene(BaseScene):
57 | def __init__(self, domain: Voxel, objects=None, pbc=(True, True, True)):
58 | super().__init__(domain, objects)
59 | self.pbc = pbc
60 |
61 | def __repr__(self) -> str:
62 | return f"PeriodicScene(domain={repr(self.domain)}, objects={repr(self.objects)}, pbc={self.pbc})"
63 |
64 | def __eq__(self, other: PeriodicScene) -> bool:
65 | # TODO: a more expansive equality check should check on the folded periodic images of domain and pbc are equal
66 | if isinstance(other, PeriodicScene):
67 | domain_check = self.domain == other.domain
68 | pbc_check = self.pbc == other.pbc
69 | object_check = EqualSet(self.objects) == EqualSet(other.objects)
70 | return domain_check and object_check and pbc_check
71 | else:
72 | return False
73 |
74 | @property
75 | def pbc(self):
76 | return self._pbc
77 |
78 | @pbc.setter
79 | def pbc(self, val):
80 | if len(val) == 3:
81 | if reduce(lambda x, y: x and y, [np.issubdtype(type(v), bool) for v in val]):
82 | self._pbc = tuple(val)
83 | else:
84 | raise TypeError
85 | else:
86 | raise ValueError
87 |
88 | def _get_periodic_indices(self, bbox):
89 | """Get set of translation vectors, in units of the domain cell, for all
90 | relevant periodic images to generate."""
91 |
92 | cell_coords = self.domain.get_voxel_coords(bbox)
93 |
94 | pos_shifts = cell_coords < 0 # Volume needs to be shifted in positive directions
95 | neg_shifts = cell_coords >= 1 # Volume needs to be shifted in negative directions
96 |
97 | ps = [np.any(pos_shifts[:, i]) for i in range(cell_coords.shape[1])]
98 | ns = [np.any(neg_shifts[:, i]) for i in range(cell_coords.shape[1])]
99 |
100 | indices = [[0] for _ in range(cell_coords.shape[1])]
101 | for i, (p, n) in enumerate(zip(ps, ns)):
102 | if self.pbc[i]:
103 | if p and n:
104 | raise AssertionError("Points extend through periodic domain")
105 | if p:
106 | N_cells = -np.min(np.floor(cell_coords[:, i]))
107 | indices[i] = [N_cells]
108 | if (not np.all(pos_shifts[:, i])) or (N_cells > 1):
109 | indices[i].append(N_cells - 1)
110 | if n:
111 | N_cells = -np.max(np.floor(cell_coords[:, i]))
112 | indices[i] = [N_cells]
113 | if (not np.all(neg_shifts[:, i])) or (N_cells < -1):
114 | indices[i].append(N_cells + 1)
115 |
116 | periodic_indices = set(product(*indices)).difference([(0, 0, 0)])
117 | return periodic_indices
118 |
119 | def _get_periodic_images(self):
120 | """Get periodic images of all objects."""
121 | self._periodic_images = {}
122 | for ob in self.objects:
123 | self._periodic_images[id(ob)] = []
124 |
125 | ## Determine which periodic images need to be generated
126 | bbox = ob.get_bounding_box()
127 | periodic_indices = self._get_periodic_indices(bbox)
128 |
129 | for pidx in periodic_indices:
130 | ## For each image, get a copy of volume translated to its periodic imnage
131 | pvec = np.array(pidx, dtype=int).reshape((3, -1))
132 | tvec = (self.domain.sbases @ pvec).reshape((3))
133 | transformation = [Translation(tvec)]
134 | new_vol = ob.from_volume(transformation=transformation)
135 | self._periodic_images[id(ob)].append(new_vol)
136 |
137 | def _get_folded_positions(self, points):
138 | domain_coords = self.domain.get_voxel_coords(points)
139 |
140 | fold_boundary = np.ones_like(domain_coords, dtype=bool)
141 | for i, p in enumerate(self.pbc):
142 | if not p:
143 | fold_boundary[:, i] = False
144 |
145 | folded_coords = np.mod(domain_coords, 1.0, out=domain_coords, where=fold_boundary)
146 | return self.domain.get_cartesian_coords(folded_coords)
147 |
148 | @property
149 | def periodic_images(self):
150 | return self._periodic_images
151 |
152 | def check_against_object(self, atoms, idx):
153 | pkey = id(self.objects[idx])
154 | return np.logical_not(
155 | reduce(
156 | lambda x, y: np.logical_or(x, y),
157 | [po.checkIfInterior(atoms) for po in self.periodic_images[pkey]],
158 | self.objects[idx].checkIfInterior(atoms),
159 | )
160 | )
161 |
162 | def _prepare_for_population(self):
163 | self._get_periodic_images()
164 |
165 | @property
166 | def all_atoms(self):
167 | return self._get_folded_positions(super().all_atoms)
168 |
169 | @property
170 | def ase_atoms(self):
171 | """Collection of atoms in scene as ASE Atoms object."""
172 | return Atoms(
173 | symbols=self.all_species,
174 | positions=self.all_atoms,
175 | cell=self.domain.sbases.T,
176 | pbc=self.pbc,
177 | )
178 |
--------------------------------------------------------------------------------
/docs/source/examples/core_shell_nanoparticle.rst:
--------------------------------------------------------------------------------
1 | Core-Shell Mn3O4/Co3O4 Nanoparticle
2 | =========================================
3 |
4 | In this example, we'll be making a core-shell oxide nanoparticle similar to those
5 | studied in :footcite:t:`oh_design_2020`. This nanoparticle has a cubic Co3O4
6 | grain in the center with strained Mn3O4 grains on all faces of the inner grain.
7 |
8 | First, let's import the routines we need.
9 |
10 | .. code-block::
11 |
12 | import numpy as np
13 | import czone as cz
14 | from pymatgen.core import Structure
15 | from cz.volume import Volume, MultiVolume, Plane, snap_plane_near_point
16 | from cz.generator import Generator, AmorphousGenerator
17 | from cz.transform import Rotation, Reflection, Translation, rot_vtv
18 | from cz.scene import Scene
19 |
20 | We downloaded `cif` files from the [Materials Project](https://materialsproject.org/) database
21 | for the unit cell data for the two grain types. Here, we load the the files into
22 | [pymatgen](https://pymatgen.org/) Structure objects which serve as the core drivers of our crystalline Generators.
23 |
24 | .. code-block::
25 |
26 | mn_crystal = Structure.from_file("Mn3O4_mp-18759_conventional_standard.cif")
27 | co_crystal = Structure.from_file("Co3O4_mp-18748_conventional_standard.cif")
28 |
29 | In this nanoparticle, the Mn grains grow with their basal planes on the faces of (100)
30 | planes of the Co core. The Mn lattice is rotated and strained such that Mn sites
31 | along the <110> direction are coincident with the Co sites. Here, we calculate
32 | the appropriate strain for the Mn unit cell and apply it to the structure.
33 |
34 | .. code-block::
35 |
36 | # correct lattice mismatch
37 | co_100 = np.linalg.norm(co_crystal.lattice.matrix @ np.array([1,0,0]))
38 | mn_110 = np.linalg.norm(mn_crystal.lattice.matrix @ np.array([1,1,0]))
39 | mn_crystal.apply_strain([co_100/mn_110-1, co_100/mn_110-1, 0])
40 |
41 | We start the nanoparticle by making the Co core. We create a generator using the
42 | Co structure above, and then find the 6 planes in the (100) family that are
43 | 6 unit cells away from the lattice origin to use as boundaries for the cube.
44 | We add the planes and the generator to a Volume object and are done with the core.
45 |
46 | .. code-block::
47 |
48 | # Make Co3O4 core
49 | co_gen = Generator(structure=co_crystal)
50 |
51 | N_uc = 6 # stretch N unit cells from center
52 | co_a = co_crystal.lattice.a
53 | vecs_100 = np.array([[1,0,0],[-1,0,0],[0,1,0],[0,-1,0],[0,0,1],[0,0,-1]])
54 | planes_100 = []
55 | for v in vecs_100:
56 | planes_100.append(snap_plane_near_point(N_uc*co_a*v, co_gen, tuple(v)))
57 |
58 | co_core = Volume(alg_objects=planes_100, generator=co_gen)
59 |
60 |
61 | For the Mn grains, we will start with a single unstrained grain and then replicate
62 | that grain about the Co core. We create the Generator from the Structure object,
63 | and then rotate it about its c-axis 45 degrees and then translate it to the top of the
64 | Co core. Since by default Generators have a local origin of (0,0,0), we do not need
65 | to do anything more to properly align the Mn and Co lattices.
66 |
67 | .. code-block::
68 |
69 | # Make unstrained Mn3O4 grain
70 | mn_gen = Generator(structure=mn_crystal)
71 |
72 | # rotate 45 degrees and place on top of Co core
73 | r = Rotation(matrix=rot_v(np.array([0,0,1]), np.pi/4))
74 | t = Translation(np.array([0,0,N_uc*co_a]))
75 | mn_gen.transform(r)
76 | mn_gen.transform(t)
77 |
78 | The Mn grain has many facets. Keeping in mind that we are working in the grain
79 | on the +Z side of the, we have (100) facets on the bottom and top of the grain;
80 | (112) facets at Mn-Mn interfaces; and (101) facets facing free space.
81 |
82 | For the bottom facet, we use grab a point at the Co surface, and for the top facet,
83 | we grab a point 6 unit cells above said point and snap (001) planes in the Mn
84 | crystal coordinate system to these points. For the (112) facets, which meet the
85 | edges of the Co core, we grab (112) planes running through the edge by choosing
86 | points that lie on the edge and snapping to them. For the (101) facets, we
87 | just choose points near the top surface and translate outwards--- this was chosen
88 | heuristically to look nice.
89 |
90 | .. code-block::
91 |
92 | # define top and bottom 100 surfaces
93 | surface_point = np.array([0,0,N_uc*co_a])
94 | mn_c = mn_crystal.lattice.c
95 | mn_bot = snap_plane_near_point(surface_point, mn_gen, (0,0,-1))
96 | mn_top = snap_plane_near_point(surface_point+6*mn_c*np.array([0,0,1]), mn_gen, (0,0,1))
97 |
98 | # define 112 facets
99 | side_vs_112 = [(1,1,-2),(1,-1,-2), (-1,1,-2), (-1,-1,-2)]
100 | co_vector = [(0,1,0), (1,0,0), (-1,0,0), (0,-1,0)]
101 | sides_112 = []
102 | for s, c in zip(side_vs_112, co_vector):
103 | tmp_point = N_uc * np.array(c) * co_a + surface_point
104 | tmp_plane = snap_plane_near_point(tmp_point, mn_gen, s)
105 | sides_112.append(tmp_plane)
106 |
107 | # define 101 facets
108 | mn_a = mn_crystal.lattice.a
109 | side_vs_101 = [(1,0,1),(0,1,1),(-1,0,1),(0,-1,1)]
110 | sides_101 = []
111 | for s in side_vs_101:
112 | tmp_point = np.array([1,1,0]) * np.sign(s) * 12*mn_a + mn_top.point
113 | tmp_plane = snap_plane_near_point(tmp_point, mn_gen, s)
114 | sides_101.append(tmp_plane)
115 |
116 | Now that we have the facets of our grain defiend, we create a Volume for the grain
117 | and with all of the planes we defined along with the Mn lattice generator that
118 | we have previously rotated and translated.
119 |
120 | .. code-block::
121 |
122 | # create volume representing grain
123 | mn_vols = [mn_bot, mn_top] + sides_112+sides_101
124 | mn_grain = Volume(alg_objects=mn_vols, generator=mn_gen)
125 | mn_grain.priority=1
126 |
127 | For the five other grains, we can rotate the original grain. We first find rotate about
128 | the global y-axis and then the global x-axis to flip the grain around appropriately.
129 | By default, the origin of a rotation is set to the global origin, but any origin can be chosen.
130 |
131 | .. code-block::
132 |
133 | # rotate to make 5 other grains from +z shell grain
134 | mn_grains = [mn_grain]
135 |
136 | # get +x, -z, -x
137 | for theta in [np.pi/2, np.pi, -np.pi/2]:
138 | rot = Rotation(rot_v(np.array([0,1,0]),theta))
139 | tmp_grain = mn_grain.from_volume(transformation=[rot])
140 | mn_grains.append(tmp_grain)
141 |
142 | # get +-y
143 | for theta in [np.pi/2, -np.pi/2]:
144 | rot = Rotation(rot_v(np.array([1,0,0]),theta))
145 | tmp_grain = mn_grain.from_volume(transformation=[rot])
146 | mn_grains.append(tmp_grain)
147 |
148 | We finally add all the volumes together to a MultiVolume and write out the nanoparticle
149 | to a structure file for visualization.
150 |
151 | .. code-block::
152 |
153 | # make final core-shell NP as multivolume and save to file
154 | core_shell_NP = MultiVolume([co_core] + mn_grains)
155 | core_shell_NP.populate_atoms()
156 | core_shell_NP.to_file("core_shell_NP.xyz")
157 |
158 | We now have this complex oxide nanoparticle structure! However, there is one glaringly un-physical
159 | feature-- between the Mn grains, there is a gap between the (112) facets. As grown,
160 | these nanoparticles are continuous in the Mn grains. The exact mechanism by which the nanoparticles
161 | accomodate for this gap is still an object of research. However, for now, we can concieve of
162 | accomodating this gap by a simple homogeneous strain that compresses the Mn grain along their c-axes
163 | until the gap is closed. In Construction Zone, this is also easy to accomplish.
164 |
165 | TO COME: Applying a geometrically necessary strain field to the particle.
166 |
--------------------------------------------------------------------------------
/tests/README.md:
--------------------------------------------------------------------------------
1 | ## Testing roadmap for Construction Zone:
2 |
3 | Test goals below are listed in relative priority/dependence order, where relevant.
4 |
5 | High Priority:
6 | - ~~Finish unit tests for algebraic volumes, excluding Miller plane utility~~
7 | - Unit tests for Volumes and Multivolumes
8 | - Unit tests for core Generator objects
9 | - Write direct unit tests for Transform core
10 | - Unit tests for Scenes
11 | - Integration tests for Volumes-Generators-Transforms, comparing against Molecules to ensure structure generation consistency
12 | - Unit tests for Amorphous Generators and associated algorithms, utils
13 | - Parial unit tests for Prefab core (not including example defect classes)
14 |
15 | Medium priority:
16 | - Unit tests for Prefab core defect classes
17 | - Unit tests for Wulff particles
18 | - Unit tests for Alpha shapes
19 | - Unit tests for Adsorbate algorithms and utilities
20 | - Unit tests for Post transformations and Strain utilities
21 |
22 | Low priority:
23 | - Unit tests for Misc. Utils (where not tested directly elsewhere)
24 | - Unit tests for Voxel module (or, deprecation and refactoring into Generator)
25 | - Unit tests and/or deprecation of Viz utilities
26 |
27 | Other goals (to be included as part of a feature roadmap):
28 | - Create Github actions/runners and/or environment testing scripts to rapidly check dependency compatibility
29 | - Explore Docker/Shifter/Podman containerization and evaluate utility, ease of generation
30 | - Expose and unify handling of RNGs
31 | - Write doc strings where possible
32 | - Implement __repr__ methods where missing
33 |
34 | As tests are completed, we can expect significant refactoring and clarification of API, where possible, as well as implementation of
35 | better error handling.
36 |
37 | ### Manual Coverage Summaray (LRD, 5 Feb. 2024):
38 | (x = total, + = partial (direct), - = partial (indirect), blank = none)
39 |
40 | [ ] Generator \
41 | | [ ] Amorphous Algorithms \
42 | | [ ] Generator Core
43 |
44 | [x] Molecule
45 |
46 | [ ] Prefab \
47 | | [ ] Prefab Core \
48 | | [ ] Wulff
49 |
50 | [ ] Scene
51 |
52 | [ ] Surface \
53 | | [ ] Adsorbate \
54 | | [ ] Alpha shapes
55 |
56 | [-] Transform \
57 | | [ ] Post \
58 | | [ ] Strain \
59 | | [-] Transform Core
60 |
61 | [ ] Util \
62 | | [ ] Measure \
63 | | [ ] Misc.
64 |
65 | [ ] Viz
66 |
67 | [+] Volume \
68 | | [+] Algebraic \
69 | | [ ] Volume Core \
70 | | [ ] Voxel
71 |
72 |
73 | ### Detailed coverage summary, via Coverage Report + pytest (LRD, 5 Feb. 2024):
74 |
75 | ```
76 | Name Stmts Miss Cover Missing
77 | ----------------------------------------------------------------------------------------------------------
78 | czone/__init__.py 8 0 100%
79 | czone/generator/__init__.py 2 0 100%
80 | czone/generator/amorphous_algorithms.py 137 127 7% 15-20, 23-29, 33-35, 45-69, 107-158, 168-245
81 | czone/generator/generator.py 185 105 43% 44, 54-60, 91-104, 113, 117-126, 132, 137, 142, 147, 152,
82 | 156-160, 165, 169-172, 177, 181-183, 188, 192-194, -236,
83 | 244-252, 280-297, 316-321, 348-360, 369, 373-375, 380,
84 | 384, 389, 393-394, 399, 403-404, 409, 416-422, 436, 439
85 | czone/molecule/__init__.py 1 0 100%
86 | czone/molecule/molecule.py 156 1 99% 229
87 | czone/prefab/__init__.py 2 0 100%
88 | czone/prefab/prefab.py 262 178 32% 32, 65-81, 86, 90, 95, 99-102, 107, 111, 116, 120-122,
89 | 127, 131-133, 138, 142-145, 151-218, 244, 252, 256, 281,
90 | 289, 293, 325-337, 342, 346, 351, 355-358, 363, 368,
91 | 372-374, 379, 383-386, 393-467, 489-495, 499, 503-507,
92 | 511, 515-522, 526-540
93 | czone/prefab/wulff.py 170 106 38% 44, 53, 57-61, 66, 71, 75-78, 83, 87-91, 107, 122, 135,
94 | 144-147, 160-162, 165-169, 172, 191-195, 208-210, 223-225,
95 | 238-240, 253-255, 317-320, 325, 329-330, 333-369, 398-399,
96 | 413-417, 420-459
97 | czone/scene/__init__.py 1 0 100%
98 | czone/scene/scene.py 85 55 35% 25-43, 48, 52-54, 59, 68-72, 77, 81, 86, 92, 98-99, 109,
99 | 122-133, 147-164, 177-187
100 | czone/surface/__init__.py 2 0 100%
101 | czone/surface/adsorbate.py 118 106 10% 39-49, 70-110, 129-145, 181-292
102 | czone/surface/alpha_shape.py 75 69 8% 21-49, 66-110, 127-185
103 | czone/transform/__init__.py 3 0 100%
104 | czone/transform/post.py 47 28 40% 20, 34, 39-42, 46-55, 58, 63, 66, 74-76, 84-92, 96-99
105 | czone/transform/strain.py 113 71 37% 26, 38, 46-50, 55, 59-60, 64, 68, 73, 77-78, 85, 102-117,
106 | 126, 130-144, 152-164, 184-198, 207, 211-214, 219,
107 | 223-232, 240-258
108 | czone/transform/transform.py 228 104 54% 44, 56, 69, 75, 80, 91, 111-118, 122, 126-129, 133, 136,
109 | 139, 142-148, 204, 216, 221-226, 234, 251, 297-303, 307,
110 | 312, 316-321, 325, 328-337, 352-355, 360, 364, 369-370,
111 | 375-381, 389-398, 401-403, 406-408, 411-413, 431-434,
112 | 447-472, 486, 512-519, 546-560
113 | czone/util/__init__.py 2 0 100%
114 | czone/util/measure.py 118 112 5% 33-156, 174-193, 211-253
115 | czone/util/misc.py 25 18 28% 16, 31-54, 67
116 | czone/viz/__init__.py 1 0 100%
117 | czone/viz/viz.py 171 159 7% 5, 14-66, 70-149, 153-232, 236-254, 261-279
118 | czone/volume/__init__.py 3 0 100%
119 | czone/volume/algebraic.py 225 30 87% 42, 47, 366, 443-494
120 | czone/volume/volume.py 246 175 29% 41, 46, 51, 56, 61, 65-68, 77, 82, 91, 100, 138-165, 174,
121 | 178-182, 187, 195-198, 203, 208, 213, 218, 222-223,
122 | 226-235, 244-247, 255-274, 278-291, 294-312, 320-344,
123 | 348-353, 366-378, 404-410, 415, 423-431, 444-455, 458-462,
124 | 465-477, 481-502, 517-521, 543-554
125 | czone/volume/voxel.py 55 34 38% 26-31, 36, 40-47, 52, 56-73, 78, 82-83, 88, 93, 105,
126 | 118-128
127 | ----------------------------------------------------------------------------------------------------------
128 | TOTAL 2748 1520 45%
129 | ```
--------------------------------------------------------------------------------
/czone/surface/alpha_shape.py:
--------------------------------------------------------------------------------
1 | """
2 | Primitives and algorithms for determining alpha-shape of collection of points.
3 | """
4 |
5 | import numpy as np
6 | from scipy.spatial import Delaunay
7 |
8 |
9 | def tetrahedron_circumradii(points):
10 | """Calculates circumradii of set of tetrahedron. Vectorized code.
11 |
12 | vectorized version of adapted code from https://github.com/python-adaptive/adaptive
13 | uses determinants to calculate circumradius of tetrahedron, ref: https://mathworld.wolfram.com/Circumsphere.html
14 |
15 | Args:
16 | points (np.ndarray): Nx4x3 array of points, representing vertices of N tetrahedra.
17 |
18 | Returns:
19 | np.ndarray of circumradii of tetrahedra
20 | """
21 | points = np.array(
22 | points
23 | ) # + np.random.rand(points.shape[0], points.shape[1], points.shape[2])*1e-8 # add small random number to avoid divide by zero
24 | pts = points[:, 1:] - points[:, 0, None]
25 |
26 | x1 = pts[:, 0, 0]
27 | y1 = pts[:, 0, 1]
28 | z1 = pts[:, 0, 2]
29 |
30 | x2 = pts[:, 1, 0]
31 | y2 = pts[:, 1, 1]
32 | z2 = pts[:, 1, 2]
33 |
34 | x3 = pts[:, 2, 0]
35 | y3 = pts[:, 2, 1]
36 | z3 = pts[:, 2, 2]
37 |
38 | l1 = x1 * x1 + y1 * y1 + z1 * z1
39 | l2 = x2 * x2 + y2 * y2 + z2 * z2
40 | l3 = x3 * x3 + y3 * y3 + z3 * z3
41 |
42 | # Compute some determinants:
43 | dx = +l1 * (y2 * z3 - z2 * y3) - l2 * (y1 * z3 - z1 * y3) + l3 * (y1 * z2 - z1 * y2)
44 | dy = +l1 * (x2 * z3 - z2 * x3) - l2 * (x1 * z3 - z1 * x3) + l3 * (x1 * z2 - z1 * x2)
45 | dz = +l1 * (x2 * y3 - y2 * x3) - l2 * (x1 * y3 - y1 * x3) + l3 * (x1 * y2 - y1 * x2)
46 | aa = +x1 * (y2 * z3 - z2 * y3) - x2 * (y1 * z3 - z1 * y3) + x3 * (y1 * z2 - z1 * y2)
47 | a = 2 * aa
48 |
49 | center = np.vstack([dx / a, -dy / a, dz / a])
50 | radius = np.linalg.norm(center, axis=0)
51 | return radius
52 |
53 |
54 | def alpha_shape_alg_3D(points, probe_radius, return_alpha_shape=False):
55 | """Use alpha shape algorithm to determine points on exterior of collection of points.
56 |
57 | Performs alpha-shape algorithm ##TODO: cite a source here
58 |
59 | Args:
60 | points (np.ndarray): Nx3 array representing coordinates of points in object
61 | probe_radius (float): radius of test
62 | return_alpha_shape (bool): return dictionary of alpha shape arrays, default False.
63 | Returns:
64 | List of indices of points on exterior of surface for given alpha-shape.
65 | """
66 |
67 | ## Get alpha-shape
68 | # get delaunay triangulation of points
69 | points = points + 1e-4 * np.random.rand(*points.shape)
70 | tri = Delaunay(points)
71 |
72 | # get cicrcumradii of all tetrahedron in triangulation
73 | circumradii = tetrahedron_circumradii(
74 | (points + 1e-10 * np.random.rand(*points.shape))[tri.simplices, :]
75 | )
76 |
77 | # check which tetrahedra in triangulation are part of alpha-shape for given probe radius
78 | probe_test = circumradii <= probe_radius
79 |
80 | ## Get outer elements of alpha-shape
81 | # check which tetrahedra are on outside of triangulation
82 | outside = np.sum(tri.neighbors >= 0, axis=1) < 4
83 |
84 | # check which tetrahedra are neighbors to those not in alpha-shape
85 |
86 | # however, only want to check neighbors that are on the outside of the triangulation
87 | # so that we can avoid counting tetrahedra that fail the probe test on large internal voids
88 |
89 | neighbor_check_0 = np.logical_not(probe_test[tri.neighbors]) # neighbors that fail probe test
90 | neighbor_check_1 = outside[tri.neighbors] # neighbors on the outside
91 | neighbor_check = np.any(np.logical_and(neighbor_check_0, neighbor_check_1), axis=1)
92 |
93 | # get tetrahedra that are on the surface of alpha-shape
94 | surface_tris = np.logical_and(probe_test, np.logical_or(outside, neighbor_check))
95 |
96 | ## Get outer points of outer elements of alpha-shape
97 | # determine which points are along the outer edges of tetrahedra/alpha-shape
98 | sub_points = tri.simplices[surface_tris, :]
99 | sub_neighbors = tri.neighbors[surface_tris, :]
100 | out_points = sub_points[
101 | np.logical_not(
102 | np.logical_or(sub_neighbors == -1, np.logical_not(probe_test[sub_neighbors]))
103 | )
104 | ]
105 |
106 | if return_alpha_shape:
107 | shape_dict = {}
108 |
109 | # return the full triangulation and corresponding surface tris
110 | shape_dict["tri"] = tri
111 | shape_dict["surface_tris"] = surface_tris
112 |
113 | # also return list of simplices indices for full alpha shape
114 | a_tris = np.logical_not(np.logical_and(outside, np.logical_not(probe_test)))
115 | shape_dict["a_tris"] = np.nonzero(a_tris)
116 |
117 | return list(set(out_points)), shape_dict
118 | else:
119 | return list(set(out_points))
120 |
121 |
122 | def alpha_shape_alg_3D_with_sampling(
123 | points, probe_radius, N_samples, std=1e-4, return_alpha_shape=False, rng=None
124 | ):
125 | """Use alpha shape algorithm to determine points on exterior of collection of points.
126 |
127 | Performs alpha-shape algorithm ##TODO: cite a source here
128 |
129 | Args:
130 | points (np.ndarray): Nx3 array representing coordinates of points in object
131 | probe_radius (float): radius of test
132 | return_alpha_shape (bool): return dictionary of alpha shape arrays, default False.
133 | Returns:
134 | List of indices of points on exterior of surface for given alpha-shape.
135 | """
136 |
137 | rng = np.random.default_rng() if rng is None else rng
138 |
139 | ## Get alpha-shape
140 | # get delaunay triangulation of points
141 | tri = Delaunay(points)
142 |
143 | for i in range(N_samples):
144 | # get cicrcumradii of all tetrahedron in triangulation
145 | noise = std * rng.standard_normal(size=points.shape)
146 |
147 | circumradii = tetrahedron_circumradii((points + noise)[tri.simplices, :])
148 |
149 | # check which tetrahedra in triangulation are part of alpha-shape for given probe radius
150 | if i == 0:
151 | probe_test = circumradii <= probe_radius
152 | else:
153 | probe_test = np.logical_or(probe_test, circumradii <= probe_radius)
154 |
155 | ## Get outer elements of alpha-shape
156 | # check which tetrahedra are on outside of triangulation
157 | outside = np.sum(tri.neighbors >= 0, axis=1) < 4
158 |
159 | # check which tetrahedra are neighbors to those not in alpha-shape
160 |
161 | # however, only want to check neighbors that are on the outside of the triangulation
162 | # so that we can avoid counting tetrahedra that fail the probe test on large internal voids
163 |
164 | neighbor_check_0 = np.logical_not(probe_test[tri.neighbors]) # neighbors that fail probe test
165 | neighbor_check_1 = outside[tri.neighbors] # neighbors on the outside
166 | neighbor_check = np.any(np.logical_and(neighbor_check_0, neighbor_check_1), axis=1)
167 |
168 | # get tetrahedra that are on the surface of alpha-shape
169 | surface_tris = np.logical_and(probe_test, np.logical_or(outside, neighbor_check))
170 |
171 | ## Get outer points of outer elements of alpha-shape
172 | # determine which points are along the outer edges of tetrahedra/alpha-shape
173 | sub_points = tri.simplices[surface_tris, :]
174 | sub_neighbors = tri.neighbors[surface_tris, :]
175 | out_points = sub_points[
176 | np.logical_not(
177 | np.logical_or(sub_neighbors == -1, np.logical_not(probe_test[sub_neighbors]))
178 | )
179 | ]
180 |
181 | if return_alpha_shape:
182 | shape_dict = {}
183 |
184 | # return the full triangulation and corresponding surface tris
185 | shape_dict["tri"] = tri
186 | shape_dict["surface_tris"] = surface_tris
187 |
188 | # also return list of simplices indices for full alpha shape
189 | a_tris = np.logical_not(np.logical_and(outside, np.logical_not(probe_test)))
190 | shape_dict["a_tris"] = np.nonzero(a_tris)
191 |
192 | return list(set(out_points)), shape_dict
193 | else:
194 | return list(set(out_points))
195 |
--------------------------------------------------------------------------------
/tests/test_rng.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pytest
3 | from .czone_test_fixtures import czone_TestCase
4 |
5 | from czone.generator.amorphous_algorithms import gen_p_substrate, gen_p_substrate_batched
6 | from czone.generator.generator import AmorphousGenerator, Generator
7 | from czone.molecule.molecule import Molecule
8 | from czone.prefab.prefab import fccMixedTwinSF, wurtziteStackingFault
9 | from czone.surface.adsorbate import add_adsorbate, find_approximate_normal
10 | from czone.surface.alpha_shape import alpha_shape_alg_3D_with_sampling
11 | from czone.transform.post import ChemicalSubstitution
12 | from czone.util.misc import get_N_splits
13 | from czone.volume.algebraic import Sphere
14 | from czone.volume.volume import Volume
15 |
16 | """
17 | These unit tests are not meant to measure code functionality/correctness.
18 | Instead, these are meant to check that any method using RNGs can be
19 | completely reproduced by passing the RNG in as a property/argument.
20 | """
21 |
22 | seed = 9871492
23 | base_rng = np.random.default_rng(seed=seed)
24 |
25 |
26 | class Test_Functions(czone_TestCase):
27 | def setUp(self):
28 | self.N_trials = 32
29 |
30 | def assertConsistent(self, F, args, seed):
31 | # seed rng and get reference result
32 | rng = np.random.default_rng(seed)
33 | ref_state = rng.bit_generator.state # cache state to reseed rng
34 | ref_res = F(*args, rng=rng)
35 |
36 | # reset RNG state and call function again
37 | rng.bit_generator.state = ref_state
38 | test_res = F(*args, rng=rng)
39 |
40 | match test_res:
41 | case np.ndarray():
42 | self.assertArrayEqual(ref_res, test_res)
43 | case (Molecule(), bool()):
44 | self.assertArrayEqual(ref_res[0].atoms, test_res[0].atoms)
45 | self.assertArrayEqual(ref_res[0].species, test_res[0].species)
46 | self.assertEqual(ref_res[1], test_res[1])
47 | case _:
48 | self.assertEqual(ref_res, test_res)
49 |
50 | def test_get_N_splits(self):
51 | L = 32
52 | N = 4
53 | M = 2
54 | for _ in range(self.N_trials):
55 | seed = base_rng.integers(0, int(1e6))
56 | self.assertConsistent(get_N_splits, (N, M, L), seed)
57 |
58 | def test_find_approximate_normal(self):
59 | points = base_rng.normal(size=(100, 3))
60 | z_filter = points[:, 2] <= 0
61 | test_points = points[z_filter, :]
62 | for _ in range(self.N_trials):
63 | seed = base_rng.integers(0, int(1e6))
64 | self.assertConsistent(find_approximate_normal, (test_points,), seed)
65 |
66 | @pytest.mark.filterwarnings(
67 | "ignore:Requested to transform molecule", "ignore:Input Volume has not pop"
68 | )
69 | def test_add_adsorbate(self):
70 | ## Initialize test volume
71 | surface_atoms = 10 * base_rng.normal(size=(256, 3))
72 | surface_species = np.ones(surface_atoms.shape[0])
73 |
74 | test_volume = Volume(
75 | alg_objects=Sphere(10, np.zeros(3)), generator=Molecule(surface_species, surface_atoms)
76 | )
77 |
78 | test_adsorbate = Molecule([1], np.zeros((1, 3)))
79 |
80 | test_args = (test_adsorbate, 0, 1.0, test_volume)
81 |
82 | for _ in range(self.N_trials // 2):
83 | seed = base_rng.integers(0, int(1e6))
84 | self.assertConsistent(add_adsorbate, test_args, seed)
85 |
86 | def test_alpha_shape_alg_3D_with_sampling(self):
87 | test_points = 10 * base_rng.normal(size=(256, 3))
88 | probe_radius = 1.0
89 | N_samples = 4
90 |
91 | for _ in range(self.N_trials):
92 | seed = base_rng.integers(0, int(1e6))
93 | self.assertConsistent(
94 | alpha_shape_alg_3D_with_sampling, (test_points, probe_radius, N_samples), seed
95 | )
96 |
97 | def test_gen_p_substrate(self):
98 | dims = (15, 15, 15)
99 |
100 | for _ in range(self.N_trials):
101 | seed = base_rng.integers(0, int(1e6))
102 | self.assertConsistent(gen_p_substrate, (dims,), seed)
103 |
104 | def test_gen_p_substrate_batched(self):
105 | dims = (15, 15, 15)
106 |
107 | for _ in range(self.N_trials):
108 | seed = base_rng.integers(0, int(1e6))
109 | self.assertConsistent(gen_p_substrate_batched, (dims,), seed)
110 |
111 |
112 | class Test_Classes(czone_TestCase):
113 | def test_AmorphousGenerator(self):
114 | N_trials = 10
115 | for _ in range(N_trials):
116 | seed = base_rng.integers(0, int(1e6))
117 | rng = np.random.default_rng(seed=seed)
118 | ref_volume = Volume(
119 | alg_objects=Sphere(10, np.zeros(3)), generator=AmorphousGenerator(rng=rng)
120 | )
121 |
122 | test_volume = ref_volume.from_volume()
123 |
124 | self.assertNotEqual(id(ref_volume.generator.rng), id(test_volume.generator.rng))
125 | self.assertEqual(
126 | ref_volume.generator.rng.bit_generator.state,
127 | test_volume.generator.rng.bit_generator.state,
128 | )
129 |
130 | ref_volume.populate_atoms()
131 | test_volume.populate_atoms()
132 |
133 | self.assertArrayEqual(ref_volume.atoms, test_volume.atoms)
134 | self.assertArrayEqual(ref_volume.species, test_volume.species)
135 |
136 | def test_ChemicalSubstitution(self):
137 | test_generator = Generator.from_spacegroup(
138 | [6], np.zeros((1, 3)), [1, 1, 1], [90, 90, 90], sgn=225
139 | )
140 | ref_volume = Volume(alg_objects=Sphere(10, np.zeros(3)), generator=test_generator)
141 | test_volume = ref_volume.from_volume()
142 |
143 | N_trials = 32
144 | for _ in range(N_trials):
145 | seed = base_rng.integers(0, int(1e6))
146 | ref_rng = np.random.default_rng(seed=seed)
147 | test_rng = np.random.default_rng(seed=seed)
148 | ref_volume.generator.post_transform = ChemicalSubstitution({6: 8}, 0.1, rng=ref_rng)
149 | test_volume.generator.post_transform = ChemicalSubstitution({6: 8}, 0.1, rng=test_rng)
150 |
151 | ref_volume.populate_atoms()
152 | test_volume.populate_atoms()
153 | self.assertArrayEqual(ref_volume.species, test_volume.species)
154 |
155 | def test_fccMixedTwinSF(self):
156 | generator = Generator.from_spacegroup(
157 | [6], np.zeros((1, 3)), [2, 2, 2], [90, 90, 90], sgn=225
158 | )
159 | volume = Volume(alg_objects=Sphere(10, np.zeros(3)))
160 |
161 | N_trials = 32
162 | for _ in range(N_trials):
163 | seed = base_rng.integers(0, int(1e6))
164 | ref_rng = np.random.default_rng(seed=seed)
165 | test_rng = np.random.default_rng(seed=seed)
166 |
167 | ref_fab = fccMixedTwinSF(generator, volume, rng=ref_rng)
168 | test_fab = fccMixedTwinSF(generator, volume, rng=test_rng)
169 |
170 | ref_obj = ref_fab.build_object()
171 | ref_obj.populate_atoms()
172 |
173 | test_obj = test_fab.build_object()
174 | test_obj.populate_atoms()
175 |
176 | self.assertArrayEqual(ref_obj.atoms, test_obj.atoms)
177 | self.assertArrayEqual(ref_obj.species, test_obj.species)
178 |
179 | def test_wurtziteStackingFault(self):
180 | generator = Generator.from_spacegroup(
181 | [6], np.zeros((1, 3)), [2, 2, 3], [90, 90, 120], sgn=186
182 | )
183 | volume = Volume(alg_objects=Sphere(10, np.zeros(3)))
184 |
185 | N_trials = 32
186 | for _ in range(N_trials):
187 | seed = base_rng.integers(0, int(1e6))
188 | ref_rng = np.random.default_rng(seed=seed)
189 | test_rng = np.random.default_rng(seed=seed)
190 |
191 | ref_fab = wurtziteStackingFault(generator, volume, rng=ref_rng)
192 | test_fab = wurtziteStackingFault(generator, volume, rng=test_rng)
193 |
194 | ref_obj = ref_fab.build_object()
195 | ref_obj.populate_atoms()
196 |
197 | test_obj = test_fab.build_object()
198 | test_obj.populate_atoms()
199 |
200 | self.assertArrayEqual(ref_obj.atoms, test_obj.atoms)
201 | self.assertArrayEqual(ref_obj.species, test_obj.species)
202 |
--------------------------------------------------------------------------------
/docs/source/refs.bib:
--------------------------------------------------------------------------------
1 | @ARTICLE{2020SciPy,
2 | author = {Virtanen, Pauli and Gommers, Ralf and Oliphant, Travis E. and
3 | Haberland, Matt and Reddy, Tyler and Cournapeau, David and
4 | Burovski, Evgeni and Peterson, Pearu and Weckesser, Warren and
5 | Bright, Jonathan and {van der Walt}, St{\'e}fan J. and
6 | Brett, Matthew and Wilson, Joshua and Millman, K. Jarrod and
7 | Mayorov, Nikolay and Nelson, Andrew R. J. and Jones, Eric and
8 | Kern, Robert and Larson, Eric and Carey, C J and
9 | Polat, {\.I}lhan and Feng, Yu and Moore, Eric W. and
10 | {VanderPlas}, Jake and Laxalde, Denis and Perktold, Josef and
11 | Cimrman, Robert and Henriksen, Ian and Quintero, E. A. and
12 | Harris, Charles R. and Archibald, Anne M. and
13 | Ribeiro, Ant{\^o}nio H. and Pedregosa, Fabian and
14 | {van Mulbregt}, Paul and {SciPy 1.0 Contributors}},
15 | title = {{{SciPy} 1.0: Fundamental Algorithms for Scientific
16 | Computing in Python}},
17 | journal = {Nature Methods},
18 | year = {2020},
19 | volume = {17},
20 | pages = {261--272},
21 | adsurl = {https://rdcu.be/b08Wh},
22 | doi = {10.1038/s41592-019-0686-2},
23 | }
24 |
25 | @ARTICLE{2020NumPy,
26 | author = {Harris, Charles R. and Millman, K. Jarrod and
27 | van der Walt, Stéfan J and Gommers, Ralf and
28 | Virtanen, Pauli and Cournapeau, David and
29 | Wieser, Eric and Taylor, Julian and Berg, Sebastian and
30 | Smith, Nathaniel J. and Kern, Robert and Picus, Matti and
31 | Hoyer, Stephan and van Kerkwijk, Marten H. and
32 | Brett, Matthew and Haldane, Allan and
33 | Fernández del Río, Jaime and Wiebe, Mark and
34 | Peterson, Pearu and Gérard-Marchant, Pierre and
35 | Sheppard, Kevin and Reddy, Tyler and Weckesser, Warren and
36 | Abbasi, Hameer and Gohlke, Christoph and
37 | Oliphant, Travis E.},
38 | title = {Array programming with {NumPy}},
39 | journal = {Nature},
40 | year = {2020},
41 | volume = {585},
42 | pages = {357–362},
43 | doi = {10.1038/s41586-020-2649-2}
44 | }
45 |
46 | @article{ONG2013314,
47 | title = {Python Materials Genomics (pymatgen): A robust, open-source python library for materials analysis},
48 | journal = {Computational Materials Science},
49 | volume = {68},
50 | pages = {314-319},
51 | year = {2013},
52 | issn = {0927-0256},
53 | doi = {https://doi.org/10.1016/j.commatsci.2012.10.028},
54 | author = {Shyue Ping Ong and William Davidson Richards and Anubhav Jain and Geoffroy Hautier and Michael Kocher and Shreyas Cholia and Dan Gunter and Vincent L. Chevrier and Kristin A. Persson and Gerbrand Ceder},
55 | keywords = {Materials, Project, Design, Thermodynamics, High-throughput},
56 | }
57 |
58 | @article{Hjorth_Larsen_2017,
59 | doi = {10.1088/1361-648x/aa680e},
60 | year = 2017,
61 | month = {jun},
62 | publisher = {{IOP} Publishing},
63 | volume = {29},
64 | number = {27},
65 | pages = {273002},
66 | author = {Ask Hjorth Larsen and Jens J{\o}rgen Mortensen and Jakob Blomqvist and Ivano E Castelli and Rune Christensen and Marcin Du{\l}ak and Jesper Friis and Michael N Groves and Bj{\o}rk Hammer and Cory Hargus and Eric D Hermes and Paul C Jennings and Peter Bjerre Jensen and James Kermode and John R Kitchin and Esben Leonhard Kolsbjerg and Joseph Kubal and Kristen Kaasbjerg and Steen Lysgaard and J{\'{o}}n Bergmann Maronsson and Tristan Maxson and Thomas Olsen and Lars Pastewka and Andrew Peterson and Carsten Rostgaard and Jakob Schi{\o}tz and Ole Schütt and Mikkel Strange and Kristian S Thygesen and Tejs Vegge and Lasse Vilhelmsen and Michael Walter and Zhenhua Zeng and Karsten W Jacobsen},
67 | title = {The atomic simulation environment{\textemdash}a Python library for working with atoms},
68 | journal = {Journal of Physics: Condensed Matter},
69 | }
70 |
71 | @article{Rahm2020,
72 | doi = {10.21105/joss.01944},
73 | year = {2020},
74 | publisher = {The Open Journal},
75 | volume = {5},
76 | number = {45},
77 | pages = {1944},
78 | author = {J. Magnus Rahm and Paul Erhart},
79 | title = {WulffPack: A Python package for Wulff constructions},
80 | journal = {Journal of Open Source Software}
81 | }
82 |
83 |
84 | @article{wulff,
85 | author = {G. Wulff},
86 | doi = {doi:10.1524/zkri.1901.34.1.449},
87 | title = {XXV. Zur Frage der Geschwindigkeit des Wachsthums und der Auflösung der Krystallflächen},
88 | journal = {Zeitschrift für Kristallographie - Crystalline Materials},
89 | number = {1-6},
90 | volume = {34},
91 | year = {1901},
92 | pages = {449--530}
93 | }
94 |
95 |
96 | @article{ldmarks1,
97 | author = { A. Howie and L. D. Marks },
98 | title = {Elastic strains and the energy balance for multiply twinned particles},
99 | journal = {Philosophical Magazine A},
100 | volume = {49},
101 | number = {1},
102 | pages = {95-109},
103 | year = {1984},
104 | publisher = {Taylor & Francis},
105 | doi = {10.1080/01418618408233432},
106 | }
107 |
108 | @article{ldmarks2,
109 | title = {Modified Wulff constructions for twinned particles},
110 | journal = {Journal of Crystal Growth},
111 | volume = {61},
112 | number = {3},
113 | pages = {556-566},
114 | year = {1983},
115 | issn = {0022-0248},
116 | doi = {https://doi.org/10.1016/0022-0248(83)90184-7},
117 | author = {L.D. Marks},
118 | }
119 |
120 | @article{WINTERBOTTOM1967303,
121 | title = {Equilibrium shape of a small particle in contact with a foreign substrate},
122 | journal = {Acta Metallurgica},
123 | volume = {15},
124 | number = {2},
125 | pages = {303-310},
126 | year = {1967},
127 | issn = {0001-6160},
128 | doi = {https://doi.org/10.1016/0001-6160(67)90206-4},
129 | author = {W.L Winterbottom},
130 | }
131 |
132 | @software{rangel_dacosta_luis_2021_5161161,
133 | author = {Rangel DaCosta, Luis and
134 | Scott, Mary},
135 | title = {Construction Zone},
136 | month = aug,
137 | year = 2021,
138 | publisher = {Zenodo},
139 | version = {v2021.08.04},
140 | doi = {10.5281/zenodo.5161161},
141 | url = {https://doi.org/10.5281/zenodo.5161161}
142 | }
143 |
144 | @article{oh_design_2020,
145 | title = {Design and synthesis of multigrain nanocrystals via geometric misfit strain},
146 | volume = {577},
147 | issn = {1476-4687},
148 | url = {https://doi.org/10.1038/s41586-019-1899-3},
149 | doi = {10.1038/s41586-019-1899-3},
150 | abstract = {The impact of topological defects associated with grain boundaries (GB defects) on the electrical, optical, magnetic, mechanical and chemical properties of nanocrystalline materials1,2 is well known. However, elucidating this influence experimentally is difficult because grains typically exhibit a large range of sizes, shapes and random relative orientations3–5. Here we demonstrate that precise control of the heteroepitaxy of colloidal polyhedral nanocrystals enables ordered grain growth and can thereby produce material samples with uniform GB defects. We illustrate our approach with a multigrain nanocrystal comprising a Co3O4 nanocube core that carries a Mn3O4 shell on each facet. The individual shells are symmetry-related interconnected grains6, and the large geometric misfit between adjacent tetragonal Mn3O4 grains results in tilt boundaries at the sharp edges of the Co3O4 nanocube core that join via disclinations. We identify four design principles that govern the production of these highly ordered multigrain nanostructures. First, the shape of the substrate nanocrystal must guide the crystallographic orientation of the overgrowth phase7. Second, the size of the substrate must be smaller than the characteristic distance between the dislocations. Third, the incompatible symmetry between the overgrowth phase and the substrate increases the geometric misfit strain between the grains. Fourth, for GB formation under near-equilibrium conditions, the surface energy of the shell needs to be balanced by the increasing elastic energy through ligand passivation8–10. With these principles, we can produce a range of multigrain nanocrystals containing distinct GB defects.},
151 | number = {7790},
152 | journal = {Nature},
153 | author = {Oh, Myoung Hwan and Cho, Min Gee and Chung, Dong Young and Park, Inchul and Kwon, Youngwook Paul and Ophus, Colin and Kim, Dokyoon and Kim, Min Gyu and Jeong, Beomgyun and Gu, X. Wendy and Jo, Jinwoung and Yoo, Ji Mun and Hong, Jaeyoung and McMains, Sara and Kang, Kisuk and Sung, Yung-Eun and Alivisatos, A. Paul and Hyeon, Taeghwan},
154 | month = jan,
155 | year = {2020},
156 | pages = {359--363},
157 | }
158 |
--------------------------------------------------------------------------------
/tests/test_scene.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from czone.molecule import Molecule
4 | from czone.scene import PeriodicScene, Scene
5 | from czone.volume import MultiVolume, Sphere, Volume
6 | from czone.util.voxel import Voxel
7 |
8 | from .czone_test_fixtures import czone_TestCase
9 | from .test_generator import get_random_generator
10 | from .test_volume import get_random_volume
11 |
12 | seed = 709123
13 | rng = np.random.default_rng(seed=seed)
14 |
15 |
16 | def get_random_object(rng=rng, depth=0, generator_args={}):
17 | if depth < 2:
18 | vol_type = rng.choice(["Volume", "MultiVolume"])
19 | else:
20 | vol_type = "Volume"
21 |
22 | match vol_type:
23 | case "Volume":
24 | G = get_random_generator(rng=rng, **generator_args)
25 | V = get_random_volume(G, N_points=8, rng=rng)
26 | case "MultiVolume":
27 | N_vols = rng.integers(2, 8 - 3 * depth)
28 | Vs = [
29 | get_random_object(rng, depth=depth + 1, generator_args=generator_args)
30 | for _ in range(N_vols)
31 | ]
32 | V = MultiVolume(Vs, priority=rng.integers(-10, 10))
33 |
34 | return V
35 |
36 |
37 | def get_random_domain(rng=rng):
38 | bases = rng.normal(size=(3, 3))
39 | scale = rng.uniform(0.1, 10)
40 | origin = rng.uniform(-100, 100, size=(3,))
41 |
42 | return Voxel(bases, scale, origin)
43 |
44 |
45 | def get_random_scene(periodic=False, N_max_objects=8, rng=rng, generator_args={}):
46 | N_objects = rng.integers(1, N_max_objects)
47 | domain = get_random_domain(rng=rng)
48 | objects = [get_random_object(rng=rng, generator_args=generator_args) for _ in range(N_objects)]
49 |
50 | if periodic:
51 | pbc = tuple((bool(rng.choice([True, False])) for _ in range(3)))
52 | return PeriodicScene(domain, objects, pbc=pbc)
53 | else:
54 | return Scene(domain, objects)
55 |
56 |
57 | class Test_Scene(czone_TestCase):
58 | def setUp(self):
59 | self.rng = rng
60 | self.N_trials = 32
61 |
62 | def test_init(self):
63 | for _ in range(self.N_trials):
64 | scene = get_random_scene()
65 | self.assertReprEqual(scene)
66 |
67 | domain = Voxel()
68 | ## init with one sphere
69 | sphere = Sphere(1, np.zeros((3, 1)))
70 | self.assertRaises(TypeError, lambda: Scene(domain, objects=sphere))
71 |
72 | ## init with two spheres
73 | spheres = [Volume(alg_objects=[Sphere(1, np.zeros((3, 1)))]), Sphere(5, np.zeros((3, 1)))]
74 | self.assertRaises(TypeError, lambda: Scene(domain, objects=spheres))
75 |
76 | spheres = [
77 | Volume(alg_objects=[Sphere(1, np.zeros((3, 1)))]),
78 | Volume(alg_objects=[Sphere(5, np.zeros((3, 1)))]),
79 | ]
80 | scene = Scene(domain, objects=spheres)
81 |
82 | # Check to see that references are carried around and not copies
83 | ref_ids = [id(x) for x in spheres]
84 | test_ids = [id(x) for x in scene.objects]
85 | self.assertEqual(set(ref_ids), set(test_ids))
86 |
87 | def test_eq(self):
88 | for _ in range(self.N_trials):
89 | ref = get_random_scene()
90 | test = Scene(ref.domain, objects=rng.permutation(ref.objects))
91 | self.assertEqual(ref, test)
92 | self.assertEqual(set([id(o) for o in ref.objects]), set([id(o) for o in test.objects]))
93 |
94 | def test_get_priorities(self):
95 | def get_objects(N_objects, min_priority=-10, max_priority=10):
96 | for _ in range(N_objects):
97 | sphere = Sphere(1, np.zeros((3, 1)))
98 | vol = Volume(
99 | alg_objects=[sphere],
100 | priority=int(rng.integers(min_priority, max_priority)),
101 | )
102 | yield vol
103 |
104 | for _ in range(self.N_trials):
105 | ## Get test objects and store in buckets by priority
106 | objs = list(get_objects(64))
107 | ref_dict = {}
108 | for o in objs:
109 | if o.priority in ref_dict:
110 | ref_dict[o.priority].append(id(o))
111 | else:
112 | ref_dict[o.priority] = [id(o)]
113 |
114 | ## Create scene and get priority array
115 | scene = Scene(domain=Voxel(), objects=objs)
116 | rel_plevels, offsets = scene._get_priorities()
117 |
118 | orig_priorities = [o.priority for o in objs]
119 | uniq_priorities = np.unique(orig_priorities)
120 | N_priorities = len(np.unique(orig_priorities))
121 |
122 | ## Check that priority array has been compressed correctly
123 | self.assertTrue(np.all(rel_plevels >= 0))
124 | self.assertTrue(len(offsets) == N_priorities + 1)
125 |
126 | ## Check that all objects are accounted for and counted uniquely
127 | for rel_p, up in enumerate(uniq_priorities):
128 | test_ids = [id(scene.objects[i]) for i in range(offsets[rel_p], offsets[rel_p + 1])]
129 | self.assertEqual(set(test_ids), set(ref_dict[up]))
130 | self.assertTrue(len(test_ids) == len(ref_dict[up]))
131 |
132 | def test_populate(self):
133 | def get_objects(N_objects, min_priority=-10, max_priority=10):
134 | for _ in range(N_objects):
135 | N = 32
136 | species = self.rng.integers(1, 119, (N, 1))
137 | positions = self.rng.normal(size=(N, 3))
138 | mol = Molecule(species, positions)
139 | sphere = Sphere(self.rng.uniform(2, 10), self.rng.uniform(-10, 10, (3,)))
140 | vol = Volume(
141 | alg_objects=[sphere],
142 | generator=mol,
143 | priority=int(rng.integers(min_priority, max_priority)),
144 | )
145 | yield vol
146 |
147 | def sort_atom_arrays(atoms, species):
148 | order = np.argsort(atoms[:, 0])
149 | atoms = atoms[order]
150 | species = species[order]
151 | return atoms, species
152 |
153 | def get_atoms_from_scene(s):
154 | test_atoms, test_species = s.all_atoms, s.all_species
155 | return sort_atom_arrays(test_atoms, test_species)
156 |
157 | def get_atoms_from_objects(objs):
158 | ref_atoms = np.vstack([o.atoms for o in objs])
159 | ref_species = np.concatenate([o.species for o in objs])
160 | return sort_atom_arrays(ref_atoms, ref_species)
161 |
162 | def brute_force_collision(objs):
163 | atoms = []
164 | species = []
165 | for i, iobj in enumerate(objs):
166 | current_atoms = iobj.atoms
167 | current_species = iobj.species
168 | check = np.ones(current_atoms.shape[0], dtype=bool)
169 | for j, jobj in enumerate(objs):
170 | if i == j:
171 | continue
172 |
173 | if jobj.priority <= iobj.priority:
174 | check = np.logical_and(
175 | check, np.logical_not(jobj.checkIfInterior(current_atoms))
176 | )
177 |
178 | atoms.append(current_atoms[check, :])
179 | species.append(current_species[check])
180 |
181 | ref_atoms = np.vstack(atoms)
182 | ref_species = np.concatenate(species)
183 | return sort_atom_arrays(ref_atoms, ref_species)
184 |
185 | for _ in range(self.N_trials):
186 | objs = list(get_objects(32))
187 | scene = Scene(domain=Voxel(), objects=objs)
188 |
189 | ## Populate scene and sort atoms by position
190 | scene.populate_no_collisions()
191 | test_atoms, test_species = get_atoms_from_scene(scene)
192 |
193 | ref_atoms, ref_species = get_atoms_from_objects(objs)
194 |
195 | self.assertTrue(np.array_equal(test_atoms, ref_atoms))
196 | self.assertTrue(np.array_equal(test_species, ref_species))
197 |
198 | ## Check collision handling
199 | scene.populate()
200 | test_atoms, test_species = get_atoms_from_scene(scene)
201 | ref_atoms, ref_species = brute_force_collision(objs)
202 | self.assertTrue(np.array_equal(test_atoms, ref_atoms))
203 | self.assertTrue(np.array_equal(test_species, ref_species))
204 |
205 | # def test_add_object(self):
206 | # ## init with two spheres
207 | # spheres = [Volume(alg_objects=[Sphere(1, np.zeros((3,1)))])]
208 | # scene = Scene(objects=spheres)
209 | # scene.add_object(Volume(alg_objects=[Sphere(1, np.zeros((3,1)))]))
210 |
211 |
212 | class Test_PeriodicScene(czone_TestCase):
213 | def setUp(self):
214 | self.rng = rng
215 | self.N_trials = 32
216 |
217 | def test_init(self):
218 | for _ in range(self.N_trials):
219 | scene = get_random_scene(periodic=True)
220 | self.assertReprEqual(scene)
221 |
--------------------------------------------------------------------------------
/examples/presentation_examples.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import czone as cz\n",
10 | "import numpy as np\n",
11 | "from ase import Atoms\n",
12 | "from ase.io import write as ase_write"
13 | ]
14 | },
15 | {
16 | "cell_type": "code",
17 | "execution_count": 2,
18 | "metadata": {},
19 | "outputs": [],
20 | "source": [
21 | "bbox = cz.volume.makeRectPrism(50,50,50)\n",
22 | "Au_gen = cz.generator.from_spacegroup(Z=[79], coords=np.array([[0,0,0]]),\\\n",
23 | " cellDims=4.07825*np.ones(3), cellAngs=[90,90,90], sgn=225)\n",
24 | "pos, species = Au_gen.supply_atoms(bbox)"
25 | ]
26 | },
27 | {
28 | "cell_type": "code",
29 | "execution_count": 3,
30 | "metadata": {},
31 | "outputs": [],
32 | "source": [
33 | "ase_write(\"Au_block.xyz\", Atoms(symbols=species, positions=pos))"
34 | ]
35 | },
36 | {
37 | "cell_type": "code",
38 | "execution_count": 4,
39 | "metadata": {},
40 | "outputs": [],
41 | "source": [
42 | "sphere = cz.volume.algebraic.Sphere(center=np.mean(bbox,axis=0), radius=25.0)\n",
43 | "Au_sphere = cz.volume.Volume(alg_objects=[sphere], generator=Au_gen, gen_origin=np.array([0,0,0]))\n",
44 | "Au_sphere.populate_atoms()"
45 | ]
46 | },
47 | {
48 | "cell_type": "code",
49 | "execution_count": 5,
50 | "metadata": {},
51 | "outputs": [],
52 | "source": [
53 | "ase_write(\"Au_sphere.xyz\", Atoms(symbols=Au_sphere.species, positions=Au_sphere.atoms))"
54 | ]
55 | },
56 | {
57 | "cell_type": "code",
58 | "execution_count": 6,
59 | "metadata": {},
60 | "outputs": [],
61 | "source": [
62 | "Cu_gen = cz.generator.from_spacegroup(Z=[29], coords=np.array([[0,0,0]]),\\\n",
63 | " cellDims=4.07825*np.ones(3), cellAngs=[90,90,90], sgn=225)\n",
64 | "small_sphere = cz.volume.algebraic.Sphere(center=np.mean(bbox,axis=0), radius=12.5)\n",
65 | "Cu_core = cz.volume.Volume(alg_objects=[small_sphere], generator=Cu_gen, gen_origin=np.array([0,0,0]))"
66 | ]
67 | },
68 | {
69 | "cell_type": "code",
70 | "execution_count": 7,
71 | "metadata": {},
72 | "outputs": [],
73 | "source": [
74 | "# cutoff_plane = cz.volume.algebraic.Plane(point=sphere.center, normal=np.array([1,0,0]))\n",
75 | "# Au_sphere.add_alg_object(cutoff_plane)\n",
76 | "# Cu_core.add_alg_object(cutoff_plane)"
77 | ]
78 | },
79 | {
80 | "cell_type": "code",
81 | "execution_count": 8,
82 | "metadata": {},
83 | "outputs": [],
84 | "source": [
85 | "bounds = np.array([[0.0,0.0,0.0], [30.0, 30.0, 30.0]])\n",
86 | "Cu_core.priority = 0\n",
87 | "Au_sphere.priority = 1\n",
88 | "example_scene = cz.scene.Scene(bounds=bounds, objects=[Au_sphere, Cu_core])"
89 | ]
90 | },
91 | {
92 | "cell_type": "code",
93 | "execution_count": 9,
94 | "metadata": {},
95 | "outputs": [],
96 | "source": [
97 | "example_scene.populate()"
98 | ]
99 | },
100 | {
101 | "cell_type": "code",
102 | "execution_count": 10,
103 | "metadata": {},
104 | "outputs": [],
105 | "source": [
106 | "cz.io.write_scene(\"Au_sphere_Cu_core.xyz\", example_scene)"
107 | ]
108 | },
109 | {
110 | "cell_type": "code",
111 | "execution_count": 11,
112 | "metadata": {},
113 | "outputs": [],
114 | "source": [
115 | "refl_111_plane = cz.volume.algebraic.snap_plane_near_point(sphere.center, Au_gen, (1,1,1))\n",
116 | "refl_111_transform = cz.transform.Reflection(refl_111_plane)\n",
117 | "vector_112 = np.array([1.0,1.0,-2.0])\n",
118 | "vector_121 = np.array([-1.0,2.0,-1.0])\n",
119 | "vector_112 *= (4.07825/6)\n",
120 | "vector_121 *= (4.07825/6)\n",
121 | "translate_112 = cz.transform.Translation(shift=2*vector_112.T)\n",
122 | "translate_121 = cz.transform.Translation(shift=2*vector_121.T)"
123 | ]
124 | },
125 | {
126 | "cell_type": "code",
127 | "execution_count": 12,
128 | "metadata": {},
129 | "outputs": [],
130 | "source": [
131 | "Au_gen_twin = cz.generator.from_generator(Au_gen, transformation=[refl_111_transform])\n",
132 | "Au_gen_shift_a = cz.generator.from_generator(Au_gen, transformation=[translate_112])\n",
133 | "Au_gen_shift_b = cz.generator.from_generator(Au_gen, transformation=[translate_121])\n",
134 | "\n",
135 | "cutoff_111_a = cz.volume.algebraic.snap_plane_near_point(np.array([32.62, 2.03, 22.43]), Au_gen, (1,1,1))\n",
136 | "cutoff_111_b = cz.volume.algebraic.snap_plane_near_point(np.array([20.39, 8.156, 16.313]), Au_gen, (1,1,1))\n",
137 | "cutoff_111_b.point += cutoff_111_b.normal"
138 | ]
139 | },
140 | {
141 | "cell_type": "code",
142 | "execution_count": 13,
143 | "metadata": {},
144 | "outputs": [],
145 | "source": [
146 | "Au_sphere.add_alg_object(refl_111_plane)\n",
147 | "Au_sphere_twin = cz.volume.Volume(alg_objects=[sphere], generator=Au_gen_twin, gen_origin=Au_gen_twin.voxel.origin)\n",
148 | "Au_sphere_shift_a = cz.volume.Volume(alg_objects=[sphere, cutoff_111_a], generator=Au_gen_shift_a, gen_origin=Au_gen_shift_a.voxel.origin)\n",
149 | "Au_sphere_shift_b = cz.volume.Volume(alg_objects=[sphere, cutoff_111_b], generator=Au_gen_shift_b, gen_origin=Au_gen_shift_b.voxel.origin)\n",
150 | "vol_list = [Au_sphere, Au_sphere_twin, Au_sphere_shift_a, Au_sphere_shift_b]"
151 | ]
152 | },
153 | {
154 | "cell_type": "code",
155 | "execution_count": 14,
156 | "metadata": {},
157 | "outputs": [],
158 | "source": [
159 | "Au_sphere.priority = 4\n",
160 | "Au_sphere_twin.priority = 5\n",
161 | "Au_sphere_shift_a.priority = 3\n",
162 | "Au_sphere_shift_b.priority = 2\n",
163 | "twin_scene = cz.scene.Scene(bounds=bounds, objects=vol_list)"
164 | ]
165 | },
166 | {
167 | "cell_type": "code",
168 | "execution_count": 15,
169 | "metadata": {},
170 | "outputs": [],
171 | "source": [
172 | "twin_scene.populate()"
173 | ]
174 | },
175 | {
176 | "cell_type": "code",
177 | "execution_count": 16,
178 | "metadata": {},
179 | "outputs": [],
180 | "source": [
181 | "cz.io.write_scene(\"Au_twin.xyz\", twin_scene)"
182 | ]
183 | },
184 | {
185 | "cell_type": "code",
186 | "execution_count": 17,
187 | "metadata": {},
188 | "outputs": [
189 | {
190 | "data": {
191 | "text/plain": [
192 | "(array([[0.24520321],\n",
193 | " [0.24520321],\n",
194 | " [0.24520321]]),\n",
195 | " array([[24.4695],\n",
196 | " [24.4695],\n",
197 | " [24.4695]]))"
198 | ]
199 | },
200 | "execution_count": 17,
201 | "metadata": {},
202 | "output_type": "execute_result"
203 | }
204 | ],
205 | "source": [
206 | "refl_111_plane.params"
207 | ]
208 | },
209 | {
210 | "cell_type": "code",
211 | "execution_count": 18,
212 | "metadata": {},
213 | "outputs": [],
214 | "source": [
215 | "translate_up = cz.transform.Translation(shift=np.array([12.5, 12.5, 50-sphere.center[2]]), locked=True) #shift from sides and above future substrate\n",
216 | "for vol in vol_list:\n",
217 | " vol.transform(translate_up)"
218 | ]
219 | },
220 | {
221 | "cell_type": "code",
222 | "execution_count": 19,
223 | "metadata": {},
224 | "outputs": [],
225 | "source": [
226 | "C_substrate_gen = cz.generator.AmorphousGenerator(min_dist=1.2)"
227 | ]
228 | },
229 | {
230 | "cell_type": "code",
231 | "execution_count": 20,
232 | "metadata": {},
233 | "outputs": [],
234 | "source": [
235 | "substrate_vol = cz.volume.Volume(points=cz.volume.makeRectPrism(70,70,30), generator=C_substrate_gen)"
236 | ]
237 | },
238 | {
239 | "cell_type": "code",
240 | "execution_count": 21,
241 | "metadata": {},
242 | "outputs": [],
243 | "source": [
244 | "sub_bounds = np.array([[0.0,0.0,0.0], [70.0, 70.0, 85.0]])\n",
245 | "# vol_list.append(substrate_vol)\n",
246 | "twin_on_substrate_scene = cz.scene.Scene(bounds=sub_bounds, objects=vol_list)"
247 | ]
248 | },
249 | {
250 | "cell_type": "code",
251 | "execution_count": 22,
252 | "metadata": {},
253 | "outputs": [],
254 | "source": [
255 | "twin_on_substrate_scene.populate()"
256 | ]
257 | },
258 | {
259 | "cell_type": "code",
260 | "execution_count": 23,
261 | "metadata": {},
262 | "outputs": [],
263 | "source": [
264 | "cz.io.write_scene(\"Au_twin_on_substrate.xyz\", twin_on_substrate_scene)"
265 | ]
266 | },
267 | {
268 | "cell_type": "code",
269 | "execution_count": null,
270 | "metadata": {},
271 | "outputs": [],
272 | "source": []
273 | }
274 | ],
275 | "metadata": {
276 | "kernelspec": {
277 | "display_name": "Python [conda env:ML_work]",
278 | "language": "python",
279 | "name": "conda-env-ML_work-py"
280 | },
281 | "language_info": {
282 | "codemirror_mode": {
283 | "name": "ipython",
284 | "version": 3
285 | },
286 | "file_extension": ".py",
287 | "mimetype": "text/x-python",
288 | "name": "python",
289 | "nbconvert_exporter": "python",
290 | "pygments_lexer": "ipython3",
291 | "version": "3.7.9"
292 | }
293 | },
294 | "nbformat": 4,
295 | "nbformat_minor": 4
296 | }
297 |
--------------------------------------------------------------------------------
/czone/util/measure.py:
--------------------------------------------------------------------------------
1 | from typing import Tuple
2 |
3 | import numpy as np
4 |
5 |
6 | def get_voxel_grid(dim: Tuple[int], px: bool = True, py: bool = True, pz: bool = True):
7 | """Generate list of voxel neighbors for 3D voxel grid with periodic boundary conditions.
8 |
9 | Utility function which returns a representation of a connected 3D voxel grid
10 | for arbitrary periodic boundary conditions. Voxels are ordered on a 1D list
11 | by X, then Y, then Z. For example, a 2x2x2 voxel grid will be ordered as
12 | [(0,0,0), (1,0,0), (0,1,0), (1,1,0), (0,0,1), (1,0,1), (0,1,1), (1,1,1)]. An ordered
13 | list is returned which contains, for each voxel i, a list of all of its
14 | neighbors on the 3D grid, as ordered in the 1D indexing scheme.
15 |
16 | For fully periodic boundary conditions, each voxel i will have 27 neighbors,
17 | including the voxel index itself.
18 |
19 | Args:
20 | dim (Tuple[int]): size of grid in x, y, and z
21 | px (bool): periodicity in x
22 | py (bool): periodicity in y
23 | pz (bool): periodicity in z
24 |
25 | Returns:
26 | List[List[int]]
27 | """
28 | # get relative coordinates as 3D grid
29 | nn = [x for x in range(27)]
30 | nn_x = np.array([(x % 3) - 1 for x in nn])
31 | nn_y = np.array([((x // 3) % 3) - 1 for x in nn])
32 | nn_z = np.array([(x // 9) - 1 for x in nn])
33 |
34 | N = np.prod(dim)
35 | neighbors = np.ones((N, 27)) * np.arange(N)[:, None]
36 | # get relative indices as 1D list
37 | shifts = (1, dim[0], dim[0] * dim[1])
38 | for i, t in enumerate(zip(nn_x, nn_y, nn_z)):
39 | neighbors[:, i] += np.dot(t, shifts)
40 |
41 | ind = np.arange(N)
42 | # correct x_min edge
43 | le = (ind % dim[0]) == 0
44 | for j in range(0, 27, 3): # nn_x == -1
45 | neighbors[le, j] += dim[0]
46 |
47 | # correct x max edge
48 | re = (ind % dim[0]) == (dim[0] - 1)
49 | for j in range(2, 27, 3): # nn_x == 1
50 | neighbors[re, j] -= dim[0]
51 |
52 | # correct y min edge
53 | te = ((ind // dim[0]) % dim[1]) == 0
54 | for j in np.where(nn_y == -1)[0]:
55 | neighbors[te, j] += dim[0] * dim[1]
56 |
57 | # correct y max edge
58 | be = ((ind // dim[0]) % dim[1]) == (dim[1] - 1)
59 | for j in np.where(nn_y == 1)[0]:
60 | neighbors[be, j] -= dim[0] * dim[1]
61 |
62 | # correct list for total size of grid
63 | neighbors = neighbors % N
64 |
65 | # if fully periodic, no further corrections needed
66 | if px and py and pz:
67 | return neighbors.astype(int)
68 |
69 | # get full list of indices as array
70 | idx = np.array([x for x in range(N)]).astype(int)
71 |
72 | # get logical arrays for voxels on faces of grid
73 | xi_face = (idx % dim[0] == 0)[:, None]
74 | xf_face = (idx % dim[0] == dim[0] - 1)[:, None]
75 |
76 | yi_face = ((idx // dim[0]) % dim[1] == 0)[:, None]
77 | yf_face = ((idx // dim[0]) % dim[1] == dim[1] - 1)[:, None]
78 |
79 | zi_face = (idx // (dim[0] * dim[1]) == 0)[:, None]
80 | zf_face = (idx // (dim[0] * dim[1]) == dim[2] - 1)[:, None]
81 |
82 | # get local neighbors on faces as arrays so that we can use matrix
83 | # multiplication for logical indexing
84 | nn_xi = np.array([(x % 3) == 0 for x in nn])[None, :]
85 | nn_xf = np.array([(x % 3) == 2 for x in nn])[None, :]
86 |
87 | nn_yi = np.array([((x // 3) % 3) == 0 for x in nn])[None, :]
88 | nn_yf = np.array([((x // 3) % 3) == 2 for x in nn])[None, :]
89 |
90 | nn_zi = np.array([(x // 9) == 0 for x in nn])[None, :]
91 | nn_zf = np.array([(x // 9) == 2 for x in nn])[None, :]
92 |
93 | # change neighbor indices to nans if boundary is not periodic
94 | fx = not px
95 | fy = not py
96 | fz = not pz
97 |
98 | # correct just x faces
99 | if fx:
100 | neighbors[xi_face @ nn_xi] = np.nan
101 | neighbors[xf_face @ nn_xf] = np.nan
102 |
103 | # correct just y faces
104 | if fy:
105 | neighbors[yi_face @ nn_yi] = np.nan
106 | neighbors[yf_face @ nn_yf] = np.nan
107 |
108 | # correct just z faces
109 | if fz:
110 | neighbors[zi_face @ nn_zi] = np.nan
111 | neighbors[zf_face @ nn_zf] = np.nan
112 |
113 | # correct x/y edges
114 | if fx and fy:
115 | for xc, nn_xc in zip([xi_face, xf_face], [nn_xi, nn_xf]):
116 | for yc, nn_yc in zip([yi_face, yf_face], [nn_yi, nn_yf]):
117 | idx_check = np.logical_and(xc, yc)
118 | nn_check = np.logical_and(nn_xc, nn_yc)
119 | neighbors[idx_check @ nn_check] = np.nan
120 |
121 | # correct x/z edges
122 | if fx and fz:
123 | for xc, nn_xc in zip([xi_face, xf_face], [nn_xi, nn_xf]):
124 | for zc, nn_zc in zip([zi_face, zf_face], [nn_zi, nn_zf]):
125 | idx_check = np.logical_and(xc, zc)
126 | nn_check = np.logical_and(nn_xc, nn_zc)
127 | neighbors[idx_check @ nn_check] = np.nan
128 |
129 | # correct y/z edges
130 | if fy and fz:
131 | for yc, nn_yc in zip([yi_face, yf_face], [nn_yi, nn_yf]):
132 | for zc, nn_zc in zip([zi_face, zf_face], [nn_zi, nn_zf]):
133 | idx_check = np.logical_and(yc, zc)
134 | nn_check = np.logical_and(nn_yc, nn_zc)
135 | neighbors[idx_check @ nn_check] = np.nan
136 |
137 | # # correct x/y/z corners
138 | if fx and fy and fz:
139 | for xc, nn_xc in zip([xi_face, xf_face], [nn_xi, nn_xf]):
140 | for yc, nn_yc in zip([yi_face, yf_face], [nn_yi, nn_yf]):
141 | for zc, nn_zc in zip([zi_face, zf_face], [nn_zi, nn_zf]):
142 | idx_check = np.logical_and(np.logical_and(yc, zc), xc)
143 | nn_check = np.logical_and(np.logical_and(nn_yc, nn_zc), nn_xc)
144 | neighbors[idx_check @ nn_check] = np.nan
145 |
146 | # used masked arrays to get compact lists of neighboring voxels
147 | mask = np.isnan(neighbors)
148 | neighbors_ma = np.ma.masked_array(neighbors, mask=mask).astype(int)
149 | neighbor_lists = [set(list(np.ma.compressed(x))) for x in neighbors_ma]
150 |
151 | return neighbor_lists
152 |
153 |
154 | def get_sdist_fun(dims=None, px=False, py=False, pz=False):
155 | """Return a squared distance function in 3D space for any PBC.
156 |
157 | Args:
158 | dims (Array[float]): length 3 array, list, or tuple for periodic
159 | dimension sizes (x,y,z); must be passed if using
160 | periodic boundaries
161 | px (bool): whether or not periodic in x
162 | py (bool): whether or not periodic in y
163 | pz (bool): whether or not periodic in z
164 |
165 | Returns:
166 | Callable[float] : squared distance function obeying periodic boundaries
167 |
168 | """
169 | # return standard distance function if no periodic bonadries
170 | if not np.any((px, py, pz)):
171 |
172 | def sdist(A, B):
173 | return np.sum((A - B) ** 2.0, axis=1)
174 |
175 | # grab relevant dimensions
176 | cols = [x for x, y in zip([0, 1, 2], [px, py, pz]) if y]
177 | sdims = np.array(dims)[cols]
178 | sdims = sdims[None, :, None]
179 |
180 | def sdist(A, B):
181 | dist_0 = np.abs(A - B)
182 | dist_1 = sdims - dist_0[:, cols, :]
183 | dist_0[:, cols, :] = np.min(np.stack([dist_0[:, cols, :], dist_1], axis=-1), axis=-1)
184 |
185 | return np.sum(dist_0 * dist_0, axis=1)
186 |
187 | return sdist
188 |
189 |
190 | def calc_rdf(coords, cutoff=20.0, px=True, py=True, pz=True):
191 | """Calculate the 3D periodic radial distribution function of array of points.
192 |
193 | Args:
194 | coords (np.ndarray): Nx3 numpy array representing points in 3D
195 | cutoff (float): cutoff distance for neighbor consideration
196 | px (bool): whether or not coordinates are periodic in x
197 | py (bool): whether or not coordinates are periodic in y
198 | pz (bool): whether or not coordinates are periodic in z
199 |
200 | Returns:
201 | np.ndarray: radial distribution function, returned in steps of 0.1
202 |
203 | """
204 | # shift outside of negative octants
205 | coords -= np.min(coords, axis=0)
206 |
207 | # get box size and number of voxels in each direction
208 | dims = np.max(coords, axis=0)
209 | N = np.ceil(dims / cutoff).astype(int)
210 |
211 | Nt = np.array([1, N[0], N[0] * N[1]])
212 |
213 | # get voxel neighbor list and 1D voxel idx for each obj
214 | nn = get_voxel_grid(N, px, py, pz)
215 | box_idx = (np.floor(coords / cutoff) @ Nt).astype(int)
216 |
217 | # get periodic distance calculation
218 | f_sdist = get_sdist_fun(dims, px, py, pz)
219 |
220 | # assign coords to voxels
221 | parts = []
222 | part_ids = np.arange(coords.shape[0])
223 | for i in range(np.prod(N)):
224 | parts.append([int(x) for x in part_ids[box_idx == i]])
225 |
226 | # do 3D arrays so that distances are broadcasted/batched
227 | counts = np.zeros(int(cutoff / 0.1))
228 |
229 | # for each voxel, calculate distance of owned particles to all possible
230 | # neighbors within cutoff distance
231 | for i in range(np.prod(N)):
232 | cur_parts = coords[parts[i], :][:, :, None]
233 |
234 | for n in nn[box_idx[i]]:
235 | neighbor_parts = (coords[parts[n], :].T)[None, :, :]
236 | dist = np.sqrt(f_sdist(cur_parts, neighbor_parts))
237 |
238 | # use histogram to get counts for RDF
239 | tmp_counts, _ = np.histogram(dist, bins=counts.shape[0], range=(0.0, cutoff))
240 | counts += tmp_counts
241 |
242 | # correct for self interactions
243 | counts[0] = 0.0
244 |
245 | return counts
246 |
--------------------------------------------------------------------------------
/docs/source/examples/nanoparticle_on_substrate.rst:
--------------------------------------------------------------------------------
1 | Defected FCC Nanoparticle on Carbon Substrate
2 | =======================================================
3 |
4 | In this example, we'll be creating a spherical gold nanoparticle with a couple of
5 | planar defects, which will be then be placed onto an amorphous carbon substrate.
6 | In this example, we'll be going through most of the core functionality of the package.
7 |
8 | First, let's import the classes we'll need.
9 |
10 | .. code-block::
11 |
12 | import czone as cz
13 | import numpy as np
14 | from cz.volume import MultiVolume, Volume, Sphere, Plane, snap_plane_near_point, makeRectPrism
15 | from cz.generator import Generator, AmorphousGenerator
16 | from cz.transform import Rotation, Reflection, Translation, rot_vtv
17 | from cz.scene import Scene
18 |
19 | Let's start with making the substrate. We first will create an AmorphousGenerator object,
20 | which by default generates blocks of amorphous carbon which are periodic in x and y.
21 | We want a substrate that is 12 nm x 12 nm x 5 nm thick, so we use a utility to create
22 | the points of a rectangular prism with those dimensions. We then create a Volume object,
23 | to which we attach our amorphous carbon generator and the points defining the boundaries of the substrate.
24 |
25 | .. code-block::
26 |
27 | c_generator = AmorphousGenerator()
28 | substrate_prism = makeRectPrism(120,120,50)
29 | substrate = Volume(points=substrate_prism, generator=c_generator)
30 |
31 | For the gold nanoparticle, we we will be working with a crystalline generator.
32 | Gold is an FCC metal with a lattice parameter about 4 angstroms. Here, we can
33 | request a Generator with the correct unit cell and symmetry by providing the space group
34 | information. Only symmetric sites in the unit cell need to be passed in.
35 |
36 | .. code-block::
37 |
38 | base_Au_gen = Generator.from_spacegroup(Z=[79],
39 | coords=np.array([[0,0,0]]),
40 | cellDims=4.07825*np.ones(3),
41 | cellAngs=[90,90,90],
42 | sgn=225)
43 |
44 | Now, we can work on making the spherical nanoparticle itself. We first create
45 | a sphere to represent the outer boundary. The sphere is right now centered at the global origin.
46 | For the defects, we'll put a twin defect in the center of the nanoparticle and two
47 | stacking faults further toward the side. We use the snap_plane_near_point utility
48 | to grab several (111) planes for the defect placement.
49 |
50 | .. code-block::
51 |
52 | d_111 = 4.07825/np.sqrt(3)
53 |
54 | sphere = Sphere(center=np.array([0,0,0]), radius=32.5)
55 | refl_111_plane = snap_plane_near_point(sphere.center, base_Au_gen, (1,1,1))
56 |
57 | b_111 = base_Au_gen.voxel.sbases @ np.array([1,1,1])
58 | b_111 *= d_111 / np.linalg.norm(b_111)
59 | cutoff_111_a = snap_plane_near_point(-3*b_111, base_Au_gen, (1,1,1))
60 | cutoff_111_b = snap_plane_near_point(-8*b_111, base_Au_gen, (1,1,1))
61 |
62 | In order to have regions of the nanoparticle have defects, we essentially need to
63 | have new rules for how atoms are supplied to those regions. We can use a series
64 | of derived generators for that very purpose. For the twin grain, we reflect the
65 | original generator over the twin plane. For the two stacking faults, we create
66 | generators with shifted local origins.
67 |
68 | .. code-block::
69 |
70 | b_112 = base_Au_gen.voxel.sbases @ np.array([1,1,-2]).T / 3
71 | b_121 = base_Au_gen.voxel.sbases @ np.array([-1,2,-1]).T / 3
72 |
73 | refl_111 = Reflection(refl_111_plane)
74 | translate_112 = Translation(shift=b_112)
75 | translate_121 = Translation(shift=b_121)
76 |
77 | twin_gen = base_Au_gen.from_generator(transformation=[refl_111])
78 | shift_a_gen = base_Au_gen.from_generator(transformation=[translate_112])
79 | shift_b_gen = base_Au_gen.from_generator(transformation=[translate_121])
80 |
81 | Now that we have all the generators for the sub-grains of the nanoparticle, we
82 | can make the nanoparticle by combining all of the volumes together. Each grain
83 | will get its own volume, which is represented by the intersection of the interiors
84 | of the sphere and their respective defect planes. To make sure the grains don't
85 | generate atoms on top of eachother where their volumes intersect, we assign the grains
86 | different priorities. A lower priority means that volume has precedence over other
87 | volumes with higher priority levels. Two volumes with the same precedence will
88 | remove atoms in their interesecting region. The volumes are added to a MultiVolume object,
89 | which let's us manipulate all the grains simultaneously as one large semantic object.
90 |
91 | .. code-block::
92 |
93 | Au_main_grain = Volume(alg_objects=[sphere, refl_111_plane], generator=base_Au_gen)
94 | Au_sf_a_grain = Volume(alg_objects=[sphere, cutoff_111_a], generator=shift_a_gen)
95 | Au_sf_b_grain = Volume(alg_objects=[sphere, cutoff_111_b], generator=shift_b_gen)
96 | Au_twin_grain = Volume(alg_objects=[sphere], generator=twin_gen)
97 |
98 | Au_twin_grain.priority = 4
99 | Au_main_grain.priority = 3
100 | Au_sf_a_grain.priority = 2
101 | Au_sf_b_grain.priority = 1
102 |
103 | defected_NP = MultiVolume(volumes=[Au_twin_grain, Au_main_grain, Au_sf_a_grain, Au_sf_b_grain])
104 |
105 | We now rotate the nanoparticle to a random zone axis with a rotation transformation.
106 |
107 | .. code-block::
108 |
109 | # rotate to a random zone axis
110 | zone_axis = np.random.randn(3)
111 | zone_axis /= np.linalg.norm(zone_axis)
112 | rot = Rotation(matrix=rot_vtv(zone_axis, [0,0,1]))
113 |
114 | defected_NP.transform(rot)
115 |
116 |
117 | We use a surface alignment routine to help place the particle in the desired location
118 | on the substrate. We take one plane, near the bottom of the nanoparticle, and align it
119 | to the surface of the substrate. We also want to align the center of the sphere (which is
120 | currently the origin) with the center of substrate in X and Y. The surface alignment routine
121 | returns a MultiTransform object, which contains a sequence of transformations (in this case,
122 | a rotation followed by a translation).
123 |
124 | .. code-block::
125 |
126 | moving_plane = Plane(point=[0,0,-0.8*sphere.radius], normal=[0,0,-1]) # not quite the bottom of the NP
127 | target_plane = Plane(point=[0,0,50], normal=[0,0,1]) # the surface of the substrate
128 | alignment_transform = s2s_alignment(moving_plane,
129 | target_plane,
130 | np.array([0,0,0]),
131 | np.array([60,60,0]))
132 |
133 | defected_NP.transform(alignment_transform)
134 |
135 | Finally, we add the substrate and the nanoparticle to a scene. We use the populate method of the
136 | scene to actually generate the atoms, and once that is done (it may take 10-30 seconds for the
137 | carbon generation), write the structure an output file for visualization with our favorite
138 | visualization software.
139 |
140 | .. code-block::
141 |
142 | # remove substrate where NP exists
143 | defected_NP.priority = 0
144 | substrate.priority = 1
145 | defected_NP_scene = cz.scene.Scene(bounds=np.array([[0,0,0],[120,120,120]]),
146 | objects=[defected_NP, substrate])
147 | defected_NP_scene.populate()
148 | defected_NP_scene.to_file("defected_NP.xyz")
149 |
150 |
151 | While the code above is pretty compact, and hopefully, straightforward and readable, it can still be a little cumbersome.
152 | Imagine that we want to not sample a specific planar defect location, but many nanoparticles with random
153 | placement of defects. The above procedure has a couple of key steps where we define lattice relationships
154 | that make up our planar defects---this can certainly be reduced to a generalized algorithm.
155 | Construction Zone is designed to take algorithms and make repeatable programmatic workflows
156 | that can be sampled many times for large scale structure generation. Some such routines are
157 | already developed in the Prefab module. FCC planar defects is one such prefab routine
158 | currently available.
159 |
160 | In the following code, we create the defected nanoparticle itself in all of four lines.
161 | We then rotate and place the code onto the substrate as before, and create two structure files---
162 | one without the substrate, and one with the substrate.
163 |
164 | .. code-block::
165 | from cz.prefab import fccMixedTwinSF
166 |
167 | sphere = Sphere(center=np.array([0,0,0]), radius=radius)
168 | vol = Volume(alg_objects=[small_sphere])
169 | sf_object_prefab = fccMixedTwinSF(generator=base_Au_gen, volume=vol, ratio=0.75, N=3)
170 | current_vol = sf_object_prefab.build_object() #sample a defected nanoparticle
171 |
172 | # apply random rotation
173 | zone_axis = np.random.randn(3)
174 | zone_axis /= np.linalg.norm(zone_axis)
175 | rot = Rotation(matrix=rot_vtv(zone_axis, [0,0,1]))
176 | current_vol.transform(rot)
177 |
178 | # put on substrate and apply random shift about center of FOV
179 | moving_plane = Plane(point=[0,0,-0.8*small_sphere.radius], normal=[0,0,-1]) # not quite the bottom of the NP
180 | target_plane = Plane(point=[0,0,50], normal=[0,0,1]) # the surface of the substrate
181 | final_center = np.array([60,60,0]) + 10*np.random.randn(3)*np.array([1,1,0])
182 | alignment_transform = s2s_alignment(moving_plane,
183 | target_plane,
184 | small_sphere.center,
185 | final_center)
186 |
187 | current_vol.transform(alignment_transform)
188 |
189 | scene = cz.scene.Scene(bounds=np.array([[0,0,0],[120,120,125]]), objects=[current_vol])
190 | scene.populate()
191 | scene.to_file("particle.xyz")
192 | scene.add_object(substrate)
193 | scene.populate()
194 | scene.to_file("particle_on_substrate.xyz")
195 |
--------------------------------------------------------------------------------
/tests/test_molecule.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from .czone_test_fixtures import czone_TestCase
3 | from pymatgen.core.structure import Molecule as pmg_molecule
4 |
5 | from czone.molecule import Molecule
6 | from czone.transform import MatrixTransform
7 |
8 | seed = 8907190823
9 | rng = np.random.default_rng(seed=seed)
10 |
11 |
12 | class Test_Molecule(czone_TestCase):
13 | def setUp(self):
14 | self.N_trials = 100
15 |
16 | def test_init(self):
17 | N = 1024
18 |
19 | ## Check basic initialization
20 | for _ in range(self.N_trials):
21 | species = rng.integers(1, 119, (N, 1))
22 | positions = rng.normal(size=(N, 3))
23 | mol = Molecule(species, positions)
24 |
25 | self.assertTrue(np.allclose(mol.species, species.ravel()))
26 | self.assertTrue(np.allclose(mol.atoms, positions))
27 | self.assertReprEqual(mol)
28 |
29 | ## Check input errors with wrong shaped arrays
30 | def init_molecule(*args):
31 | return Molecule(*args)
32 |
33 | species = rng.integers(1, 119, (N - 1, 1))
34 | positions = rng.normal(size=(N, 3))
35 | self.assertRaises(ValueError, init_molecule, species, positions)
36 |
37 | species = rng.integers(1, 119, (N, 2))
38 | positions = rng.normal(size=(N, 3))
39 | self.assertRaises(ValueError, init_molecule, species, positions)
40 |
41 | # Numpy should raise an error here, for the reshape of positions
42 | species = rng.integers(1, 119, (N, 1))
43 | positions = rng.normal(size=(N, 4))
44 | self.assertRaises(ValueError, init_molecule, species, positions)
45 |
46 | # Reshape is valid, but the sizes are now incompatible
47 | N = 30
48 | species = rng.integers(1, 119, (N, 1))
49 | positions = rng.normal(size=(N, 4))
50 | self.assertRaises(ValueError, init_molecule, species, positions)
51 |
52 | def test_eq(self):
53 | N = 256
54 |
55 | ## Check set equality
56 | for _ in range(self.N_trials):
57 | species = rng.integers(1, 119, (N, 1))
58 | positions = rng.normal(size=(N, 3))
59 | ref_mol = Molecule(species, positions)
60 |
61 | new_order = rng.permutation(np.arange(N))
62 | test_mol = Molecule(species[new_order], positions[new_order])
63 |
64 | self.assertEqual(ref_mol, test_mol)
65 |
66 | def test_copy_constructors(self):
67 | N = 512
68 | for _ in range(self.N_trials // 8):
69 | species = rng.integers(1, 119, (N, 1))
70 | positions = rng.normal(size=(N, 3))
71 | mol = Molecule(species, positions)
72 | new_mol = Molecule.from_molecule(mol)
73 | self.assertTrue(np.allclose(mol.species, new_mol.species))
74 | self.assertTrue(np.allclose(mol.atoms, new_mol.atoms))
75 |
76 | ase_mol = mol.ase_atoms
77 | self.assertTrue(np.allclose(mol.species, ase_mol.get_atomic_numbers()))
78 | self.assertTrue(np.allclose(mol.atoms, ase_mol.get_positions()))
79 |
80 | ase_mol = Molecule.from_ase_atoms(ase_mol)
81 | self.assertTrue(np.allclose(mol.species, ase_mol.species))
82 | self.assertTrue(np.allclose(mol.atoms, ase_mol.atoms))
83 |
84 | pmg_mol = pmg_molecule(list(species[:, 0]), positions)
85 | test_mol = Molecule.from_pmg_molecule(pmg_mol)
86 |
87 | ref_species = np.array([s.number for s in pmg_mol.species])
88 | self.assertTrue(np.allclose(test_mol.species, ref_species))
89 | self.assertTrue(np.allclose(test_mol.atoms, pmg_mol.cart_coords))
90 |
91 | self.assertRaises(
92 | TypeError, Molecule.from_ase_atoms, pmg_molecule(list(species[:, 0]), positions)
93 | )
94 | self.assertRaises(TypeError, Molecule.from_pmg_molecule, mol.ase_atoms)
95 |
96 | def test_updates(self):
97 | N = 1024
98 | for _ in range(self.N_trials):
99 | species = rng.integers(1, 119, (N, 1))
100 | positions = rng.normal(size=(N, 3))
101 | mol = Molecule(species, positions)
102 |
103 | new_species = rng.integers(1, 119, (N, 1))
104 | new_positions = rng.normal(size=(N, 3))
105 |
106 | mol.update_species(new_species)
107 | mol.update_positions(new_positions)
108 |
109 | self.assertTrue(np.allclose(mol.species, new_species.ravel()))
110 | self.assertTrue(np.allclose(mol.atoms, new_positions))
111 |
112 | species = rng.integers(1, 119, (N, 1))
113 | positions = rng.normal(size=(N, 3))
114 | mol = Molecule(species, positions)
115 |
116 | bad_species = rng.integers(1, 119, (N - 1, 1))
117 | self.assertRaises(ValueError, lambda s: mol.update_species(s), bad_species)
118 |
119 | bad_positions = rng.normal(size=(N - 1, 3))
120 | self.assertRaises(ValueError, lambda p: mol.update_positions(p), bad_positions)
121 |
122 | def test_removes(self):
123 | N = 1024
124 | for _ in range(self.N_trials):
125 | species = rng.integers(1, 119, (N, 1))
126 | positions = rng.normal(size=(N, 3))
127 | mol = Molecule(species, positions)
128 |
129 | rem_ind = rng.choice(np.arange(N), 128, replace=False)
130 | mol.remove_atoms(rem_ind)
131 |
132 | keep_ind = set(np.arange(N)).difference(rem_ind)
133 | ref_species = np.asarray([species[i, 0] for i in keep_ind])
134 | ref_pos = np.vstack([positions[i, :] for i in keep_ind])
135 |
136 | self.assertTrue(np.allclose(mol.species, ref_species))
137 | self.assertTrue(np.allclose(mol.atoms, ref_pos))
138 |
139 | species = rng.integers(1, 119, (N, 1))
140 | positions = rng.normal(size=(N, 3))
141 | mol = Molecule(species, positions)
142 |
143 | # Test for bad removal indices
144 | bad_ind = [0, 1, 2, 3, 1025]
145 | self.assertRaises(IndexError, mol.remove_atoms, bad_ind)
146 |
147 | bad_ind = [0, 1, 2, 3, -1025]
148 | self.assertRaises(IndexError, mol.remove_atoms, bad_ind)
149 |
150 | # Test for bad choices of new origins
151 | bad_ind = [0, 1, 2, 3]
152 | self.assertRaises(TypeError, mol.remove_atoms, *(bad_ind, 5.2))
153 | self.assertRaises(IndexError, mol.remove_atoms, *(bad_ind, 1025))
154 | self.assertRaises(IndexError, mol.remove_atoms, *(bad_ind, -1025))
155 | self.assertRaises(IndexError, mol.remove_atoms, *(bad_ind, 2))
156 |
157 | # Test for origin tracking removal
158 | bad_ind = [0, 1, 2, 3]
159 | mol.set_origin(idx=2)
160 | self.assertRaises(NotImplementedError, mol.remove_atoms, bad_ind)
161 |
162 | def test_orientation(self):
163 | N = 4
164 | for _ in range(self.N_trials):
165 | species = rng.integers(1, 119, (N, 1))
166 | positions = rng.normal(size=(N, 3))
167 |
168 | scaled_orientation = rng.normal(
169 | 0,
170 | 1,
171 | (
172 | 3,
173 | 3,
174 | ),
175 | )
176 | orientation, _ = np.linalg.qr(scaled_orientation)
177 |
178 | mol = Molecule(species, positions, orientation=orientation)
179 | self.assertTrue(np.allclose(mol.orientation, orientation))
180 |
181 | def init_molecule(orientation):
182 | return Molecule(species, positions, orientation=orientation)
183 |
184 | self.assertRaises(ValueError, init_molecule, np.eye(4))
185 |
186 | bad_eigenvals = np.eye(3) * 2
187 | self.assertRaises(ValueError, init_molecule, bad_eigenvals)
188 |
189 | s = np.cbrt(1 / 2.0)
190 | bad_ortho = np.array([[s, s, 0], [0, s, s], [s, 0, s]])
191 | self.assertRaises(ValueError, init_molecule, bad_ortho)
192 |
193 | def test_origin(self):
194 | N = 1024
195 | for _ in range(self.N_trials):
196 | species = rng.integers(1, 119, (N, 1))
197 | positions = rng.normal(size=(N, 3))
198 |
199 | # Default origin at grid origin
200 | mol_0 = Molecule(species, positions)
201 | self.assertTrue(np.allclose(mol_0.origin, np.zeros((3, 1))))
202 |
203 | # Check consistency between origin index and manual specification
204 | origin = rng.choice(1024, 1)[0]
205 | mol_1 = Molecule(species, positions, origin=origin)
206 | mol_2 = Molecule(species, positions, origin=positions[origin, :])
207 | self.assertTrue(np.allclose(mol_1.origin, mol_2.origin))
208 |
209 | # Check tracking of origin
210 | new_positions = np.copy(positions)
211 | new_positions[origin, :] = 0.0
212 | mol_1.update_positions(new_positions)
213 | self.assertTrue(np.allclose(mol_1.origin, np.zeros((3, 1))))
214 |
215 | def init_molecule(origin_idx):
216 | return Molecule(species, positions, origin=origin_idx)
217 |
218 | self.assertRaises(IndexError, init_molecule, 1025)
219 | self.assertRaises(IndexError, init_molecule, -1025)
220 |
221 | self.assertRaises(TypeError, lambda x: mol_0.set_origin(idx=x), 73.103)
222 |
223 | def test_transform(self):
224 | N = 1024
225 | for _ in range(self.N_trials):
226 | mat = rng.normal(size=(3, 3))
227 | T = MatrixTransform(mat)
228 |
229 | species = rng.integers(1, 119, (N, 1))
230 | positions = rng.normal(size=(N, 3))
231 | mol = Molecule(species, positions)
232 |
233 | test_mol = Molecule.from_molecule(mol, transformation=[T])
234 | test_mol.set_origin(idx=4)
235 | mol.set_origin(positions[4, :])
236 | mol.transform(T)
237 |
238 | self.assertTrue(np.allclose(mol.atoms, (mat @ positions.T).T))
239 | self.assertTrue(np.allclose(mol.atoms, test_mol.atoms))
240 | self.assertTrue(np.allclose(mol.origin, test_mol.origin))
241 |
242 | mol.set_origin(idx=4)
243 | mol.print_warnings = True
244 | self.assertWarns(UserWarning, mol.transform, T)
245 |
246 | # Since the warnings are only used here, test to make sure warning setting catches errors
247 | def set_warnings(val):
248 | mol.print_warnings = val
249 |
250 | self.assertRaises(TypeError, set_warnings, 1.2)
251 |
--------------------------------------------------------------------------------
/czone/blueprint/serializer.py:
--------------------------------------------------------------------------------
1 | import json
2 | from abc import ABC, abstractmethod
3 | from pathlib import Path
4 |
5 | import numpy as np
6 |
7 | from .blueprint import (
8 | BaseNode,
9 | Blueprint,
10 | NodeMap,
11 | )
12 |
13 | try:
14 | import yaml
15 |
16 | YAML_AVAILABLE = True
17 | except ImportError:
18 | YAML_AVAILABLE = False
19 |
20 | try:
21 | import tomlkit
22 |
23 | TOML_AVAILABLE = True
24 | except ImportError:
25 | TOML_AVAILABLE = False
26 |
27 | try:
28 | import h5py
29 |
30 | H5PY_AVAIALBLE = True
31 | except ImportError:
32 | H5PY_AVAIALBLE = False
33 |
34 |
35 | class BaseSerializer(ABC):
36 | def __init__():
37 | pass
38 |
39 | @staticmethod
40 | @abstractmethod
41 | def serialize(filepath: Path | str, blueprint: Blueprint, **kwargs) -> None:
42 | """Take a Blueprint and serialize to disk."""
43 | pass
44 |
45 | @classmethod
46 | def write(cls, filepath: Path | str, blueprint: Blueprint, **kwargs) -> None:
47 | "Alias for serialize."
48 | cls.serialize(filepath, blueprint, **kwargs)
49 |
50 | @staticmethod
51 | @abstractmethod
52 | def deserialize(filepath: Path | str, **kwargs) -> Blueprint:
53 | """Take a file and try to return a Blueprint"""
54 | pass
55 |
56 | @classmethod
57 | def read(cls, filepath: Path | str, **kwargs) -> Blueprint:
58 | """Alias for deserialize."""
59 | return cls.deserialize(filepath, **kwargs)
60 |
61 |
62 | class json_Serializer(BaseSerializer):
63 | @staticmethod
64 | def to_dict(node: BaseNode) -> dict:
65 | res = {k: v for k, v in node.items() if v is not None}
66 | for k in res.keys():
67 | if isinstance(res[k], np.ndarray):
68 | res[k] = res[k].tolist()
69 |
70 | res["_class_type"] = node.class_type.__name__ # force to be first in sort order
71 | try:
72 | children = res.pop("children")
73 | except KeyError:
74 | children = []
75 |
76 | if len(children) > 0:
77 | res["children"] = [json_Serializer.to_dict(n) for n in children]
78 |
79 | return res
80 |
81 | @staticmethod
82 | def from_dict(bdict: dict) -> BaseNode:
83 | try:
84 | children = bdict.pop("children")
85 | except KeyError:
86 | children = []
87 |
88 | res = NodeMap[bdict.pop("_class_type")](**bdict)
89 | for n in children:
90 | res.add_node(json_Serializer.from_dict(n))
91 |
92 | return res
93 |
94 | @staticmethod
95 | def serialize(filepath: Path | str, blueprint: Blueprint, **kwargs) -> None:
96 | bdict = json_Serializer.to_dict(blueprint.mapping)
97 |
98 | try:
99 | indent = kwargs.pop("indent")
100 | except KeyError:
101 | indent = 4
102 |
103 | try:
104 | sort_keys = kwargs.pop("sort_keys")
105 | except KeyError:
106 | sort_keys = True
107 |
108 | with open(filepath, "w") as f:
109 | json.dump(bdict, f, sort_keys=sort_keys, indent=indent, **kwargs)
110 |
111 | @staticmethod
112 | def deserialize(filepath: Path | str, **kwargs) -> Blueprint:
113 | with open(filepath, "r") as f:
114 | bdict = json.load(f)
115 |
116 | node = json_Serializer.from_dict(bdict)
117 | return Blueprint(node)
118 |
119 |
120 | class yaml_Serializer(BaseSerializer):
121 | # TODO: prettier formatting
122 |
123 | @staticmethod
124 | def serialize(filepath: Path | str, blueprint: Blueprint, **kwargs) -> None:
125 | bdict = json_Serializer.to_dict(blueprint.mapping)
126 |
127 | with open(filepath, "w") as f:
128 | yaml.dump(bdict, f, **kwargs)
129 |
130 | @staticmethod
131 | def deserialize(filepath: Path | str, **kwargs) -> Blueprint:
132 | with open(filepath, "r") as f:
133 | bdict = yaml.full_load(f)
134 |
135 | node = json_Serializer.from_dict(bdict)
136 | return Blueprint(node)
137 |
138 |
139 | class toml_Serializer(BaseSerializer):
140 | # TODO: prettier formatting
141 | @staticmethod
142 | def serialize(filepath: Path | str, blueprint: Blueprint, **kwargs) -> None:
143 | bdict = json_Serializer.to_dict(blueprint.mapping)
144 |
145 | with open(filepath, "w") as f:
146 | tomlkit.dump(bdict, f)
147 |
148 | @staticmethod
149 | def deserialize(filepath: Path | str, **kwargs) -> Blueprint:
150 | with open(filepath, "r") as f:
151 | bdict = tomlkit.load(f).unwrap()
152 |
153 | node = json_Serializer.from_dict(bdict)
154 | return Blueprint(node)
155 |
156 |
157 | class h5_Serializer(BaseSerializer):
158 | # TODO: For now, adopting basic dictionary unfolding stategy, as in json, yaml, and toml
159 | # In future, for slightly more efficient packing, could adopt class-packing approach
160 | # e.g., if volume owns many planes can pack all plane params into one large array
161 | @staticmethod
162 | def write_node_to_group(node: BaseNode, group: h5py.Group, **kwargs) -> None:
163 | params = {**node}
164 | children = params.pop("children")
165 |
166 | group_name = kwargs.get("name", node.class_type.__name__)
167 | G = group.create_group(group_name)
168 | for k, v in params.items():
169 | match v:
170 | case None:
171 | continue
172 | case np.ndarray():
173 | G.create_dataset(k, data=v)
174 | case _:
175 | G.attrs[k] = v
176 |
177 | if len(children) > 0:
178 | # Get counters for children by type
179 | child_types = set([n.class_type for n in children])
180 | counters = {t: 0 for t in child_types}
181 |
182 | for n in children:
183 | t = n.class_type
184 | name = f"{t.__name__}_{counters[t]}"
185 | h5_Serializer.write_node_to_group(n, G, name=name)
186 | counters[t] += 1
187 |
188 | @staticmethod
189 | def read_node_from_group(group: h5py.Group) -> BaseNode:
190 | class_name = group.name.rsplit("/", 1)[-1].split("_")[0]
191 |
192 | params = dict(group.attrs)
193 |
194 | children = []
195 | for k in group.keys():
196 | if isinstance(group[k], h5py.Dataset):
197 | params[k] = np.array(group[k])
198 | else:
199 | children.append(k)
200 |
201 | node = NodeMap[class_name](**params)
202 | for cg in children:
203 | node.add_node(h5_Serializer.read_node_from_group(group[cg]))
204 |
205 | return node
206 |
207 | @staticmethod
208 | def serialize(filepath: Path | str, blueprint: Blueprint, **kwargs) -> None:
209 | head_node = blueprint.mapping
210 | with h5py.File(filepath, mode="w") as f:
211 | h5_Serializer.write_node_to_group(head_node, f)
212 |
213 | @staticmethod
214 | def deserialize(filepath: Path | str, **kwargs) -> Blueprint:
215 | with h5py.File(filepath, mode="r") as f:
216 | root_groups = list(f.keys())
217 | if len(root_groups) > 1:
218 | raise ValueError
219 |
220 | root_group = f[root_groups[0]]
221 | head_node = h5_Serializer.read_node_from_group(root_group)
222 |
223 | return Blueprint(head_node)
224 |
225 |
226 | class Serializer(BaseSerializer):
227 | """Dispatch class."""
228 |
229 | @staticmethod
230 | def serialize(filepath: Path | str, blueprint: Blueprint, **kwargs) -> None:
231 | ## Get format from **kwargs if passed in; otherwise, infer from filepath
232 | output_format = kwargs.get("format", str(filepath).rsplit(".")[-1])
233 |
234 | match output_format:
235 | case "json":
236 | return json_Serializer.serialize(filepath, blueprint, **kwargs)
237 | case "h5" | "H5" | "hdf5":
238 | if H5PY_AVAIALBLE:
239 | return h5_Serializer.serialize(filepath, blueprint, **kwargs)
240 | else:
241 | raise ValueError(
242 | "hdf5 support not available. Please install h5py: https://docs.h5py.org/"
243 | )
244 | case "yaml":
245 | if YAML_AVAILABLE:
246 | return yaml_Serializer.serialize(filepath, blueprint, **kwargs)
247 | else:
248 | raise ValueError(
249 | "yaml support not available. Please insall pyyaml: https://pyyaml.org"
250 | )
251 | case "toml":
252 | if TOML_AVAILABLE:
253 | return toml_Serializer.serialize(filepath, blueprint, **kwargs)
254 | else:
255 | raise ValueError(
256 | "toml support not available. Please insall tomlkit: https://tomlkit.readthedocs.io/en"
257 | )
258 | case _:
259 | raise ValueError(f"Unsupported format {output_format} detected or passed in.")
260 |
261 | @staticmethod
262 | def deserialize(filepath: Path | str, **kwargs) -> Blueprint:
263 | ## Get format from **kwargs if passed in; otherwise, infer from filepath
264 | input_format = kwargs.get("format", str(filepath).rsplit(".")[-1])
265 |
266 | match input_format:
267 | case "json":
268 | return json_Serializer.deserialize(filepath, **kwargs)
269 | case "h5" | "H5" | "hdf5":
270 | if H5PY_AVAIALBLE:
271 | return h5_Serializer.deserialize(filepath, **kwargs)
272 | else:
273 | raise ValueError(
274 | "hdf5 support not available. Please install h5py: https://docs.h5py.org/"
275 | )
276 | case "yaml":
277 | if YAML_AVAILABLE:
278 | return yaml_Serializer.deserialize(filepath, **kwargs)
279 | else:
280 | raise ValueError(
281 | "yaml support not available. Please insall pyyaml: https://pyyaml.org"
282 | )
283 | case "toml":
284 | if TOML_AVAILABLE:
285 | return toml_Serializer.deserialize(filepath, **kwargs)
286 | else:
287 | raise ValueError(
288 | "toml support not available. Please insall tomlkit: https://tomlkit.readthedocs.io/en"
289 | )
290 | case _:
291 | raise ValueError(f"Unsupported format {input_format} detected or passed in.")
292 |
--------------------------------------------------------------------------------
/czone/molecule/molecule.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import copy
4 | import warnings
5 |
6 | import numpy as np
7 | from ase import Atoms
8 | from pymatgen.core.structure import IMolecule
9 |
10 | from czone.types import BaseGenerator, BaseTransform
11 | from czone.util.eset import array_set_equal
12 |
13 |
14 | class Molecule(BaseGenerator):
15 | """Base abstract class for Molecule objects.
16 |
17 | Molecule objects are intended to facilitate molecular atomic items, which
18 | are not easily generated in the Generation-Volume pair scheme. They are also
19 | intended to facilitate applications, for example, in surface chemistry studies.
20 | The molecule class mostly interfaces with other packages more suited for molecular
21 | generation.
22 |
23 | BaseMolecules are typically not created directly.
24 |
25 | Attributes:
26 | atoms (np.ndarray): Nx3 array of atom positions of atoms in molecule
27 | species (np.ndarray): Nx1 array of atomic numbers of atom in molecule
28 | origin (np.ndarray): Reference origin of molecule.
29 | orientation (np.ndarray): Reference orientation of molecule.
30 | ase_atoms (Atoms): Collection of atoms in molecule as ASE Atoms object
31 |
32 | """
33 |
34 | def __init__(self, species, positions, origin=None, **kwargs) -> None:
35 | self._atoms = None
36 | self._species = None
37 | self.reset_orientation()
38 | self.print_warnings = True
39 | self.set_atoms(species, positions)
40 |
41 | if origin is None:
42 | self.set_origin(point=np.array([0.0, 0.0, 0.0]))
43 | elif np.issubdtype(type(origin), np.integer):
44 | self.set_origin(idx=origin)
45 | else:
46 | self.set_origin(point=origin)
47 |
48 | if "orientation" in kwargs.keys():
49 | self.orientation = kwargs["orientation"]
50 |
51 | def __repr__(self) -> str:
52 | return f"Molecule(species={repr(self.species)}, positions={repr(self.atoms)})"
53 |
54 | def __eq__(self, other: Molecule) -> bool:
55 | if isinstance(other, Molecule):
56 | pos_check = array_set_equal(self.atoms, other.atoms)
57 | if pos_check:
58 | x_ind = np.argsort(self.atoms, axis=0)
59 | y_ind = np.argsort(other.atoms, axis=0)
60 | return np.array_equal(self.species[x_ind], other.species[y_ind])
61 | else:
62 | return False
63 |
64 | @property
65 | def print_warnings(self):
66 | return self._print_warnings
67 |
68 | @print_warnings.setter
69 | def print_warnings(self, val):
70 | if not isinstance(val, bool):
71 | raise TypeError
72 |
73 | self._print_warnings = val
74 |
75 | @property
76 | def atoms(self):
77 | """Array of atomic positions of atoms lying within molecule."""
78 | return self._atoms
79 |
80 | @property
81 | def species(self):
82 | """Array of atomic numbers of atoms lying within molecule."""
83 | return self._species
84 |
85 | def set_atoms(self, species, positions):
86 | # check size compatibilities; cast appropriately; set variables
87 | species = np.array(species)
88 | species = np.reshape(species, (-1,)).astype(int)
89 | positions = np.array(positions)
90 | positions = np.reshape(positions, (-1, 3))
91 |
92 | if positions.shape[0] != species.shape[0]:
93 | raise ValueError(
94 | f"Number of positions ({positions.shape[0]}) provided does not match number of species ({species.shape[0]}) provided"
95 | )
96 |
97 | self._species = species
98 | self._atoms = positions
99 |
100 | def update_positions(self, positions):
101 | positions = np.array(positions)
102 | positions = np.reshape(positions, self.atoms.shape)
103 | self._atoms = positions
104 |
105 | def update_species(self, species):
106 | species = np.array(species)
107 | species = np.reshape(species, self.species.shape).astype(int)
108 | self._species = species
109 |
110 | def remove_atoms(self, indices, new_origin_idx=None):
111 | """
112 | Args:
113 | indices: iterable(int), set of indices to remove
114 | new_origin_idx: int, original index number of atom to set as new origin
115 | """
116 | if new_origin_idx is not None:
117 | if not np.issubdtype(type(new_origin_idx), np.integer):
118 | raise TypeError("new_origin_idx must be an int")
119 |
120 | if np.abs(new_origin_idx) >= self.atoms.shape[0]:
121 | raise IndexError(
122 | f"Supplied new_origin_idx {new_origin_idx} is out of bounds for {self.atoms.shape[0]} atom molecule"
123 | )
124 |
125 | if new_origin_idx in indices:
126 | raise IndexError(
127 | f"Supplied new_origin_idx {new_origin_idx} in set of indices of atoms to be removed."
128 | )
129 |
130 | if self._origin_tracking and self._origin_idx in indices:
131 | raise NotImplementedError # TODO: Implement origin resetting behavior and warn user if origin is reset to a new index
132 | # self._origin_idx = new_origin_idx # TEST
133 |
134 | self._species = np.delete(self.species, indices, axis=0)
135 | self._atoms = np.delete(self.atoms, indices, axis=0)
136 |
137 | @property
138 | def ase_atoms(self):
139 | """Collection of atoms in molecule as ASE Atoms object."""
140 | return Atoms(symbols=self.species, positions=self.atoms)
141 |
142 | @property
143 | def origin(self):
144 | if self._origin_tracking:
145 | return self.atoms[self._origin_idx, :]
146 | else:
147 | return self._origin
148 |
149 | @property
150 | def _origin_tracking(self) -> bool:
151 | return self.__origin_tracking
152 |
153 | @_origin_tracking.setter
154 | def _origin_tracking(self, val: bool):
155 | assert isinstance(val, bool)
156 |
157 | self.__origin_tracking = val
158 |
159 | @property
160 | def _origin_idx(self) -> int:
161 | return self.__origin_idx
162 |
163 | @_origin_idx.setter
164 | def _origin_idx(self, val: int):
165 | if np.issubdtype(type(val), np.integer):
166 | if np.abs(val) < self.atoms.shape[0]:
167 | self.__origin_idx = val
168 | else:
169 | raise IndexError(
170 | f"Supplied origin index is {val} is out of bounds for {self.atoms.shape[0]} atom molecule"
171 | )
172 | else:
173 | raise TypeError(f"Supplied drigin index is a {type(val)} and must be an integer")
174 |
175 | def transform(self, transformation: BaseTransform, transform_origin=True):
176 | """Transform molecule with given transformation.
177 |
178 | Args:
179 | transformation (BaseTransform): transformation to apply to molecule.
180 | """
181 | assert isinstance(
182 | transformation, BaseTransform
183 | ), "Supplied transformation not transformation object."
184 |
185 | self.set_atoms(self.species, transformation.applyTransformation(self.atoms))
186 |
187 | if transform_origin:
188 | if self._origin_tracking:
189 | if self.print_warnings:
190 | warnings.warn(
191 | f"Requested to transform molecule, but currently origin is set to track an atom. \n Origin will not be transformed. Molecule is currently tracking origin against atom {self._origin_idx}"
192 | )
193 | return
194 | self.set_origin(point=transformation.applyTransformation(self.origin))
195 |
196 | def set_origin(self, point=None, idx=None) -> None:
197 | """Set the reference origin to global coordinate or to track specific atom.
198 |
199 | Args:
200 | point (np.ndarray):
201 | idx (int):
202 | """
203 | # TODO: switch to match statement in 3.10
204 | if point is not None:
205 | point = np.array(point).ravel()
206 | assert point.shape == (3,)
207 | self._origin_tracking = False
208 | self._origin = point
209 |
210 | elif idx is not None:
211 | self._origin_tracking = True
212 | self._origin_idx = idx
213 |
214 | @property
215 | def orientation(self):
216 | return self._orientation
217 |
218 | @orientation.setter
219 | def orientation(self, mat):
220 | # check for valid rotation matrix
221 | # rotation matrix transforms zone axes to global coordinate system
222 | if mat.shape != (3, 3):
223 | raise ValueError(f"Input matrix has shape {mat.shape} but must have shape {(3,3)}.")
224 |
225 | if np.abs(np.linalg.det(mat) - 1.0) > 1e-6:
226 | raise ValueError("Input (rotation) matrix must have determinant of 1.")
227 |
228 | if np.sum(np.abs(mat @ mat.T - np.eye(3))) > 1e-6:
229 | raise ValueError(
230 | "Input (rotation) matrix must be orthogonal."
231 | ) # TODO: provide info on non-orthogonal vectors
232 |
233 | self._orientation = mat
234 |
235 | def reset_orientation(self):
236 | """Reset orientation to align with global XYZ. Does not transform molecule."""
237 | self.orientation = np.eye(3)
238 |
239 | def supply_atoms(self, *args, **kwargs):
240 | return self.atoms, self.species
241 |
242 | # def checkIfInterior(self, testPoints: np.ndarray):
243 | # ## TODO
244 | # # have a minimum bond distance
245 | # # perhaps set heuristically to maximum atomic radius for any of the constiuent atoms?
246 | # warnings.warn("WARNING: Default behavior for interiority check for molecules not yet implemented. No atoms will be removed from Volume or Scene due to collisions with a higher priority Molecule.")
247 | # return np.zeros(testPoints.shape[0], dtype=bool)
248 |
249 | @classmethod
250 | def from_ase_atoms(cls, atoms):
251 | if isinstance(atoms, Atoms):
252 | return cls(atoms.get_atomic_numbers(), atoms.get_positions())
253 | else:
254 | raise TypeError(f"Supplied atoms are {type(atoms)} and should be an ASE Atoms object")
255 |
256 | @classmethod
257 | def from_pmg_molecule(cls, atoms):
258 | if isinstance(atoms, IMolecule):
259 | species = [s.number for s in atoms.species]
260 | return cls(species, atoms.cart_coords)
261 | else:
262 | raise TypeError(
263 | f"Supplied atoms are {type(atoms)} and should be a Pymatgen IMolecule or Molecule object"
264 | )
265 |
266 | def from_molecule(self, **kwargs):
267 | """Constructor for new Molecules from existing Molecule object
268 |
269 | Args:
270 | **kwargs: "transformation"=List[BaseTransformation] to apply a
271 | series of transformations to the copied molecule.
272 | """
273 |
274 | new_molecule = copy.deepcopy(self)
275 |
276 | if "transformation" in kwargs.keys():
277 | for t in kwargs["transformation"]:
278 | new_molecule.transform(t)
279 |
280 | return new_molecule
281 |
--------------------------------------------------------------------------------
/czone/surface/adsorbate.py:
--------------------------------------------------------------------------------
1 | """
2 | Loose sketch of algorithm:
3 | 1) Find all atoms on surface with alpha shape
4 | 2) Filter atoms out, e.g., via chemistry or spatial filters
5 | 3) Choose an atom and find approximate surface normal
6 | 4) Rotate target vector in molecule coordinates to align with surface normal.
7 | Target vector is relative to the bonding atom. Default: +Z
8 | 4a) Offer ability to sample
9 | 5) Rotate molecule about surface normal axis
10 | 5a) Offer ability to sample
11 | 6) Set molecule outside of surface with specified bond length
12 | 7) Check for collisions/make sure none of the molecule is in the surface
13 | 8) Accept adsorbate
14 | """
15 |
16 | import warnings
17 | from functools import reduce
18 |
19 | import numpy as np
20 | from pymatgen.core import Element
21 | from scipy.sparse import csr_matrix
22 |
23 | from czone.molecule import Molecule
24 | from czone.transform import Rotation, Translation, rot_v, rot_vtv
25 | from czone.types import BaseVolume
26 |
27 | from .alpha_shape import alpha_shape_alg_3D, alpha_shape_alg_3D_with_sampling
28 |
29 |
30 | def sparse_matrix_from_tri(simplices):
31 | """Convert array of triangulation simplices into sparse matrix (graph).
32 |
33 | Assumes triangulation is for tetrahedra.
34 |
35 | Args:
36 | simplices (np.ndarray): Array (potentially reduced) of vertices
37 | representing simplices in Delaunay triangulation,
38 | e.g., as returned by scipy.spatial.Delaunay().simplices
39 |
40 | Returns:
41 | NxN array as sparse matrix, where N is the max index in the triangulation.
42 | """
43 | N = np.max(simplices) + 1
44 | mat = np.zeros((N, N))
45 | for s in simplices:
46 | for i in range(4):
47 | for j in range(i + 1, 4):
48 | ii = s[i]
49 | jj = s[j]
50 | mat[ii, jj] = 1
51 | mat[jj, ii] = 1
52 |
53 | return csr_matrix(mat)
54 |
55 |
56 | def find_approximate_normal(
57 | points, decay=0.99, tol=1e-5, margin=0, max_steps=1000, rng=None, **kwargs
58 | ):
59 | """Use modified perception algorithm to find approximate surface normal to set of points.
60 |
61 | Assumes points come from local section of alpha-shape, i.e, they are bounded
62 | by a convex surface and thus all lie within a common half-space.
63 |
64 | Args:
65 | points (np.ndarray): Nx3 array of points representing local surface
66 | decay (float): "gradient" weight decay, used to stabilize algorithm
67 | tol (float): stopping criterion for difference between successive dot
68 | products of surface normal
69 | seed (int): seed for RNG, used to determine sequence of points chosen
70 | to update guess for
71 | max_steps (int): maximum number of steps to run
72 |
73 | Returns:
74 | np.ndarray: (3,) normalized vector representing orientation of surface normal
75 | """
76 |
77 | rng = np.random.default_rng() if rng is None else rng
78 |
79 | A = points # use matrix notation
80 |
81 | # initialize guess to 0
82 | w = np.ones(3) * 1e-10
83 |
84 | # set sequence of iterates
85 | sequence = rng.integers(0, A.shape[0], max_steps)
86 |
87 | converging = True
88 | i = 0
89 | j = 0
90 | w_list = []
91 | while converging:
92 | # choose random point from set of points
93 | x = A[sequence[i], :]
94 |
95 | # get new surface normal guess
96 | if w @ x >= margin:
97 | w_new = w - (decay**j) * x
98 |
99 | # store dot product against previous guess
100 | w_list.append((w_new @ w) / (np.linalg.norm(w_new) * np.linalg.norm(w)))
101 |
102 | # update and normalize guess
103 | w = w_new
104 | w = w / np.linalg.norm(w)
105 | j += 1
106 |
107 | i += 1
108 | if len(w_list) > 5:
109 | # if last five iterations are all under tolerance, break loop
110 | if reduce(lambda x, y: x & y, [x > (1 - tol) for x in w_list[-5:]]):
111 | converging = False
112 |
113 | if i == max_steps:
114 | break
115 |
116 | return w
117 |
118 |
119 | def get_nearest_neighbors(target_idx, shape_dict, N_shells=3, surface_only=True, **kwargs):
120 | """Traverse alpha-shape triangulation to get nearest neighbors to surface atom.
121 |
122 | Args:
123 | target_idx (int): index of atom on surface to which adsorbate is attached
124 | shape_dict (dict): dictionary of information regarding computed alpha-shape
125 | N_shells (int): number of nearest neighbor shells to to traverse
126 | surface_only (bool): whether or not to limit search to surface atoms
127 | Returns:
128 | List of indices of nearest neighbors of atom.
129 |
130 | """
131 |
132 | if surface_only:
133 | # reduce selection to atoms that are part of surface tetrahedra
134 | # do not reduce to solely surface atoms, because roughness is unstable
135 | # with alpha shape
136 | reduced_tri = shape_dict["tri"].simplices[shape_dict["surface_tris"], :]
137 | else:
138 | reduced_tri = shape_dict["tri"].simplices[shape_dict["a_tris"], :]
139 |
140 | graph = sparse_matrix_from_tri(reduced_tri)
141 | v = np.zeros((graph.shape[0], 1))
142 | v[target_idx] = 1
143 |
144 | for i in range(N_shells):
145 | v = graph @ v
146 |
147 | v[target_idx] = 0
148 | return np.nonzero(v)[0]
149 |
150 |
151 | def add_adsorbate(
152 | mol: Molecule,
153 | adsorbate_idx,
154 | bond_length,
155 | volume: BaseVolume,
156 | mol_vector=np.array([0, 0, 1]).T,
157 | mol_rotation: float = 0.0,
158 | probe_radius=2.5,
159 | filters={},
160 | debug=False,
161 | use_sampling=True,
162 | rng=None,
163 | **kwargs,
164 | ):
165 | """Add adsorbate onto surface of a given volume.
166 |
167 |
168 | for filters, accept dictionaries where key:value pairs are:
169 | element: list of element names or atomic numbers
170 | mask: boolean mask
171 | indices: list of indices to include
172 | spatial: function: (Mx3) float array -> (M,) bool array
173 |
174 | Args:
175 | mol (BaseMolecule): molecule to add as adsorbate
176 | mol_vector (np.ndarray): (3,1) array representing direction in molecule frame
177 | to orient in direction of surface normal
178 | mol_rotation (float): value in radians to rotate molecule about surface normal
179 | adsorbate_idx (int): index of atom in molecule which bonds to surface
180 | bond_length (float): length of bond between molecule and surface
181 | probe_radius (float):
182 | filters (dict):
183 |
184 | Returns:
185 | Adsorbate molecule
186 | """
187 |
188 | rng = np.random.default_rng() if rng is None else rng
189 |
190 | mol_out = mol.from_molecule()
191 |
192 | if volume.atoms is None:
193 | warnings.warn("Input Volume has not populated its atoms. Populating now.")
194 | volume.populate_atoms()
195 |
196 | ## Find all atoms on surface with alpha shape
197 | ## TODO: test default probe radius from RDF measurement
198 |
199 | if use_sampling:
200 | surface_ind, shape_dict = alpha_shape_alg_3D_with_sampling(
201 | points=volume.atoms,
202 | probe_radius=probe_radius,
203 | N_samples=20,
204 | rng=rng,
205 | return_alpha_shape=True,
206 | )
207 | else:
208 | surface_ind, shape_dict = alpha_shape_alg_3D(
209 | points=volume.atoms, probe_radius=probe_radius, return_alpha_shape=True
210 | )
211 |
212 | valid_indices = np.zeros(volume.atoms.shape[0], dtype=bool)
213 | valid_indices[surface_ind] = True
214 |
215 | ## Filter atoms out, e.g., via chemistry or spatial filters
216 |
217 | for key, val in filters.items():
218 | if "element" in key:
219 | elements = [Element(x).Z if isinstance(x, str) else x for x in val]
220 | tmp_mask = reduce(
221 | lambda x, y: np.logical_or(x, y), [volume.species == x for x in elements]
222 | )
223 | valid_indices = np.logical_and(valid_indices, tmp_mask)
224 | elif "mask" in key:
225 | valid_indices = np.logical_and(valid_indices, val)
226 | elif "indices" in key:
227 | tmp_mask = np.zeros(volume.atoms.shape[0], dtype=bool)
228 | tmp_mask[val] = True
229 | valid_indices = np.logical_and(valid_indices, tmp_mask)
230 | elif "spatial" in key:
231 | tmp_mask = val(volume.atoms)
232 | valid_indices = np.logical_and(valid_indices, tmp_mask)
233 | else:
234 | print(
235 | "Invalid filter key provided. Must be one of {elementXX, maskXX, indicesXX, spatialXX}."
236 | )
237 | print("Key provided: ", key)
238 |
239 | valid_indices = np.nonzero(valid_indices)[0]
240 | ## Choose target surface atom and find approximate surface normal
241 |
242 | target_idx = rng.choice(valid_indices)
243 |
244 | # grab orientation vectors
245 | nn_ind = get_nearest_neighbors(target_idx, shape_dict, **kwargs)
246 | nn_pos = volume.atoms[nn_ind, :] - volume.atoms[target_idx, :]
247 |
248 | # normalize so that all vectors are within the unit sphere
249 | # and such that closer neighbors have the largest dot products
250 | nn_pos_norms = np.linalg.norm(nn_pos, axis=1)[:, None]
251 | nn_pos_min = np.min(nn_pos_norms)
252 | nn_pos = nn_pos / nn_pos_min / (nn_pos_norms / nn_pos_min) ** 2.0
253 |
254 | # optimize margin on normal
255 | best_margin = 0
256 | j = 0
257 | while j < 10:
258 | w = find_approximate_normal(nn_pos, decay=0.95, tol=1e-4, margin=best_margin, rng=rng)
259 |
260 | margins = []
261 | for b in nn_pos:
262 | margins.append(w @ b)
263 |
264 | j += 1
265 | if np.max(margins) < best_margin:
266 | best_margin = np.max(margins)
267 | j = 0
268 |
269 | normal = w
270 |
271 | ## Rotate target vector in molecule coordinates to align with surface normal.
272 | # set origin of molecule to adsorbate molecule
273 | mol_out.set_origin(idx=adsorbate_idx)
274 | m_vec = mol_out.orientation @ mol_vector
275 |
276 | # calculate rotation matrix and transform
277 | rot = Rotation(matrix=rot_vtv(m_vec, normal), origin=mol_out.origin)
278 | mol_out.transform(rot)
279 |
280 | ## Rotate molecule about surface normal axis
281 | rot = Rotation(matrix=rot_v(normal, mol_rotation), origin=mol_out.origin)
282 | mol_out.transform(rot)
283 |
284 | ## Set molecule outside of surface with specified bond length
285 | new_origin = volume.atoms[target_idx, :] + normal * bond_length
286 | mol_out.transform(Translation(new_origin - mol_out.origin))
287 |
288 | ## Check for collisions/make sure none of the molecule is in the surface
289 | if np.all(np.logical_not(volume.checkIfInterior(mol_out.atoms))):
290 | if debug:
291 | return mol_out, target_idx, nn_ind, nn_pos
292 | else:
293 | return mol_out, True
294 | else:
295 | print("Adsorbate placement failed. Molecule landed inside volume.")
296 | print("Check input parameters.")
297 | if debug:
298 | return mol_out, target_idx, nn_ind, nn_pos
299 | else:
300 | return mol_out, False
301 |
--------------------------------------------------------------------------------
/czone/volume/algebraic.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from functools import reduce
4 | from typing import List
5 |
6 | import numpy as np
7 | from scipy.optimize import linprog
8 | from scipy.spatial import Delaunay, HalfspaceIntersection, QhullError
9 |
10 | from czone.types import BaseAlgebraic
11 |
12 | #####################################
13 | ##### Geometric Surface Classes #####
14 | #####################################
15 |
16 |
17 | class Sphere(BaseAlgebraic):
18 | """Algebraic surface for spheres.
19 |
20 | Interior points are points with distance from center smaller than the radius.
21 |
22 | Attributes:
23 | radius (float): Radius of sphere.
24 | center (np.ndarray): 3x1 array representing center of sphere in space.
25 | tol (float): Tolerance value for interiority check. Default is 1e-5.
26 | """
27 |
28 | def __init__(self, radius: float, center: np.ndarray, tol=1e-5):
29 | self.radius = radius
30 | self.center = center
31 |
32 | super().__init__(tol=tol)
33 |
34 | def __repr__(self):
35 | return (
36 | f"Sphere(radius={repr(self.radius)}, center={repr(self.center)}, tol={repr(self.tol)})"
37 | )
38 |
39 | def __eq__(self, other):
40 | if isinstance(other, Sphere):
41 | checks = [
42 | np.allclose(x, y)
43 | for x, y in zip(
44 | [self.radius, self.center, self.tol], [other.radius, other.center, other.tol]
45 | )
46 | ]
47 | return reduce(lambda x, y: x and y, checks)
48 | else:
49 | return False
50 |
51 | def checkIfInterior(self, testPoints: np.ndarray) -> np.ndarray:
52 | return np.sum((testPoints - self.center) ** 2.0, axis=1) < (self.radius + self.tol) ** 2.0
53 |
54 | @property
55 | def params(self):
56 | """Return radius, center of Sphere."""
57 | return self.radius, self.center
58 |
59 | @property
60 | def radius(self):
61 | """Radius of sphere."""
62 | return self._radius
63 |
64 | @radius.setter
65 | def radius(self, radius: float):
66 | if radius > 0.0:
67 | self._radius = float(radius)
68 | else:
69 | raise ValueError("Radius needs to be positive valued.")
70 |
71 | @property
72 | def center(self):
73 | """Center of sphere in space."""
74 | return self._center
75 |
76 | @center.setter
77 | def center(self, center: np.ndarray):
78 | center = np.array(center) # cast to np array if not already
79 | if center.size != 3 or center.shape[0] != 3:
80 | raise ValueError("Center must be an array with 3 elements")
81 | self._center = center
82 |
83 |
84 | class Plane(BaseAlgebraic):
85 | """Algebraic surface for planes in R3.
86 |
87 | Interior points lie opposite in direction of plane normal,
88 | e.g., the point (0, 0, -1) is interior to Plane((0,0,1), (0,0,0))
89 |
90 | Attributes:
91 | normal (np.ndarray): normal vector describing orientation of plane.
92 | point (np.ndarray): point lying on plane.
93 | tol (float): Tolerance value for interiority check. Default is 1e-5.
94 |
95 | """
96 |
97 | def __init__(self, normal: np.ndarray, point: np.ndarray, tol: float = 1e-5):
98 | self.normal = normal
99 | self.point = point
100 |
101 | super().__init__(tol=tol)
102 |
103 | def __repr__(self):
104 | return f"Plane(normal={repr(self.normal)}, point={repr(self.point)}, tol={repr(self.tol)})"
105 |
106 | def __eq__(self, other):
107 | if isinstance(other, Plane):
108 | # check collinearity of normals
109 | check_collinearity = np.isclose(np.dot(self.normal, other.normal), 1.0)
110 |
111 | # check to see if they define the same plane
112 | check_origin_dist = np.isclose(
113 | np.dot(self.normal, self.point), np.dot(other.normal, other.point)
114 | )
115 |
116 | # property check
117 | check_tol = self.tol == other.tol
118 |
119 | checks = [check_collinearity, check_origin_dist, check_tol]
120 |
121 | return reduce(lambda x, y: x and y, checks)
122 | else:
123 | return False
124 |
125 | @property
126 | def params(self):
127 | """Return normal vector, point on plane of Plane."""
128 | return self.normal, self.point
129 |
130 | @property
131 | def point(self):
132 | """Point lying on surface of Plane."""
133 | return self._point
134 |
135 | @point.setter
136 | def point(self, point: np.ndarray):
137 | point = np.squeeze(np.array(point)) # cast to np array if not already
138 | assert point.shape[0] == 3, "Point must be a point in 3D space"
139 | self._point = point
140 |
141 | @property
142 | def normal(self):
143 | """Normal vector defining orientation of Plane in space."""
144 | return self._normal
145 |
146 | @normal.setter
147 | def normal(self, normal: np.ndarray):
148 | normal = np.array(normal) # cast to np array if not already
149 | try:
150 | normal = normal.reshape((3,))
151 | except ValueError as e:
152 | raise ValueError(f"Normal must be a 3D vector, but has shape {normal.shape}")
153 | if np.linalg.norm(normal) > np.finfo(float).eps:
154 | self._normal = normal / np.linalg.norm(normal)
155 | else:
156 | raise ValueError(
157 | f"Input normal vector length {np.linalg.norm(normal)} is below machine precision."
158 | )
159 |
160 | def checkIfInterior(self, testPoints: np.ndarray):
161 | return self.sdist_from_plane(testPoints) < self.tol
162 |
163 | def flip_orientation(self):
164 | """Flip the orientation of the plane."""
165 | self.normal = -self.normal
166 | return self
167 |
168 | def sdist_from_plane(self, point: np.ndarray):
169 | """Calculate the signed distance from a point or series of points to the Plane.
170 |
171 | Arg:
172 | point (np.ndarray): Point in space to calculate distance.
173 |
174 | Returns:
175 | Array of distances to plane.
176 |
177 | """
178 | # separate into two dot products to avoid an an array subtraction against testPoints
179 | point = point.reshape(-1, 3)
180 | return np.dot(point, self.normal) - np.dot(self.point, self.normal)
181 |
182 | def dist_from_plane(self, point: np.ndarray):
183 | """Calculate the distance from a point or series of points to the Plane.
184 |
185 | Arg:
186 | point (np.ndarray): Point in space to calculate distance.
187 |
188 | Returns:
189 | Array of distances to plane.
190 |
191 | """
192 | return np.abs(self.sdist_from_plane(point))
193 |
194 | def project_point(self, point: np.ndarray):
195 | """Project a point in space onto Plane.
196 |
197 | Arg:
198 | point (np.ndarray): Point in space to project onto Plane.
199 |
200 | Returns:
201 | Projected point lying on surface of Plane.
202 | """
203 | return point - self.sdist_from_plane(point)[:, None] * self.normal[None, :]
204 |
205 |
206 | class Cylinder(BaseAlgebraic):
207 | """Algebraic surface for circular cylinders in R3.
208 |
209 | Cylinders are defined with vectors, pointing parallel to central axis;
210 | points, lying along central axis; and radii, defining size of cylinder.
211 |
212 | Attributes:
213 | axis (np.ndarray): vector parallel to central axis of cylinder.
214 | point (np.ndarray): point which lies at the center of the cylinder
215 | radius (float): radius of cylinder.
216 | length (float): length of cylinder
217 | tol (float): Tolerance value for interiority check. Default is 1e-5.
218 | """
219 |
220 | def __init__(
221 | self, axis: np.ndarray, point: np.ndarray, radius: float, length: float, tol: float = 1e-5
222 | ):
223 | self.axis = axis
224 | self.point = point
225 | self.radius = radius
226 | self.length = length
227 | super().__init__(tol=tol)
228 |
229 | def __repr__(self):
230 | return f"Cylinder(axis={repr(self.axis)}, point={repr(self.point)}, radius={repr(self.radius)}, length={repr(self.length)}, tol={repr(self.tol)})"
231 |
232 | def __eq__(self, other):
233 | if isinstance(other, Cylinder):
234 | checks = [
235 | np.allclose(x, y)
236 | for x, y in zip(
237 | [self.radius, self.length, self.point, self.tol],
238 | [other.radius, self.length, other.point, other.tol],
239 | )
240 | ]
241 | return reduce(lambda x, y: x and y, checks)
242 | else:
243 | return False
244 |
245 | def params(self):
246 | """Return axis, point, radius, and length of cylinder."""
247 | return self.axis, self.point, self.radius, self.length
248 |
249 | @property
250 | def axis(self):
251 | """Vector lying parallel to central axis."""
252 | return self._axis
253 |
254 | @axis.setter
255 | def axis(self, arr):
256 | arr = np.array(arr).reshape((3,))
257 | norm = np.linalg.norm(arr)
258 | if norm <= np.finfo(float).eps:
259 | raise ValueError(f"Input axis {arr} has norm {norm}, which is below machine precision.")
260 | self._axis = arr / norm
261 |
262 | @property
263 | def point(self):
264 | """Point lying along central axis."""
265 | return self._point
266 |
267 | @point.setter
268 | def point(self, arr):
269 | self._point = np.array(arr).reshape((3,))
270 |
271 | @property
272 | def radius(self):
273 | """Radius of cylinder."""
274 | return self._radius
275 |
276 | @radius.setter
277 | def radius(self, val):
278 | val = float(val)
279 | if val < np.finfo(float).eps: # negative or subnormal
280 | raise ValueError("Radius must be positive but is close to zero or negative.")
281 | self._radius = val
282 |
283 | @property
284 | def length(self):
285 | """Length of cylinder."""
286 | return self._length
287 |
288 | @length.setter
289 | def length(self, val):
290 | val = float(val)
291 | if val < np.finfo(float).eps: # negative or subnormal
292 | raise ValueError("Length must be positive but is close to zero or negative.")
293 | self._length = val
294 |
295 | def checkIfInterior(self, testPoints):
296 | rad_dists = np.linalg.norm(np.cross(testPoints - self.point, self.axis[None, :]), axis=1)
297 |
298 | rad_check = rad_dists < self.radius + self.tol
299 |
300 | length_dists = np.abs(np.dot(testPoints - self.point, self.axis))
301 | length_check = length_dists < self.length / 2.0 + self.tol
302 | return np.logical_and(rad_check, length_check)
303 |
304 | def get_bounding_box(self):
305 | # make a square inscribing cylinder at center disk
306 | # any rotation is valid
307 |
308 | # need vz to be normalized to project out vs_0
309 | vz = np.copy(self.axis.T)[:, None]
310 |
311 | # get vectors perpendicular to axis
312 | vs_0 = np.array([[1, 1, 1]]).T # any vector works; fix to make generation stable
313 | vs_0 = vs_0 / np.linalg.norm(vs_0)
314 | vs_0 = vs_0 - (vs_0.T @ vz) * vz
315 | vs_0 = vs_0 / np.linalg.norm(vs_0)
316 |
317 | vs_1 = np.cross(vs_0, vz, axis=0)
318 | vs_1 = vs_1 / np.linalg.norm(vs_1)
319 |
320 | vs_0 = np.squeeze(vs_0)
321 | vs_1 = np.squeeze(vs_1)
322 |
323 | # factors of two cancel
324 | square = self.point + (self.radius) * np.array(
325 | [
326 | vs_0 + vs_1,
327 | vs_0 - vs_1,
328 | -vs_0 + vs_1,
329 | -vs_0 - vs_1,
330 | ]
331 | )
332 |
333 | # extend square halfway in both directions into rectangular prism
334 | vz = self.length * vz / 2.0
335 | return np.vstack([square + vz.T, square - vz.T])
336 |
337 |
338 | #####################################
339 | ######### Utility routines ##########
340 | #####################################
341 |
342 |
343 | def convex_hull_to_planes(points, **kwargs):
344 | """Convert the convex hull of a set of points into a set of Planes."""
345 |
346 | tri = Delaunay(points)
347 | facets = tri.convex_hull
348 |
349 | def facet_to_plane(facet):
350 | v0 = points[facet[1], :] - points[facet[0], :]
351 | v1 = points[facet[2], :] - points[facet[0], :]
352 | n = np.cross(v0, v1)
353 | return Plane(n, points[facet[0]])
354 |
355 | planes = [facet_to_plane(f) for f in facets]
356 |
357 | ipoint = np.mean(points, axis=0)
358 | if tri.find_simplex(ipoint) < 0:
359 | raise AssertionError
360 |
361 | for i, plane in enumerate(planes):
362 | if not plane.checkIfInterior(ipoint):
363 | plane = plane.flip_orientation()
364 |
365 | return planes
366 |
367 |
368 | def get_bounding_box(planes: List[Plane]):
369 | """Get convex region interior to set of Planes, if one exists.
370 |
371 | Determines if set of planes forms a valid interior convex region. If so,
372 | returns vertices of convex region. Uses scipy half space intersection and
373 | linear progamming routines to determine boundaries of convex region and
374 | valid interior points.
375 |
376 | Args:
377 | planes (List[Plane]): set of planes to check mutual intersection of.
378 |
379 | Returns:
380 | np.ndarray: Nx3 array of vertices of convex region.
381 | status: 0 if successful, 2 if no valid intersection, 3 if intersection is unbounded.
382 | """
383 |
384 | # some issues arose when all planes were in negative coordinate space
385 | # so shift planes so that all points line positive coordinates
386 | shift = np.zeros_like(planes[0].point)
387 | for plane in planes:
388 | shift = np.min([shift, plane.point], axis=0)
389 |
390 | shift = -1.5 * shift
391 |
392 | # convert plane set to matrix form of half spaces
393 | A = np.zeros((len(planes), 3))
394 | d = np.zeros((len(planes), 1))
395 | norms = np.zeros((len(planes), 1))
396 | for i, plane in enumerate(planes):
397 | n, p = plane.params
398 | A[i, :] = n.squeeze()
399 | d[i, 0] = -1.0 * np.dot(n.squeeze(), (p + shift).squeeze())
400 | norms[i, 0] = np.linalg.norm(n)
401 |
402 | # check feasiblity of region and get interior point
403 | c = np.zeros(4)
404 | c[-1] = -1
405 |
406 | res = linprog(c, A_ub=np.hstack([A, norms]), b_ub=-1.0 * d)
407 | if res.status == 0:
408 | try:
409 | hs = HalfspaceIntersection(np.hstack([A, d]), res.x[:-1])
410 | except QhullError:
411 | return np.empty((0, 3)), 2
412 |
413 | return hs.intersections - shift, res.status
414 | else:
415 | return np.empty((0, 3)), res.status
416 |
--------------------------------------------------------------------------------