The thermo, or thermo, package is a set of Python tools to interface with the molecular dynamics (MD) simulator GPUMD (and LAMMPS for comparison) for the purpose of thermodynamic simulations (i.e. thermal conductivity).
154 |
Currently, the functionality is limited as it serves specific research purposes at this time; however, the long-term plan is to make this package to primarily serve GPUMD with only minor supporting functions for LAMMPS for the purpose of checking force-consistency between the simulators.
155 |
The documenation is produced and maintained by Alex Gabourie at Stanford University. It outlines the structure and usage of the thermo package.
Loads the forces from either GPUMD or LAMMPS output to facilitate a
183 | comparison between techniques.
184 |
185 |
Args:
186 |
arg1 (str)force_file
Filename with forces
187 |
188 |
arg2 (str)sim
If type == ‘LAMMPS’:
189 | The file path should be for the LAMMPS output forces
190 | LAMMPS file should be in the format given by the following LAMMPS input command:
191 | force all custom 1 <file> id fx fy fz
192 | If type == ‘GPUMD’:
193 | the force output file (f.out) path when GPUMD is compiled with the force flag
194 |
195 |
196 |
197 |
Returns:
dict: dictionary containing sorted force vectors
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | autodoc_mock_imports = ['ase.io', 'numpy', 'scipy.integrate', 'os', 'math',
2 | 'ase', 'pyfftw', 'multiprocessing', 'traceback', 'scipy', 'scipy.io',
3 | 're', 'atomman', 'pandas']
4 | # -*- coding: utf-8 -*-
5 | #
6 | # Configuration file for the Sphinx documentation builder.
7 | #
8 | # This file does only contain a selection of the most common options. For a
9 | # full list see the documentation:
10 | # http://www.sphinx-doc.org/en/master/config
11 |
12 | # -- Path setup --------------------------------------------------------------
13 |
14 | # If extensions (or modules to document with autodoc) are in another directory,
15 | # add these directories to sys.path here. If the directory is relative to the
16 | # documentation root, use os.path.abspath to make it absolute, like shown here.
17 | #
18 | import os
19 | import sys
20 | sys.path.insert(0, os.path.abspath('../..'))
21 | sys.path.insert(0, os.path.abspath('../../thermo/'))
22 |
23 |
24 | # -- Project information -----------------------------------------------------
25 |
26 | project = u'thermo'
27 | copyright = u'2020, Alexander Gabourie'
28 | author = u'Alexander Gabourie'
29 |
30 | # The short X.Y version
31 | version = u''
32 | # The full version, including alpha/beta/rc tags
33 | release = u''
34 |
35 |
36 | # -- General configuration ---------------------------------------------------
37 |
38 | # If your documentation needs a minimal Sphinx version, state it here.
39 | #
40 | # needs_sphinx = '1.0'
41 |
42 | # Add any Sphinx extension module names here, as strings. They can be
43 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
44 | # ones.
45 | extensions = [
46 | 'sphinx.ext.autodoc',
47 | 'sphinx.ext.viewcode',
48 | ]
49 |
50 | # Add any paths that contain templates here, relative to this directory.
51 | templates_path = ['_templates']
52 |
53 | # The suffix(es) of source filenames.
54 | # You can specify multiple suffix as a list of string:
55 | #
56 | # source_suffix = ['.rst', '.md']
57 | source_suffix = '.rst'
58 |
59 | # The master toctree document.
60 | master_doc = 'index'
61 |
62 | # The language for content autogenerated by Sphinx. Refer to documentation
63 | # for a list of supported languages.
64 | #
65 | # This is also used if you do content translation via gettext catalogs.
66 | # Usually you set "language" from the command line for these cases.
67 | language = None
68 |
69 | # List of patterns, relative to source directory, that match files and
70 | # directories to ignore when looking for source files.
71 | # This pattern also affects html_static_path and html_extra_path.
72 | exclude_patterns = []
73 |
74 | # The name of the Pygments (syntax highlighting) style to use.
75 | pygments_style = None
76 |
77 |
78 | # -- Options for HTML output -------------------------------------------------
79 |
80 | # The theme to use for HTML and HTML Help pages. See the documentation for
81 | # a list of builtin themes.
82 | #
83 | html_theme = 'sphinx_rtd_theme'
84 |
85 | # Theme options are theme-specific and customize the look and feel of a theme
86 | # further. For a list of options available for each theme, see the
87 | # documentation.
88 | #
89 | html_theme_options = {
90 | 'navigation_depth' : 4
91 | }
92 |
93 | # Add any paths that contain custom static files (such as style sheets) here,
94 | # relative to this directory. They are copied after the builtin static files,
95 | # so a file named "default.css" will overwrite the builtin "default.css".
96 | html_static_path = []
97 |
98 | # Custom sidebar templates, must be a dictionary that maps document names
99 | # to template names.
100 | #
101 | # The default sidebars (for documents that don't match any pattern) are
102 | # defined by theme itself. Builtin themes are using these templates by
103 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
104 | # 'searchbox.html']``.
105 | #
106 | # html_sidebars = {}
107 |
108 |
109 | # -- Options for HTMLHelp output ---------------------------------------------
110 |
111 | # Output file base name for HTML help builder.
112 | htmlhelp_basename = 'thermodoc'
113 |
114 |
115 | # -- Options for LaTeX output ------------------------------------------------
116 |
117 | latex_elements = {
118 | # The paper size ('letterpaper' or 'a4paper').
119 | #
120 | # 'papersize': 'letterpaper',
121 |
122 | # The font size ('10pt', '11pt' or '12pt').
123 | #
124 | # 'pointsize': '10pt',
125 |
126 | # Additional stuff for the LaTeX preamble.
127 | #
128 | # 'preamble': '',
129 |
130 | # Latex figure (float) alignment
131 | #
132 | # 'figure_align': 'htbp',
133 | }
134 |
135 | # Grouping the document tree into LaTeX files. List of tuples
136 | # (source start file, target name, title,
137 | # author, documentclass [howto, manual, or own class]).
138 | latex_documents = [
139 | (master_doc, 'thermo.tex', u'thermo Documentation',
140 | u'Alexander Gabourie', 'manual'),
141 | ]
142 |
143 |
144 | # -- Options for manual page output ------------------------------------------
145 |
146 | # One entry per manual page. List of tuples
147 | # (source start file, name, description, authors, manual section).
148 | man_pages = [
149 | (master_doc, 'thermo', u'thermo Documentation',
150 | [author], 1)
151 | ]
152 |
153 |
154 | # -- Options for Texinfo output ----------------------------------------------
155 |
156 | # Grouping the document tree into Texinfo files. List of tuples
157 | # (source start file, target name, title, author,
158 | # dir menu entry, description, category)
159 | texinfo_documents = [
160 | (master_doc, 'thermo', u'thermo Documentation',
161 | author, 'thermo', 'One line description of project.',
162 | 'Miscellaneous'),
163 | ]
164 |
165 |
166 | # -- Options for Epub output -------------------------------------------------
167 |
168 | # Bibliographic Dublin Core info.
169 | epub_title = project
170 |
171 | # The unique identifier of the text. This can be a ISBN number
172 | # or the project homepage.
173 | #
174 | # epub_identifier = ''
175 |
176 | # A unique identification for the text.
177 | #
178 | # epub_uid = ''
179 |
180 | # A list of files that should not be packed into the epub file.
181 | epub_exclude_files = ['search.html']
182 |
183 |
184 | # -- Extension configuration -------------------------------------------------
185 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | thermo - A GPUMD Helper Package
2 | ===========================================
3 |
4 | The thermo, or `thermo`_, package is a set of `Python`_ tools to interface with the molecular dynamics (MD) simulator `GPUMD`_ (and `LAMMPS`_ for comparison) for the purpose of thermodynamic simulations (i.e. thermal conductivity).
5 |
6 | Currently, the functionality is limited as it serves specific research purposes at this time; however, the long-term plan is to make this package to primarily serve `GPUMD`_ with only minor supporting functions for `LAMMPS`_ for the purpose of checking force-consistency between the simulators.
7 |
8 | The documenation is produced and maintained by `Alex Gabourie `_ at Stanford University. It outlines the structure and usage of the `thermo`_ package.
9 |
10 | Documentation
11 | -------------
12 |
13 | | This package contains four subpackages:
14 | | 1. **gpumd** : Python interface specific to `GPUMD`_.
15 | | 2. **lammps** : Python interface specific to `LAMMPS`_.
16 | | 3. **shared** : Used strictly to compare `GPUMD`_ and `LAMMPS`_.
17 | | 4. **tools** : Extra support for more general MD related content.
18 |
19 | ..
20 | Subpackages
21 | -----------
22 |
23 | .. toctree::
24 |
25 | thermo.gpumd
26 | thermo.lammps
27 | thermo.shared
28 | thermo.tools
29 |
30 | ..
31 | Module contents
32 | ---------------
33 |
34 | .. automodule:: thermo
35 | :members:
36 | :undoc-members:
37 | :show-inheritance:
38 |
39 | .. _thermo: https://github.com/AlexGabourie/thermo
40 | .. _GPUMD: https://github.com/brucefan1983/GPUMD
41 | .. _LAMMPS: https://lammps.sandia.gov/
42 | .. _Python: https://www.python.org/
43 |
44 |
45 | * :ref:`genindex`
46 |
47 | .. * :ref:`modindex`
48 | .. * :ref:`search`
49 |
--------------------------------------------------------------------------------
/docs/source/thermo.gpumd.rst:
--------------------------------------------------------------------------------
1 | gpumd
2 | ====================
3 |
4 | ..
5 | Submodules
6 | ----------
7 |
8 | Calculations
9 | ------------------------
10 |
11 | .. automodule:: thermo.gpumd.calc
12 | :members:
13 | :undoc-members:
14 | :show-inheritance:
15 |
16 | Data Loaders
17 | ------------------------
18 |
19 | .. automodule:: thermo.gpumd.data
20 | :members:
21 | :undoc-members:
22 | :show-inheritance:
23 |
24 | Input/Output
25 | ----------------------
26 |
27 | .. automodule:: thermo.gpumd.io
28 | :members:
29 | :undoc-members:
30 | :show-inheritance:
31 |
32 | Preprocessing
33 | ---------------------------
34 |
35 | .. automodule:: thermo.gpumd.preproc
36 | :members:
37 | :undoc-members:
38 | :show-inheritance:
39 |
40 |
41 | ..
42 | Module contents
43 | ---------------
44 |
45 | .. automodule:: thermo.gpumd
46 | :members:
47 | :undoc-members:
48 | :show-inheritance:
49 |
--------------------------------------------------------------------------------
/docs/source/thermo.lammps.rst:
--------------------------------------------------------------------------------
1 | lammps
2 | =====================
3 |
4 | ..
5 | Submodules
6 | ----------
7 |
8 | thermo.lammps.calc module
9 | -------------------------
10 |
11 | Calculations
12 | -------------------------
13 |
14 | .. automodule:: thermo.lammps.calc
15 | :members:
16 | :undoc-members:
17 | :show-inheritance:
18 |
19 | Data Loaders
20 | -------------------------
21 |
22 | .. automodule:: thermo.lammps.data
23 | :members:
24 | :undoc-members:
25 | :show-inheritance:
26 |
27 | Input/Output
28 | -----------------------
29 |
30 | .. automodule:: thermo.lammps.io
31 | :members:
32 | :undoc-members:
33 | :show-inheritance:
34 |
35 | ..
36 | Module contents
37 | ---------------
38 |
39 | .. automodule:: thermo.lammps
40 | :members:
41 | :undoc-members:
42 | :show-inheritance:
43 |
--------------------------------------------------------------------------------
/docs/source/thermo.shared.rst:
--------------------------------------------------------------------------------
1 | shared
2 | =====================
3 |
4 | ..
5 | Submodules
6 | ----------
7 |
8 | Force Comparison
9 | --------------------------
10 |
11 | .. automodule:: thermo.shared.force
12 | :members:
13 | :undoc-members:
14 | :show-inheritance:
15 |
16 | ..
17 | Module contents
18 | ---------------
19 |
20 | .. automodule:: thermo.shared
21 | :members:
22 | :undoc-members:
23 | :show-inheritance:
24 |
--------------------------------------------------------------------------------
/docs/source/thermo.tools.rst:
--------------------------------------------------------------------------------
1 | tools
2 | =====================
3 |
4 | ..
5 | Submodules
6 | ----------
7 |
8 | Lennard Jones
9 | --------------------------
10 |
11 | .. automodule:: thermo.tools.lj
12 | :members:
13 | :undoc-members:
14 | :show-inheritance:
15 |
16 | ..
17 | Module contents
18 | ---------------
19 |
20 | .. automodule:: thermo.shared
21 | :members:
22 | :undoc-members:
23 | :show-inheritance:
24 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | matplotlib
2 | pyfftw
3 | scipy
4 | numpy
5 | ase>=3.20.1
6 | pandas
7 | atomman==1.2.3
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | import setuptools
5 |
6 | setup(name='thermo',
7 | version='0.3',
8 | description='MD thermal properties functions',
9 | author='Alexander Gabourie',
10 | author_email='gabourie@stanford.edu',
11 | packages=setuptools.find_packages(),
12 | include_package_data=True,
13 | install_requires=['matplotlib',
14 | 'pyfftw',
15 | 'scipy',
16 | 'ase>=3.20.1',
17 | 'atomman==1.2.3'],
18 | )
19 |
--------------------------------------------------------------------------------
/thermo/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hityingph/thermo/08faef51ad4333e816cadf838a224773152421fc/thermo/__init__.py
--------------------------------------------------------------------------------
/thermo/data/UFF.params:
--------------------------------------------------------------------------------
1 | (dp0
2 | S'Ru'
3 | p1
4 | (F0.0024283898184332205
5 | F2.6397329018498255
6 | tp2
7 | sS'Re'
8 | p3
9 | (F0.002862030857439153
10 | F2.631714813386562
11 | tp4
12 | sS'Ra'
13 | p5
14 | (F0.017519097975839663
15 | F3.2758345866020275
16 | tp6
17 | sS'Rb'
18 | p7
19 | (F0.001734564156023729
20 | F3.6651573264293558
21 | tp8
22 | sS'Rn'
23 | p9
24 | (F0.01075429776734712
25 | F4.245132391938716
26 | tp10
27 | sS'Rh'
28 | p11
29 | (F0.0022982975067314406
30 | F2.6094423454330538
31 | tp12
32 | sS'Be'
33 | p13
34 | (F0.003685948831550424
35 | F2.4455169812952313
36 | tp14
37 | sS'Ba'
38 | p15
39 | (F0.015784533819815934
40 | F3.2989979532736764
41 | tp16
42 | sS'Bi'
43 | p17
44 | (F0.02246260582050729
45 | F3.893227398273283
46 | tp18
47 | sS'Bk'
48 | p19
49 | (F0.0005637333507077119
50 | F2.9747108198705927
51 | tp20
52 | sS'Br'
53 | p21
54 | (F0.010884390079048898
55 | F3.731974730289881
56 | tp22
57 | sS'H'
58 | p23
59 | (F0.0019080205716261018
60 | F2.5711337005530193
61 | tp24
62 | sS'P'
63 | p25
64 | (F0.013226051689680933
65 | F3.694556984127987
66 | tp26
67 | sS'Os'
68 | p27
69 | (F0.0016044718443219493
70 | F2.7796040005978586
71 | tp28
72 | sS'Es'
73 | p29
74 | (F0.0005203692468071187
75 | F2.939074871144979
76 | tp30
77 | sS'Hg'
78 | p31
79 | (F0.016695180001728392
80 | F2.4098810325696176
81 | tp32
82 | sS'Ge'
83 | p33
84 | (F0.01643499537832483
85 | F3.813046513640652
86 | tp34
87 | sS'Gd'
88 | p35
89 | (F0.000390276935105339
90 | F3.0005468826966624
91 | tp36
92 | sS'Ga'
93 | p37
94 | (F0.017996103118746186
95 | F3.9048090816091072
96 | tp38
97 | sS'Pr'
98 | p39
99 | (F0.00043364103900593224
100 | F3.2125807776140634
101 | tp40
102 | sS'Pt'
103 | p41
104 | (F0.003469128312047458
105 | F2.4535350697584946
106 | tp42
107 | sS'Pu'
108 | p43
109 | (F0.0006938256624094916
110 | F3.0504372109125217
111 | tp44
112 | sS'C'
113 | p45
114 | (F0.0045532309095622885
115 | F3.4308509635584463
116 | tp46
117 | sS'Pb'
118 | p47
119 | (F0.02875040088609331
120 | F3.828191791849038
121 | tp48
122 | sS'Pa'
123 | p49
124 | (F0.0009540102858130509
125 | F3.0504372109125217
126 | tp50
127 | sS'Pd'
128 | p51
129 | (F0.002081476987228475
130 | F2.5827153838888437
131 | tp52
132 | sS'Cd'
133 | p53
134 | (F0.009887015689335255
135 | F2.537279549263686
136 | tp54
137 | sS'Po'
138 | p55
139 | (F0.014093333767692798
140 | F4.195242063722858
141 | tp56
142 | sS'Pm'
143 | p57
144 | (F0.000390276935105339
145 | F3.1600177532437836
146 | tp58
147 | sS'Ho'
148 | p59
149 | (F0.00030354872730415256
150 | F3.0370737301404165
151 | tp60
152 | sS'Hf'
153 | p61
154 | (F0.003122215480842712
155 | F2.798312873678806
156 | tp62
157 | sS'K'
158 | p63
159 | (F0.001517743636520763
160 | F3.396105913550973
161 | tp64
162 | sS'He'
163 | p65
164 | (F0.0024283898184332205
165 | F2.1043027722474816
166 | tp66
167 | sS'Md'
168 | p67
169 | (F0.00047700514290652544
170 | F2.916802403191471
171 | tp68
172 | sS'Mg'
173 | p69
174 | (F0.004813415532965848
175 | F2.6914050275019648
176 | tp70
177 | sS'Mo'
178 | p71
179 | (F0.0024283898184332205
180 | F2.7190228877643157
181 | tp72
182 | sS'Mn'
183 | p73
184 | (F0.0005637333507077119
185 | F2.6379511044135446
186 | tp74
187 | sS'O'
188 | p75
189 | (F0.0026018462340355935
190 | F3.1181455134911875
191 | tp76
192 | sS'S'
193 | p77
194 | (F0.011881764468762544
195 | F3.594776327696269
196 | tp78
197 | sS'W'
198 | p79
199 | (F0.002905394961339746
200 | F2.7341681659727013
201 | tp80
202 | sS'Zn'
203 | p81
204 | (F0.00537714888367356
205 | F2.4615531582217574
206 | tp82
207 | sS'Eu'
208 | p83
209 | (F0.0003469128312047458
210 | F3.111909222464205
211 | tp84
212 | sS'Zr'
213 | p85
214 | (F0.0029921231691409328
215 | F2.78316759547042
216 | tp86
217 | sS'Er'
218 | p87
219 | (F0.00030354872730415256
220 | F3.0210375532138904
221 | tp88
222 | sS'Ni'
223 | p89
224 | (F0.0006504615585088984
225 | F2.5248069672097215
226 | tp90
227 | sS'No'
228 | p91
229 | (F0.00047700514290652544
230 | F2.893639036519822
231 | tp92
232 | sS'Na'
233 | p93
234 | (F0.0013009231170177968
235 | F2.6575508762126323
236 | tp94
237 | sS'Nb'
238 | p95
239 | (F0.002558482130135
240 | F2.819694442914174
241 | tp96
242 | sS'Nd'
243 | p97
244 | (F0.00043364103900593224
245 | F3.184962917351713
246 | tp98
247 | sS'Ne'
248 | p99
249 | (F0.0018212923638249155
250 | F2.88918454292912
251 | tp100
252 | sS'Np'
253 | p101
254 | (F0.0008239179741112712
255 | F3.0504372109125217
256 | tp102
257 | sS'Fr'
258 | p103
259 | (F0.002168205195029661
260 | F4.365403718887663
261 | tp104
262 | sS'Fe'
263 | p105
264 | (F0.0005637333507077119
265 | F2.5942970672246677
266 | tp106
267 | sS'Fm'
268 | p107
269 | (F0.0005203692468071187
270 | F2.927493187809155
271 | tp108
272 | sS'B'
273 | p109
274 | (F0.00780553870210678
275 | F3.6375394661670053
276 | tp110
277 | sS'F'
278 | p111
279 | (F0.002168205195029661
280 | F2.996983287824101
281 | tp112
282 | sS'Sr'
283 | p113
284 | (F0.010190564416639406
285 | F3.2437622327489755
286 | tp114
287 | sS'N'
288 | p115
289 | (F0.0029921231691409328
290 | F3.260689308393642
291 | tp116
292 | sS'Kr'
293 | p117
294 | (F0.00954010285813051
295 | F3.689211591819145
296 | tp118
297 | sS'Si'
298 | p119
299 | (F0.017432369768038476
300 | F3.826409994412757
301 | tp120
302 | sS'Sn'
303 | p121
304 | (F0.024587446911636356
305 | F3.9128271700723705
306 | tp122
307 | sS'Sm'
308 | p123
309 | (F0.0003469128312047458
310 | F3.1359634878539944
311 | tp124
312 | sS'V'
313 | p125
314 | (F0.0006938256624094916
315 | F2.8009855698332267
316 | tp126
317 | sS'Sc'
318 | p127
319 | (F0.0008239179741112712
320 | F2.935511276272418
321 | tp128
322 | sS'Sb'
323 | p129
324 | (F0.019470482651366357
325 | F3.9377723341802997
326 | tp130
327 | sS'Se'
328 | p131
329 | (F0.012618954235072628
330 | F3.746229109780127
331 | tp132
332 | sS'Co'
333 | p133
334 | (F0.0006070974546083051
335 | F2.5586611184990544
336 | tp134
337 | sS'Cm'
338 | p135
339 | (F0.0005637333507077119
340 | F2.9631291365347683
341 | tp136
342 | sS'Cl'
343 | p137
344 | (F0.009843651585434663
345 | F3.5163772404999194
346 | tp138
347 | sS'Ca'
348 | p139
349 | (F0.010320656728341187
350 | F3.0281647429590133
351 | tp140
352 | sS'Cf'
353 | p141
354 | (F0.0005637333507077119
355 | F2.9515474531989443
356 | tp142
357 | sS'Ce'
358 | p143
359 | (F0.0005637333507077119
360 | F3.1680358417070464
361 | tp144
362 | sS'Xe'
363 | p145
364 | (F0.014396882494996951
365 | F3.923517954690054
366 | tp146
367 | sS'Lu'
368 | p147
369 | (F0.0017779282599243223
370 | F3.242871334030835
371 | tp148
372 | sS'Cs'
373 | p149
374 | (F0.001951384675526695
375 | F4.024189509839913
376 | tp150
377 | sS'Cr'
378 | p151
379 | (F0.0006504615585088984
380 | F2.6931868249382456
381 | tp152
382 | sS'Cu'
383 | p153
384 | (F0.00021682051950296612
385 | F3.113691019900486
386 | tp154
387 | sS'La'
388 | p155
389 | (F0.0007371897663100849
390 | F3.1377452852902747
391 | tp156
392 | sS'Li'
393 | p157
394 | (F0.0010841025975148306
395 | F2.183592758161972
396 | tp158
397 | sS'Lw'
398 | p159
399 | (F0.00047700514290652544
400 | F2.882948251902138
401 | tp160
402 | sS'Tl'
403 | p161
404 | (F0.029487590652403393
405 | F3.872736727756055
406 | tp162
407 | sS'Tm'
408 | p163
409 | (F0.00026018462340355935
410 | F3.005892275005505
411 | tp164
412 | sS'Th'
413 | p165
414 | (F0.0011274667014154237
415 | F3.025492046804592
416 | tp166
417 | sS'Ti'
418 | p167
419 | (F0.0007371897663100849
420 | F2.828603430095577
421 | tp168
422 | sS'Te'
423 | p169
424 | (F0.017258913352436105
425 | F3.982317270087316
426 | tp170
427 | sS'Tb'
428 | p171
429 | (F0.00030354872730415256
430 | F3.074491476302311
431 | tp172
432 | sS'Tc'
433 | p173
434 | (F0.002081476987228475
435 | F2.6709143569847376
436 | tp174
437 | sS'Ta'
438 | p175
439 | (F0.003512492415948051
440 | F2.8241489365048755
441 | tp176
442 | sS'Yb'
443 | p177
444 | (F0.009887015689335255
445 | F2.9889651993608384
446 | tp178
447 | sS'Dy'
448 | p179
449 | (F0.00030354872730415256
450 | F3.054000805785083
451 | tp180
452 | sS'I'
453 | p181
454 | (F0.014700431222301105
455 | F4.009044231631527
456 | tp182
457 | sS'U'
458 | p183
459 | (F0.0009540102858130509
460 | F3.0246011480864516
461 | tp184
462 | sS'Y'
463 | p185
464 | (F0.003122215480842712
465 | F2.980056212179435
466 | tp186
467 | sS'Ac'
468 | p187
469 | (F0.0014310154287195764
470 | F3.0985457416921003
471 | tp188
472 | sS'Ag'
473 | p189
474 | (F0.001561107740421356
475 | F2.8045491647057883
476 | tp190
477 | sS'Ir'
478 | p191
479 | (F0.003165579584743305
480 | F2.5301523595185635
481 | tp192
482 | sS'Am'
483 | p193
484 | (F0.0006070974546083051
485 | F3.012128566032487
486 | tp194
487 | sS'Al'
488 | p195
489 | (F0.021898872469799577
490 | F4.008153332913386
491 | tp196
492 | sS'As'
493 | p197
494 | (F0.013399508105283306
495 | F3.7685015777336357
496 | tp198
497 | sS'Ar'
498 | p199
499 | (F0.008022359221609746
500 | F3.4459962417668324
501 | tp200
502 | sS'Au'
503 | p201
504 | (F0.0016912000521231358
505 | F2.9337294788361374
506 | tp202
507 | sS'At'
508 | p203
509 | (F0.012315405507768475
510 | F4.231768911166611
511 | tp204
512 | sS'In'
513 | p205
514 | (F0.02597509823645534
515 | F3.976080979060334
516 | tp206
517 | s.
--------------------------------------------------------------------------------
/thermo/data/readme.md:
--------------------------------------------------------------------------------
1 | # Data Used With Thermo Package
2 | * UFF.params: Stores all LJ information from [Rappe's UFF paper](https://pubs.acs.org/doi/abs/10.1021/ja00051a040). Can be loaded with gpumd.data subpackage (location subject to change later).
--------------------------------------------------------------------------------
/thermo/gpumd/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ['io', 'preproc', 'data', 'calc']
2 |
--------------------------------------------------------------------------------
/thermo/gpumd/calc.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from scipy.integrate import cumtrapz
3 | from .common import __get_direction, __get_path
4 | from thermo.math.correlate import corr
5 | from scipy import integrate
6 |
7 | __author__ = "Alexander Gabourie"
8 | __email__ = "gabourie@stanford.edu"
9 |
10 |
11 | def __scale_gpumd_tc(vol, T):
12 | """
13 | Used to scale the thermal conductivity when converting GPUMD heat-flux correlations
14 | to thermal conductivity.
15 |
16 | Args:
17 | vol (float):
18 | Volume in angstroms^3
19 |
20 | T (float):
21 | Temperature in K
22 |
23 | Returns:
24 | float: Converted value
25 | """
26 |
27 | one = 1.602176634e-19 * 9.651599e7 # eV^3/amu -> Jm^2/s^2*eV
28 | two = 1. / 1.e15 # fs -> s
29 | three = 1.e30 / 8.617333262145e-5 # K/(eV*Ang^3) -> K/(eV*m^3) w/ Boltzmann
30 | return one * two * three / (T * T * vol)
31 |
32 |
33 | def get_gkma_kappa(data, nbins, nsamples, dt, sample_interval, T=300, vol=1, max_tau=None, directions='xyz',
34 | outputfile='heatmode.npy', save=False, directory=None, return_data=True):
35 | """
36 | Calculate the Green-Kubo thermal conductivity from modal heat current data from 'load_heatmode'
37 |
38 | Args:
39 | data (dict):
40 | Dictionary with heat currents loaded by 'load_heatmode'
41 |
42 | nbins (int):
43 | Number of bins used during the GPUMD simulation
44 |
45 | nsamples (int):
46 | Number of times heat flux was sampled with GKMA during GPUMD simulation
47 |
48 | dt (float):
49 | Time step during data collection in fs
50 |
51 | sample_interval (int):
52 | Number of time steps per sample of modal heat flux
53 |
54 | T (float):
55 | Temperature of system during data collection
56 |
57 | vol (float):
58 | Volume of system in angstroms^3
59 |
60 | max_tau (float):
61 | Correlation time to calculate up to. Units of ns
62 |
63 | directions (str):
64 | Directions to gather data from. Any order of 'xyz' is accepted. Excluding directions also allowed (i.e. 'xz'
65 | is accepted)
66 |
67 | outputfile (str):
68 | File name to save read data to. Output file is a binary dictionary. Loading from a binary file is much
69 | faster than re-reading data files and saving is recommended
70 |
71 | save (bool):
72 | Toggle saving data to binary dictionary. Loading from save file is much faster and recommended
73 |
74 | directory (str):
75 | Name of directory storing the input file to read
76 |
77 | return_data (bool):
78 | Toggle returning the loaded modal heat flux data. If this is False, the user should ensure that
79 | save is True
80 |
81 | Returns:
82 | dict: Input data dict but with correlation, thermal conductivity, and lag time data included
83 |
84 | .. csv-table:: Output dictionary (new entries)
85 | :stub-columns: 1
86 |
87 | **key**,tau,kmxi,kmxo,kmyi,kmyo,kmz,jmxijx,jmxojx,jmyijy,jmyojy,jmzjz
88 | **units**,ns,|gk1|,|gk1|,|gk1|,|gk1|,|gk1|,|gk2|,|gk2|,|gk2|,|gk2|,|gk2|
89 |
90 | .. |gk1| replace:: Wm\ :sup:`-1` K\ :sup:`-1` *x*\ :sup:`-1`
91 | .. |gk2| replace:: eV\ :sup:`3` amu\ :sup:`-1` *x*\ :sup:`-1`
92 |
93 | Here *x* is the size of the bins in THz. For example, if there are 4 bins per THz, *x* = 0.25 THz.
94 | """
95 | out_path = __get_path(directory, outputfile)
96 | scale = __scale_gpumd_tc(vol, T)
97 | # set the heat flux sampling time: rate * timestep * scaling
98 | srate = sample_interval * dt # [fs]
99 |
100 | # Calculate total time
101 | tot_time = srate * (nsamples - 1) # [fs]
102 |
103 | # set the integration limit (i.e. tau)
104 | if max_tau is None:
105 | max_tau = tot_time # [fs]
106 | else:
107 | max_tau = max_tau * 1e6 # [fs]
108 |
109 | max_lag = int(np.floor(max_tau / srate))
110 | size = max_lag + 1
111 | data['tau'] = np.squeeze(np.linspace(0, max_lag * srate, max_lag + 1)) # [ns]
112 |
113 | ### AUTOCORRELATION ###
114 | directions = __get_direction(directions)
115 | cplx = np.complex128
116 | # Note: loops necessary due to memory constraints
117 | # (can easily max out cluster mem.)
118 | if 'x' in directions:
119 | if 'jmxi' not in data.keys() or 'jmxo' not in data.keys():
120 | raise ValueError("x direction data is missing")
121 |
122 | jx = np.sum(data['jmxi']+data['jmxo'], axis=0)
123 | data['jmxijx'] = np.zeros((nbins, size))
124 | data['jmxojx'] = np.zeros((nbins, size))
125 | data['kmxi'] = np.zeros((nbins, size))
126 | data['kmxo'] = np.zeros((nbins, size))
127 | for m in range(nbins):
128 | data['jmxijx'][m, :] = corr(data['jmxi'][m, :].astype(cplx), jx.astype(cplx), max_lag)
129 | data['kmxi'][m, :] = integrate.cumtrapz(data['jmxijx'][m, :], data['tau'], initial=0) * scale
130 |
131 | data['jmxojx'][m, :] = corr(data['jmxo'][m, :].astype(cplx), jx.astype(cplx), max_lag)
132 | data['kmxo'][m, :] = integrate.cumtrapz(data['jmxojx'][m, :], data['tau'], initial=0) * scale
133 | del jx
134 |
135 | if 'y' in directions:
136 | if 'jmyi' not in data.keys() or 'jmyo' not in data.keys():
137 | raise ValueError("y direction data is missing")
138 |
139 | jy = np.sum(data['jmyi']+data['jmyo'], axis=0)
140 | data['jmyijy'] = np.zeros((nbins, size))
141 | data['jmyojy'] = np.zeros((nbins, size))
142 | data['kmyi'] = np.zeros((nbins, size))
143 | data['kmyo'] = np.zeros((nbins, size))
144 | for m in range(nbins):
145 | data['jmyijy'][m, :] = corr(data['jmyi'][m, :].astype(cplx), jy.astype(cplx), max_lag)
146 | data['kmyi'][m, :] = integrate.cumtrapz(data['jmyijy'][m, :], data['tau'], initial=0) * scale
147 |
148 | data['jmyojy'][m, :] = corr(data['jmyo'][m, :].astype(cplx), jy.astype(cplx), max_lag)
149 | data['kmyo'][m, :] = integrate.cumtrapz(data['jmyojy'][m, :], data['tau'], initial=0) * scale
150 | del jy
151 |
152 | if 'z' in directions:
153 | if 'jmz' not in data.keys():
154 | raise ValueError("z direction data is missing")
155 |
156 | jz = np.sum(data['jmz'], axis=0)
157 | data['jmzjz'] = np.zeros((nbins, size))
158 | data['kmz'] = np.zeros((nbins, size))
159 | for m in range(nbins):
160 | data['jmzjz'][m, :] = corr(data['jmz'][m, :].astype(cplx), jz.astype(cplx), max_lag)
161 | data['kmz'][m, :] = integrate.cumtrapz(data['jmzjz'][m, :], data['tau'], initial=0) * scale
162 | del jz
163 |
164 | data['tau'] = data['tau'] / 1.e6
165 |
166 | if save:
167 | np.save(out_path, data)
168 |
169 | if return_data:
170 | return data
171 | return
172 |
173 |
174 | def running_ave(kappa, time):
175 | """
176 | Gets running average. Reads and returns the structure input file from GPUMD.
177 |
178 | Args:
179 | kappa (ndarray): Raw thermal conductivity
180 | time (ndarray): Time vector that kappa was sampled at
181 |
182 | Returns:
183 | ndarray: Running average of kappa input
184 | """
185 | return cumtrapz(kappa, time, initial=0)/time
186 |
187 |
188 | def hnemd_spectral_kappa(shc, Fe, T, V):
189 | """
190 | Spectral thermal conductivity calculation from an SHC run
191 |
192 | Args:
193 | shc (dict):
194 | The data from a single SHC run as output by thermo.gpumd.data.load_shc
195 |
196 | Fe (float):
197 | HNEMD force in (1/A)
198 |
199 | T (float):
200 | HNEMD run temperature (K)
201 |
202 | V (float):
203 | Volume (A^3) during HNEMD run
204 |
205 | Returns:
206 | dict: Same as shc argument, but with spectral thermal conductivity included
207 |
208 | .. csv-table:: Output dictionary (new entries)
209 | :stub-columns: 1
210 |
211 | **key**,kwi,kwo
212 | **units**,|sh3|,|sh3|
213 |
214 | .. |sh3| replace:: Wm\ :sup:`-1` K\ :sup:`-1` THz\ :sup:`-1`
215 | """
216 | if 'jwi' not in shc.keys() or 'jwo' not in shc.keys():
217 | raise ValueError("shc argument must be from load_shc and contain in/out heat currents.")
218 |
219 | # ev*A/ps/THz * 1/A^3 *1/K * A ==> W/m/K/THz
220 | convert = 1602.17662
221 | shc['kwi'] = shc['jwi']*convert/(Fe*T*V)
222 | shc['kwo'] = shc['jwo'] * convert / (Fe * T * V)
223 |
--------------------------------------------------------------------------------
/thermo/gpumd/common.py:
--------------------------------------------------------------------------------
1 | import re
2 | import os
3 |
4 | __author__ = "Alexander Gabourie"
5 | __email__ = "gabourie@stanford.edu"
6 |
7 |
8 | def __get_direction(directions):
9 | """
10 | Creates a sorted list showing which directions the user asked for. Ex: 'xyz' -> ['x', 'y', 'z']
11 |
12 | Args:
13 | directions (str):
14 | A string containing the directions the user wants to process (Ex: 'xyz', 'zy', 'x')
15 |
16 | Returns:
17 | list(str): An ordered list that simplifies the user input for future processing
18 |
19 | """
20 | if not (bool(re.match('^[xyz]+$',directions))
21 | or len(directions) > 3
22 | or len(directions) == 0):
23 | raise ValueError('Invalid directions used.')
24 | return sorted(list(set(directions)))
25 |
26 |
27 | def __get_path(directory, filename):
28 | if not directory:
29 | return os.path.join(os.getcwd(), filename)
30 | return os.path.join(directory, filename)
31 |
32 |
33 | def __check_list(data, varname=None, dtype=None):
34 | """
35 | Checks if data is a list of dtype or turns a variable of dtype into a list
36 |
37 | Args:
38 | data:
39 | Data to check
40 |
41 | varname (str):
42 | Name of variable to check
43 |
44 | dtype (type)
45 | Data type to check data against
46 | Returns:
47 | list(dtype)
48 | """
49 |
50 | if type(data) == dtype:
51 | return [data]
52 |
53 | if type(data) == list:
54 | for elem in data:
55 | if not type(elem) == dtype:
56 | if varname:
57 | raise ValueError('All entries for {} must be {}.'.format(str(varname), str(dtype)))
58 | return data
59 |
60 | raise ValueError('{} is not the correct type.'.format(str(varname)))
61 |
62 |
63 | def __check_range(npoints, maxpoints):
64 | """
65 | Checks if requested points are valid
66 |
67 | Args:
68 | npoints (list(int)):
69 | Points to check
70 |
71 | maxpoints (int):
72 | Maximum number of points to read
73 |
74 | Returns:
75 | None
76 | """
77 | if sum(npoints) > maxpoints:
78 | raise ValueError("More data requested than exists.")
79 |
80 | for points in npoints:
81 | if points < 1:
82 | raise ValueError("Only strictly positive numbers are allowed.")
--------------------------------------------------------------------------------
/thermo/gpumd/preproc.py:
--------------------------------------------------------------------------------
1 | from .common import __check_list, __check_range
2 | from numpy import prod
3 |
4 | __author__ = "Alexander Gabourie"
5 | __email__ = "gabourie@stanford.edu"
6 |
7 |
8 | #########################################
9 | # Structure preprocessing
10 | #########################################
11 |
12 | def __get_group(split, pos, direction):
13 | """
14 | Gets the group that an atom belongs to based on its position. Only works in
15 | one direction as it is used for NEMD.
16 |
17 | Args:
18 | split (list(float)):
19 | List of boundaries. First element should be lower boundary of
20 | sim. box in specified direction and the last the upper.
21 |
22 | position (float):
23 | Position of the atom
24 |
25 | direction (str):
26 | Which direction the split will work
27 |
28 | Returns:
29 | int: Group of atom
30 |
31 | """
32 | if direction == 'x':
33 | d = pos[0]
34 | elif direction == 'y':
35 | d = pos[1]
36 | else:
37 | d = pos[2]
38 | errmsg = 'Out of bounds error: {}'.format(d)
39 | for i, val in enumerate(split[:-1]):
40 | if i == 0 and d < val:
41 | print(errmsg)
42 | return -1
43 | if val <= d < split[i + 1]:
44 | return i
45 | print(errmsg)
46 | return -1
47 |
48 |
49 | def __init_index(index, info, num_atoms):
50 | """
51 | Initializes the index key for the info dict.
52 |
53 | Args:
54 | index (int):
55 | Index of atom in the Atoms object.
56 |
57 | info (dict):
58 | Dictionary that stores the velocity, and groups.
59 |
60 | num_atoms (int):
61 | Number of atoms in the Atoms object.
62 |
63 | Returns:
64 | int: Index of atom in the Atoms object.
65 |
66 | """
67 | if index == num_atoms - 1:
68 | index = -1
69 | if index not in info:
70 | info[index] = dict()
71 | return index
72 |
73 |
74 | def __handle_end(info, num_atoms):
75 | """
76 | Duplicates the index -1 entry for key that's num_atoms-1. Works in-place.
77 |
78 | Args:
79 | info (dict):
80 | Dictionary that stores the velocity, and groups.
81 |
82 | num_atoms (int):
83 | Number of atoms in the Atoms object.
84 |
85 | """
86 | info[num_atoms - 1] = info[-1]
87 |
88 |
89 | def add_group_by_position(split, atoms, direction):
90 | """
91 | Assigns groups to all atoms based on its position. Only works in
92 | one direction as it is used for NEMD.
93 | Returns a bookkeeping parameter, but atoms will be udated in-place.
94 |
95 | Args:
96 | split (list(float)):
97 | List of boundaries. First element should be lower boundary of sim.
98 | box in specified direction and the last the upper.
99 |
100 | atoms (ase.Atoms):
101 | Atoms to group
102 |
103 | direction (str):
104 | Which direction the split will work.
105 |
106 | Returns:
107 | int: A list of number of atoms in each group.
108 |
109 | """
110 | info = atoms.info
111 | counts = [0] * (len(split) - 1)
112 | num_atoms = len(atoms)
113 | for index, atom in enumerate(atoms):
114 | index = __init_index(index, info, num_atoms)
115 | i = __get_group(split, atom.position, direction)
116 | if 'groups' in info[index]:
117 | info[index]['groups'].append(i)
118 | else:
119 | info[index]['groups'] = [i]
120 | counts[i] += 1
121 | __handle_end(info, num_atoms)
122 | atoms.info = info
123 | return counts
124 |
125 |
126 | def add_group_by_type(atoms, types):
127 | """
128 | Assigns groups to all atoms based on atom types. Returns a
129 | bookkeeping parameter, but atoms will be udated in-place.
130 |
131 | Args:
132 | atoms (ase.Atoms):
133 | Atoms to group
134 |
135 | types (dict):
136 | Dictionary with types for keys and group as a value.
137 | Only one group allowed per atom. Assumed groups are integers
138 | starting at 0 and increasing in steps of 1. Ex. range(0,10).
139 |
140 | Returns:
141 | int: A list of number of atoms in each group.
142 |
143 | """
144 | # atom symbol checking
145 | all_symbols = list(types)
146 | # check that symbol set matches symbol set of atoms
147 | if set(atoms.get_chemical_symbols()) - set(all_symbols):
148 | raise ValueError('Group symbols do not match atoms symbols.')
149 | if not len(set(all_symbols)) == len(all_symbols):
150 | raise ValueError('Group not assigned to all atom types.')
151 |
152 | num_groups = len(set([types[sym] for sym in set(all_symbols)]))
153 | num_atoms = len(atoms)
154 | info = atoms.info
155 | counts = [0] * num_groups
156 | for index, atom in enumerate(atoms):
157 | index = __init_index(index, info, num_atoms)
158 | group = types[atom.symbol]
159 | counts[group] += 1
160 | if 'groups' in info[index]:
161 | info[index]['groups'].append(group)
162 | else:
163 | info[index]['groups'] = [group]
164 | __handle_end(info, num_atoms)
165 | atoms.info = info
166 | return counts
167 |
168 |
169 | def set_velocities(atoms, custom=None):
170 | """
171 | Sets the 'velocity' part of the atoms to be used in GPUMD.
172 | Custom velocities must be provided. They must also be in
173 | the units of eV^(1/2) amu^(-1/2).
174 |
175 | Args:
176 | atoms (ase.Atoms):
177 | Atoms to assign velocities to.
178 |
179 | custom (list(list)):
180 | list of len(atoms) with each element made from
181 | a 3-element list for [vx, vy, vz]
182 |
183 | """
184 | if not custom:
185 | raise ValueError("No velocities provided.")
186 |
187 | num_atoms = len(atoms)
188 | info = atoms.info
189 | if not len(custom) == num_atoms:
190 | return ValueError('Incorrect number of velocities for number of atoms.')
191 | for index, (atom, velocity) in enumerate(zip(atoms, custom)):
192 | if not len(velocity) == 3:
193 | return ValueError('Three components of velocity not provided.')
194 | index = __init_index(index, info, num_atoms)
195 | info[index]['velocity'] = velocity
196 | __handle_end(info, num_atoms)
197 | atoms.info = info
198 |
199 |
200 | def __init_index2(index, info): # TODO merge this with other __init_index function
201 | if index not in info.keys():
202 | info[index] = dict()
203 |
204 |
205 | def add_basis(atoms, index=None, mapping=None):
206 | """
207 | Assigns a basis index for each atom in atoms. Updates atoms.
208 |
209 | Args:
210 | atoms (ase.Atoms):
211 | Atoms to assign basis to.
212 |
213 | index (list(int)):
214 | Atom indices of those in the unit cell. Order is important.
215 |
216 | mapping (list(int)):
217 | Mapping of all atoms to the relevant basis positions
218 |
219 |
220 | """
221 | n = atoms.get_global_number_of_atoms()
222 | info = atoms.info
223 | info['unitcell'] = list()
224 | if index:
225 | if (mapping is None) or (len(mapping) != n):
226 | raise ValueError("Full atom mapping required if index is provided.")
227 | for idx in index:
228 | info['unitcell'].append(idx)
229 | for idx in range(n):
230 | __init_index2(idx, info)
231 | info[idx]['basis'] = mapping[idx]
232 | else:
233 | for idx in range(n):
234 | info['unitcell'].append(idx)
235 | # if no index provided, assume atoms is unit cell
236 | __init_index2(idx, info)
237 | info[idx]['basis'] = idx
238 |
239 |
240 | def repeat(atoms, rep):
241 | """
242 | A wrapper of ase.Atoms.repeat that is aware of GPUMD's basis information.
243 |
244 | Args:
245 | atoms (ase.Atoms):
246 | Atoms to assign velocities to.
247 |
248 | rep (int | list(3 ints)):
249 | List of three positive integers or a single integer
250 |
251 | """
252 | rep = __check_list(rep, varname='rep', dtype=int)
253 | replen = len(rep)
254 | if replen == 1:
255 | rep = rep*3
256 | elif not replen == 3:
257 | raise ValueError("rep must be a sequence of 1 or 3 integers.")
258 | __check_range(rep, 2**64)
259 | supercell = atoms.repeat(rep)
260 | sinfo = supercell.info
261 | ainfo = atoms.info
262 | n = atoms.get_global_number_of_atoms()
263 | for i in range(1, prod(rep, dtype=int)):
264 | for j in range(n):
265 | sinfo[i*n+j] = {'basis': ainfo[j]['basis']}
266 |
267 | return supercell
268 |
--------------------------------------------------------------------------------
/thermo/lammps/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ['calc', 'data', 'io']
2 |
--------------------------------------------------------------------------------
/thermo/lammps/calc.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import os
3 | from scipy import integrate
4 | from math import floor
5 | import scipy.io as sio
6 | from thermo.math.correlate import autocorr
7 |
8 | __author__ = "Alexander Gabourie"
9 | __email__ = "gabourie@stanford.edu"
10 |
11 |
12 | def __metal_to_SI(vol, T):
13 | """
14 | Converts LAMMPS metal units to SI units for thermal conductivity calculations.
15 |
16 | Args:
17 | vol (float):
18 | Volume in angstroms^3
19 |
20 | T (float):
21 | Temperature in K
22 |
23 | Returns:
24 | float: Converted value
25 | """
26 | kb = 1.38064852e-23 # m3*kg/(s2*K)
27 | vol = vol/(1.0e10)**3 # to m3
28 | # eV2*ns/(ps2*A4) to J2/(s*m4)
29 | to_SI = (1.602e-19)**2.*1.0e12*(1.0e10)**4.0*1000.
30 | return vol*to_SI/(kb*T**2)
31 |
32 |
33 | def get_heat_flux(directory='.', heatflux_file='heat_out.heatflux', mat_file='heat_flux.mat'):
34 | """
35 | Gets the heat flux from a LAMMPS EMD simulation. Creates a compressed .mat
36 | file if only in text form. Loads .mat form if exists.
37 |
38 | Args:
39 | directory (str):
40 | Directory of simulation results
41 |
42 | heatflux_file (str):
43 | Filename of heatflux output
44 |
45 | mat_file (str):
46 | MATLAB file to load, if exists, or save to, if does not exist.
47 | Default save name of 'heat_flux.mat'
48 |
49 | Returns:
50 | dict: Dictionary with heat flux data
51 |
52 | .. csv-table:: Output dictionary (metal units)
53 | :stub-columns: 1
54 |
55 | **key**,jx,jy,jz,rate
56 | **units**,|j1|,|j1|,|j1|,timestep
57 |
58 | .. |j1| replace:: eV ps\ :sup:`-1` A\ :sup:`-2`
59 | """
60 | heatflux_file = os.path.join(directory, heatflux_file)
61 | mat_file = os.path.join(directory, mat_file)
62 |
63 | # Check that directory exists
64 | if not os.path.isdir(directory):
65 | raise IOError('The path: {} is not a directory.'.format(directory))
66 |
67 | # Go to directory and see if imported .mat file already exists
68 | if os.path.isfile(mat_file) and mat_file.endswith('.mat'):
69 | return sio.loadmat(mat_file)
70 |
71 | # Continue with the import since .mat file
72 | if not os.path.isfile(heatflux_file):
73 | raise IOError('The file: \'{}{}\' is not found.'.format(directory,heatflux_file))
74 |
75 | # Read the file
76 | with open(heatflux_file, 'r') as hf_file:
77 | lines = hf_file.readlines()[2:]
78 |
79 | num_elem = len(lines)
80 |
81 | # Get timestep
82 | rate = int(lines[0].split()[0])
83 |
84 | # read all data
85 | jx = np.zeros(num_elem)
86 | jy = np.zeros(num_elem)
87 | jz = np.zeros(num_elem)
88 | for i,line in enumerate(lines):
89 | vals = line.split()
90 | jx[i] = float(vals[1])
91 | jy[i] = float(vals[2])
92 | jz[i] = float(vals[3])
93 |
94 | output = {'jx':jx, 'jy':jy, 'jz':jz, 'rate':rate}
95 | sio.savemat(mat_file, output)
96 | return output
97 |
98 |
99 | def get_GKTC(directory='.', T=300, vol=1, dt=None, rate=None, tau=None,
100 | heatflux_file='heat_out.heatflux',mat_file='heat_flux.mat'):
101 | """
102 | Calculates the thermal conductivity (TC) using the Green-Kubo (GK) formalism.
103 | The 'metal' units in LAMMPS must be used.
104 |
105 | Args:
106 | directory (string):
107 | Directory of simulation
108 |
109 | T (float):
110 | Temperature of simulation. Units of K
111 |
112 | vol (float):
113 | Volume of the simulation cell. Units of A^3
114 |
115 | dt (float):
116 | Timestep of the of simulation. Units are fs
117 |
118 | rate (int):
119 | Rate at which the heat flux is sampled in number of timesteps. Default of rate=dt
120 |
121 | tau (int):
122 | max lag time to integrate over. Units of ns and default of tau=total time
123 |
124 | heatflux_file (str):
125 | Heatflux output filename.
126 |
127 | mat_file (str):
128 | MATLAB file to load, if exists, or save to, if does not exist.
129 | Default save name of 'heat_flux.mat'
130 |
131 | Returns:
132 | dict: Dictionary with Green-Kubo thermal conductivity data
133 |
134 | .. csv-table:: Output dictionary
135 | :stub-columns: 1
136 |
137 | **key**,kx,ky,kz,t,dt,T,V,jxjx,jyjy,jzjz,tot_time,tau,srate,directory
138 | **units**,|gk1|,|gk1|,|gk1|,ns,fs,K,|gk2|,|gk3|,|gk3|,|gk3|,ns,ns,ns,N/A
139 |
140 | .. |gk1| replace:: Wm\ :sup:`-1` K\ :sup:`-1`
141 | .. |gk2| replace:: A\ :sup:`3`
142 | .. |gk3| replace:: (eV ps\ :sup:`-1` A\ :sup:`-2`)\ :sup:`2`
143 | """
144 | # Check that directory exists
145 | if not os.path.isdir(directory):
146 | raise IOError('The path: {} is not a directory.'.format(directory))
147 |
148 | # get heat flux, pass args
149 | hf = get_heat_flux(directory, heatflux_file,mat_file)
150 | jx = np.squeeze(hf['jx'])
151 | jy = np.squeeze(hf['jy'])
152 | jz = np.squeeze(hf['jz'])
153 |
154 | scale = __metal_to_SI(vol, T)
155 |
156 | # Set timestep if not set
157 | if dt is None:
158 | dt = 1.0e-6 # [ns]
159 | else:
160 | dt = dt*1.0e-6 # [fs] -> [ns]
161 |
162 | # set the heat flux sampling rate: rate*timestep*scaling
163 | srate = rate*dt # [ns]
164 |
165 | # Calculate total time
166 | tot_time = srate*(len(jx)-1) # [ns]
167 |
168 | # set the integration limit (i.e. tau)
169 | if tau is None:
170 | tau = tot_time # [ns]
171 |
172 | max_lag = int(floor(tau/(srate)))
173 | t = np.squeeze(np.linspace(0, (max_lag)*srate, max_lag+1)) # [ns]
174 |
175 | jxjx = autocorr(np.squeeze(jx).astype(np.complex128), max_lag)
176 | jyjy = autocorr(np.squeeze(jy).astype(np.complex128), max_lag)
177 | jzjz = autocorr(np.squeeze(jz).astype(np.complex128), max_lag)
178 |
179 | kx = integrate.cumtrapz(jxjx, t, initial=0)*scale
180 | ky = integrate.cumtrapz(jyjy, t, initial=0)*scale
181 | kz = integrate.cumtrapz(jzjz, t, initial=0)*scale
182 |
183 | dt /= 1e6 # [ns] -> [fs]
184 |
185 | return {'kx':kx, 'ky':ky, 'kz':kz, 't':t, 'directory':directory,
186 | 'dt':dt, 'tot_time':tot_time,'tau':tau, 'T':T,
187 | 'V':vol, 'srate':srate, 'jxjx':jxjx, 'jyjy':jyjy, 'jzjz':jzjz}
188 |
--------------------------------------------------------------------------------
/thermo/lammps/data.py:
--------------------------------------------------------------------------------
1 | import os
2 | import numpy as np
3 |
4 | __author__ = "Alexander Gabourie"
5 | __email__ = "gabourie@stanford.edu"
6 |
7 |
8 | def __get_path(directory, filename):
9 | if not directory:
10 | return os.path.join(os.getcwd(), filename)
11 | return os.path.join(directory, filename)
12 |
13 |
14 | def __process_box(out, filehandle):
15 | """
16 | Processes the lines of a trajectory file dedicated to the box size and shape
17 |
18 | Args:
19 | out (dict):
20 | Stores all relevant box information
21 |
22 | filehandle (TextIOWrapper):
23 | The trajectory file handle
24 |
25 | """
26 | for pair in [('x', 'xy'), ('y', 'xz'), ('z', 'yz')]:
27 | data = [float(i) for i in filehandle.readline().split()]
28 | out[pair[0]].append(data[1]-data[0])
29 | triclinic = len(data) == 3
30 | out[pair[1]].append(data[2] if triclinic else None)
31 |
32 | if triclinic:
33 | a = np.array([out['x'][-1], 0, 0])
34 | b = np.array([out['xy'][-1], out['y'], 0])
35 | c = np.array([out['xz'][-1], out['yz'], out['z']])
36 | Avec = np.cross(a, b)
37 | out['A'].append(np.linalg.norm(Avec))
38 | out['V'].append(np.abs(np.dot(Avec, c)))
39 | else:
40 | out['A'].append(out['x'][-1]*out['y'][-1])
41 | out['V'].append(out['A'][-1]*out['z'][-1])
42 |
43 |
44 | def get_dimensions(filename, directory=None):
45 | """
46 | Gets the dimensions of a 3D simulation from a LAMMPS trajectory.
47 |
48 | Args:
49 | filename (str):
50 | LAMMPS trajectory file to extract dimensions from
51 |
52 | directory (str):
53 | The directory the trajectory file is found in
54 |
55 | Returns:
56 | dict: Dictionary with keys given in the table below
57 |
58 | .. csv-table:: Output dictionary
59 | :stub-columns: 1
60 |
61 | **key**, x, y, z, A, V, xy, xz, yz
62 | **units**,|d1|,|d1|,|d1|,|d2|,|d3|,|d1|,|d1|,|d1|
63 |
64 | .. |d1| replace:: distance
65 | .. |d2| replace:: distance\ :sup:`2`
66 | .. |d3| replace:: distance\ :sup:`3`
67 |
68 | """
69 | trjpath = __get_path(directory, filename)
70 | labels = ['x', 'y', 'z', 'A', 'V', 'xy', 'xz', 'yz']
71 | out = dict()
72 | for label in labels:
73 | out[label] = []
74 |
75 | with open(trjpath) as f:
76 | line = f.readline()
77 | while line:
78 | if 'BOX' in line:
79 | __process_box(out, f)
80 | line = f.readline()
81 | return out
82 |
83 |
84 | def extract_dt(log_file):
85 | """
86 | Finds all time steps given in the lammps output log
87 |
88 | Args:
89 | log_file (str):
90 | LAMMPS log file to examine
91 |
92 | Returns:
93 | list(float): The timesteps found in log_file in units of time
94 | """
95 | dt = list()
96 | if os.path.isfile(log_file):
97 | with open(log_file, 'r') as log:
98 | lines = log.readlines()
99 |
100 | for line in lines:
101 | elements = line.split()
102 | if len(elements) > 0 and ' '.join(elements[0:2]) == 'Time step':
103 | dt.append(float(elements[3]))
104 | if len(dt) == 0:
105 | print('No timesteps found in', log_file)
106 | else:
107 | print(log_file, 'not found')
108 |
109 | return dt
110 |
--------------------------------------------------------------------------------
/thermo/lammps/io.py:
--------------------------------------------------------------------------------
1 | import atomman
2 | from ase import Atom
3 |
4 | __author__ = "Alexander Gabourie"
5 | __email__ = "gabourie@stanford.edu"
6 |
7 |
8 | def ase_atoms_to_lammps(atoms, out_file='atoms.data', add_masses=True):
9 | """
10 | Converts ASE atoms to a lammps data file.
11 |
12 | Args:
13 | atoms (ase.Atoms):
14 | Atoms to write to lammps data file
15 |
16 | out_file (str):
17 | File to save the structure data to
18 |
19 | add_masses (Bool):
20 | Determines if atom masses are written to data file
21 |
22 | """
23 | sys = atomman.load_ase_Atoms(atoms)
24 | elem = sys.symbols
25 | # write data file
26 | atomman.dump_atom_data(sys, out_file)
27 |
28 | if add_masses:
29 | # Write block of string for mass inclusion
30 | mass_str = 'Masses\n\n'
31 | for i, element in enumerate(elem):
32 | mass_str += '{} {}\n'.format(str(i + 1), str(Atom(element).mass))
33 |
34 | mass_str += '\n'
35 |
36 | # add the mass string to the correct part of the datafile
37 | with open(out_file, 'r') as f:
38 | lines = f.readlines()
39 |
40 | for i, line in enumerate(lines):
41 | if 'Atoms' in line:
42 | break
43 |
44 | lines.insert(len(lines)-1, mass_str)
45 |
46 | with open(out_file, 'w') as f:
47 | f.write(''.join(lines))
48 |
--------------------------------------------------------------------------------
/thermo/math/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/thermo/math/correlate.py:
--------------------------------------------------------------------------------
1 | import pyfftw
2 | import multiprocessing
3 | import numpy as np
4 |
5 |
6 | def autocorr(f, max_lag):
7 | """
8 | Computes a fast autocorrelation function and returns up to max_lag.
9 |
10 | Args:
11 | f (ndarray):
12 | Vector for autocorrelation
13 |
14 | max_lag (float):
15 | Lag at which to calculate up to
16 |
17 | Returns:
18 | ndarray: Autocorrelation vector
19 |
20 | """
21 | N = len(f)
22 | d = N - np.arange(N)
23 | # https://dsp.stackexchange.com/questions/741/why-should-i-zero-pad-a-signal-before-taking-the-fourier-transform
24 | f = np.pad(f, (0, N), 'constant', constant_values=(0, 0))
25 | fvi = np.zeros(2*N, dtype=type(f[0]))
26 | fwd = pyfftw.FFTW(f, fvi, flags=('FFTW_ESTIMATE',), threads=multiprocessing.cpu_count())
27 | fwd()
28 | inv_arg = np.conjugate(fvi)*fvi
29 | acf = np.zeros_like(inv_arg)
30 | rev = pyfftw.FFTW(inv_arg, acf, direction='FFTW_BACKWARD',
31 | flags=('FFTW_ESTIMATE', ), threads=multiprocessing.cpu_count())
32 | rev()
33 | acf = acf[:N]/d
34 | return np.real(acf[:max_lag+1])
35 |
36 |
37 | def corr(f, g, max_lag):
38 | """
39 | Computes fast correlation function and returns up to max_lag. Assumes f and g are same length.
40 |
41 | Args:
42 | f (ndarray):
43 | Vector for correlation
44 |
45 | g (ndarray):
46 | Vector for correlation
47 |
48 | max_lag (float):
49 | Lag at which to calculate up to
50 |
51 | Returns:
52 | ndarray: Correlation vector
53 |
54 | """
55 | if not len(f) == len(g):
56 | raise ValueError('corr arguments must be the same length.')
57 |
58 | N = len(f)
59 | d = N - np.arange(N)
60 | f = np.pad(f, (0, N), 'constant', constant_values=(0, 0))
61 | g = np.pad(g, (0, N), 'constant', constant_values=(0, 0))
62 |
63 | fvi = np.zeros(2*N, dtype=type(f[0]))
64 | gvi = np.zeros(2*N, dtype=type(g[0]))
65 |
66 | fwd_f = pyfftw.FFTW(f, fvi, flags=('FFTW_ESTIMATE',), threads=multiprocessing.cpu_count())
67 | fwd_f()
68 |
69 | fwd_g = pyfftw.FFTW(g, gvi, flags=('FFTW_ESTIMATE',), threads=multiprocessing.cpu_count())
70 | fwd_g()
71 |
72 | inv_arg = np.conjugate(fvi)*gvi
73 | cf = np.zeros_like(inv_arg)
74 | rev = pyfftw.FFTW(inv_arg, cf, direction='FFTW_BACKWARD',
75 | flags=('FFTW_ESTIMATE', ), threads=multiprocessing.cpu_count())
76 | rev()
77 | cf = cf[:N]/d
78 | return np.real(cf[:max_lag+1])
79 |
--------------------------------------------------------------------------------
/thermo/shared/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ['force']
--------------------------------------------------------------------------------
/thermo/shared/force.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | __author__ = "Alexander Gabourie"
4 | __email__ = "gabourie@stanford.edu"
5 |
6 | def load_forces(force_file, sim):
7 | '''
8 | Loads the forces from either GPUMD or LAMMPS output to facilitate a
9 | comparison between techniques.
10 |
11 | Args:
12 | arg1 (str) : force_file
13 | Filename with forces
14 |
15 | arg2 (str) : sim
16 | If type == 'LAMMPS':
17 | The file path should be for the LAMMPS output forces
18 | LAMMPS file should be in the format given by the following LAMMPS input command:
19 | force all custom 1 id fx fy fz
20 | If type == 'GPUMD':
21 | the force output file (f.out) path when GPUMD is compiled with the force flag
22 |
23 | Returns:
24 | dict: dictionary containing sorted force vectors
25 |
26 | '''
27 |
28 | # Load force outputs
29 | if sim == 'LAMMPS':
30 | # LAMMPS
31 | with open(force_file, 'r') as f:
32 | llines = f.readlines()
33 |
34 | # remove header info
35 | llines = llines[9:]
36 |
37 | # process atomic forces
38 | n = len(llines)
39 | xf, yf, zf = np.zeros(n), np.zeros(n), np.zeros(n)
40 | for line in llines:
41 | num = line.split()
42 | ID = int(num[0])-1
43 | xf[ID] = float(num[1])
44 | yf[ID] = float(num[2])
45 | zf[ID] = float(num[3])
46 |
47 | elif sim == 'GPUMD':
48 | # GPUMD
49 | with open(force_file, 'r') as f:
50 | glines = f.readlines()
51 |
52 | # process atomic forces
53 | xf, yf, zf = list(), list(), list()
54 | for line in glines:
55 | num = line.split()
56 | xf.append(float(num[0]))
57 | yf.append(float(num[1]))
58 | zf.append(float(num[2]))
59 |
60 | xf = np.array(xf)
61 | yf = np.array(yf)
62 | zf = np.array(zf)
63 | else:
64 | raise ValueError('Invalid simulation type passed. Forces not extracted.')
65 |
66 | # fill return dictionary
67 | out = dict()
68 | out['xf'] = xf
69 | out['yf'] = yf
70 | out['zf'] = zf
71 | return out
72 |
73 | def compare_forces(f1_dict, f2_dict):
74 | '''
75 | Compares the LAMMPS and GPUMD forces and returns dictionary of comparison
76 | Forces are dict2 - dict1 values.
77 |
78 | Args:
79 | arg1 (dict): f1_dict
80 | dictionary containing extracted forces from a GPUMD or
81 | LAMMPS simulation
82 |
83 | arg2 (dict): f2_dict
84 | dictionary containing extracted forces from a GPUMD or
85 | LAMMPS simulation
86 |
87 | Returns:
88 | dict: comparison dictionary
89 |
90 | '''
91 |
92 | out = dict()
93 | out['xdiff'] = f2_dict['xf'] - f1_dict['xf']
94 | out['ydiff'] = f2_dict['yf'] - f1_dict['yf']
95 | out['zdiff'] = f2_dict['zf'] - f1_dict['zf']
96 | out['xnorm'] = np.linalg.norm(out['xdiff'])
97 | out['ynorm'] = np.linalg.norm(out['ydiff'])
98 | out['znorm'] = np.linalg.norm(out['zdiff'])
99 | return out
100 |
--------------------------------------------------------------------------------
/thermo/tools/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ['lj']
2 |
--------------------------------------------------------------------------------
/thermo/tools/lj.py:
--------------------------------------------------------------------------------
1 | import pickle
2 | import os
3 |
4 | __author__ = "Alexander Gabourie"
5 | __email__ = "gabourie@stanford.edu"
6 |
7 | ###################################
8 | # UFF
9 | ###################################
10 |
11 | def load_UFF():
12 | """
13 | Loads dictionary that stores relevant LJ from UFF.
14 |
15 | Returns:
16 | dict:
17 | Dictionary with atom symbols as the key and a tuple of epsilon and
18 | sigma in units of eV and Angstroms, respectively.
19 | """
20 | path = os.path.abspath(os.path.join(__file__, '../../data/UFF.params'))
21 | return pickle.load(open(path, 'rb'))
22 |
23 | #################################
24 | # Lorentz-Berthelot Mixing
25 | #################################
26 |
27 | def lb_mixing(a1, a2):
28 | """
29 | Applies Lorentz-Berthelot mixing rules on two atoms.
30 |
31 | Args:
32 | a1 (tuple):
33 | Tuple of (epsilon, sigma)
34 |
35 | a2 (tuple):
36 | Tuple of (epsilon, sigma)
37 | """
38 | eps = (a1[0]*a2[0])**(1./2)
39 | sigma = (a1[1] + a2[1])/2.
40 | return eps, sigma
41 |
42 | #################################
43 | # LJ Object
44 | #################################
45 |
46 | class LJ(object):
47 | """ Stores all atoms for a simulation with their LJ parameters.
48 |
49 | A special dictionary with atom symbols for keys and the epsilon and
50 | sigma LJ parameters for the values. This object interfaces with the UFF
51 | LJ potential parameters but can also accept arbitrary parameters.
52 |
53 | Args:
54 | symbols (str or list(str)):
55 | Optional input. A single symbol or a list of symbols to add to
56 | the initial LJ list.
57 |
58 | ignore_pairs (list(sets)):
59 | List of sets where each set has two elements. Each element
60 | is a string for the symbol of the atom to ignore in that pair.
61 | Order in set is not important.
62 |
63 | cut_scale (float):
64 | Specifies the multiplicative factor to use on the sigma parameter
65 | to define the cutoffs. Default is 2.5.
66 | """
67 |
68 | def __init__(self, symbols=None, ignore_pairs=None, cut_scale=2.5):
69 | self.ljdict = dict()
70 | self.ignore = list()
71 | self.cutoff = dict()
72 | self.global_cutoff = None
73 | self.cut_scale = cut_scale
74 | if symbols:
75 | self.add_UFF_params(symbols)
76 |
77 | if ignore_pairs:
78 | self.ignore_pairs(ignore_pairs)
79 |
80 | def add_UFF_params(self, symbols, replace=False):
81 | """
82 | Adds UFF parameters to the LJ object. Will replace existing parameters
83 | if 'replace' is set to True. UFF parameters are loaded from the package.
84 |
85 | Args:
86 | symbols (str or list(str)):
87 | A single symbol or a list of symbols to add to the initial LJ list.
88 |
89 | replace (bool):
90 | Whether or not to replace existing symbols
91 | """
92 | if type(symbols) == str:
93 | symbols = [symbols] # convert to list if one string
94 | UFF = load_UFF()
95 | for symbol in symbols:
96 | if not replace and symbol in self.ljdict:
97 | print("Warning: {} is already in LJ list and".format(symbol) +\
98 | " will not be included.\nTo include, use " + \
99 | "replace_UFF_params or toggle 'replace' boolean.\n")
100 | else:
101 | self.ljdict[symbol] = UFF[symbol]
102 |
103 | def replace_UFF_params(self, symbols, add=False):
104 | """
105 | Replaces current LJ parameters with UFF values. Will add new entries if
106 | 'add' is set to True. UFF parameters are loaded from the package.
107 |
108 | Args:
109 | symbols (str or list(str)):
110 | A single symbol or a list of symbols to add to the initial LJ list.
111 |
112 | add (bool):
113 | Whether or not to replace existing symbols
114 | """
115 | if type(symbols) == str:
116 | symbols = [symbols] # convert to list if one string
117 | UFF = load_UFF()
118 | for symbol in symbols:
119 | if symbol in self.ljdict or add:
120 | self.ljdict[symbol] = UFF[symbol]
121 | else:
122 | print("Warning: {} is not in LJ list and".format(symbol) +\
123 | " cannot be replaced.\nTo include, use " + \
124 | "add_UFF_params or toggle 'add' boolean.\n")
125 |
126 |
127 | def add_param(self, symbol, data, replace=True):
128 | """
129 | Adds a custom parameter to the LJ object.
130 |
131 | Args:
132 | symbol (str):
133 | Symbol of atom type to add.
134 |
135 | data (tuple(float)):
136 | A two-element tuple of numbers to represent the epsilon and
137 | sigma LJ values.
138 |
139 | replace (bool):
140 | Whether or not to replace the item.
141 | """
142 |
143 | # check params
144 | good = (tuple == type(data) and len(data) == 2 and \
145 | all([isinstance(item, (int, float)) for item in data]) and \
146 | type(symbol) == str)
147 | if good:
148 | if symbol in self.ljdict:
149 | if replace:
150 | self.ljdict[symbol] = data
151 | else:
152 | print("Warning: {} exists and cannot be added.\n".format(symbol))
153 | else:
154 | self.ljdict[symbol] = data
155 |
156 | else:
157 | raise ValueError("Invalid data parameter.")
158 |
159 | def remove_param(self, symbol):
160 | """
161 | Removes an element from the LJ object. If item does not exist, nothing
162 | happens.
163 |
164 | Args:
165 | symbol (str):
166 | Symbol of atom type to remove.
167 | """
168 | # remove symbol from object
169 | self.ljdict.pop(symbol, None)
170 | # remove any ignore statements with symbol
171 | remove_list = list()
172 | for i, pair in enumerate(self.ignore):
173 | if symbol in pair:
174 | remove_list.append(i)
175 |
176 | for i in sorted(remove_list, reverse=True):
177 | del self.ignore[i]
178 |
179 | def ignore_pair(self, pair):
180 | """
181 | Adds a pair to the list of pairs that will be ignored when output to file.
182 |
183 | Args:
184 | pair (set):
185 | A two-element set where each entry is a string of the symbol in
186 | the pair to ignore.
187 | """
188 | self.__check_pair(pair)
189 |
190 | # check if pair exists already
191 | exists = False
192 | for curr_pair in self.ignore:
193 | if curr_pair == pair:
194 | return
195 |
196 | self.ignore.append(pair)
197 |
198 | def ignore_pairs(self, pairs):
199 | """
200 | Adds a list of pairs that will be ignored when output to file.
201 |
202 | Args:
203 | pairs (list(set)):
204 | A list of two-element sets where each entry of each set is a
205 | string of the symbol in the pair to ignore.
206 | """
207 | for pair in pairs:
208 | self.ignore_pair(pair)
209 |
210 | def acknowledge_pair(self, pair):
211 | """
212 | Removes the pair from the ignore list and acknowledges it during the output.
213 |
214 | Args:
215 | pair (set):
216 | A two-element set where each entry is a string of the symbol in the
217 | pair to un-ignore.
218 | """
219 | self.__check_pair(pair)
220 |
221 | # check if pair exists already
222 | exists = False
223 | for i, curr_pair in enumerate(self.ignore):
224 | if curr_pair == pair:
225 | del self.ignore[i]
226 | return
227 |
228 | raise ValueError('Pair not found.')
229 |
230 | def acknowledge_pairs(self, pairs):
231 | """
232 | Removes pairs from the ignore list.
233 |
234 | Args:
235 | pairs (list(set)):
236 | A list of two-elements sets where each entry in each set is a string of
237 | the symbol in the pair to un-ignore.
238 | """
239 | for pair in pairs:
240 | self.acknowledge_pair(pair)
241 |
242 | def custom_cutoff(self, pair, cutoff):
243 | """
244 | Sets a custom cutoff for a specific pair of atoms.
245 |
246 | Args:
247 | pair (set):
248 | A two-element set where each entry is a string of the symbol in the
249 | pair.
250 |
251 | cutoff (float):
252 | Custom cutoff to use. In Angstroms.
253 | """
254 | self.__check_pair(pair)
255 | self.__check_cutoff(cutoff)
256 | key = self.__get_cutkey(pair)
257 | self.cutoff[key] = cutoff
258 |
259 | def remove_custom_cutoff(self, pair):
260 | """
261 | Removes a custom cutoff for a pair of atoms.
262 |
263 | Args:
264 | pair (set):
265 | A two-element set where each entry is a string of the symbol in the
266 | pair.
267 | """
268 | self.__check_pair(pair)
269 | key = self.__get_cutkey(pair)
270 | self.cutoff.pop(key, None)
271 |
272 | def set_global_cutoff(self, cutoff):
273 | """
274 | Sets a global cutoff for all pairs.
275 | Warning: setting this will remove all other cutoff parameters.
276 |
277 | Args:
278 | cutoff (float):
279 | Custom cutoff to use. In Angstroms.
280 | """
281 | self.__check_cutoff(cutoff)
282 | self.global_cutoff = cutoff
283 | self.cutoff = dict()
284 | self.cut_scale = None
285 |
286 | def set_cut_scale(self, cut_scale):
287 | """
288 | Sets the amount to scale the sigma values of each pair by to set the cutoff.
289 | Warning: setting this will remove any global cutoff, but leave custom cutoffs.
290 |
291 | Args:
292 | cut_scale (float):
293 | Scaling factor to be used on sigma
294 | """
295 | self.__check_cutoff(cut_scale)
296 | self.global_cutoff = None
297 | self.cut_scale = cut_scale
298 |
299 | def create_file(self, filename='ljparams.txt', atom_order=None):
300 | """
301 | Outputs a GPUMD style LJ parameters file using the atoms defined in atom_order
302 | in the order defined in atom_order.
303 |
304 | Args:
305 | filename (str):
306 | The filename or full path with filename of the output.
307 |
308 | atom_order (list(str)):
309 | List of atom symbols to include LJ params output file. The order will
310 | determine the order in the output file. *Required*
311 | Ex. ['a', 'b', 'c'] = pairs => 'aa', 'ab', 'ac', 'ba', 'bb', 'bc', 'ca',
312 | 'cb' 'cc' in this order.
313 | """
314 | if not atom_order:
315 | raise ValueError('atom_order is required.')
316 |
317 | # check if atoms in atom_order exist in LJ
318 | for symbol in atom_order:
319 | if symbol not in self.ljdict:
320 | raise ValueError('{} atom does not exist in LJ'.format(symbol))
321 | out_txt = 'lj {}\n'.format(len(atom_order))
322 | for i, sym1 in enumerate(atom_order):
323 | for j, sym2 in enumerate(atom_order):
324 | pair = {sym1, sym2}
325 | if pair in self.ignore:
326 | if i+1==len(atom_order) and j+1==len(atom_order):
327 | out_txt += '0 0 0'
328 | else:
329 | out_txt += '0 0 0\n'
330 | continue
331 |
332 | a1 = self.ljdict[sym1]
333 | a2 = self.ljdict[sym2]
334 |
335 | eps, sig = lb_mixing(a1, a2)
336 |
337 | cutkey = self.__get_cutkey(pair)
338 | if self.global_cutoff:
339 | cutoff = self.global_cutoff
340 | elif cutkey in self.cutoff:
341 | cutoff = self.cutoff[cutkey]
342 | else:
343 | cutoff = self.cut_scale*sig
344 |
345 | if i+1==len(atom_order) and j+1==len(atom_order):
346 | out_txt += '{} {} {}'.format(eps, sig, cutoff)
347 | else:
348 | out_txt += '{} {} {}\n'.format(eps, sig, cutoff)
349 |
350 | with open(filename, 'w') as f:
351 | f.writelines(out_txt)
352 |
353 | def __get_cutkey(self, pair):
354 | keylist = sorted(list(pair))
355 | if len(keylist) == 1:
356 | keylist.append(keylist[0])
357 | key = ' '.join(keylist)
358 | return key
359 |
360 | def __check_pair(self, pair):
361 | # check params
362 | if not (type(pair) == set and (len(pair) == 2 or len(pair) == 1)):
363 | raise ValueError('Invalid pair.')
364 |
365 | # check pair
366 | good = True
367 | for item in list(pair):
368 | good = good and item in self.ljdict
369 |
370 | if not good:
371 | raise ValueError('Elements in pair not found in LJ object.')
372 |
373 | def __check_cutoff(self, cutoff):
374 | if not (isinstance(cutoff, (int, float)) and cutoff > 0):
375 | raise ValueError('Invalid cutoff.')
376 |
377 | def __str__(self):
378 | out_str = 'Symbol: Epsilon (eV), Sigma (Angs.)\n'
379 | for key in self.ljdict:
380 | cur = self.ljdict[key]
381 | out_str += "{}: {}, {}\n".format(key, cur[0], cur[1])
382 |
383 | if self.cut_scale:
384 | out_str += "\nCutoff scaling factor = {}\n".format(self.cut_scale)
385 | else: # Global cutoff
386 | out_str += "\nGlobal cutoff = {} Angstroms\n".format(self.global_cutoff)
387 |
388 |
389 | if len(self.cutoff) > 0:
390 | out_str += "\nCustom Cutoffs\n"
391 | for pair in self.cutoff:
392 | lpair = pair.split()
393 | out_str += "[{}, {}] : {}\n".format(lpair[0], lpair[1],
394 | self.cutoff[pair])
395 |
396 | if len(self.ignore) > 0:
397 | out_str += "\nIgnored Pairs (Order not important)\n"
398 | for pair in self.ignore:
399 | pair = list(pair)
400 | if len(pair) == 1:
401 | pair.append(pair[0])
402 | out_str += "[{}, {}]\n".format(pair[0], pair[1])
403 |
404 | return out_str
405 |
--------------------------------------------------------------------------------