├── .flake8 ├── .gitignore ├── LICENSE ├── README.md ├── exodusii ├── .flake8 ├── __init__.py ├── allclose.py ├── config.py ├── copy.py ├── element.py ├── ex_params.py ├── exodus_h.py ├── exoread.py ├── extension.py ├── file.py ├── find_in_region.py ├── lineout.py ├── nc.py ├── netcdf.py ├── parallel_file.py ├── put_solution.py ├── region.py ├── similar.py └── util.py ├── pyproject.toml └── test ├── conftest.py ├── data ├── edges.base.exo ├── edges.exo.4.0 ├── edges.exo.4.1 ├── edges.exo.4.2 ├── edges.exo.4.3 ├── mkmesh.gen ├── mkmesh.par.2.0 ├── mkmesh.par.2.1 ├── noh.exo ├── noh.exo.3.0 ├── noh.exo.3.1 └── noh.exo.3.2 ├── parallel_read.py ├── parallel_write.py ├── pytest.ini ├── region.py ├── serial_read.py ├── serial_write.py └── test.sh /.flake8: -------------------------------------------------------------------------------- 1 | # -*- conf -*- 2 | # flake8 settings for Nevada core files. 3 | # 4 | # E1: Indentation 5 | # - E129: visually indented line with same indent as next logical line 6 | # 7 | # E2: Whitespace 8 | # - E203: space before : 9 | # - E221: multiple spaces before operator 10 | # - E241: multiple spaces after ',' 11 | # - E272: multiple spaces before keyword 12 | # 13 | # E7: Statement 14 | # - E731: do not assign a lambda expression, use a def 15 | # 16 | # W5: Line break warning 17 | # - W503: line break before binary operator 18 | # - W504: line break after binary operator 19 | # 20 | # These are required to get the package.py files to test clean: 21 | # - F999: syntax error in doctest 22 | # 23 | # N8: PEP8-naming 24 | # - N801: class names should use CapWords convention 25 | # - N813: camelcase imported as lowercase 26 | # - N814: camelcase imported as constant 27 | # 28 | [flake8] 29 | ignore = E129,E221,E241,E272,E731,W503,W504,F999,N801,N813,N814,E203,W605 30 | max-line-length = 88 31 | exclude = lib/nevada/external,test,__init__.py,.cache,.git,opt,third_party,lib/nevada/snl,var/nevada/py-packages,var/nevada/spack/repo/packages,docs/nevada,bin/hisread.py,bin/exoread.py,var/nevada/ci,var/nevada/distro,var/nevada/tmp,var/nevada/sandboxes,var/nevada/spack/experimental-repo,TestResults.* 32 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | __pycache__ 3 | .cache 4 | .coverage 5 | htmlcov 6 | *.bak 7 | 8 | # Ignore personal settings 9 | .vscode 10 | .vscode/settings.json 11 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2022 National Technology & Engineering Solutions of Sandia, LLC 2 | (NTESS). Under the terms of Contract DE-NA0003525 with NTESS, the U.S. 3 | Government retains certain rights in this software. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | 1. Redistributions of source code must retain the above copyright notice, this 9 | list of conditions and the following disclaimer. 10 | 2. Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 15 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 16 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 17 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 18 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 19 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 20 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 21 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 22 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 23 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 24 | 25 | The views and conclusions contained in the software and documentation are those 26 | of the authors and should not be interpreted as representing official policies, 27 | either expressed or implied, of Sandia Corporation. 28 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ExodusII 2 | 3 | A pure python implementation of the [Exodus finite element database 4 | model](https://gsjaardema.github.io/seacas-docs/sphinx/html/index.html). The 5 | API strives to be compatible with the API provided by 6 | [`exodus.py`](https://gsjaardema.github.io/seacas-docs/exodus.html) built as 7 | part of SEACAS. The main advantage is that this implementation does not require 8 | building SEACAS. 9 | 10 | ## Dependencies 11 | 12 | Exodus files are written in the netCDF file format. netCDF files are read in 13 | directly using a netCDF reader copied from `scipy.io`. Exodus files can 14 | optionally be written using netCDF file format version 4. These files require 15 | the [`netcdf4`](https://unidata.github.io/netcdf4-python/) python module be 16 | installed. 17 | 18 | ## Install 19 | 20 | ``` 21 | python -m pip install . 22 | ``` 23 | 24 | ## Copyright 25 | 26 | Copyright 2022 National Technology & Engineering Solutions of Sandia, LLC 27 | (NTESS). Under the terms of Contract DE-NA0003525 with NTESS, the U.S. 28 | Government retains certain rights in this software. 29 | 30 | SCR# 2748 31 | -------------------------------------------------------------------------------- /exodusii/.flake8: -------------------------------------------------------------------------------- 1 | # -*- conf -*- 2 | # flake8 settings for Nevada core files. 3 | # 4 | # E1: Indentation 5 | # - E129: visually indented line with same indent as next logical line 6 | # 7 | # E2: Whitespace 8 | # - E203: space before : 9 | # - E221: multiple spaces before operator 10 | # - E241: multiple spaces after ',' 11 | # - E272: multiple spaces before keyword 12 | # 13 | # E7: Statement 14 | # - E731: do not assign a lambda expression, use a def 15 | # 16 | # W5: Line break warning 17 | # - W503: line break before binary operator 18 | # - W504: line break after binary operator 19 | # 20 | # These are required to get the package.py files to test clean: 21 | # - F999: syntax error in doctest 22 | # 23 | # N8: PEP8-naming 24 | # - N801: class names should use CapWords convention 25 | # - N813: camelcase imported as lowercase 26 | # - N814: camelcase imported as constant 27 | # 28 | [flake8] 29 | ignore = E129,E221,E241,E272,E731,W503,W504,F999,N801,N813,N814,E203,W605 30 | max-line-length = 88 31 | exclude = lib/nevada/external,test,__init__.py,.cache,.git,opt,lib/nevada/snl,var/nevada/py-packages,var/nevada/spack/repo/packages,docs/guide,bin/hisread.py,bin/exoread.py,var/nevada/ci,var/nevada/distro,var/nevada/tmp,var/nevada/sandboxes,var/nevada/spack/experimental-repo 32 | -------------------------------------------------------------------------------- /exodusii/__init__.py: -------------------------------------------------------------------------------- 1 | from .file import exodusii_file, ExodusIIFile, write_globals 2 | from .parallel_file import parallel_exodusii_file, MFExodusIIFile 3 | from .allclose import allclose 4 | from .similar import similar 5 | from .extension import * # noqa: F403 6 | from .lineout import lineout 7 | from .find_in_region import find_element_data_in_region, find_node_data_in_region 8 | from .exoread import main as exoread 9 | 10 | 11 | def File(filename, *files, mode="r"): 12 | 13 | if mode not in "rw": 14 | raise ValueError(f"Invalid Exodus file mode {mode!r}") 15 | 16 | if mode == "r": 17 | files = _find_files(filename, *files) 18 | if len(files) > 1: 19 | f = parallel_exodusii_file(*files) 20 | elif len(files) == 1: 21 | f = exodusii_file(files[0], mode="r") 22 | else: 23 | raise ValueError("No files to open") 24 | elif mode == "w": 25 | if files: 26 | raise TypeError(f"Exodus writer takes 1 file but {len(files)+1} were given") 27 | f = exodusii_file(filename, mode="w") 28 | 29 | return f 30 | 31 | 32 | exo_file = File 33 | 34 | 35 | def _find_files(*files): 36 | import glob 37 | 38 | found = [] 39 | for file in files: 40 | globbed_files = glob.glob(file) 41 | if not globbed_files: 42 | raise FileNotFoundError(file) 43 | found.extend(globbed_files) 44 | return found 45 | -------------------------------------------------------------------------------- /exodusii/allclose.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import numpy as np 3 | from io import StringIO 4 | from .util import string_kinds 5 | 6 | 7 | def allclose( 8 | file1, 9 | file2, 10 | atol=1.0e-12, 11 | rtol=1.0e-12, 12 | dimensions=True, 13 | variables=True, 14 | verbose=False, 15 | ): 16 | """Returns True if two files are data-wise equal within a tolerance.""" 17 | 18 | from .file import ExodusIIFile 19 | 20 | if not isinstance(file1, ExodusIIFile): 21 | file1 = ExodusIIFile(file1) 22 | if not isinstance(file2, ExodusIIFile): 23 | file2 = ExodusIIFile(file2) 24 | 25 | ach = allclose_helper(file1, file2, atol, rtol, verbose=verbose) 26 | 27 | ach.set_dimensions(dimensions) 28 | ach.set_variables(variables) 29 | 30 | ach.compare_dimensions() 31 | ach.compare_variables() 32 | 33 | return True if not ach.errors else False 34 | 35 | 36 | def _allclose(a, b, atol=1.0e-08, rtol=1.0e-05): 37 | """Returns True if two arrays are element - wise equal within a tolerance.""" 38 | 39 | def compatible_dtypes(x, y): 40 | if x.dtype.kind in string_kinds and y.dtype.kind not in string_kinds: 41 | return False 42 | if y.dtype.kind in string_kinds and x.dtype.kind not in string_kinds: 43 | return False 44 | return True 45 | 46 | def compatible_shape(x, y): 47 | return x.shape == y.shape 48 | 49 | if a is None or b is None: 50 | if a != b: 51 | raise ValueError( 52 | f"cannot compare type {type(a).__name__} to type {type(b).__name__}" 53 | ) 54 | return True 55 | 56 | x = np.asanyarray(a) 57 | y = np.asanyarray(b) 58 | 59 | if not compatible_dtypes(x, y): 60 | raise TypeError( 61 | f"adiff not supported for input dtypes {x.dtype.kind} and {y.dtype.kind}" 62 | ) 63 | elif not compatible_shape(x, y): 64 | raise ValueError("input arguments must have same shape") 65 | 66 | if x.dtype.kind in string_kinds: 67 | x = sorted(x.flatten()) 68 | y = sorted(y.flatten()) 69 | return all([x[i] == y[i] for i in range(len(x))]) 70 | 71 | return np.allclose(x, y, atol=atol, rtol=rtol) 72 | 73 | 74 | class allclose_helper: 75 | def __init__(self, file1, file2, atol, rtol, print_threshold=10, verbose=False): 76 | self.file1 = file1 77 | self.file2 = file2 78 | self.atol = atol 79 | self.rtol = rtol 80 | self.print_threshold = print_threshold 81 | self.verbose = verbose 82 | 83 | self._dims_to_compare = None 84 | self._vars_to_compare = None 85 | 86 | self.errors = 0 87 | 88 | def log_error(self, message, end="\n"): 89 | if self.verbose: 90 | sys.stderr.write(f"==> Error: {message}{end}") 91 | self.errors += 1 92 | 93 | def all_dimensions(self): 94 | dims1 = self.file1.dimension_names() 95 | dims2 = self.file2.dimension_names() 96 | return sorted(set(dims1 + dims2)) 97 | 98 | def all_variables(self): 99 | vars1 = self.file1.variable_names() 100 | vars2 = self.file2.variable_names() 101 | return sorted(set(vars1 + vars2)) 102 | 103 | def set_dimensions(self, dimensions): 104 | if dimensions is True: 105 | dimensions = self.all_dimensions() 106 | elif dimensions is None or dimensions is False: 107 | dimensions = [] 108 | elif isinstance(dimensions, str): 109 | dimensions = [dimensions] 110 | if dimensions[0].startswith("~"): 111 | # Compare all - except those being negated 112 | skip = dimensions[0][1:].split("|") 113 | dimensions = [_ for _ in self.all_dimensions() if _ not in skip] 114 | if not isinstance(dimensions, (list, tuple)): 115 | raise ValueError("Expected list of dimensions to compare") 116 | self.validate_dimensions(dimensions) 117 | self._dims_to_compare = tuple(dimensions) 118 | 119 | def set_variables(self, variables): 120 | if variables is True: 121 | variables = self.all_variables() 122 | elif variables is None or variables is False: 123 | variables = [] 124 | elif isinstance(variables, str): 125 | variables = [variables] 126 | if variables[0].startswith("~"): 127 | # Compare all - except those being negated 128 | skip = variables[0][1:].split("|") 129 | variables = [_ for _ in self.all_variables() if _ not in skip] 130 | if not isinstance(variables, (list, tuple)): 131 | raise ValueError("Expected list of variables to compare") 132 | self.validate_variables(variables) 133 | self._vars_to_compare = tuple(variables) 134 | 135 | def validate_dimensions(self, dimensions): 136 | invalid = 0 137 | valid_dimensions = self.all_dimensions() 138 | for dimension in dimensions: 139 | if dimension not in valid_dimensions: 140 | self.log_error(f"{dimension} is not a valid dimension") 141 | invalid += 1 142 | if invalid: 143 | raise ValueError("One or more invalid dimensions") 144 | 145 | def validate_variables(self, variables): 146 | invalid = 0 147 | valid_variables = self.all_variables() 148 | for variable in variables: 149 | if variable not in valid_variables: 150 | self.log_error(f"{variable} is not a valid variable") 151 | invalid += 1 152 | if invalid: 153 | raise ValueError("One or more invalid variables") 154 | 155 | def compare_dimensions(self): 156 | if self._dims_to_compare is None: 157 | raise ValueError("Dimensions to compare must first be set") 158 | for dimension in self._dims_to_compare: 159 | self.compare_dimension(dimension) 160 | self._dims_to_compare = None 161 | 162 | def compare_dimension(self, dim): 163 | if dim not in self.file1.fh.dimensions: 164 | self.log_error(f"dimension {dim} not found in {self.file1.filename}") 165 | return 166 | elif dim not in self.file2.fh.dimensions: 167 | self.log_error(f"dimension {dim} not found in {self.file2.filename}") 168 | return 169 | elif dim in ("time_step",): 170 | return 171 | 172 | dim1 = self.file1.get_dimension(dim) 173 | dim2 = self.file1.get_dimension(dim) 174 | if not _allclose(dim1, dim2, atol=self.atol, rtol=self.rtol): 175 | err = StringIO() 176 | err.write( 177 | f"{self.file1.filename}::{dim} != " 178 | f"{self.file2.filename}::{dim} ({dim1} != {dim2})" 179 | ) 180 | self.log_error(err.getvalue()) 181 | 182 | def compare_variables(self): 183 | if self._vars_to_compare is None: 184 | raise ValueError("Variables to compare must first be set") 185 | for variable in self._vars_to_compare: 186 | self.compare_variable(variable) 187 | self._vars_to_compare = None 188 | 189 | def compare_variable(self, var): 190 | if var not in self.file1.fh.variables: 191 | self.log_error(f"variable {var} not found in {self.file1.filename}") 192 | return 193 | elif var not in self.file2.fh.variables: 194 | self.log_error(f"variable {var} not found in {self.file2.filename}") 195 | return 196 | 197 | var1 = self.file1.get_variable(var) 198 | var2 = self.file2.get_variable(var) 199 | if var1.shape != var2.shape: 200 | err = StringIO() 201 | err.write( 202 | f"{self.file1.filename}::{var}.shape != " 203 | f"{self.file2.filename}::{var}.shape " 204 | f"({var1.shape} != {var2.shape})" 205 | ) 206 | self.log_error(err.getvalue()) 207 | return 208 | 209 | if not _allclose(var1, var2, atol=self.atol, rtol=self.rtol): 210 | err = StringIO() 211 | s1 = np.array2string(var1, threshold=self.print_threshold) 212 | s2 = np.array2string(var2, threshold=self.print_threshold) 213 | err.write( 214 | f"{self.file1.filename}::{var} != " 215 | f"{self.file2.filename}::{var} ({s1} != {s2})" 216 | ) 217 | self.log_error(err.getvalue()) 218 | -------------------------------------------------------------------------------- /exodusii/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from types import SimpleNamespace 3 | 4 | 5 | def env_boolean(var, default=None): 6 | value = os.getenv(var, default) 7 | if value is None: 8 | return default 9 | if value.lower() in ("false", "0", "off", ""): 10 | return False 11 | return True 12 | 13 | 14 | def initialize_config(): 15 | cfg = SimpleNamespace() 16 | cfg.use_netcdf4_if_possible = env_boolean("EXODUSII_USE_NETCDF4", default="on") 17 | cfg.debug = env_boolean("EXODUSII_DEBUG", default="off") 18 | return cfg 19 | 20 | 21 | config = initialize_config() 22 | -------------------------------------------------------------------------------- /exodusii/copy.py: -------------------------------------------------------------------------------- 1 | from .exodus_h import types 2 | 3 | 4 | copy_extra_set_info = False 5 | 6 | 7 | def copy(source, target): 8 | """Copy the source ExodusII file to the target""" 9 | target.put_init( 10 | source.title(), 11 | source.num_dimensions(), 12 | source.num_nodes(), 13 | source.num_elems(), 14 | source.num_blks(), 15 | source.num_node_sets(), 16 | source.num_side_sets(), 17 | num_edge=source.num_edges(), 18 | num_edge_blk=source.num_edge_blk(), 19 | num_face=source.num_faces(), 20 | num_face_blk=source.num_face_blk(), 21 | ) 22 | copy_mesh(source, target) 23 | copy_variable_params(source, target) 24 | copy_variable_histories(source, target) 25 | 26 | 27 | def copy_mesh(source, target): 28 | """Copies ExodusII mesh information from source to target""" 29 | 30 | target.put_coord_names(source.get_coord_names()) 31 | target.put_coords(source.get_coords()) 32 | 33 | for block in source.elem_blocks(): 34 | target.put_element_block( 35 | block.id, 36 | block.elem_type, 37 | block.num_block_elems, 38 | block.num_elem_nodes, 39 | num_faces_per_elem=block.num_elem_faces, 40 | num_edges_per_elem=block.num_elem_edges, 41 | num_attr=block.num_elem_attrs, 42 | ) 43 | for type in (types.node, types.edge, types.face): 44 | conn = source.get_element_conn(block.id, type=type) 45 | if conn is not None: 46 | target.put_element_conn(block.id, conn, type=type) 47 | 48 | if target.num_faces(): 49 | for block in source.face_blocks(): 50 | target.put_face_block( 51 | block.id, 52 | block.elem_type, 53 | block.num_block_faces, 54 | block.num_face_nodes, 55 | block.num_face_attrs, 56 | ) 57 | conn = source.get_face_block_conn(block.id) 58 | target.put_face_conn(block.id, conn) 59 | 60 | if target.num_edges(): 61 | for block in source.edge_blocks(): 62 | target.put_edge_block( 63 | block.id, 64 | block.elem_type, 65 | block.num_block_edges, 66 | block.num_edge_nodes, 67 | block.num_edge_attrs, 68 | ) 69 | conn = source.get_edge_block_conn(block.id) 70 | target.put_edge_conn(block.id, conn) 71 | 72 | for ns in source.node_sets(): 73 | target.put_node_set_param(ns.id, ns.num_nodes, ns.num_dist_facts) 74 | target.put_node_set_name(ns.id, ns.name) 75 | target.put_node_set_nodes(ns.id, ns.nodes) 76 | if ns.num_dist_facts: 77 | target.put_node_set_dist_fact(ns.id, ns.dist_facts) 78 | 79 | for es in source.edge_sets(): 80 | target.put_edge_set_param( 81 | es.id, es.num_edges, es.num_nodes_per_edge, es.num_dist_facts 82 | ) 83 | 84 | for fs in source.face_sets(): 85 | target.put_face_set_param( 86 | fs.id, fs.num_faces, fs.num_nodes_per_face, fs.num_dist_facts 87 | ) 88 | 89 | for es in source.elem_sets(): 90 | target.put_element_set_param(es.id, es.num_elems, es.num_dist_facts) 91 | 92 | for ss in source.side_sets(): 93 | target.put_side_set_param(ss.id, ss.num_sides, ss.num_dist_facts) 94 | target.put_side_set_name(ss.id, ss.name) 95 | target.put_side_set_sides(ss.id, ss.elems, ss.sides) 96 | if ss.num_dist_facts: 97 | target.put_side_set_dist_fact(ss.id, ss.dist_facts) 98 | 99 | node_id_map = source.get_node_id_map() 100 | target.put_node_id_map(node_id_map) 101 | 102 | elem_id_map = source.get_element_id_map() 103 | target.put_element_id_map(elem_id_map) 104 | 105 | if source.num_edges(): 106 | edge_id_map = source.get_edge_id_map() 107 | target.put_edge_id_map(edge_id_map) 108 | 109 | if source.num_faces(): 110 | face_id_map = source.get_face_id_map() 111 | target.put_face_id_map(face_id_map) 112 | 113 | 114 | def copy_variable_params(source, target): 115 | 116 | target.put_global_variable_params(source.get_global_variable_number()) 117 | target.put_global_variable_names(source.get_global_variable_names()) 118 | 119 | target.put_node_variable_params(source.get_node_variable_number()) 120 | target.put_node_variable_names(source.get_node_variable_names()) 121 | 122 | if source.get_element_variable_number() is not None: 123 | target.put_element_variable_params(source.get_element_variable_number()) 124 | target.put_element_variable_names(source.get_element_variable_names()) 125 | table = source.get_element_variable_truth_table() 126 | if table is not None: 127 | target.put_element_variable_truth_table(table) 128 | 129 | if source.get_face_variable_number() is not None: 130 | target.put_face_variable_params(source.get_face_variable_number()) 131 | target.put_face_variable_names(source.get_face_variable_names()) 132 | table = source.get_face_variable_truth_table() 133 | if table is not None: 134 | target.put_face_variable_truth_table(table) 135 | 136 | if source.get_edge_variable_number() is not None: 137 | target.put_edge_variable_params(source.get_edge_variable_number()) 138 | target.put_edge_variable_names(source.get_edge_variable_names()) 139 | table = source.get_edge_variable_truth_table() 140 | if table is not None: 141 | target.put_edge_variable_truth_table(table) 142 | 143 | target.put_node_set_variable_params(source.get_node_set_variable_number()) 144 | target.put_node_set_variable_names(source.get_node_set_variable_names()) 145 | 146 | if copy_extra_set_info: 147 | 148 | target.put_edge_set_variable_params(source.get_edge_set_variable_number()) 149 | target.put_edge_set_variable_names(source.get_edge_set_variable_names()) 150 | 151 | target.put_face_set_variable_params(source.get_face_set_variable_number()) 152 | target.put_face_set_variable_names(source.get_face_set_variable_names()) 153 | 154 | target.put_element_set_variable_params(source.get_element_set_variable_number()) 155 | target.put_element_set_variable_names(source.get_element_set_variable_names()) 156 | 157 | target.put_side_set_variable_params(source.get_side_set_variable_number()) 158 | target.put_side_set_variable_names(source.get_side_set_variable_names()) 159 | 160 | 161 | def copy_variable_histories(source, target): 162 | 163 | for (time_step, time) in enumerate(source.get_times(), start=1): 164 | target.put_time(time_step, time) 165 | 166 | values = source.get_all_global_variable_values() 167 | target.put_global_variable_values(None, values) 168 | 169 | for name in source.get_node_variable_names(): 170 | values = source.get_node_variable_values(name) 171 | target.put_node_variable_values(None, name, values) 172 | 173 | for block_id in source.get_element_block_ids(): 174 | for name in source.get_element_variable_names(): 175 | values = source.get_element_variable_values(block_id, name) 176 | target.put_element_variable_values(None, block_id, name, values) 177 | 178 | for block_id in source.get_edge_block_ids(): 179 | for name in source.get_edge_variable_names(): 180 | values = source.get_edge_variable_values(block_id, name) 181 | target.put_edge_variable_values(None, block_id, name, values) 182 | 183 | for block_id in source.get_face_block_ids(): 184 | for name in source.get_face_variable_names(): 185 | values = source.get_face_variable_values(block_id, name) 186 | target.put_face_variable_values(None, block_id, name, values) 187 | 188 | if copy_extra_set_info: 189 | for set_id in source.get_node_set_ids(): 190 | for name in source.get_node_set_variable_names(): 191 | values = source.get_node_set_variable_values(block_id, name) 192 | target.put_node_set_variable_values(None, block_id, name, values) 193 | 194 | for set_id in source.get_side_set_ids(): 195 | for name in source.get_side_set_variable_names(): 196 | values = source.get_side_set_variable_values(block_id, name) 197 | target.put_side_set_variable_values(None, block_id, name, values) 198 | -------------------------------------------------------------------------------- /exodusii/element.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def factory(elem_type, elem_coord): 5 | """Determine the element type and return a dictionary containing 6 | functions for that element. 7 | 8 | """ 9 | if isinstance(elem_type, bytes): 10 | elem_type = elem_type.decode("ascii") 11 | etype = elem_type.lower() 12 | if etype in ("quad", "quad4"): 13 | return Quad4(elem_coord) 14 | elif etype in ("hex", "hex8"): 15 | return Hex8(elem_coord) 16 | elif etype in ("tri3", "triangle", "triangle3"): 17 | return Tri3(elem_coord) 18 | elif etype in ("tet", "tet4", "tetra", "tetra4"): 19 | return Tet4(elem_coord) 20 | elif etype in ("wedge", "wedge6"): 21 | return Wedge6(elem_coord) 22 | raise ValueError(f"==> Error: unknown element type {elem_type!r}") 23 | 24 | 25 | class Quad4: 26 | """A QUAD4 exodus element object 27 | 28 | Parameters 29 | ---------- 30 | coord : array_like 31 | coordinates of element's nodes, assuming exodus node order convention - 32 | (counter clockwise around the element) 33 | 34 | """ 35 | 36 | dim = 2 37 | name = "QUAD4" 38 | nnode = 4 39 | 40 | def __init__(self, coord): 41 | self.coord = np.array(coord) 42 | 43 | @property 44 | def volume(self): 45 | x, y = self.coord[:, 0], self.coord[:, 1] 46 | return 0.5 * ((x[0] - x[2]) * (y[1] - y[3]) + (x[1] - x[3]) * (y[2] - y[0])) 47 | 48 | @property 49 | def center(self): 50 | """Compute the coordinates of the center of the element. 51 | 52 | Note 53 | ---- 54 | - Simple average in physical space. 55 | - The result is the same as for subdiv with intervals=1. 56 | 57 | """ 58 | xc = np.average(self.coord[:, 0]) / 4.0 59 | return np.append(xc, 0.0) 60 | 61 | def subdiv(self, intervals): 62 | """Compute an equispaced subdivision of a quad4 element. 63 | 64 | Parameters 65 | ---------- 66 | intervals : int 67 | The element will be subdivided into nx*ny equispaced subelements, where 68 | nx = ny = intervals 69 | 70 | Note 71 | ---- 72 | Quadrature points are equispaced, rather than Gaussian; improved 73 | order of accuracy of Gaussian quadrature is not achieved for 74 | discontinuous data. 75 | 76 | """ 77 | 78 | x, y = self.coord[:, 0], self.coord[:, 1] 79 | coord = [] 80 | for jj in range(intervals): 81 | j = (0.5 + jj) / intervals 82 | for ii in range(intervals): 83 | i = (0.5 + ii) / intervals 84 | xp = ( 85 | (1 - j) * (1 - i) * x[0] 86 | + (1 - j) * i * x[1] 87 | + j * i * x[2] 88 | + j * (1 - i) * x[3] 89 | ) 90 | yp = ( 91 | (1 - j) * (1 - i) * y[0] 92 | + (1 - j) * i * y[1] 93 | + j * i * y[2] 94 | + j * (1 - i) * y[3] 95 | ) 96 | coord.append([xp, yp, 0.0]) 97 | 98 | return np.array(coord) 99 | 100 | def subcoord(self, intervals): 101 | """Compute an equispaced subdivision of a quad4 element. 102 | 103 | Parameters 104 | ---------- 105 | intervals : int 106 | The element will be subdivided into nx*ny equispaced subelements, where 107 | nx = ny = intervals 108 | 109 | Returns 110 | ------- 111 | coord : ndarray 112 | the nodal coordinates of the subelements. 113 | 114 | """ 115 | if intervals <= 0: 116 | return np.array(self.coord) 117 | 118 | x, y = self.coord[:, 0], self.coord[:, 1] 119 | coord = [] 120 | for jj in range(intervals + 1): 121 | j = float(jj) / intervals 122 | for ii in range(intervals + 1): 123 | i = float(ii) / intervals 124 | xp = ( 125 | (1 - j) * (1 - i) * x[0] 126 | + (1 - j) * i * x[1] 127 | + j * i * x[2] 128 | + j * (1 - i) * x[3] 129 | ) 130 | yp = ( 131 | (1 - j) * (1 - i) * y[0] 132 | + (1 - j) * i * y[1] 133 | + j * i * y[2] 134 | + j * (1 - i) * y[3] 135 | ) 136 | coord.append([xp, yp]) 137 | 138 | return np.array(coord) 139 | 140 | def subconn(self, intervals): 141 | """Compute the connectivity matrix relating the subelements to the 142 | nodes produced by subcoord. 143 | 144 | Parameters 145 | ---------- 146 | intervals : int 147 | The element will be subdivided into nx*ny equispaced subelements, where 148 | nx = ny = intervals 149 | 150 | Returns 151 | ------- 152 | conn : ndarray 153 | Element connectivity 154 | 155 | Note 156 | ---- 157 | Node indices are 0-based. 158 | 159 | """ 160 | conn = [] 161 | n = intervals + 1 162 | for j in range(intervals): 163 | jn = j * n 164 | j1n = (j + 1) * n 165 | for i in range(intervals): 166 | i1 = i + 1 167 | # 0-based element number is j*intervals + i 168 | # local node numbers n1, n2, n3, n4 169 | n1, n2, n3, n4 = i + jn, i1 + jn, i1 + j1n, i + j1n 170 | conn.append([n1, n2, n3, n4]) 171 | 172 | return np.array(conn, dtype=int) 173 | 174 | def subvols(self, intervals): 175 | """Compute the subelement volumes of an equispaced subdivision of 176 | a quad4 element. 177 | 178 | Parameters 179 | ---------- 180 | intervals : int 181 | The element will be subdivided into nx*ny equispaced subelements, where 182 | nx = ny = intervals 183 | 184 | Returns 185 | ------- 186 | vols : ndarray 187 | Volumes of the subelements 188 | 189 | """ 190 | 191 | coord = self.subcoord(intervals) 192 | conn = self.subconn(intervals) 193 | 194 | m = intervals * intervals 195 | vols = np.zeros(m) 196 | for subel in range(m): 197 | loc_coords = coord[conn[subel]] 198 | vols[subel] = Quad4(loc_coords).volume 199 | 200 | return vols 201 | 202 | 203 | class Hex8: 204 | dim = 3 205 | name = "HEX8" 206 | nnode = 8 207 | 208 | def __init__(self, coord): 209 | self.coord = np.array(coord) 210 | 211 | @property 212 | def volume(self): 213 | x1, x2, x3, x4, x5, x6, x7, x8 = self.coord[:, 0] 214 | y1, y2, y3, y4, y5, y6, y7, y8 = self.coord[:, 1] 215 | z1, z2, z3, z4, z5, z6, z7, z8 = self.coord[:, 2] 216 | 217 | rx0 = ( 218 | y2 * ((z6 - z3) - (z4 - z5)) 219 | + y3 * (z2 - z4) 220 | + y4 * ((z3 - z8) - (z5 - z2)) 221 | + y5 * ((z8 - z6) - (z2 - z4)) 222 | + y6 * (z5 - z2) 223 | + y8 * (z4 - z5) 224 | ) 225 | rx1 = ( 226 | y3 * ((z7 - z4) - (z1 - z6)) 227 | + y4 * (z3 - z1) 228 | + y1 * ((z4 - z5) - (z6 - z3)) 229 | + y6 * ((z5 - z7) - (z3 - z1)) 230 | + y7 * (z6 - z3) 231 | + y5 * (z1 - z6) 232 | ) 233 | rx2 = ( 234 | y4 * ((z8 - z1) - (z2 - z7)) 235 | + y1 * (z4 - z2) 236 | + y2 * ((z1 - z6) - (z7 - z4)) 237 | + y7 * ((z6 - z8) - (z4 - z2)) 238 | + y8 * (z7 - z4) 239 | + y6 * (z2 - z7) 240 | ) 241 | rx3 = ( 242 | y1 * ((z5 - z2) - (z3 - z8)) 243 | + y2 * (z1 - z3) 244 | + y3 * ((z2 - z7) - (z8 - z1)) 245 | + y8 * ((z7 - z5) - (z1 - z3)) 246 | + y5 * (z8 - z1) 247 | + y7 * (z3 - z8) 248 | ) 249 | rx4 = ( 250 | y8 * ((z4 - z7) - (z6 - z1)) 251 | + y7 * (z8 - z6) 252 | + y6 * ((z7 - z2) - (z1 - z8)) 253 | + y1 * ((z2 - z4) - (z8 - z6)) 254 | + y4 * (z1 - z8) 255 | + y2 * (z6 - z1) 256 | ) 257 | rx5 = ( 258 | y5 * ((z1 - z8) - (z7 - z2)) 259 | + y8 * (z5 - z7) 260 | + y7 * ((z8 - z3) - (z2 - z5)) 261 | + y2 * ((z3 - z1) - (z5 - z7)) 262 | + y1 * (z2 - z5) 263 | + y3 * (z7 - z2) 264 | ) 265 | rx6 = ( 266 | y6 * ((z2 - z5) - (z8 - z3)) 267 | + y5 * (z6 - z8) 268 | + y8 * ((z5 - z4) - (z3 - z6)) 269 | + y3 * ((z4 - z2) - (z6 - z8)) 270 | + y2 * (z3 - z6) 271 | + y4 * (z8 - z3) 272 | ) 273 | rx7 = ( 274 | y7 * ((z3 - z6) - (z5 - z4)) 275 | + y6 * (z7 - z5) 276 | + y5 * ((z6 - z1) - (z4 - z7)) 277 | + y4 * ((z1 - z3) - (z7 - z5)) 278 | + y3 * (z4 - z7) 279 | + y1 * (z5 - z4) 280 | ) 281 | 282 | vol = ( 283 | x1 * rx0 284 | + x2 * rx1 285 | + x3 * rx2 286 | + x4 * rx3 287 | + x5 * rx4 288 | + x6 * rx5 289 | + x7 * rx6 290 | + x8 * rx7 291 | ) / 12.0 292 | 293 | return vol 294 | 295 | @property 296 | def center(self): 297 | """Compute the coordinates of the center of a hex8 element. 298 | 299 | Note 300 | ---- 301 | - Simple average in physical space. 302 | - The result is the same as for hex8_subdiv with intervals=1. 303 | 304 | """ 305 | return np.average(self.coord, axis=0) 306 | 307 | def subdiv(self, intervals): 308 | """Compute an equispaced subdivision of a hex8 element. 309 | 310 | Quadrature points are equispaced, rather than Gaussian; improved 311 | order of accuracy of Gaussian quadrature is not achieved for 312 | discontinuous data. 313 | 314 | Parameters 315 | ---------- 316 | intervals : int 317 | The element will be subdivided into nx*ny*nz equispaced subelements, 318 | where nx = ny = nz = intervals 319 | 320 | """ 321 | coord = [] 322 | x, y, z = self.coord[:, 0], self.coord[:, 1], self.coord[:, 2] 323 | for kk in range(intervals): 324 | k = (0.5 + kk) / intervals 325 | for jj in range(intervals): 326 | j = (0.5 + jj) / intervals 327 | for ii in range(intervals): 328 | i = (0.5 + ii) / intervals 329 | xp = ( 330 | (1 - k) * (1 - j) * (1 - i) * x[0] 331 | + (1 - k) * (1 - j) * i * x[1] 332 | + (1 - k) * j * i * x[2] 333 | + (1 - k) * j * (1 - i) * x[3] 334 | + k * (1 - j) * (1 - i) * x[4] 335 | + k * (1 - j) * i * x[5] 336 | + k * j * i * x[6] 337 | + k * j * (1 - i) * x[7] 338 | ) 339 | yp = ( 340 | (1 - k) * (1 - j) * (1 - i) * y[0] 341 | + (1 - k) * (1 - j) * i * y[1] 342 | + (1 - k) * j * i * y[2] 343 | + (1 - k) * j * (1 - i) * y[3] 344 | + k * (1 - j) * (1 - i) * y[4] 345 | + k * (1 - j) * i * y[5] 346 | + k * j * i * y[6] 347 | + k * j * (1 - i) * y[7] 348 | ) 349 | zp = ( 350 | (1 - k) * (1 - j) * (1 - i) * z[0] 351 | + (1 - k) * (1 - j) * i * z[1] 352 | + (1 - k) * j * i * z[2] 353 | + (1 - k) * j * (1 - i) * z[3] 354 | + k * (1 - j) * (1 - i) * z[4] 355 | + k * (1 - j) * i * z[5] 356 | + k * j * i * z[6] 357 | + k * j * (1 - i) * z[7] 358 | ) 359 | coord.append([xp, yp, zp]) 360 | 361 | return coord 362 | 363 | def subcoord(self, intervals): 364 | """Compute an equispaced subdivision of a hex8 element. Return 365 | the nodal coordinates of the subelements. 366 | 367 | Parameters 368 | ---------- 369 | intervals : int 370 | The element will be subdivided into nx*ny equispaced subelements, where 371 | nx = ny = intervals 372 | 373 | Returns 374 | ------- 375 | coord : ndarray 376 | the nodal coordinates of the subelements. 377 | 378 | """ 379 | if intervals <= 0: 380 | return np.array(self.coord) 381 | 382 | coord = [] 383 | x, y, z = self.coord[:, 0], self.coord[:, 1], self.coord[:, 2] 384 | for kk in range(intervals + 1): 385 | k = float(kk) / intervals 386 | for jj in range(intervals + 1): 387 | j = float(jj) / intervals 388 | for ii in range(intervals + 1): 389 | i = float(ii) / intervals 390 | xp = ( 391 | (1 - k) * (1 - j) * (1 - i) * x[0] 392 | + (1 - k) * (1 - j) * i * x[1] 393 | + (1 - k) * j * i * x[2] 394 | + (1 - k) * j * (1 - i) * x[3] 395 | + k * (1 - j) * (1 - i) * x[4] 396 | + k * (1 - j) * i * x[5] 397 | + k * j * i * x[6] 398 | + k * j * (1 - i) * x[7] 399 | ) 400 | yp = ( 401 | (1 - k) * (1 - j) * (1 - i) * y[0] 402 | + (1 - k) * (1 - j) * i * y[1] 403 | + (1 - k) * j * i * y[2] 404 | + (1 - k) * j * (1 - i) * y[3] 405 | + k * (1 - j) * (1 - i) * y[4] 406 | + k * (1 - j) * i * y[5] 407 | + k * j * i * y[6] 408 | + k * j * (1 - i) * y[7] 409 | ) 410 | zp = ( 411 | (1 - k) * (1 - j) * (1 - i) * z[0] 412 | + (1 - k) * (1 - j) * i * z[1] 413 | + (1 - k) * j * i * z[2] 414 | + (1 - k) * j * (1 - i) * z[3] 415 | + k * (1 - j) * (1 - i) * z[4] 416 | + k * (1 - j) * i * z[5] 417 | + k * j * i * z[6] 418 | + k * j * (1 - i) * z[7] 419 | ) 420 | coord.append([xp, yp, zp]) 421 | 422 | return np.array(coord) 423 | 424 | def subconn(self, intervals): 425 | """Compute the connectivity matrix relating the subelements to the 426 | nodes produced by subcoord. 427 | 428 | Parameters 429 | ---------- 430 | intervals : int 431 | The element will be subdivided into nx*ny equispaced subelements, where 432 | nx = ny = intervals 433 | 434 | Returns 435 | ------- 436 | conn : ndarray 437 | Element connectivity 438 | 439 | Note 440 | ---- 441 | Node indices are 0-based. 442 | 443 | 444 | Input: 445 | intervals: The element will be subdivided into nx*ny*nz equispaced 446 | subelements, where nx = ny = nz = intervals 447 | """ 448 | 449 | conn = [] 450 | n = intervals + 1 451 | nn = n * n 452 | for k in range(intervals): 453 | knn = k * nn # k *(intervals+1)^2 454 | k1nn = (k + 1) * nn # (k+1)*(intervals+1)^2 455 | for j in range(intervals): 456 | jn_knn = j * n + knn # j *(intervals+1) + k *(intervals+1)^2 457 | j1n_knn = ( 458 | j + 1 459 | ) * n + knn # (j+1)*(intervals+1) + k *(intervals+1)^2 460 | jn_k1nn = j * n + k1nn # j *(intervals+1) + (k+1)*(intervals+1)^2 461 | j1n_k1nn = ( 462 | j + 1 463 | ) * n + k1nn # (j+1)*(intervals+1) + (k+1)*(intervals+1)^2 464 | for i in range(intervals): 465 | i1 = i + 1 466 | # 0-based element number is j*intervals + i 467 | # local node numbers n1, n2, n3, n4, n5, n6, n7, n8 468 | n1 = i + jn_knn 469 | n2 = i1 + jn_knn 470 | n3 = i1 + j1n_knn 471 | n4 = i + j1n_knn 472 | n5 = i + jn_k1nn 473 | n6 = i1 + jn_k1nn 474 | n7 = i1 + j1n_k1nn 475 | n8 = i + j1n_k1nn 476 | conn.append([n1, n2, n3, n4, n5, n6, n7, n8]) 477 | 478 | return np.array(conn, dtype=int) 479 | 480 | def subvols(self, intervals): 481 | """Compute the subelement volumes of an equispaced subdivision of 482 | a hex8 element. 483 | 484 | Parameters 485 | ---------- 486 | intervals : int 487 | The element will be subdivided into nx*ny equispaced subelements, where 488 | nx = ny = intervals 489 | 490 | Returns 491 | ------- 492 | vols : ndarray 493 | Volumes of the subelements 494 | 495 | """ 496 | 497 | coord = self.subcoord(intervals) 498 | conn = self.subconn(intervals) 499 | 500 | m = intervals ** 3 501 | vols = np.zeros(m) 502 | for subel in range(m): 503 | loc_coords = coord[conn[subel]] 504 | vols[subel] = Hex8(loc_coords).volume 505 | 506 | return vols 507 | 508 | 509 | def _midpoint(a, b): 510 | return [(a[0] + b[0]) / 2.0, (a[1] + b[1]) / 2.0, (a[2] + b[2]) / 2.0] 511 | 512 | 513 | def _distsquare(a, b): 514 | return (a[0] - b[0]) ** 2 + (a[1] - b[1]) ** 2 + (a[2] - b[2]) ** 2 515 | 516 | 517 | class Tri3: 518 | dim = 2 519 | name = "TRI3" 520 | nnode = 3 521 | 522 | def __init__(self, coord): 523 | self.coord = coord 524 | 525 | @property 526 | def volume(self): 527 | x, y = self.coord[:, 0], self.coord[:, 1] 528 | return 0.5 * abs(x[0] * y[1] - y[0] * x[1]) 529 | 530 | @property 531 | def center(self): 532 | """Compute the coordinates of the center of a tri element. 533 | 534 | Note 535 | ---- 536 | Simple average in physical space. 537 | 538 | """ 539 | return np.average(self.coord, axis=0) 540 | 541 | def subcoord(self, intervals): 542 | """Divides the tri into 4^(intervals-1) new triangles, each with equal 543 | volume. The triangles are recursively divided along their longest edge to 544 | create two smaller triangles. 545 | 546 | Parameters 547 | ---------- 548 | intervals : int 549 | The element will be subdivided into nx*ny*nz equispaced subelements, 550 | where nx = ny = nz = intervals 551 | """ 552 | if intervals <= 1: 553 | return np.array(self.coord) 554 | triindices = [[0, 1, 2]] 555 | coord = np.array(self.coord) 556 | self._subcoord(coord, triindices, intervals * 2 - 2) 557 | return np.array(coord) 558 | 559 | def subdiv(self, intervals): 560 | """Compute node center list for a subdivided tri element""" 561 | triindices = [[0, 1, 2]] 562 | coord = np.array(self.coord) 563 | if intervals > 1: 564 | self._subcoord(coord, triindices, intervals * 2 - 2) 565 | ntri = len(triindices) 566 | subdiv = [Tri3(coord[triindices[i]]).center for i in range(ntri)] 567 | return np.array(subdiv) 568 | 569 | def subvols(self, intervals): 570 | """Compute volumes of the subdivided tris""" 571 | triindices = [[0, 1, 2]] 572 | coord = np.array(self.coord) 573 | self._subcoord(coord, triindices, intervals * 2 - 2) 574 | ntri = len(triindices) 575 | vol = [Tri3(coord[triindices[i]]).volume for i in range(ntri)] 576 | return np.array(vol) 577 | 578 | def subconn(self, intervals): 579 | """Compute connectivity map for subdivided tris. elem_coords must be 580 | supplied.""" 581 | triindices = [[0, 1, 2]] 582 | if intervals > 1: 583 | coord = np.array(self.coord) 584 | self._subcoord(coord, triindices, intervals * 2 - 2) 585 | return np.array(triindices, dtype=int) 586 | 587 | def _subcoord(self, coord, tris, iterations): 588 | """Main subivision function. 'coord' contains a list of nodes, and 'tris' is 589 | a list containing objects like [2,5,7], where coord[2], coord[5], and 590 | coord[7] would give the nodes of a triangle. This function divides 591 | each triangle in half recursively, up to 'iterations' recursion levels. 592 | Each triangle is divided in half on its longest edge, so the final 593 | triangles should have similar dimensions. 594 | 595 | After running, the lists passed as 'coord' and 'tris' will contain the 596 | node locations and indexes respectively for the subdivided triangles. 597 | """ 598 | 599 | if iterations <= 0: 600 | return 601 | 602 | newtris = [] 603 | for tri in tris: 604 | # find longest edge 605 | longest = 0 606 | longi1 = 0 607 | longi2 = 0 608 | i3 = 0 609 | for idx in range(-1, len(tri) - 1): 610 | ds = _distsquare(coord[idx], coord[idx+1]) 611 | if ds > longest: 612 | longest = ds 613 | longi1 = tri[idx] 614 | longi2 = tri[idx+1] 615 | 616 | i3 = (set(tri) - set((longi1, longi2))).pop() 617 | 618 | # split it 619 | mp = _midpoint(coord[longi1], coord[longi2]) 620 | coord = np.row_stack((coord, mp)) 621 | mpnode = len(coord) - 1 622 | # replace it with the two new tets 623 | newtris.append([longi1, mpnode, i3]) 624 | newtris.append([longi2, mpnode, i3]) 625 | # recurse 626 | self._subcoord(coord, newtris, iterations - 1) 627 | tris[:] = newtris 628 | 629 | 630 | class Tet4: 631 | dim = 3 632 | name = "TET4" 633 | nnode = 4 634 | 635 | def __init__(self, coord): 636 | self.coord = coord 637 | 638 | @property 639 | def volume(self): 640 | amd = [self.coord[0, i] - self.coord[3, i] for i in range(3)] 641 | bmd = [self.coord[1, i] - self.coord[3, i] for i in range(3)] 642 | cmd = [self.coord[2, i] - self.coord[3, i] for i in range(3)] 643 | return abs(np.dot(amd, np.cross(bmd, cmd)) / 6.0) 644 | 645 | @property 646 | def center(self): 647 | """Compute the coordinates of the center of a tet element. 648 | 649 | Note 650 | ---- 651 | Simple average in physical space. 652 | 653 | """ 654 | return np.average(self.coord, axis=0) 655 | 656 | def subcoord(self, intervals): 657 | """Divides the tet into 4^(intervals-1) new tetrahedrons, each with equal 658 | volume. The tetrahedrons are recursively divided along their longest edge to 659 | create two smaller tetrahedrons. 660 | 661 | Parameters 662 | ---------- 663 | intervals : int 664 | The element will be subdivided into nx*ny*nz equispaced subelements, 665 | where nx = ny = nz = intervals 666 | """ 667 | if intervals <= 1: 668 | return np.array(self.coord) 669 | tetindices = [[0, 1, 2, 3]] 670 | coord = np.array(self.coord) 671 | self._subcoord(coord, tetindices, intervals * 2 - 2) 672 | return np.array(coord) 673 | 674 | def subdiv(self, intervals): 675 | """Compute node center list for a subdivided tet element""" 676 | tetindices = [[0, 1, 2, 3]] 677 | coord = np.array(self.coord) 678 | if intervals > 1: 679 | self._subcoord(coord, tetindices, intervals * 2 - 2) 680 | ntet = len(tetindices) 681 | subdiv = [Tet4(coord[tetindices[i]]).center for i in range(ntet)] 682 | return np.array(subdiv) 683 | 684 | def subvols(self, intervals): 685 | """Compute volumes of the subdivided tets""" 686 | tetindices = [[0, 1, 2, 3]] 687 | coord = np.array(self.coord) 688 | self._subcoord(coord, tetindices, intervals * 2 - 2) 689 | ntet = len(tetindices) 690 | vol = [Tet4(coord[tetindices[i]]).volume for i in range(ntet)] 691 | return np.array(vol) 692 | 693 | def subconn(self, intervals): 694 | """Compute connectivity map for subdivided tets. elem_coords must be 695 | supplied.""" 696 | tetindices = [[0, 1, 2, 3]] 697 | if intervals > 1: 698 | coord = np.array(self.coord) 699 | self._subcoord(coord, tetindices, intervals * 2 - 2) 700 | return np.array(tetindices, dtype=int) 701 | 702 | def _subcoord(self, coord, tets, iterations): 703 | """Main subivision function. 'coord' contains a list of nodes, and 'tets' is 704 | a list containing objects like [2,5,7,8], where coord[2],coord[5],coord[7] 705 | and coord[8] would give the nodes of a tetrahedron. This function divides 706 | each tetrahedron in half recursively, up to 'iterations' recursion levels. 707 | Each tetrahedron is divided in half on its longest edge, so the final 708 | tetrahedrons should have similar dimensions. 709 | 710 | After running, the lists passed as 'coord' and 'tets' will contain the 711 | node locations and indexes respectively for the subdivided tetrahedrons 712 | 713 | """ 714 | 715 | if iterations == 0: 716 | return 717 | 718 | newtets = [] 719 | for tet in tets: 720 | # find longest edge 721 | longest = 0 722 | longi1 = 0 723 | longi2 = 0 724 | i3 = 0 725 | i4 = 0 726 | for a in tet: 727 | for b in tet: 728 | ds = _distsquare(coord[a], coord[b]) 729 | if ds > longest: 730 | longest = ds 731 | longi1 = a 732 | longi2 = b 733 | 734 | for a in tet: 735 | if a != longi1 and a != longi2: 736 | if i3 == 0: 737 | i3 = a 738 | else: 739 | i4 = a 740 | # split it 741 | mp = _midpoint(coord[longi1], coord[longi2]) 742 | coord = np.row_stack((coord, mp)) 743 | mpnode = len(coord) - 1 744 | # replace it with the two new tets 745 | newtets.append([longi1, mpnode, i3, i4]) 746 | newtets.append([longi2, mpnode, i3, i4]) 747 | # recurse 748 | self._subcoord(coord, newtets, iterations - 1) 749 | tets[:] = newtets 750 | 751 | 752 | class Wedge6: 753 | dim = 3 754 | name = "WEDGE6" 755 | nnode = 6 756 | 757 | def __init__(self, coord): 758 | self.coord = np.array(coord) 759 | 760 | @property 761 | def center(self): 762 | return np.average(self.coord, axis=0) 763 | 764 | @property 765 | def volume(self): 766 | """Computes volume of the wedge given nodes. 767 | 768 | Calculates volume by cutting into three tetrahedra 769 | """ 770 | x, y, z = self.coord[:, 0], self.coord[:, 1], self.coord[:, 2] 771 | 772 | i, j, k = 2, 1, 4 773 | ux, uy, uz = x[i] - x[0], y[i] - y[0], z[i] - z[0] 774 | vx, vy, vz = x[j] - x[0], y[j] - y[0], z[j] - z[0] 775 | wx, wy, wz = x[k] - x[0], y[k] - y[0], z[k] - z[0] 776 | vol = ( 777 | wx * (uy * vz - uz * vy) 778 | + wy * (vx * uz - ux * vz) 779 | + wz * (ux * vy - uy * vx) 780 | ) 781 | 782 | i, j, k = 3, 5, 4 783 | ux, uy, uz = x[i] - x[0], y[i] - y[0], z[i] - z[0] 784 | vx, vy, vz = x[j] - x[0], y[j] - y[0], z[j] - z[0] 785 | wx, wy, wz = x[k] - x[0], y[k] - y[0], z[k] - z[0] 786 | vol += ( 787 | wx * (uy * vz - uz * vy) 788 | + wy * (vx * uz - ux * vz) 789 | + wz * (ux * vy - uy * vx) 790 | ) 791 | 792 | i, j, k = 5, 2, 4 793 | ux, uy, uz = x[i] - x[0], y[i] - y[0], z[i] - z[0] 794 | vx, vy, vz = x[j] - x[0], y[j] - y[0], z[j] - z[0] 795 | wx, wy, wz = x[k] - x[0], y[k] - y[0], z[k] - z[0] 796 | vol += ( 797 | wx * (uy * vz - uz * vy) 798 | + wy * (vx * uz - ux * vz) 799 | + wz * (ux * vy - uy * vx) 800 | ) 801 | 802 | return vol 803 | 804 | def subcoord(self, intervals): 805 | """Divides the wedge into 4^(intervals) new wedges. The wedges are 806 | recursively divided along their bottom triangular face's longest edge to 807 | create two and through the middle of vertical extrusion to produce 4 sub 808 | wedges 809 | 810 | Parameters 811 | ---------- 812 | intervals : int 813 | The element will be subdivided into nx*ny*nz equispaced subelements, 814 | where nx = ny = nz = intervals 815 | """ 816 | coord = np.array(self.coord) 817 | if intervals > 1: 818 | wedgeindices = [[0, 1, 2, 3, 4, 5]] 819 | self._subcoord(coord, wedgeindices, intervals * 2 - 2) 820 | return np.array(coord) 821 | 822 | def subdiv(self, intervals): 823 | """Compute node center list for a subdivided wedges element""" 824 | wedgeindices = [[0, 1, 2, 3, 4, 5]] 825 | coord = np.array(self.coord) 826 | if intervals > 1: 827 | self._subcoord(coord, wedgeindices, intervals * 2 - 2) 828 | nwedge = len(wedgeindices) 829 | subdiv = [Wedge6(coord[wedgeindices[i]]).center for i in range(nwedge)] 830 | return np.array(subdiv) 831 | 832 | def subvols(self, intervals): 833 | """Compute volumes of the subdivided wedges""" 834 | wedgeindices = [[0, 1, 2, 3, 4, 5]] 835 | coord = np.array_self.coord 836 | if intervals > 1: 837 | self._subcoord(coord, wedgeindices, intervals * 2 - 2) 838 | nwedge = len(wedgeindices) 839 | vols = [Wedge6(coord[wedgeindices[i]]).volume for i in range(nwedge)] 840 | return np.array(vols) 841 | 842 | def subconn(self, intervals): 843 | """Compute connectivity map for subdivided wedges. elem_coords must be 844 | supplied.""" 845 | wedgeindices = [[0, 1, 2, 3, 4, 5]] 846 | if intervals > 1: 847 | coord = np.array(self.coord) 848 | self._subcoord(coord, wedgeindices, intervals * 2 - 2) 849 | return np.array(wedgeindices, dtype=int) 850 | 851 | def _subcoord(self, coord, wedges, iterations): 852 | 853 | if iterations == 0: 854 | return 855 | 856 | newwedges = [] 857 | for wedge in wedges: 858 | longest = 0 859 | ni = 0 860 | nj = 0 861 | nk = 0 862 | 863 | for i in range(0, 2): 864 | for j in range(i + 1, 3): 865 | ds = _distsquare(coord[wedge[i]], coord[wedge[j]]) 866 | if ds > longest: 867 | longest = ds 868 | ni = i 869 | nj = j 870 | 871 | for k in range(0, 2): 872 | if k != ni and k != nj: 873 | nk = k 874 | 875 | gi = wedge[ni] 876 | gj = wedge[nj] 877 | gk = wedge[nk] 878 | giu = wedge[ni + 3] 879 | gju = wedge[nj + 3] 880 | gku = wedge[nk + 3] 881 | 882 | # SPLIT 883 | mpbtri = _midpoint(coord[gi], coord[gj]) 884 | mpttri = _midpoint(coord[giu], coord[gju]) 885 | mpvi = _midpoint(coord[gi], coord[giu]) 886 | mpvj = _midpoint(coord[gj], coord[gju]) 887 | mpvk = _midpoint(coord[gk], coord[gku]) 888 | mpvm = _midpoint(mpbtri, mpttri) 889 | 890 | old_max = len(coord) 891 | coord = np.row_stack( 892 | ( 893 | coord, 894 | mpbtri, # old_max+1 895 | mpttri, # old_max+2 896 | mpvi, # old_max+3 897 | mpvj, # old_max+4 898 | mpvk, # old_max+5 899 | mpvm, # old_max+6 900 | ) 901 | ) 902 | 903 | newwedges.extend( 904 | [ 905 | # wedge [ni, mpb, nk, vi, vm, vk] 906 | [gi, old_max + 1, gk, old_max + 3, old_max + 6, old_max + 5], 907 | # wedge [mpb, nj, nk, vm, vj, vk] 908 | [old_max + 1, gj, gk, old_max + 6, old_max + 4, old_max + 5], 909 | # wedge [vi, vm, vk, ni+3, mpt, nk+3] 910 | [old_max + 3, old_max + 6, old_max + 5, giu, old_max + 2, gku], 911 | # wedge [vm, vj, vk, mpt, nj+3, nk+3] 912 | [old_max + 6, old_max + 4, old_max + 5, old_max + 2, gju, gku], 913 | ] 914 | ) 915 | 916 | # RECURSE 917 | self._subcoord(coord, newwedges, iterations - 1) 918 | wedges[:] = newwedges 919 | -------------------------------------------------------------------------------- /exodusii/ex_params.py: -------------------------------------------------------------------------------- 1 | from . import nc 2 | 3 | 4 | class ex_init_params: 5 | def __init__(self, fh): 6 | self.fh = fh 7 | 8 | def __eq__(self, other): 9 | if not isinstance(other, ex_init_params): 10 | return False 11 | for attr in dir(self): 12 | if not attr.startswith("num_"): 13 | continue 14 | if getattr(self, attr) != getattr(other, attr): 15 | return False 16 | return True 17 | 18 | def getdim(self, name, default=None): 19 | return nc.get_dimension(self.fh, name, default=default) 20 | 21 | @property 22 | def num_dim(self): 23 | return self.getdim("num_dim", 0) 24 | 25 | @property 26 | def num_edge_blk(self): 27 | """Number of model edge blocks""" 28 | return self.getdim("num_edge_blk", 0) 29 | 30 | @property 31 | def num_edge_maps(self): 32 | """Number of model edge maps""" 33 | return self.getdim("num_edge_maps", 0) 34 | 35 | @property 36 | def num_edge_sets(self): 37 | """Number of model edge sets""" 38 | return self.getdim("num_edge_sets", 0) 39 | 40 | @property 41 | def num_el_blk(self): 42 | """Number of model element blocks""" 43 | return self.getdim("num_el_blk", 0) 44 | 45 | @property 46 | def num_elem_maps(self): 47 | """Number of model elem maps""" 48 | return self.getdim("num_elem_maps", 0) 49 | 50 | @property 51 | def num_elem_sets(self): 52 | """Number of model elem sets""" 53 | return self.getdim("num_elem_sets", 0) 54 | 55 | @property 56 | def num_face_blk(self): 57 | """Number of model face blocks""" 58 | return self.getdim("num_face_blk", 0) 59 | 60 | @property 61 | def num_face_maps(self): 62 | """Number of model face maps""" 63 | return self.getdim("num_face_maps", 0) 64 | 65 | @property 66 | def num_face_sets(self): 67 | """Number of model face sets""" 68 | return self.getdim("num_face_sets", 0) 69 | 70 | @property 71 | def num_glo_var(self): 72 | return self.getdim("num_glo_var", 0) 73 | 74 | @property 75 | def num_node_maps(self): 76 | """Number of model node maps""" 77 | return self.getdim("num_node_maps", 0) 78 | 79 | @property 80 | def num_node_sets(self): 81 | """Number of model node sets""" 82 | return self.getdim("num_node_sets", 0) 83 | 84 | @property 85 | def num_side_sets(self): 86 | """Number of model side sets""" 87 | return self.getdim("num_side_sets", 0) 88 | -------------------------------------------------------------------------------- /exodusii/exodus_h.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class types(Enum): 5 | node = 0 6 | element = 1 7 | edge = 2 8 | face = 3 9 | 10 | 11 | class maps(Enum): 12 | elem_local_to_global = 0 13 | node_local_to_global = 1 14 | edge_local_to_global = 2 15 | face_local_to_global = 3 16 | elem_block_elem_local_to_global = 10 17 | elem_block_elem_global_to_local = 11 18 | edge_block_edge_local_to_global = 20 19 | edge_block_edge_global_to_local = 21 20 | face_block_face_local_to_global = 30 21 | face_block_face_global_to_local = 31 22 | 23 | 24 | def ex_catstr(*args): 25 | return "".join(str(_) for _ in args) 26 | 27 | 28 | # -------------------------------------------------------- exodusII_inc.h --- # 29 | MAX_VAR_NAME_LENGTH = 20 # Internal use only 30 | 31 | # Default "filesize" for newly created files. 32 | # Set to 0 for normal filesize setting. 33 | # Set to 1 for EXODUS_LARGE_MODEL setting to be the default 34 | EXODUS_DEFAULT_SIZE = 1 35 | 36 | # Exodus error return codes - function return values: 37 | EX_FATAL = -1 # fatal error flag def 38 | EX_NOERR = 0 # no error flag def 39 | EX_WARN = 1 # warning flag def 40 | 41 | # This file contains defined constants that are used internally in the EXODUS II API. 42 | # 43 | # The first group of constants refer to netCDF variables, attributes, or dimensions in 44 | # which the EXODUS II data are stored. Using the defined constants will allow the 45 | # names of the netCDF entities to be changed easily in the future if needed. The first 46 | # three letters of the constant identify the netCDF entity as a variable (VAR), 47 | # dimension (DIM), or attribute (ATT). 48 | # 49 | # NOTE: The entity name should not have any blanks in it. Blanks are 50 | # technically legal but some netcdf utilities (ncgen in particular) 51 | # fail when they encounter a blank in a name. 52 | 53 | ATT_TITLE = "title" # the database title 54 | ATT_API_VERSION = "api_version" # the EXODUS II api vers number 55 | ATT_VERSION = "version" # the EXODUS II file vers number 56 | ATT_FILESIZE = "file_size" # 1=large, 0=normal 57 | 58 | # word size of floating point numbers in file 59 | ATT_FLT_WORDSIZE = "floating_point_word_size" 60 | 61 | # word size of floating point numbers in file used for db version 2.01 and earlier 62 | ATT_FLT_WORDSIZE_BLANK = "floating point word size" 63 | 64 | ATT_NAME_ELEM_TYPE = "elem_type" # element type names for each element block 65 | 66 | DIM_NUM_NODES = "num_nodes" # number of nodes 67 | DIM_NUM_DIM = "num_dim" # number of dimensions; 2- or 3-d 68 | DIM_NUM_EDGE = "num_edge" # number of edges (over all blks) 69 | DIM_NUM_FACE = "num_face" # number of faces (over all blks) 70 | DIM_NUM_ELEM = "num_elem" # number of elements 71 | DIM_NUM_ELEM_BLK = "num_el_blk" # number of element blocks 72 | DIM_NUM_EDGE_BLK = "num_ed_blk" # number of edge blocks 73 | DIM_NUM_FACE_BLK = "num_fa_blk" # number of face blocks 74 | 75 | DIM_NUM_ELEM_GLOBAL = "num_elems_global" 76 | DIM_NUM_NODE_GLOBAL = "num_nodes_global" 77 | 78 | DIM_NUM_NODE_SET_GLOBAL = "num_ns_global" 79 | DIM_NUM_SIDE_SET_GLOBAL = "num_ss_global" 80 | DIM_NUM_ELEM_BLK_GLOBAL = "num_el_blk_global" 81 | 82 | VAR_ELEM_BLK_COUNT_GLOBAL = "el_blk_cnt_global" 83 | VAR_SIDE_SET_SIDE_COUNT_GLOBAL = "ss_side_cnt_global" 84 | VAR_NODE_SET_NODE_COUNT_GLOBAL = "ns_node_cnt_global" 85 | 86 | VAR_NODE_SET_DF_COUNT_GLOBAL = "ns_df_cnt_global" 87 | VAR_SIDE_SET_DF_COUNT_GLOBAL = "ss_df_cnt_global" 88 | VAR_EDGE_SET_DF_COUNT_GLOBAL = "es_df_cnt_global" 89 | VAR_ELEM_SET_DF_COUNT_GLOBAL = "els_df_cnt_global" 90 | 91 | VAR_ELEM_BLK_IDS_GLOBAL = "el_blk_ids_global" 92 | VAR_NODE_SET_IDS_GLOBAL = "ns_ids_global" 93 | VAR_SIDE_SET_IDS_GLOBAL = "ss_ids_global" 94 | 95 | VAR_COORD = "coord" # nodal coordinates 96 | VAR_COORD_X = "coordx" # X-dimension coordinate 97 | VAR_COORD_Y = "coordy" # Y-dimension coordinate 98 | VAR_COORD_Z = "coordz" # Z-dimension coordinate 99 | VAR_NAME_COORD = "coor_names" # names of coordinates 100 | VAR_NAME_ELEM_BLK = "eb_names" # names of element blocks 101 | VAR_NAME_NODE_SET = "ns_names" # names of node sets 102 | VAR_NAME_SIDE_SET = "ss_names" # names of side sets 103 | VAR_NAME_ELEM_MAP = "emap_names" # names of element maps 104 | VAR_NAME_EDGE_MAP = "edmap_names" # names of edge maps 105 | VAR_NAME_FACE_MAP = "famap_names" # names of face maps 106 | VAR_NAME_NODE_MAP = "nmap_names" # names of node maps 107 | VAR_NAME_EDGE_BLK = "ed_names" # names of edge blocks 108 | VAR_NAME_FACE_BLK = "fa_names" # names of face blocks 109 | VAR_NAME_EDGE_SET = "es_names" # names of edge sets 110 | VAR_NAME_FACE_SET = "fs_names" # names of face sets 111 | VAR_NAME_ELEM_SET = "els_names" # names of element sets 112 | VAR_STAT_ELEM_BLK = "eb_status" # element block status 113 | VAR_STAT_EDGE_CONN = "econn_status" # element block edge status 114 | VAR_STAT_FACE_CONN = "fconn_status" # element block face status 115 | VAR_STAT_EDGE_BLK = "ed_status" # edge block status 116 | VAR_STAT_FACE_BLK = "fa_status" # face block status 117 | VAR_ID_ELEM_BLK = "eb_prop1" # element block ids props 118 | VAR_ID_EDGE_BLK = "ed_prop1" # edge block ids props 119 | VAR_ID_FACE_BLK = "fa_prop1" # face block ids props 120 | 121 | DIM_NUM_ATTR = lambda num: ex_catstr("num_attr", num) 122 | # number of elements in element block num 123 | DIM_NUM_ELEM_IN_ELEM_BLK = lambda num: ex_catstr("num_el_in_blk", num) 124 | 125 | # number of nodes per element in element block num 126 | DIM_NUM_NODE_PER_ELEM = lambda num: ex_catstr("num_nod_per_el", num) 127 | 128 | # number of attributes in element block num 129 | DIM_NUM_ATT_IN_ELEM_BLK = lambda num: ex_catstr("num_att_in_blk", num) 130 | 131 | # number of edges in edge block num 132 | DIM_NUM_EDGE_IN_EDGE_BLK = lambda num: ex_catstr("num_ed_in_blk", num) 133 | 134 | # number of nodes per edge in edge block num 135 | DIM_NUM_NODE_PER_EDGE = lambda num: ex_catstr("num_nod_per_ed", num) 136 | 137 | # number of edges per element in element block num 138 | DIM_NUM_EDGE_PER_ELEM = lambda num: ex_catstr("num_edg_per_el", num) 139 | 140 | # number of attributes in edge block num 141 | DIM_NUM_ATT_IN_EDGE_BLK = lambda num: ex_catstr("num_att_in_eblk", num) 142 | 143 | # number of faces in face block num 144 | DIM_NUM_FACE_IN_FACE_BLK = lambda num: ex_catstr("num_fa_in_blk", num) 145 | 146 | # number of nodes per face in face block num 147 | DIM_NUM_NODE_PER_FACE = lambda num: ex_catstr("num_nod_per_fa", num) 148 | 149 | # number of faces per element in element block num 150 | DIM_NUM_FACE_PER_ELEM = lambda num: ex_catstr("num_fac_per_el", num) 151 | 152 | # number of attributes in face block num 153 | DIM_NUM_ATT_IN_FACE_BLK = lambda num: ex_catstr("num_att_in_fblk", num) 154 | DIM_NUM_ATT_IN_NODE_BLK = "num_att_in_nblk" 155 | 156 | # element connectivity for element block num 157 | VAR_ELEM_BLK_CONN = lambda num: ex_catstr("connect", num) 158 | 159 | # array containing number of entity per entity for n-sided face/element blocks 160 | VAR_EBEPEC = lambda num: ex_catstr("ebepecnt", num) 161 | 162 | # list of attributes for element block num 163 | VAR_ELEM_ATTRIB = lambda num: ex_catstr("attrib", num) 164 | 165 | # list of attribute names for element block num 166 | VAR_NAME_ELEM_BLK_ATTRIB = lambda num: ex_catstr("attrib_name", num) 167 | 168 | # list of the numth property for all element blocks 169 | VAR_EB_PROP = lambda num: ex_catstr("eb_prop", num) 170 | 171 | # edge connectivity for element block num 172 | VAR_EDGE_CONN = lambda num: ex_catstr("edgconn", num) 173 | 174 | # edge connectivity for edge block num 175 | VAR_EDGE_BLK_CONN = lambda num: ex_catstr("ebconn", num) 176 | 177 | # list of attributes for edge block num 178 | VAR_EDGE_BLK_ATTRIB = lambda num: ex_catstr("eattrb", num) 179 | # list of attribute names for edge block num 180 | VAR_NAME_EDGE_BLK_ATTRIB = lambda num: ex_catstr("eattrib_name", num) 181 | 182 | VAR_NATTRIB = "nattrb" 183 | VAR_NAME_NATTRIB = "nattrib_name" 184 | DIM_NUM_ATT_IN_NODE_BLK = "num_att_in_nblk" 185 | VAR_NODE_SET_ATTRIB = lambda num: ex_catstr("nsattrb", num) 186 | VAR_NAME_NODE_SET_ATTRIB = lambda num: ex_catstr("nsattrib_name", num) 187 | DIM_NUM_ATT_IN_NODE_SET = lambda num: ex_catstr("num_att_in_ns", num) 188 | VAR_SIDE_SET_ATTRIB = lambda num: ex_catstr("ssattrb", num) 189 | VAR_NAME_SIDE_SET_ATTRIB = lambda num: ex_catstr("ssattrib_name", num) 190 | DIM_NUM_ATT_IN_SIDE_SET = lambda num: ex_catstr("num_att_in_ss", num) 191 | VAR_EDGE_SET_ATTRIB = lambda num: ex_catstr("esattrb", num) 192 | VAR_NAME_EDGE_SET_ATTRIB = lambda num: ex_catstr("esattrib_name", num) 193 | DIM_NUM_ATT_IN_EDGE_SET = lambda num: ex_catstr("num_att_in_es", num) 194 | VAR_FACE_SET_ATTRIB = lambda num: ex_catstr("fsattrb", num) 195 | VAR_NAME_FACE_SET_ATTRIB = lambda num: ex_catstr("fsattrib_name", num) 196 | DIM_NUM_ATT_IN_FACE_SET = lambda num: ex_catstr("num_att_in_fs", num) 197 | VAR_ELEM_SET_ATTRIB = lambda num: ex_catstr("elsattrb", num) 198 | VAR_NAME_ELEM_SET_ATTRIB = lambda num: ex_catstr("elsattrib_name", num) 199 | DIM_NUM_ATT_IN_ELEM_SET = lambda num: ex_catstr("num_att_in_els", num) 200 | VAR_EDGE_PROP = lambda num: ex_catstr("ed_prop", num) 201 | 202 | # face connectivity for element block num 203 | VAR_FACE_CONN = lambda num: ex_catstr("facconn", num) 204 | 205 | # face connectivity for element block num 206 | VAR_FACE_BLK_CONN = lambda num: ex_catstr("fbconn", num) 207 | 208 | # face connectivity for face block num 209 | VAR_FBEPEC = lambda num: ex_catstr("fbepecnt", num) 210 | 211 | # array containing number of entity per entity for n-sided face/element blocks 212 | VAR_FACE_ATTRIB = lambda num: ex_catstr("fattrb", num) 213 | 214 | # list of attributes for face block num 215 | VAR_NAME_FACE_BLK_ATTRIB = lambda num: ex_catstr("fattrib_name", num) 216 | 217 | # list of attribute names for face block num 218 | VAR_FACE_PROP = lambda num: ex_catstr("fa_prop", num) 219 | 220 | # list of the numth property for all face blocks 221 | ATT_PROP_NAME = "name" # name attached to element 222 | 223 | # block, node set, side set, element map, or map properties 224 | DIM_NUM_SIDE_SET = "num_side_sets" # number of side sets 225 | VAR_SIDE_SET_STAT = "ss_status" # side set status 226 | VAR_SIDE_SET_IDS = "ss_prop1" # side set id properties 227 | 228 | # number of sides in side set num 229 | DIM_NUM_SIDE_SIDE_SET = lambda num: ex_catstr("num_side_ss", num) 230 | 231 | # number of distribution factors in side set num 232 | DIM_NUM_DF_SIDE_SET = lambda num: ex_catstr("num_df_ss", num) 233 | 234 | # the distribution factors for each node in side set num 235 | VAR_DF_SIDE_SET = lambda num: ex_catstr("dist_fact_ss", num) 236 | 237 | # list of elements in side set num 238 | VAR_ELEM_SIDE_SET = lambda num: ex_catstr("elem_ss", num) 239 | 240 | # list of sides in side set 241 | VAR_SIDE_SIDE_SET = lambda num: ex_catstr("side_ss", num) 242 | 243 | # list of the numth property for all side sets 244 | VAR_SIDE_SET_PROP = lambda num: ex_catstr("ss_prop", num) 245 | 246 | DIM_NUM_EDGE_SET = "num_edge_sets" # number of edge sets 247 | VAR_EDGE_SET_STAT = "es_status" # edge set status 248 | VAR_EDGE_SET_IDS = "es_prop1" # edge set id properties 249 | 250 | # number of edges in edge set num 251 | DIM_NUM_EDGE_EDGE_SET = lambda num: ex_catstr("num_edge_es", num) 252 | 253 | DIM_NUM_DF_EDGE_SET = lambda num: ex_catstr("num_df_es", num) 254 | 255 | # number of distribution factors in edge set num 256 | VAR_DF_EDGE_SET = lambda num: ex_catstr("dist_fact_es", num) 257 | 258 | # list of edges in edge set num 259 | VAR_EDGE_EDGE_SET = lambda num: ex_catstr("edge_es", num) 260 | 261 | # list of orientations in the edge set. 262 | VAR_ORNT_EDGE_SET = lambda num: ex_catstr("ornt_es", num) 263 | 264 | # list of the numth property for all edge sets 265 | VAR_EDGE_SET_PROP = lambda num: ex_catstr("es_prop", num) 266 | 267 | DIM_NUM_FACE_SET = "num_face_sets" # number of face sets 268 | VAR_FACE_SET_STAT = "fs_status" # face set status 269 | VAR_FACE_SET_IDS = "fs_prop1" # face set id properties 270 | 271 | # number of faces in side set num 272 | DIM_NUM_FACE_FACE_SET = lambda num: ex_catstr("num_face_fs", num) 273 | 274 | # number of distribution factors in face set num 275 | DIM_NUM_DF_FACE_SET = lambda num: ex_catstr("num_df_fs", num) 276 | 277 | # the distribution factors for each node in face set num 278 | VAR_DF_FACE_SET = lambda num: ex_catstr("dist_fact_fs", num) 279 | 280 | # list of elements in face set num 281 | VAR_FACE_FACE_SET = lambda num: ex_catstr("face_fs", num) 282 | 283 | # list of sides in side set 284 | VAR_ORNT_FACE_SET = lambda num: ex_catstr("ornt_fs", num) 285 | 286 | # list of the numth property for all face sets 287 | VAR_FACE_SET_PROP = lambda num: ex_catstr("fs_prop", num) 288 | 289 | DIM_NUM_ELEM_SET = "num_elem_sets" # number of elem sets 290 | 291 | # number of elements in elem set num 292 | DIM_NUM_ELEM_ELEM_SET = lambda num: ex_catstr("num_ele_els", num) 293 | 294 | # number of distribution factors in element set num 295 | DIM_NUM_DF_ELEM_SET = lambda num: ex_catstr("num_df_els", num) 296 | 297 | VAR_ELEM_SET_STAT = "els_status" # elem set status 298 | VAR_ELEM_SET_IDS = "els_prop1" # elem set id properties 299 | 300 | # list of elements in elem set num 301 | VAR_ELEM_ELEM_SET = lambda num: ex_catstr("elem_els", num) 302 | 303 | # list of distribution factors in elem set num 304 | VAR_DF_ELEM_SET = lambda num: ex_catstr("dist_fact_els", num) 305 | 306 | # list of the numth property for all elem sets 307 | VAR_ELEM_SET_PROP = lambda num: ex_catstr("els_prop", num) 308 | 309 | DIM_NUM_NODE_SET = "num_node_sets" # number of node sets 310 | 311 | # number of nodes in node set num 312 | DIM_NUM_NODE_NODE_SET = lambda num: ex_catstr("num_nod_ns", num) 313 | 314 | # number of distribution factors in node set num 315 | DIM_NUM_DF_NODE_SET = lambda num: ex_catstr("num_df_ns", num) 316 | 317 | VAR_NODE_SET_STAT = "ns_status" # node set status 318 | VAR_NODE_SET_IDS = "ns_prop1" # node set id properties 319 | 320 | # list of nodes in node set num 321 | VAR_NODE_NODE_SET = lambda num: ex_catstr("node_ns", num) 322 | 323 | # list of distribution factors in node set num 324 | VAR_DF_NODE_SET = lambda num: ex_catstr("dist_fact_ns", num) 325 | 326 | # list of the numth property for all node sets 327 | VAR_NODE_SET_PROP = lambda num: ex_catstr("ns_prop", num) 328 | 329 | DIM_NUM_QA = "num_qa_rec" # number of QA records 330 | VAR_QA_TITLE = "qa_records" # QA records 331 | DIM_NUM_INFO = "num_info" # number of information records 332 | VAR_INFO = "info_records" # information records 333 | VAR_WHOLE_TIME = "time_whole" # simulation times for whole time steps 334 | VAR_ELEM_TAB = "elem_var_tab" # element variable truth table 335 | VAR_EDGE_BLK_TAB = "edge_var_tab" # edge variable truth table 336 | VAR_FACE_BLK_TAB = "face_var_tab" # face variable truth table 337 | VAR_ELEM_SET_TAB = "elset_var_tab" # elemset variable truth table 338 | VAR_SIDE_SET_TAB = "sset_var_tab" # sideset variable truth table 339 | VAR_FACE_SET_TAB = "fset_var_tab" # faceset variable truth table 340 | VAR_EDGE_SET_TAB = "eset_var_tab" # edgeset variable truth table 341 | VAR_NODE_SET_TAB = "nset_var_tab" # nodeset variable truth table 342 | DIM_NUM_GLO_VAR = "num_glo_var" # number of global variables 343 | VAR_NAME_GLO_VAR = "name_glo_var" # names of global variables 344 | VAR_GLO_VAR = "vals_glo_var" # values of global variables 345 | DIM_NUM_NODE_VAR = "num_nod_var" # number of nodal variables 346 | VAR_NAME_NODE_VAR = "name_nod_var" # names of nodal variables 347 | 348 | VAR_NODE_VAR = lambda num: ex_catstr("vals_nod_var", num) # values of nodal variables 349 | 350 | DIM_NUM_ELEM_VAR = "num_elem_var" # number of element variables 351 | VAR_NAME_ELEM_VAR = "name_elem_var" # names of element variables 352 | 353 | # values of element variable num1 in element block num2 354 | VAR_ELEM_VAR = lambda num1, num2: ex_catstr("vals_elem_var", num1, "eb", num2) 355 | 356 | DIM_NUM_EDGE_VAR = "num_edge_var" # number of edge variables 357 | VAR_NAME_EDGE_VAR = "name_edge_var" # names of edge variables 358 | 359 | # values of edge variable num1 in edge block num2 360 | VAR_EDGE_VAR = lambda num1, num2: ex_catstr("vals_edge_var", num1, "eb", num2) 361 | 362 | DIM_NUM_FACE_VAR = "num_face_var" # number of face variables 363 | VAR_NAME_FACE_VAR = "name_face_var" # names of face variables 364 | 365 | # values of face variable num1 in face block num2 366 | VAR_FACE_VAR = lambda num1, num2: ex_catstr("vals_face_var", num1, "fb", num2) 367 | 368 | DIM_NUM_NODE_SET_VAR = "num_nset_var" # number of nodeset variables 369 | VAR_NAME_NODE_SET_VAR = "name_nset_var" # names of nodeset variables 370 | 371 | # values of nodeset variable num1 in nodeset num2 372 | VAR_NODE_SET_VAR = lambda num1, num2: ex_catstr("vals_nset_var", num1, "ns", num2) 373 | 374 | DIM_NUM_EDGE_SET_VAR = "num_eset_var" # number of edgeset variables 375 | VAR_NAME_EDGE_SET_VAR = "name_eset_var" # names of edgeset variables 376 | 377 | # values of edgeset variable num1 in edgeset num2 378 | VAR_EDGE_SET_VAR = lambda num1, num2: ex_catstr("vals_eset_var", num1, "es", num2) 379 | 380 | DIM_NUM_FACE_SET_VAR = "num_fset_var" # number of faceset variables 381 | VAR_NAME_FACE_SET_VAR = "name_fset_var" # names of faceset variables 382 | 383 | # values of faceset variable num1 in faceset num2 384 | VAR_FACE_SET_VAR = lambda num1, num2: ex_catstr("vals_fset_var", num1, "fs", num2) 385 | 386 | DIM_NUM_SIDE_SET_VAR = "num_sset_var" # number of sideset variables 387 | VAR_NAME_SIDE_SET_VAR = "name_sset_var" # names of sideset variables 388 | 389 | # values of sideset variable num1 in sideset num2 390 | VAR_SIDE_SET_VAR = lambda num1, num2: ex_catstr("vals_sset_var", num1, "ss", num2) 391 | 392 | DIM_NUM_ELEM_SET_VAR = "num_elset_var" # number of element set variables 393 | VAR_NAME_ELEM_SET_VAR = "name_elset_var" # names of elemset variables 394 | 395 | # values of elemset variable num1 in elemset num2 396 | VAR_ELEM_SET_VAR = lambda num1, num2: ex_catstr("vals_elset_var", num1, "es", num2) 397 | 398 | # general dimension of length MAX_STR_LENGTH used for name lengths 399 | DIM_STR = "len_string" 400 | DIM_NAME = "len_name" 401 | DIM_NUM_SIDE = "num_side" 402 | 403 | # general dimension of length MAX_LINE_LENGTH used for long strings 404 | DIM_LIN = "len_line" 405 | DIM_N4 = "four" # general dimension of length 4 406 | 407 | 408 | # unlimited (expandable) dimension for time steps 409 | DIM_TIME = "time_step" 410 | 411 | VAR_ELEM_NUM_MAP = "elem_num_map" 412 | VAR_EDGE_NUM_MAP = "edge_num_map" 413 | VAR_FACE_NUM_MAP = "face_num_map" 414 | VAR_NODE_NUM_MAP = "node_num_map" 415 | 416 | DIM_NUM_ELEM_MAP = "num_elem_maps" # number of element maps 417 | VAR_ELEM_MAP = lambda num: ex_catstr("elem_map", num) # the numth element map 418 | 419 | # list of the numth property for all element maps 420 | VAR_ELEM_MAP_PROP = lambda num: ex_catstr("em_prop", num) 421 | 422 | DIM_NUM_EDGE_MAP = "num_edge_maps" # number of edge maps 423 | VAR_EDGE_MAP = lambda num: ex_catstr("edge_map", num) # the numth edge map 424 | 425 | # list of the numth property for all edge maps 426 | VAR_EDGE_MAP_PROP = lambda num: ex_catstr("edm_prop", num) 427 | 428 | DIM_NUM_FACE_MAP = "num_face_maps" # number of face maps 429 | VAR_FACE_MAP = lambda num: ex_catstr("face_map", num) # the numth face map 430 | 431 | # list of the numth property for all face maps 432 | VAR_FACE_MAP_PROP = lambda num: ex_catstr("fam_prop", num) 433 | 434 | DIM_NUM_NODE_MAP = "num_node_maps" # number of node maps 435 | VAR_NODE_MAP = lambda num: ex_catstr("node_map", num) # the numth node map 436 | 437 | # list of the numth property for all node maps 438 | VAR_NODE_MAP_PROP = lambda num: ex_catstr("nm_prop", num) 439 | 440 | EX_ELEM_UNK = (-1,) # unknown entity 441 | EX_ELEM_NULL_ELEMENT = 0 442 | EX_ELEM_TRIANGLE = 1 # Triangle entity 443 | EX_ELEM_QUAD = 2 # Quad entity 444 | EX_ELEM_HEX = 3 # Hex entity 445 | EX_ELEM_WEDGE = 4 # Wedge entity 446 | EX_ELEM_TETRA = 5 # Tetra entity 447 | EX_ELEM_TRUSS = 6 # Truss entity 448 | EX_ELEM_BEAM = 7 # Beam entity 449 | EX_ELEM_SHELL = 8 # Shell entity 450 | EX_ELEM_SPHERE = 9 # Sphere entity 451 | EX_ELEM_CIRCLE = 10 # Circle entity 452 | EX_ELEM_TRISHELL = 11 # Triangular Shell entity 453 | EX_ELEM_PYRAMID = 12 # Pyramid entity 454 | 455 | # ------------------------------------------------------------ exodusII.h --- # 456 | EX_NOCLOBBER = 0 457 | EX_CLOBBER = 1 458 | EX_NORMAL_MODEL = 2 459 | EX_LARGE_MODEL = 4 460 | EX_NETCDF4 = 8 461 | EX_NOSHARE = 16 462 | EX_SHARE = 32 463 | 464 | EX_READ = 0 465 | EX_WRITE = 1 466 | 467 | EX_ELEM_BLOCK = 1 468 | EX_NODE_SET = 2 469 | EX_SIDE_SET = 3 470 | EX_ELEM_MAP = 4 471 | EX_NODE_MAP = 5 472 | EX_EDGE_BLOCK = 6 473 | EX_EDGE_SET = 7 474 | EX_FACE_BLOCK = 8 475 | EX_FACE_SET = 9 476 | EX_ELEM_SET = 10 477 | EX_EDGE_MAP = 11 478 | EX_FACE_MAP = 12 479 | EX_GLOBAL = 13 480 | EX_NODE = 15 # not defined in exodus 481 | EX_EDGE = 16 # not defined in exodus 482 | EX_FACE = 17 # not defined in exodus 483 | EX_ELEM = 18 # not defined in exodus 484 | 485 | MAX_STR_LENGTH = 32 486 | MAX_VAR_NAME_LENGTH = 20 487 | MAX_LINE_LENGTH = 80 488 | MAX_ERR_LENGTH = 256 489 | 490 | EX_VERBOSE = 1 491 | EX_DEBUG = 2 492 | EX_ABORT = 4 493 | 494 | EX_INQ_FILE_TYPE = 1 # inquire EXODUS II file type 495 | EX_INQ_API_VERS = 2 # inquire API version number 496 | EX_INQ_DB_VERS = 3 # inquire database version number 497 | EX_INQ_TITLE = 4 # inquire database title 498 | EX_INQ_DIM = 5 # inquire number of dimensions 499 | EX_INQ_NODES = 6 # inquire number of nodes 500 | EX_INQ_ELEM = 7 # inquire number of elements 501 | EX_INQ_ELEM_BLK = 8 # inquire number of element blocks 502 | EX_INQ_NODE_SETS = 9 # inquire number of node sets 503 | EX_INQ_NS_NODE_LEN = 10 # inquire length of node set node list 504 | EX_INQ_SIDE_SETS = 11 # inquire number of side sets 505 | EX_INQ_SS_NODE_LEN = 12 # inquire length of side set node list 506 | EX_INQ_SS_ELEM_LEN = 13 # inquire length of side set element list 507 | EX_INQ_QA = 14 # inquire number of QA records 508 | EX_INQ_INFO = 15 # inquire number of info records 509 | EX_INQ_TIME = 16 # inquire number of time steps in the database 510 | EX_INQ_EB_PROP = 17 # inquire number of element block properties 511 | EX_INQ_NS_PROP = 18 # inquire number of node set properties 512 | EX_INQ_SS_PROP = 19 # inquire number of side set properties 513 | EX_INQ_NS_DF_LEN = 20 # inquire length of node set distribution factor list 514 | EX_INQ_SS_DF_LEN = 21 # inquire length of side set distribution factor list 515 | EX_INQ_LIB_VERS = 22 # inquire API Lib vers number 516 | EX_INQ_EM_PROP = 23 # inquire number of element map properties 517 | EX_INQ_NM_PROP = 24 # inquire number of node map properties 518 | EX_INQ_ELEM_MAP = 25 # inquire number of element maps 519 | EX_INQ_NODE_MAP = 26 # inquire number of node maps 520 | EX_INQ_EDGE = 27 # inquire number of edges 521 | EX_INQ_EDGE_BLK = 28 # inquire number of edge blocks 522 | EX_INQ_EDGE_SETS = 29 # inquire number of edge sets 523 | EX_INQ_ES_LEN = 30 # inquire length of concat edge set edge list 524 | EX_INQ_ES_DF_LEN = 31 # inquire length of concat edge set dist factor list 525 | EX_INQ_EDGE_PROP = 32 # inquire number of properties stored per edge block 526 | EX_INQ_ES_PROP = 33 # inquire number of properties stored per edge set 527 | EX_INQ_FACE = 34 # inquire number of faces 528 | EX_INQ_FACE_BLK = 35 # inquire number of face blocks 529 | EX_INQ_FACE_SETS = 36 # inquire number of face sets 530 | EX_INQ_FS_LEN = 37 # inquire length of concat face set face list 531 | EX_INQ_FS_DF_LEN = 38 # inquire length of concat face set dist factor list 532 | EX_INQ_FACE_PROP = 39 # inquire number of properties stored per face block 533 | EX_INQ_FS_PROP = 40 # inquire number of properties stored per face set 534 | EX_INQ_ES = 41 # inquire number of element sets 535 | EX_INQ_ELS_LEN = 42 # inquire length of concat element set element list 536 | EX_INQ_ELS_DF_LEN = 43 # inquire length of concat element set dist factor list 537 | EX_INQ_ELS_PROP = 44 # inquire number of properties stored per elem set 538 | EX_INQ_EDGE_MAP = 45 # inquire number of edge maps 539 | EX_INQ_FACE_MAP = 46 # inquire number of face maps 540 | EX_INQ_COORD_FRAMES = 47 # inquire number of coordinate frames 541 | -------------------------------------------------------------------------------- /exodusii/exoread.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import glob 3 | import argparse 4 | 5 | from .lineout import lineout 6 | from .file import exodusii_file 7 | from .parallel_file import parallel_exodusii_file 8 | 9 | 10 | class Namespace(argparse.Namespace): 11 | def __init__(self, **kwargs): 12 | self.variables = [] 13 | super(Namespace, self).__init__(**kwargs) 14 | 15 | def __setattr__(self, attr, value): 16 | if value: 17 | if attr in ("globalvar", "element", "face", "edge", "node"): 18 | type = "d" if attr == "edge" else attr[0] 19 | self.variables.append(f"{type}/{value[-1]}") 20 | super(Namespace, self).__setattr__(attr, value) 21 | 22 | 23 | def main(argv=None, file=None): 24 | """Extracts specified variable values from a given file name. By default data is 25 | written to stdout in tabular form. If no variables are selected, then the file 26 | meta data is written instead. 27 | 28 | Default for global variables is to write out the values for all times. 29 | 30 | Default for spatial variables is to write out the values for each object for the 31 | last time slab. 32 | 33 | Special nodal variable names are the geometry coordinates, "COORDINATES", and the 34 | motion displacements, "DISPLACEMENTS". When you ask for one of them, you get each 35 | component of the vector. 36 | """ 37 | 38 | argv = argv or sys.argv[1:] 39 | p = argparse.ArgumentParser(description=main.__doc__) 40 | p.add_argument("-V", "--version", action="version", version="%(prog)s 3.0") 41 | 42 | g = p.add_mutually_exclusive_group() 43 | g.add_argument( 44 | "-g", 45 | "--global", 46 | action="append", 47 | dest="globalvar", 48 | help="Select a mesh global variable name to extact", 49 | ) 50 | g.add_argument( 51 | "-e", 52 | "--element", 53 | action="append", 54 | help="Select a mesh element variable name to extact", 55 | ) 56 | g.add_argument( 57 | "-f", 58 | "--face", 59 | action="append", 60 | help="Select a mesh face variable name to extact", 61 | ) 62 | g.add_argument( 63 | "-d", 64 | "--edge", 65 | action="append", 66 | help="Select a mesh edge variable name to extact", 67 | ) 68 | g.add_argument( 69 | "-n", 70 | "--node", 71 | action="append", 72 | help="Select a mesh node variable name to extact", 73 | ) 74 | 75 | g = p.add_mutually_exclusive_group() 76 | g.add_argument( 77 | "-t", 78 | "--time", 79 | type=float, 80 | help="Output the variable at this time. Closest time value is chosen.", 81 | ) 82 | g.add_argument( 83 | "-i", 84 | "--index", 85 | type=int, 86 | help="Output the variable at this time step index. " 87 | "A value of -1 means the last time step in the file. " 88 | "Note that exodus time steps start at one, while this starts at zero.", 89 | ) 90 | g.add_argument( 91 | "-c", 92 | "--cycle", 93 | type=int, 94 | help="Output the variable at this cycle number. " 95 | "Numbers start at zero. A value of -1 means the last time step in the file.", 96 | ) 97 | 98 | p.add_argument( 99 | "--object-index", 100 | action="store_true", 101 | default=False, 102 | help="For non-global variables, include the object index in the output.", 103 | ) 104 | p.add_argument( 105 | "--nolabels", 106 | action="store_true", 107 | default=False, 108 | help="Do not write the variable names and units to the output.", 109 | ) 110 | 111 | p.add_argument( 112 | "-L", 113 | "--lineout", 114 | metavar="{x|X|}/{y|Y|}/{z|Z|}[/T]", 115 | type=lineout.from_cli, 116 | help=lineout.__doc__, 117 | ) 118 | 119 | p.add_argument("file", help="The ExodusII database file.") 120 | 121 | args = p.parse_args(argv, namespace=Namespace()) 122 | f = exo_file(args.file) 123 | if args.variables: 124 | f.print( 125 | *args.variables, 126 | time=args.time, 127 | index=args.index, 128 | cycle=args.cycle, 129 | lineout=args.lineout, 130 | file=file, 131 | labels=not args.nolabels, 132 | ) 133 | else: 134 | f.describe(file=file) 135 | 136 | 137 | def exo_file(filename, *files): 138 | files = _find_files(filename, *files) 139 | if len(files) > 1: 140 | f = parallel_exodusii_file(*files) 141 | elif len(files) == 1: 142 | f = exodusii_file(files[0], mode="r") 143 | return f 144 | 145 | 146 | def _find_files(*files): 147 | 148 | found = [] 149 | for file in files: 150 | globbed_files = glob.glob(file) 151 | if not globbed_files: 152 | raise FileNotFoundError(file) 153 | found.extend(globbed_files) 154 | return found 155 | 156 | 157 | if __name__ == "__main__": 158 | sys.exit(main()) 159 | -------------------------------------------------------------------------------- /exodusii/extension.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import multiprocessing 3 | from .util import compute_connected_average 4 | from .element import factory as element_factory 5 | 6 | __all__ = [ 7 | "compute_element_centers", 8 | "compute_edge_centers", 9 | "compute_face_centers", 10 | "compute_element_length", 11 | "compute_element_volumes", 12 | "compute_volume_averaged_elem_variable", 13 | "compute_node_volumes", 14 | "compute_node_variable_values_at_element_center", 15 | ] 16 | 17 | 18 | def compute_element_centers(file, block_id=None, time_step=None): 19 | """Computes the element geometric center. 20 | 21 | Parameters 22 | ---------- 23 | block_id : int 24 | element block ID (not INDEX) 25 | time_step : int 26 | 1-based index of time step 27 | 28 | Returns 29 | ------- 30 | centers : ndarray of float 31 | 32 | Note 33 | ---- 34 | If `time_step` is not None, the center of the element displacement is computed 35 | 36 | """ 37 | if block_id is None: 38 | # compute centers for all blocks 39 | centers = [] 40 | for id in file.get_element_block_ids(): 41 | centers.append(compute_element_centers(file, id, time_step=time_step)) 42 | return np.concatenate(centers, axis=0) 43 | else: 44 | conn = file.get_element_conn(block_id) - 1 45 | coords = file.get_coords(time_step=time_step) 46 | return compute_connected_average(conn, coords) 47 | 48 | 49 | def compute_element_length(file, time): 50 | """Calculate the characteristic element length. 51 | 52 | The length of a 3D element is the cube root of the volume; for a 2D element 53 | it is the square root of the area. 54 | 55 | The characteristic element length is the average element length over the mesh. 56 | """ 57 | 58 | time_step = file.get_time_step(time) 59 | 60 | ndim = file.num_dimensions() 61 | dexp = 1.0 / ndim 62 | 63 | length = 0.0 64 | for block_id in file.get_element_block_ids(): 65 | vols = compute_element_volumes(file, block_id, time_step=time_step) 66 | length += np.sum(np.power(np.abs(vols), dexp)) 67 | 68 | return length / file.num_elems() 69 | 70 | 71 | def compute_edge_centers(file, block_id=None, time_step=None): 72 | """Computes the edge geometric center. 73 | 74 | Parameters 75 | ---------- 76 | block_id : int 77 | edge block ID (not INDEX) 78 | time_step : int 79 | 1-based index of time step 80 | 81 | Returns 82 | ------- 83 | centers : ndarray of float 84 | 85 | Note 86 | ---- 87 | If `time_step` is not None, the center of the edge displacement is computed 88 | 89 | """ 90 | if block_id is None: 91 | # compute centers for all blocks 92 | centers = [] 93 | for id in file.get_edge_block_ids(): 94 | centers.append(compute_edge_centers(file, id, time_step=time_step)) 95 | return np.concatenate(centers, axis=0) 96 | else: 97 | conn = file.get_edge_block_conn(block_id) - 1 98 | coords = file.get_coords(time_step=time_step) 99 | return compute_connected_average(conn, coords) 100 | 101 | 102 | def compute_face_centers(file, block_id=None, time_step=None): 103 | """Computes the face geometric center. 104 | 105 | Parameters 106 | ---------- 107 | block_id : int 108 | face block ID (not INDEX) 109 | time_step : int 110 | 1-based index of time step 111 | 112 | Returns 113 | ------- 114 | centers : ndarray of float 115 | 116 | Note 117 | ---- 118 | If `time_step` is not None, the center of the face displacement is computed 119 | 120 | """ 121 | if block_id is None: 122 | # compute centers for all blocks 123 | centers = [] 124 | for id in file.get_face_block_ids(): 125 | centers.append(compute_face_centers(file, id, time_step=time_step)) 126 | return np.concatenate(centers, axis=0) 127 | else: 128 | conn = file.get_face_block_conn(block_id) - 1 129 | coords = file.get_coords(time_step=time_step) 130 | return compute_connected_average(conn, coords) 131 | 132 | 133 | def compute_node_variable_values_at_element_center( 134 | file, block_id, var_name, time_step=None 135 | ): 136 | """Computes the value of a node variable at an element's center 137 | 138 | Parameters 139 | ---------- 140 | block_id : int 141 | element block ID (not INDEX) 142 | var_name : str 143 | The nodal variable name 144 | time_step : int 145 | 1-based index of time step 146 | 147 | Returns 148 | ------- 149 | data : ndarray of float 150 | 151 | """ 152 | if block_id is None: 153 | # compute for all blocks 154 | data = [] 155 | for id in file.get_element_block_ids(): 156 | x = compute_node_variable_values_at_element_center( 157 | file, id, var_name, time_step=time_step 158 | ) 159 | data.append(x) 160 | return np.concatenate(data, axis=0) 161 | else: 162 | if var_name == "coordinates": 163 | nvars = file.get_coords() 164 | elif var_name == "displacements": 165 | nvars = file.get_displ(time_step) 166 | else: 167 | nvars = file.get_node_variable_values(var_name, time_step=time_step) 168 | conn = file.get_element_conn(block_id) - 1 169 | return compute_connected_average(conn, nvars) 170 | 171 | 172 | def compute_node_volumes(file, time_step=None): 173 | """Get the node volumes at the time index specified. The time index is 174 | 1-based. If provided, vol_array must be an array.array object of type 175 | storageType(), which is filled with the values; otherwise it is created.""" 176 | 177 | # Basic strategy: For each block, first get the element volumes 178 | # then distribute the element volumes to the nodes. 179 | vol = np.zeros(file.num_nodes()) 180 | 181 | for block_id in file.get_element_block_ids(): 182 | blk = file.get_element_block(block_id) 183 | 184 | nodes_per_i = 1.0 / blk.num_elem_nodes 185 | 186 | # Get the element volumes for a block, then partition the volume 187 | # to the element's nodes 188 | element_volumes = compute_element_volumes(file, block_id, time_step) 189 | 190 | conn = file.get_element_conn(block_id) - 1 191 | # Now, partition the element volume and distribute it to the nodes. 192 | for element in range(blk.num_block_elems): 193 | node_vol_part = nodes_per_i * element_volumes[element] 194 | vol[conn[element]] += node_vol_part 195 | 196 | return vol 197 | 198 | 199 | def compute_volume_averaged_elem_variable( 200 | file, block_id, time_step, func, intervals=5, zfill=None, processes=None 201 | ): 202 | """Get the cell-average of a variable for block block_id at time_step. 203 | 204 | If the exoobj mesh is 2D and zfill is provided, zfill is appended to the x 205 | and y values in restructured_coords for all nodes. 206 | 207 | """ 208 | processes = processes or 1 209 | 210 | # Get the time that matches the solution time_index (which 211 | # might not be the same as the test_time) 212 | exact_time = file.get_time(time_step) 213 | 214 | elem_blk = file.get_element_block(block_id) 215 | elem_type = elem_blk.elem_type 216 | 217 | coord = file.get_coords(time_step=time_step) 218 | if file.num_dimensions() == 2 and zfill is not None: 219 | coord = np.column_stack((coord, np.zeros(coord.shape[0]))) 220 | 221 | conn = file.get_element_conn(block_id) - 1 222 | if processes <= 2: 223 | averaged = _compute_ave(elem_type, func, exact_time, conn, coord, intervals) 224 | else: 225 | count = elem_blk.num_block_elems 226 | nproc = processes - 1 227 | pipes = [(None, None) for i in range(nproc)] 228 | procs = [None for i in range(nproc)] 229 | for procno in range(nproc): 230 | start = int((procno * count) / nproc) 231 | end = int(((procno + 1) * count) / nproc) 232 | pipes[procno] = multiprocessing.Pipe(False) 233 | p = multiprocessing.Process( 234 | target=_compute_ave, 235 | args=( 236 | elem_type, 237 | func, 238 | exact_time, 239 | conn[start:end], 240 | coord, 241 | intervals, 242 | pipes[procno][1], 243 | ), 244 | ) 245 | procs[procno] = p 246 | p.start() 247 | averaged = np.zeros(count) 248 | for procno in range(nproc): 249 | p = procs[procno] 250 | start = int((procno * count) / nproc) 251 | end = int(((procno + 1) * count) / nproc) 252 | pipe = pipes[procno][0] 253 | averaged[start:end] = pipe.recv() 254 | pipe.close() 255 | p.join() 256 | 257 | return averaged 258 | 259 | 260 | def _compute_ave(elem_type, fun, time, conn, coord, intervals, pipe=None): 261 | averaged = np.zeros(len(conn)) 262 | for (iel, ix) in enumerate(conn): 263 | el = element_factory(elem_type, coord[ix]) 264 | centers = el.subdiv(intervals) 265 | vols = el.subvols(intervals) 266 | exact = np.array([fun(x, time) for x in centers]) 267 | averaged[iel] = np.sum(vols * exact) / np.sum(vols) 268 | if pipe is None: 269 | return averaged 270 | else: 271 | pipe.send(averaged) 272 | pipe.close() 273 | 274 | 275 | def compute_element_volumes(file, block_id, time_step=None): 276 | """Computes the element volumes. 277 | 278 | Parameters 279 | ---------- 280 | block_id : int 281 | element block ID (not INDEX) 282 | time_step : int 283 | 1-based index of time step 284 | 285 | Returns 286 | ------- 287 | volumes : ndarray of float 288 | 289 | Note 290 | ---- 291 | If `time_step` is not None, the volume of the displaced element displacement 292 | 293 | """ 294 | coords = file.get_coords(time_step=time_step) 295 | elem_blk = file.get_element_block(block_id) 296 | efactory = lambda x: element_factory(elem_blk.elem_type, x) 297 | 298 | # Connectivity is 1 based 299 | conn = file.get_element_conn(block_id) - 1 300 | 301 | # Now, compute the volumes. 302 | vol = np.zeros(elem_blk.num_block_elems) 303 | for (iel, ix) in enumerate(conn): 304 | el = efactory(coords[ix]) 305 | vol[iel] = el.volume 306 | 307 | return vol 308 | -------------------------------------------------------------------------------- /exodusii/find_in_region.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import numpy as np 3 | from collections import OrderedDict as ordered_dict 4 | 5 | from .file import exodusii_file 6 | from .region import unbounded_time_domain 7 | from .extension import compute_element_centers 8 | from .parallel_file import parallel_exodusii_file 9 | 10 | 11 | def find_node_data_in_region( 12 | files, vars, region, time_domain=None, use_displaced_coords=False 13 | ): 14 | """Finds element data in a finite element mesh region 15 | 16 | Parameters 17 | ---------- 18 | files : list of str 19 | List of ExodusII files to read 20 | vars : list of str 21 | Node variables to read from files 22 | region : object 23 | An object implementing a `contains` method that takes as input an array of 24 | coordinates and returns a boolean array of the same length containing True if 25 | the point is in the region and False otherwise. 26 | time_domain : object 27 | An object implementing a `contains` method that take an array of times 28 | as input and returns a boolean array of the same length containg True if 29 | the time should be queried and False otherwise. 30 | use_displaced_coords : bool 31 | Use displaced coordinates for determining geometric region 32 | 33 | Returns 34 | ------- 35 | data : dict 36 | data[cycle] = cycle_data 37 | where cycle_data is a dictionary containing ndarrays for the nodal 38 | coordinates and each node variable. 39 | 40 | cycle_data["cycle"] = int 41 | cycle_data["time"] = float 42 | cycle_data["X"] = ndarray 43 | cycle_data["Y"] = ndarray 44 | cycle_data["Z"] = ndarray [only if 3d] 45 | cycle_data["var1"] = ndarray 46 | ... 47 | cycle_data["varn"] = ndarray 48 | 49 | Examples 50 | -------- 51 | >>> region = cylinder((0, 12.5e-6), (None, 12.5e-6), 6e-6) 52 | >>> time_domain = bound_time_domain(0, None) 53 | >>> vars = ("FORCEX", "FORCEY") 54 | >>> data = find_node_data_in_region(files, vars, region, time_domain=time_domain) 55 | >>> data[0]["time"] 56 | 0.000 57 | >>> data[0]["cycle"] 58 | 0 59 | >>> data[0]["FORCEX"] 60 | ndarray([3.4529e+3, ..., 5.3914e+4]) 61 | 62 | """ 63 | 64 | if isinstance(files, (str, exodusii_file, parallel_exodusii_file)): 65 | files = [files] 66 | 67 | data = ordered_dict() 68 | time_domain = time_domain or unbounded_time_domain() 69 | 70 | for (i, file) in enumerate(files): 71 | 72 | logging.info(f"Processing file {i + 1} of {len(files)} files") 73 | 74 | if not isinstance(file, (exodusii_file, parallel_exodusii_file)): 75 | file = exodusii_file(file, mode="r") 76 | 77 | if i == 0: 78 | times = file.get_times() 79 | cycles = time_domain.contains(times).nonzero()[0] 80 | 81 | if not use_displaced_coords: 82 | # Precompute element centers for all cycles 83 | xc = file.get_coords() 84 | dimension = 1 if xc.ndim == 1 else xc.shape[1] 85 | if dimension != region.dimension: 86 | raise ValueError("Coordinate dimension does not match region") 87 | ix = region.contains(xc) 88 | if not np.any(ix): 89 | continue 90 | 91 | for cycle in cycles: 92 | 93 | if use_displaced_coords: 94 | xc = file.get_coords(time_step=cycle + 1) 95 | dimension = 1 if xc.ndim == 1 else xc.shape[1] 96 | if dimension != region.dimension: 97 | raise ValueError("Coordinate dimension does not match region") 98 | ix = region.contains(xc) 99 | if not np.any(ix): 100 | continue 101 | 102 | xd = [xc[ix]] 103 | for var in vars: 104 | elem_data = file.get_node_variable_values(var, time_step=cycle + 1) 105 | xd.append(elem_data[ix]) 106 | xd = np.column_stack(xd) 107 | if cycle in data: 108 | data[cycle] = np.row_stack((data[cycle], xd)) 109 | else: 110 | data[cycle] = xd 111 | 112 | for (cycle, xd) in data.items(): 113 | # Sort by coordinate 114 | ix = np.argsort(xd[:, 0]) 115 | cycle_data = {"cycle": cycle, "time": times[cycle]} 116 | for (i, dim) in enumerate("XYZ"[:dimension]): 117 | cycle_data[dim] = xd[ix, i] 118 | for (i, var) in enumerate(vars, start=dimension): 119 | cycle_data[var] = xd[ix, i] 120 | data[cycle] = cycle_data 121 | 122 | return data 123 | 124 | 125 | def find_element_data_in_region( 126 | files, vars, region, time_domain=None, block_ids=None, use_displaced_coords=False 127 | ): 128 | """Finds element data in a finite element mesh region 129 | 130 | Parameters 131 | ---------- 132 | files : list of str 133 | List of ExodusII files to read 134 | vars : list of str 135 | Element variables to read from files 136 | region : object 137 | An object implementing a `contains` method that takes as input an array of 138 | coordinates and returns a boolean array of the same length containing True if 139 | the point is in the region and False otherwise. 140 | time_domain : object 141 | An object implementing a `contains` method that take an array of times 142 | as input and returns a boolean array of the same length containg True if 143 | the time should be queried and False otherwise. 144 | block_ids : list of int 145 | Get element data only from these blocks. If None, get data from all blocks 146 | use_displaced_coords : bool 147 | Use displaced coordinates for determining geometric region 148 | 149 | Returns 150 | ------- 151 | data : dict 152 | data[cycle] = cycle_data 153 | where cycle_data is a dictionary containing ndarrays for the nodal 154 | coordinates and each node variable. 155 | 156 | cycle_data["cycle"] = int 157 | cycle_data["time"] = float 158 | cycle_data["X"] = ndarray 159 | cycle_data["Y"] = ndarray 160 | cycle_data["Z"] = ndarray [only if 3d] 161 | cycle_data["var1"] = ndarray 162 | ... 163 | cycle_data["varn"] = ndarray 164 | 165 | Examples 166 | -------- 167 | >>> region = cylinder((0, 12.5e-6), (None, 12.5e-6), 6e-6) 168 | >>> time_domain = bound_time_domain(0, None) 169 | >>> vars = ("BE_MAG", "VOID_FRC") 170 | >>> data = find_element_data_in_region(files, vars, region, time_domain=time_domain) 171 | >>> data[0]["time"] 172 | 0.000 173 | >>> data[0]["cycle"] 174 | 0 175 | >>> data[0]["BE_MAG"] 176 | ndarray([3.4529e+3, ..., 5.3914e+4]) 177 | 178 | """ 179 | 180 | if isinstance(files, (str, exodusii_file, parallel_exodusii_file)): 181 | files = [files] 182 | 183 | _block_ids = block_ids 184 | time_domain = time_domain or unbounded_time_domain() 185 | 186 | data = ordered_dict() 187 | for (i, file) in enumerate(files): 188 | 189 | logging.info(f"Processing file {i + 1} of {len(files)} files") 190 | 191 | if not isinstance(file, (exodusii_file, parallel_exodusii_file)): 192 | file = exodusii_file(file, mode="r") 193 | 194 | if i == 0: 195 | times = file.get_times() 196 | cycles = time_domain.contains(times).nonzero()[0] 197 | 198 | block_ids = file.get_element_block_ids() if _block_ids is None else _block_ids 199 | for block_id in block_ids: 200 | num_elem = file.num_elems_in_blk(block_id) 201 | if not num_elem: 202 | continue 203 | 204 | if not use_displaced_coords: 205 | # Precompute element centers for all cycles 206 | xe = compute_element_centers(file, block_id) 207 | dimension = 1 if xe.ndim == 1 else xe.shape[1] 208 | if dimension != region.dimension: 209 | raise ValueError("Coordinate dimension does not match region") 210 | ix = region.contains(xe) 211 | if not np.any(ix): 212 | continue 213 | 214 | for cycle in cycles: 215 | 216 | if use_displaced_coords: 217 | xe = compute_element_centers(file, block_id, time_step=cycle + 1) 218 | dimension = 1 if xe.ndim == 1 else xe.shape[1] 219 | if dimension != region.dimension: 220 | raise ValueError("Coordinate dimension does not match region") 221 | ix = region.contains(xe) 222 | if not np.any(ix): 223 | continue 224 | 225 | xd = [xe[ix]] 226 | for var in vars: 227 | elem_data = file.get_element_variable_values( 228 | block_id, var, time_step=cycle + 1 229 | ) 230 | xd.append(elem_data[ix]) 231 | xd = np.column_stack(xd) 232 | if cycle in data: 233 | data[cycle] = np.row_stack((data[cycle], xd)) 234 | else: 235 | data[cycle] = xd 236 | 237 | for (cycle, xd) in data.items(): 238 | # Sort by coordinate 239 | ix = np.argsort(xd[:, 0]) 240 | cycle_data = {"cycle": cycle, "time": times[cycle]} 241 | for (i, dim) in enumerate("XYZ"[:dimension]): 242 | cycle_data[dim] = xd[ix, i] 243 | for (i, var) in enumerate(vars, start=dimension): 244 | cycle_data[var] = xd[ix, i] 245 | data[cycle] = cycle_data 246 | 247 | return data 248 | -------------------------------------------------------------------------------- /exodusii/lineout.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from functools import cmp_to_key 3 | 4 | from .util import string_types 5 | 6 | 7 | class lineout: 8 | """Restrict the nodes/elements/edges/faces to those whose coordinates are along a 9 | line parallel to an axis. 10 | 11 | """ 12 | 13 | def __init__(self, *, x=None, y=None, z=None, tol=None): 14 | self.x = x 15 | self.y = y 16 | self.z = z 17 | self.tol = tol 18 | self.needs_displacements = self.x == "X" or self.y == "Y" or self.z == "Z" 19 | 20 | @property 21 | def spec(self): 22 | return [self.x, self.y, self.z] 23 | 24 | @classmethod 25 | def from_cli(cls, arg): 26 | """Instantiate the lineout object using the command line interface. Use lower 27 | case x/y/z for original or material coordinates and upper case for displaced 28 | or Lagrangian coordinates. In 2D, both an X and a Y entry are required and 29 | for 3D, a Z entry. So "x/1.0" in 2D would be a lineout along x with y=1.0, 30 | while "2.0/y" would be a lineout in y with x=2.0. The last number with a "T" 31 | in front is an optional tolerance used in selecting the coordinate locations. 32 | A 3D example is "1.0/Y/3.0/T0.1" which is a lineout in y using displaced 33 | coordinates with x in the range (0.9,1.1) and z in (2.9,3.1). 34 | 35 | """ 36 | parts = [_.strip() for _ in arg.split("/") if _.split()] 37 | 38 | tol = None 39 | if parts[-1].startswith(("t", "T")): 40 | try: 41 | tol = float(parts[-1][1:]) 42 | except (ValueError, TypeError, SyntaxError): 43 | raise ValueError("lineout: time parameter must be a float") from None 44 | parts = parts[:-1] 45 | 46 | if len(parts) > 3: 47 | raise ValueError("lineout: expected at most 3 spatial specifiers") 48 | 49 | x = cls.read_spatial_spec(parts[0], "x") 50 | y = cls.read_spatial_spec(None if len(parts) < 2 else parts[1], "y") 51 | z = cls.read_spatial_spec(None if len(parts) < 3 else parts[2], "z") 52 | 53 | return cls(x=x, y=y, z=z, tol=tol) 54 | 55 | @staticmethod 56 | def read_spatial_spec(spec, coord): 57 | if spec is None: 58 | return None 59 | try: 60 | return float(spec) 61 | except (ValueError, TypeError, SyntaxError): 62 | pass 63 | if spec.lower() != coord: 64 | raise ValueError( 65 | f"lineout: expected specifier " 66 | f"{spec!r} to be {coord!r}, {coord.upper()!r}, or a float" 67 | ) 68 | return spec 69 | 70 | def apply(self, *args): 71 | """Removes points that are not along a line. 72 | 73 | If self.needs_displacements, the displacements are added to the coordinates 74 | first. In this case, the columns are expected to be: 75 | 76 | 0 1 2 3 4 5 6 77 | 1D index DISPLX COORDX 78 | 2D index DISPLX DISPLY COORDX COORDY 79 | 3D index DISPLX DISPLY DISPLZ COORDX COORDY COORDZ 80 | 81 | Otherwise, the columns are expected to be: 82 | 83 | 0 1 2 3 84 | 1D index COORDX 85 | 2D index COORDX COORDY 86 | 3D index COORDX COORDY COORDZ 87 | 88 | """ 89 | isstructured = False 90 | if len(args) == 1: 91 | # Structured array 92 | isstructured = True 93 | header = list(args[0].dtype.names) 94 | data = np.array(args[0].tolist()) 95 | elif len(args) == 2: 96 | header, data = args 97 | else: 98 | raise TypeError( 99 | f"apply() takes 1 or 2 positional arguments but {len(args)} were given" 100 | ) 101 | if isinstance(data, np.ndarray) and data.dtype.names is not None: 102 | # Don't work with structured arrays 103 | data = np.asarray(data.tolist()) 104 | start = 1 if header[0].lower() == "index" else 0 105 | dim = 3 if "COORDZ" in header else 2 if "COORDY" in header else 1 106 | pairs = [(start + i, start + dim + i) for i in range(dim)] 107 | dim = len(pairs) 108 | 109 | if self.needs_displacements: 110 | assert "DISPLX" in header 111 | # add the displacements to the coordinates, then remove the displacements 112 | for a, b in pairs: 113 | data[:, b] += data[:, a] 114 | cols = [a for a, _ in pairs] 115 | header = np.delete(header, cols, axis=0).tolist() 116 | data = np.delete(data, cols, axis=1) 117 | header[start : start + len(pairs)] = [ 118 | f"LOCATION{'XYZ'[i]}" for i in range(len(pairs)) 119 | ] 120 | 121 | if self.tol is None and len(data): 122 | self.tol = self.compute_tol_from_bounding_box(data[:, start : start + dim]) 123 | 124 | sort_order = [] 125 | indices_to_remove = [] 126 | for i in range(dim): 127 | idx = start + i 128 | if isinstance(self.spec[i], float): 129 | indices_to_remove.insert(0, idx) # want largest index first 130 | # compute new list excluding points that are not within tolerance 131 | filtered = [] 132 | for row in data: 133 | if abs(self.spec[i] - row[idx]) < self.tol: 134 | filtered.append(row) 135 | data = np.array(filtered) 136 | else: 137 | sort_order.append(idx) 138 | 139 | if sort_order: 140 | # sort by the free column(s) 141 | data = sorted(data, key=cmp_to_key(self.linecmp(sort_order))) 142 | 143 | if indices_to_remove: 144 | # remove the LOCATION columns of restricted coordinates 145 | header = np.delete(header, indices_to_remove, axis=0).tolist() 146 | if len(data): 147 | data = np.delete(data, indices_to_remove, axis=1) 148 | else: 149 | data = np.empty((0, len(header))) 150 | 151 | data = np.asarray(data) 152 | 153 | if isstructured: 154 | formats = [(name, "f8") for name in header] 155 | dtype = np.dtype(formats) 156 | return np.array(list(zip(*data.T)), dtype=dtype) 157 | 158 | return header, data 159 | 160 | def compute_tol_from_bounding_box(self, data): 161 | # tolerance not given; first compute mesh bounding box 162 | dim = data.shape[1] 163 | bbox = [None] * 6 164 | bbox[0:2] = [np.min(data[:, 0]), np.max(data[:, 0])] 165 | if dim >= 2: 166 | bbox[2:4] = [np.min(data[:, 1]), np.max(data[:, 1])] 167 | if dim > 2: 168 | bbox[4:6] = [np.min(data[:, 2]), np.max(data[:, 2])] 169 | 170 | # from the direction(s) being restricted, compute tolerance 171 | def tol_from_bbox(x, bx, by): 172 | fac, puny = 1.0e-4, 1.0e-30 173 | return None if not isinstance(x, float) else fac * max(by - bx, puny) 174 | 175 | xtol = tol_from_bbox(self.x, bbox[0], bbox[1]) 176 | ytol = None if dim < 2 else tol_from_bbox(self.y, bbox[2], bbox[3]) 177 | ztol = None if dim < 3 else tol_from_bbox(self.z, bbox[4], bbox[5]) 178 | return min([xtol or 1, ytol or 1, ztol or 1]) 179 | 180 | def linecmp(self, sortidx): 181 | if len(sortidx) == 1: 182 | i1 = sortidx[0] 183 | 184 | def linecmp(a, b): 185 | if isinstance(a[0], string_types): 186 | return -1 187 | if isinstance(b[0], string_types): 188 | return 1 189 | return cmp(a[i1], b[i1]) 190 | 191 | elif len(sortidx) == 2: 192 | i1, i2 = sortidx[0:2] 193 | 194 | def linecmp(a, b): 195 | if isinstance(a[0], string_types): 196 | return -1 197 | if isinstance(b[0], string_types): 198 | return 1 199 | c = cmp(a[i1], b[i1]) 200 | if c == 0: 201 | return cmp(a[i2], b[i2]) 202 | return c 203 | 204 | else: 205 | i1, i2, i3 = sortidx[0:3] 206 | 207 | def linecmp(a, b): 208 | if isinstance(a[0], string_types): 209 | return -1 210 | if isinstance(b[0], string_types): 211 | return 1 212 | c = cmp(a[i1], b[i1]) 213 | if c == 0: 214 | c = cmp(a[i2], b[i2]) 215 | if c == 0: 216 | return cmp(a[i3], b[i3]) 217 | return c 218 | 219 | return linecmp 220 | 221 | 222 | def cmp(x, y): 223 | """ 224 | Replacement for built-in function cmp that was removed in Python 3 225 | 226 | Compare the two objects x and y and return an integer according to 227 | the outcome. The return value is negative if x < y, zero if x == y 228 | and strictly positive if x > y. 229 | """ 230 | return bool(x > y) - bool(x < y) 231 | -------------------------------------------------------------------------------- /exodusii/nc.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from functools import wraps 3 | from .util import stringify, string_kinds 4 | from .config import config 5 | 6 | 7 | if not config.use_netcdf4_if_possible: 8 | _netcdf4 = False 9 | from .netcdf import netcdf_file as Dataset 10 | else: 11 | try: 12 | _netcdf4 = True 13 | from netCDF4 import Dataset 14 | except (ImportError, ValueError): 15 | _netcdf4 = False 16 | from .netcdf import netcdf_file as Dataset 17 | 18 | if _netcdf4: 19 | import warnings 20 | 21 | # netCDF4 uses numpy.tostring, which is deprecated 22 | warnings.filterwarnings("ignore") 23 | 24 | 25 | def library(): 26 | return "netcdf4" if _netcdf4 else "netcdf" 27 | 28 | 29 | def open(filename, mode="r"): 30 | if _netcdf4: 31 | try: 32 | fh = Dataset(filename, mode=mode, format="NETCDF4_CLASSIC") 33 | except (OSError, TypeError): 34 | fh = Dataset(filename, mode=mode, format="NETCDF3_64BIT_OFFSET") 35 | else: 36 | fh = Dataset(filename, mode=mode) 37 | return fh 38 | 39 | 40 | def close(*files): 41 | for file in files: 42 | try: 43 | file.close() 44 | except Exception: 45 | pass 46 | 47 | 48 | def filename(fh): 49 | return fh.filepath() if _netcdf4 else fh.filename 50 | 51 | 52 | def cache(fun): 53 | sentinel = object() 54 | global_cache = dict() 55 | cache_get = global_cache.get 56 | 57 | def make_key(fh, name, **kwds): 58 | s = ", ".join(f"{k}={v}" for (k, v) in kwds.items()) 59 | return f"{filename(fh)}::{fun.__name__}::{name}({s})" 60 | 61 | @wraps(fun) 62 | def wrapper(fh, name, **kwds): 63 | key = make_key(fh, name, **kwds) 64 | val = cache_get(key, sentinel) 65 | if val is not sentinel: 66 | return val 67 | val = fun(fh, name, **kwds) 68 | global_cache[key] = val 69 | return val 70 | 71 | return wrapper 72 | 73 | 74 | def get_variable(fh, name, default=None, raw=False): 75 | if name not in fh.variables: 76 | return default 77 | var = fh.variables[name] 78 | if raw: 79 | return var 80 | if _netcdf4: 81 | val = var[:].data 82 | else: 83 | val = var.data 84 | if isinstance(val, np.ndarray) and val.dtype.kind in string_kinds: 85 | val = stringify(val) 86 | elif isinstance(val, bytes): 87 | val = stringify(val) 88 | return val 89 | 90 | 91 | def get_dimension(fh, name, default=None): 92 | if name not in fh.dimensions: 93 | return default 94 | x = fh.dimensions[name] 95 | dim = x if not _netcdf4 else x.size 96 | if dim is None and name != "time_step": 97 | return 0 98 | return int(dim) 99 | 100 | 101 | def setncattr(fh, variable, name, value): 102 | if _netcdf4: 103 | fh.variables[variable].setncattr(name, value) 104 | else: 105 | setattr(fh.variables[variable], name, value) 106 | 107 | 108 | def create_dimension(fh, name, value): 109 | fh.createDimension(name, value) 110 | 111 | 112 | def create_variable(fh, id, type, shape): 113 | kind = {str: "c", int: "i", float: "f"}[type] 114 | fh.createVariable(id, kind, shape) 115 | 116 | 117 | def fill_variable(fh, name, *args): 118 | value = args[-1] 119 | if len(args) == 1: 120 | fh.variables[name][:] = value 121 | elif len(args) == 2: 122 | i = args[0] 123 | fh.variables[name][i, :] = value 124 | elif len(args) == 3: 125 | i, j = args[0:2] 126 | fh.variables[name][i, j, :] = value 127 | elif len(args) == 4: 128 | i, j, k = args[0:3] 129 | fh.variables[name][i, j, k, :] = value 130 | else: 131 | raise ValueError("Unknown fill shape") 132 | -------------------------------------------------------------------------------- /exodusii/put_solution.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from .file import ExodusIIFile 3 | from . import exodus_h as ex 4 | 5 | 6 | def put_nodal_solution(filename, nodmap, elemap, coord, elecon, elem_blocks, u): 7 | 8 | exo = ExodusIIFile(filename, mode="w") 9 | fh = exo.fh 10 | 11 | # initialize file with parameters 12 | num_nodes, num_dim = coord.shape 13 | num_elem, node_per_elem = elecon.shape 14 | num_elem_block = len(elem_blocks) 15 | num_node_sets = 0 16 | num_side_sets = 0 17 | exo.put_init( 18 | "FETK Nodal Solution", 19 | num_dim, 20 | num_nodes, 21 | num_elem, 22 | num_elem_block, 23 | num_node_sets, 24 | num_side_sets, 25 | ) 26 | exo.create_dimension(ex.DIM_NUM_GLO_VAR, 1) 27 | exo.create_variable(ex.VALS_GLO_VAR, float, (ex.DIM_TIME_STEP,)) 28 | 29 | node_variable_names = ["displ%s" % _ for _ in "xyz"[:num_dim]] 30 | num_node_variables = len(node_variable_names) 31 | exo.create_dimension(ex.DIM_NUM_NOD_VAR, num_node_variables) 32 | exo.createVariable( 33 | ex.VAR_NAME_NOD_VAR, ex.CHAR, (ex.DIM_NUM_NOD_VAR, ex.DIM_LEN_STRING) 34 | ) 35 | for (k, node_variable) in enumerate(node_variable_names): 36 | key = ex.adjstr(node_variable) 37 | fh.variables[ex.VAR_NAME_NOD_VAR][k, :] = key 38 | fh.createVariable( 39 | ex.VALS_NOD_VAR(k + 1), ex.FLOAT, (ex.DIM_TIME_STEP, ex.DIM_NUM_NOD) 40 | ) 41 | 42 | u0 = np.zeros_like(u) 43 | fh.variables[ex.VAR_TIME_WHOLE][0] = 0.0 44 | fh.variables[ex.VALS_GLO_VAR][0] = 0.0 45 | for (k, label) in enumerate(node_variable_names): 46 | fh.variables[ex.VALS_NOD_VAR(k + 1)][0] = u0[:, k] 47 | 48 | fh.variables[ex.VAR_TIME_WHOLE][1] = 1.0 49 | fh.variables[ex.VALS_GLO_VAR][1] = 1.0 50 | for (k, label) in enumerate(node_variable_names): 51 | fh.variables[ex.VALS_NOD_VAR(k + 1)][1] = u[:, k] 52 | 53 | exo.update() 54 | exo.close() 55 | -------------------------------------------------------------------------------- /exodusii/region.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | class bounded_time_domain: 5 | """Represents a bounded time region [t0, tf] 6 | 7 | Parameters 8 | ---------- 9 | t_min : float 10 | min of domain 11 | if None, t_min is set to 0. 12 | t_max : float 13 | max of domain 14 | if None, t_max is set to bounded_time_domain.tmax 15 | 16 | Notes 17 | ----- 18 | provides a single public method `contains` that determines whether a time 19 | (or times) is contained inside the time domain. 20 | 21 | """ 22 | 23 | tmax = 1e20 24 | 25 | def __init__(self, t_min=None, t_max=None): 26 | self.t_min = t_min or 0.0 27 | self.t_max = t_max or bounded_time_domain.tmax 28 | 29 | def contains(self, times): 30 | return np.array((times >= self.t_min) & (times <= self.t_max)) 31 | 32 | 33 | class unbounded_time_domain: 34 | """Represents an unbounded time region 35 | 36 | Notes 37 | ----- 38 | provides a single public method `contains` that determines whether a time 39 | (or times) is contained inside the time domain. 40 | 41 | """ 42 | 43 | def contains(self, times): 44 | return np.array([True] * len(times), dtype=bool) 45 | 46 | 47 | class cylinder: 48 | """Region defining a cylinder in 2 or 3d space 49 | 50 | Parameters 51 | ---------- 52 | point1, point2 : array_like 53 | x, y[, z] coordinates. If x, y, or z is None, it is replaced with -/+pmax 54 | in point1 and point 2, respectively. 55 | radius : float 56 | The radius of the cylinder 57 | 58 | Notes 59 | ----- 60 | cylinder provides a single public method `contains` that determines if a point 61 | (or points) is contained inside the cylinder. 62 | 63 | """ 64 | 65 | pmax = 1e20 66 | 67 | def __init__(self, point1, point2, radius): 68 | self.dimension = len(point1) 69 | if self.dimension not in (2, 3): 70 | raise ValueError("Expected cylinder point dimension to be 2 or 3") 71 | if len(point2) != self.dimension: 72 | raise ValueError("Inconsistent point dimensions") 73 | self.p1 = self.aspoint(point1, -cylinder.pmax) 74 | self.p2 = self.aspoint(point2, cylinder.pmax) 75 | self.radius = radius 76 | 77 | @staticmethod 78 | def aspoint(p, default): 79 | return np.asarray([x if x is not None else default for x in p]) 80 | 81 | def contains(self, points): 82 | """Determine with points is contained in the cylinder 83 | 84 | Parameters 85 | ---------- 86 | points : array_like 87 | points[i] are the x, y[, z] coordinates of the point to be queried 88 | 89 | Returns 90 | ------- 91 | a : ndarray of bool 92 | a[i] is True if points[i] is in the cylinder 93 | 94 | """ 95 | if self.dimension == 2: 96 | return self._contains2d(points) 97 | points = np.asarray(points) 98 | one_point = points.ndim == 1 99 | if one_point: 100 | points = points[np.newaxis, :] 101 | axis = self.p2 - self.p1 102 | # points lie between end points of the cylinder 103 | condition1 = np.einsum("ij,j->i", points - self.p1, axis) >= 0 104 | condition2 = np.einsum("ij,j->i", points - self.p2, axis) <= 0 105 | # points lie within curved surface of cylinder 106 | cp = np.cross(points - self.p1, axis) 107 | norm = np.abs(cp) if cp.ndim == 1 else np.linalg.norm(cp, axis=1) 108 | condition3 = norm <= self.radius * np.linalg.norm(axis) 109 | contains = np.array(condition1 & condition2 & condition3) 110 | return contains[0] if one_point else contains 111 | 112 | def _contains2d(self, points): 113 | """Specialized version for 2D cylinder""" 114 | assert self.dimension == 2 115 | # A cylinder in 2d is really just a rectangle""" 116 | axis = self.p2 - self.p1 117 | u = np.array([-axis[1], axis[0]]) 118 | r = self.radius * u / np.sqrt(np.dot(u, u)) 119 | a, b = self.p1 - r, self.p2 - r 120 | c, d = self.p2 + r, self.p1 + r 121 | return convex_polygon.contains([a, b, c, d], points) 122 | 123 | 124 | class convex_polygon: 125 | dimension = 2 126 | 127 | def __init__(self, *args, **kwargs): 128 | raise NotImplementedError("complex_polygon is an abstract base class") 129 | 130 | @staticmethod 131 | def is_convex(*vertices): 132 | vertices = [np.asarray(v) for v in vertices] 133 | for i in range(len(vertices)): 134 | p3 = vertices[i - 2] 135 | p2 = vertices[i - 1] 136 | p1 = vertices[i] 137 | if convex_polygon.angle_between_points(p1, p2, p3) > np.pi: 138 | return False 139 | return True 140 | 141 | @staticmethod 142 | def angle_between_points(a, b, c): 143 | x = np.arctan2(c[1] - b[1], c[0] - b[0]) 144 | y = np.arctan2(a[1] - b[1], a[0] - b[0]) 145 | angle = x - y 146 | return angle + np.pi if angle < 0 else angle 147 | 148 | @staticmethod 149 | def contains(vertices, points): 150 | """Determine with points is contained in the polygon 151 | 152 | Parameters 153 | ---------- 154 | points : array_like 155 | points[i] are the x, y coordinates of the point to be queried 156 | 157 | Returns 158 | ------- 159 | a : ndarray of bool 160 | a[i] is True if points[i] is in the polygon 161 | 162 | """ 163 | points = np.asarray(points) 164 | one_point = points.ndim == 1 165 | if one_point: 166 | points = points[np.newaxis, :] 167 | contains = np.array([True] * len(points)) 168 | for i in range(len(vertices)): 169 | a = vertices[i - 1] 170 | b = vertices[i] 171 | edge = b - a 172 | v = points - a 173 | contains &= edge[0] * v[:, 1] - v[:, 0] * edge[1] >= 0 174 | return contains[0] if one_point else contains 175 | 176 | 177 | class quad(convex_polygon): 178 | """Region defining a quadrilateral 179 | 180 | Parameters 181 | ---------- 182 | a, b, c, d : array_like 183 | x, y coordinates of points in quad 184 | 185 | Notes 186 | ----- 187 | quad provides a single public method `contains` that determines if a point 188 | (or points) is contained inside the quad. 189 | 190 | The points are ordered counter-clockwise: 191 | 192 | d-------------------------------c 193 | | | 194 | | | 195 | a-------------------------------b 196 | 197 | """ 198 | 199 | def __init__(self, a, b, c, d): 200 | for p in (a, b, c, d): 201 | if len(p) != self.dimension: 202 | raise ValueError("Expected quad points to be 2D") 203 | if not convex_polygon.is_convex(a, b, c, d): 204 | raise ValueError("vertices do not form a convex polygon") 205 | self.a = np.asarray(a) 206 | self.b = np.asarray(b) 207 | self.c = np.asarray(c) 208 | self.d = np.asarray(d) 209 | 210 | def contains(self, points): 211 | vertices = [self.a, self.b, self.c, self.d] 212 | return convex_polygon.contains(vertices, points) 213 | 214 | 215 | class rectangle(quad): 216 | """Region defining a rectangle in 2d space 217 | 218 | Parameters 219 | ---------- 220 | origin : array_like 221 | x, y coordinates of bottom left hand side 222 | width : float 223 | The width of the rectangle 224 | height : float 225 | The height of the rectangle 226 | 227 | Notes 228 | ----- 229 | rectangle provides a single public method `contains` that determines if a point 230 | (or points) is contained inside the rectangle. 231 | 232 | The origin of the rectangle is as shown below 233 | _______________________________ 234 | | | 235 | | h 236 | o_____________ w _______________| 237 | 238 | """ 239 | 240 | pmax = 1e20 241 | 242 | def __init__(self, origin, width=None, height=None): 243 | if len(origin) != self.dimension: 244 | raise ValueError("Expected rectangle origin to be 2D") 245 | 246 | width = width or rectangle.pmax 247 | if width <= 0: 248 | raise ValueError("Expected rectangle width > 0") 249 | 250 | height = height or width 251 | if height <= 0: 252 | raise ValueError("Expected rectangle height > 0") 253 | 254 | origin = np.asarray(origin) 255 | self.a = origin 256 | self.b = origin + np.array([width, 0]) 257 | self.c = origin + np.array([width, height]) 258 | self.d = origin + np.array([0, height]) 259 | 260 | 261 | class sphere: 262 | """Region defining a sphere in 3d space 263 | 264 | Parameters 265 | ---------- 266 | center : array_like 267 | x, y, z coordinates of center of sphere 268 | radius : float 269 | The radius of the sphere 270 | 271 | Notes 272 | ----- 273 | sphere provides a single public method `contains` that determines if a point 274 | (or points) is contained inside the sphere. 275 | 276 | """ 277 | 278 | pmax = 1e20 279 | 280 | def __init__(self, center, radius): 281 | self.dimension = 3 282 | if len(center) != self.dimension: 283 | raise ValueError("Expected sphere center to be 3D") 284 | self.center = np.asarray(center) 285 | 286 | if radius <= 0: 287 | raise ValueError("Expected sphere radius > 0") 288 | self.radius = radius 289 | 290 | def contains(self, points): 291 | """Determine with points is contained in the sphere 292 | 293 | Parameters 294 | ---------- 295 | points : array_like 296 | points[i] are the x, y, z coordinates of the point to be queried 297 | 298 | Returns 299 | ------- 300 | a : ndarray of bool 301 | a[i] is True if points[i] is in the sphere 302 | 303 | """ 304 | p = np.asarray(points) 305 | one_point = p.ndim == 1 306 | if one_point: 307 | p = p[np.newaxis, :] 308 | 309 | cx, cy, cz = self.center 310 | x, y, z = p[:, 0], p[:, 1], p[:, 2] 311 | condition = (x - cx) ** 2 + (y - cy) ** 2 + (z - cz) ** 2 <= self.radius**2 312 | contains = np.array(condition) 313 | 314 | return contains[0] if one_point else contains 315 | 316 | 317 | class circle: 318 | """Region defining a circle in 2d space 319 | 320 | Parameters 321 | ---------- 322 | center : array_like 323 | x, y coordinates of center of circle 324 | radius : float 325 | The radius of the circle 326 | 327 | Notes 328 | ----- 329 | circle provides a single public method `contains` that determines if a point 330 | (or points) is contained inside the circle. 331 | 332 | """ 333 | 334 | pmax = 1e20 335 | 336 | def __init__(self, center, radius): 337 | self.dimension = 2 338 | if len(center) != self.dimension: 339 | raise ValueError("Expected circle center to be 3D") 340 | self.center = np.asarray(center) 341 | 342 | if radius <= 0: 343 | raise ValueError("Expected circle radius > 0") 344 | self.radius = radius 345 | 346 | def contains(self, points): 347 | """Determine with points is contained in the circle 348 | 349 | Parameters 350 | ---------- 351 | points : array_like 352 | points[i] are the x, y coordinates of the point to be queried 353 | 354 | Returns 355 | ------- 356 | a : ndarray of bool 357 | a[i] is True if points[i] is in the circle 358 | 359 | """ 360 | p = np.asarray(points) 361 | one_point = p.ndim == 1 362 | if one_point: 363 | p = p[np.newaxis, :] 364 | 365 | cx, cy = self.center 366 | x, y = p[:, 0], p[:, 1] 367 | condition = (x - cx) ** 2 + (y - cy) ** 2 <= self.radius**2 368 | contains = np.array(condition) 369 | 370 | return contains[0] if one_point else contains 371 | -------------------------------------------------------------------------------- /exodusii/similar.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def similar(file1, file2, times=None): 5 | """Compare two Exodus files, except the solution.""" 6 | from .file import ExodusIIFile 7 | 8 | if not isinstance(file1, ExodusIIFile): 9 | file1 = ExodusIIFile(file1) 10 | 11 | if not isinstance(file2, ExodusIIFile): 12 | file2 = ExodusIIFile(file2) 13 | 14 | if file1.num_dimensions() != file2.num_dimensions(): 15 | raise ValueError("Files do not have the same dimension") 16 | 17 | if file1.num_element_blocks() != file2.num_element_blocks(): 18 | raise ValueError("Files do not have same number of element blocks") 19 | 20 | if file1.num_nodes() != file2.num_nodes(): 21 | raise ValueError("Files do not have same number of nodes") 22 | 23 | if file1.num_elems() != file2.num_elems(): 24 | raise ValueError("Files do not have same number of elements") 25 | 26 | if not compare_varnames( 27 | file1.get_node_variable_names(), file2.get_node_variable_names() 28 | ): 29 | raise ValueError("Files do not define the same node variables") 30 | 31 | if not compare_varnames( 32 | file1.get_edge_variable_names(), file2.get_edge_variable_names() 33 | ): 34 | raise ValueError("Files do not define the same edge variables") 35 | 36 | if not compare_varnames( 37 | file1.get_face_variable_names(), file2.get_face_variable_names() 38 | ): 39 | raise ValueError("Files do not define the same face variables") 40 | 41 | if not compare_varnames( 42 | file1.get_element_variable_names(), file2.get_element_variable_names() 43 | ): 44 | raise ValueError("Files do not define the same element variables") 45 | 46 | if not np.allclose(file1.get_element_block_ids(), file1.get_element_block_ids()): 47 | raise ValueError("Files do not define the same element block IDs") 48 | 49 | for block_id in file1.get_element_block_ids(): 50 | conn1 = file1.get_element_conn(block_id) 51 | conn2 = file2.get_element_conn(block_id) 52 | if not np.allclose(conn1, conn2): 53 | raise ValueError("Files do not have the same node connectivity") 54 | 55 | coords1 = file1.get_coords() 56 | coords2 = file2.get_coords() 57 | if not np.allclose(coords1, coords2): 58 | raise ValueError("Files do not have the same node coordinates") 59 | 60 | if times is not None: 61 | if not compare_times(file1.get_times(), file2.get_times(), times): 62 | raise ValueError("Files do not contain the same times") 63 | 64 | return True 65 | 66 | 67 | def compare_varnames(arg1, arg2): 68 | if arg1 is None and arg2 is None: 69 | return True 70 | elif arg1 is None and arg2 is not None: 71 | return False 72 | elif arg2 is None and arg1 is not None: 73 | return False 74 | list1 = [x.lower() for x in arg1] 75 | list2 = [x.lower() for x in arg2] 76 | return all([x in list2 for x in list1]) 77 | 78 | 79 | def compare_times(times1, times2, times): 80 | found1, found2 = False, False 81 | for time in times: 82 | for t in times1: 83 | if abs(time - t) < 1e-12: 84 | found1 = True 85 | break 86 | for t in times2: 87 | if abs(time - t) < 1e-12: 88 | found2 = True 89 | break 90 | return found1 and found2 91 | -------------------------------------------------------------------------------- /exodusii/util.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | import subprocess 4 | import numpy as np 5 | from contextlib import contextmanager 6 | 7 | 8 | string_kinds = ("U", "S") 9 | string_types = (str,) 10 | 11 | 12 | def stringify(a): 13 | if isinstance(a, str): 14 | return a 15 | elif isinstance(a, bytes): 16 | return a.decode() 17 | elif isinstance(a, np.ndarray): 18 | if len(a.shape) == 1: 19 | return "".join(decode(x) for x in a if x).rstrip() 20 | elif len(a.shape) == 2: 21 | return np.array([stringify(row) for row in a]) 22 | elif len(a.shape) == 3: 23 | x = [stringify(row) for row in a] 24 | return np.array([" ".join(_) for _ in x]) 25 | else: 26 | raise TypeError(f"Cannot stringify arrays with shape {a.shape}") 27 | else: 28 | raise TypeError(f"Cannot stringify items of type {type(a).__name__}") 29 | 30 | 31 | def decode(x): 32 | if isinstance(x, np.ma.core.MaskedConstant): 33 | return "" 34 | return x.decode() 35 | 36 | 37 | def index(array, val): 38 | if isinstance(array, (list, tuple)): 39 | return array.index(val) 40 | (ix,) = np.where(array == val) 41 | if not len(ix): 42 | raise ValueError(f"{val} is not in array") 43 | return ix[0] 44 | 45 | 46 | def is_exe(path): 47 | return os.path.isfile(path) and os.access(path, os.X_OK) 48 | 49 | 50 | def which(*args): 51 | """Like ``which`` on the command line""" 52 | path = os.getenv("PATH").split(os.pathsep) 53 | for name in args: 54 | exe = os.path.abspath(name) 55 | if is_exe(exe): 56 | return exe 57 | for directory in path: 58 | exe = os.path.join(directory, name) 59 | if is_exe(exe): 60 | return exe 61 | raise ValueError( 62 | f"Required executable {args[0]} not found. Make sure it is in your path" 63 | ) 64 | 65 | 66 | @contextmanager 67 | def working_dir(dirname): 68 | cwd = os.getcwd() 69 | os.chdir(dirname) 70 | yield 71 | os.chdir(cwd) 72 | 73 | 74 | def epu(*files): 75 | """Concatenate exodus files""" 76 | if not files: 77 | return None 78 | if len(files) == 1: 79 | return files[0] 80 | 81 | epu = which("epu") 82 | workdir = os.path.dirname(files[0]) 83 | if not os.path.isdir(workdir): 84 | raise ValueError(f"{workdir} is not a directory") 85 | with working_dir(workdir): 86 | files = [os.path.basename(f) for f in files] 87 | for file in files: 88 | if not os.path.exists(file): 89 | raise ValueError(f"{workdir} is not a file") 90 | # determine the basename 91 | parts = files[0].split(".") 92 | try: 93 | base, suff, p, n = parts 94 | except ValueError: 95 | raise ValueError("Expected files `base.suf.#p.#n`") from None 96 | cmd = [epu, "-auto", files[0]] 97 | f = ".epu.log" 98 | with open(f, "w") as fh: 99 | p = subprocess.Popen(cmd, stdout=fh, stderr=subprocess.STDOUT) 100 | p.wait() 101 | if p.returncode != 0: 102 | os.rename(f, f[1:]) 103 | raise SystemExit(f"Exodus file concatenation failed, see {f[1:]}") 104 | else: 105 | os.remove(f) 106 | 107 | os.rename(os.path.join(workdir, f"{base}.{suff}"), f"{base}.{suff}") 108 | return f"{base}.{suff}" 109 | 110 | 111 | def compute_connected_average(conn, values): 112 | """Computes the average of values 113 | 114 | Parameters 115 | ---------- 116 | conn : ndarray of int 117 | Entity connectivity, 0 based 118 | values : ndarray of float 119 | Nodal values 120 | 121 | Returns 122 | ------- 123 | averaged : ndarray of float 124 | 125 | """ 126 | num_ent = len(conn) 127 | num_dimensions = values.shape[1] 128 | averaged = np.zeros((num_ent, num_dimensions)) 129 | for (e, ix) in enumerate(conn): 130 | for dim in range(num_dimensions): 131 | x = values[ix, dim] 132 | averaged[e, dim] = x.sum() / conn.shape[1] 133 | return averaged 134 | 135 | 136 | def streamify(file): 137 | if isinstance(file, string_types): 138 | return open(file, "w"), True 139 | else: 140 | return file, False 141 | 142 | 143 | def fmt_join(*, fmt, items, sep=" "): 144 | return sep.join(fmt % item for item in items) 145 | 146 | 147 | def fuzzy_compare(arg1, arg2): 148 | """Compares arg1 to arg2. The comparison is case insensitive and spaces, '-', and 149 | '_' are stripped from both ends. Treats '-', '_' as a space. Multiple spaces 150 | treated as one space. 151 | """ 152 | regex = re.compile(r"[-_]") 153 | transform = lambda s: " ".join(regex.sub(" ", s).split()).lower() 154 | return transform(arg1) == transform(arg2) 155 | 156 | 157 | def find_index(sequence, arg, strict=True): 158 | """Searches for 'aname' in the list of names 'nameL' using the name_compare() 159 | function. Returns the index into the 'nameL' list if found, or None if not. 160 | """ 161 | for (i, item) in enumerate(sequence): 162 | if strict: 163 | if arg == item: 164 | return i 165 | elif fuzzy_compare(arg, item): 166 | return i 167 | return None 168 | 169 | 170 | def find_nearest(array, value): 171 | array = np.asarray(array) 172 | idx = (np.abs(array - value)).argmin() 173 | return idx, array[idx] 174 | 175 | 176 | def check_bounds(array, value, tol=1e-12): 177 | if value + tol <= np.amin(array): 178 | return False 179 | elif value - tol >= np.amax(array): 180 | return False 181 | return True 182 | 183 | 184 | def contains(array, value): 185 | return value in list(array) 186 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "exodusii" 3 | version = "1.0.0" 4 | description = "Python wrappers to the ExodusII finite element database model" 5 | dependencies = ["netcdf4"] 6 | 7 | [project.scripts] 8 | "exoread.py" = "exodusii:exoread" 9 | 10 | [build-system] 11 | requires = ["setuptools>=64", "setuptools-scm[toml]>=6.2.3"] 12 | build-backend = "setuptools.build_meta" 13 | -------------------------------------------------------------------------------- /test/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | 4 | 5 | @pytest.fixture(scope="function") 6 | def datadir(): 7 | test_dir = os.path.dirname(os.path.realpath(__file__)) 8 | yield os.path.join(test_dir, "data") 9 | -------------------------------------------------------------------------------- /test/data/edges.base.exo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sandialabs/exodusii/6535a48ae8fc83e6f7be7203c994bc88ff146880/test/data/edges.base.exo -------------------------------------------------------------------------------- /test/data/edges.exo.4.0: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sandialabs/exodusii/6535a48ae8fc83e6f7be7203c994bc88ff146880/test/data/edges.exo.4.0 -------------------------------------------------------------------------------- /test/data/edges.exo.4.1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sandialabs/exodusii/6535a48ae8fc83e6f7be7203c994bc88ff146880/test/data/edges.exo.4.1 -------------------------------------------------------------------------------- /test/data/edges.exo.4.2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sandialabs/exodusii/6535a48ae8fc83e6f7be7203c994bc88ff146880/test/data/edges.exo.4.2 -------------------------------------------------------------------------------- /test/data/edges.exo.4.3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sandialabs/exodusii/6535a48ae8fc83e6f7be7203c994bc88ff146880/test/data/edges.exo.4.3 -------------------------------------------------------------------------------- /test/data/mkmesh.gen: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sandialabs/exodusii/6535a48ae8fc83e6f7be7203c994bc88ff146880/test/data/mkmesh.gen -------------------------------------------------------------------------------- /test/data/mkmesh.par.2.0: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sandialabs/exodusii/6535a48ae8fc83e6f7be7203c994bc88ff146880/test/data/mkmesh.par.2.0 -------------------------------------------------------------------------------- /test/data/mkmesh.par.2.1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sandialabs/exodusii/6535a48ae8fc83e6f7be7203c994bc88ff146880/test/data/mkmesh.par.2.1 -------------------------------------------------------------------------------- /test/data/noh.exo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sandialabs/exodusii/6535a48ae8fc83e6f7be7203c994bc88ff146880/test/data/noh.exo -------------------------------------------------------------------------------- /test/data/noh.exo.3.0: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sandialabs/exodusii/6535a48ae8fc83e6f7be7203c994bc88ff146880/test/data/noh.exo.3.0 -------------------------------------------------------------------------------- /test/data/noh.exo.3.1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sandialabs/exodusii/6535a48ae8fc83e6f7be7203c994bc88ff146880/test/data/noh.exo.3.1 -------------------------------------------------------------------------------- /test/data/noh.exo.3.2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sandialabs/exodusii/6535a48ae8fc83e6f7be7203c994bc88ff146880/test/data/noh.exo.3.2 -------------------------------------------------------------------------------- /test/parallel_read.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import glob 5 | import numpy as np 6 | import exodusii 7 | from exodusii.exodus_h import maps 8 | 9 | 10 | def test_exodusii_read_1(datadir): 11 | 12 | files = glob.glob(os.path.join(datadir, "noh.exo.?.?")) 13 | exof = exodusii.exo_file(*files) 14 | assert exof.num_dimensions() == 2 15 | assert exof.storage_type() == "d" 16 | 17 | assert exof.get_info_records() is None 18 | 19 | nums = [ 20 | exof.num_elem_blk(), 21 | exof.num_node_sets(), 22 | exof.num_side_sets(), 23 | exof.num_elem_maps(), 24 | exof.num_node_maps(), 25 | exof.num_edge_blk(), 26 | exof.num_edge_sets(), 27 | exof.num_face_blk(), 28 | exof.num_face_sets(), 29 | exof.num_elem_sets(), 30 | exof.num_edge_maps(), 31 | exof.num_face_maps(), 32 | ] 33 | assert nums == [2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] 34 | 35 | assert exof.get_coord_names().tolist() == ["X", "Y"] 36 | 37 | coords = exof.get_coords() 38 | assert coords.shape[1] == 2 39 | xL = [ 40 | 0.0, 41 | 1.0, 42 | 2.0, 43 | 3.0, 44 | 4.0, 45 | 5.0, 46 | 6.0, 47 | 7.0, 48 | 8.0, 49 | 9.0, 50 | 10.0, 51 | 0.0, 52 | 1.0, 53 | 2.0, 54 | 3.0, 55 | 4.0, 56 | 5.0, 57 | 6.0, 58 | 7.0, 59 | 8.0, 60 | 9.0, 61 | 10.0, 62 | ] 63 | yL = [ 64 | 0.0, 65 | 0.0, 66 | 0.0, 67 | 0.0, 68 | 0.0, 69 | 0.0, 70 | 0.0, 71 | 0.0, 72 | 0.0, 73 | 0.0, 74 | 0.0, 75 | 1.0, 76 | 1.0, 77 | 1.0, 78 | 1.0, 79 | 1.0, 80 | 1.0, 81 | 1.0, 82 | 1.0, 83 | 1.0, 84 | 1.0, 85 | 1.0, 86 | ] 87 | assert np.allclose(coords[:, 0], xL) 88 | assert np.allclose(coords[:, 1], yL) 89 | 90 | assert exof.get_element_block_ids().tolist() == [1, 2] 91 | assert exof.get_node_set_ids().tolist() == [10, 20, 30, 40] 92 | assert exof.get_element_block_id(1) == 1 93 | assert exof.get_node_set_id(4) == 40 94 | 95 | info = exof.get_element_block(1) 96 | assert info.elem_type == "QUAD" 97 | assert info.num_block_elems == 5 98 | assert info.num_elem_nodes == 4 99 | assert info.num_elem_attrs == 0 100 | 101 | info = exof.get_element_block(2) 102 | assert info.elem_type == "QUAD" 103 | assert info.num_block_elems == 5 104 | assert info.num_elem_nodes == 4 105 | assert info.num_elem_attrs == 0 106 | 107 | assert exof.get_node_set_params(10).num_nodes == 2 108 | assert exof.get_node_set_params(20).num_nodes == 2 109 | assert exof.get_node_set_params(30).num_nodes == 11 110 | assert exof.get_node_set_params(40).num_nodes == 11 111 | 112 | ia = np.array( 113 | [0, 1, 12, 11, 1, 2, 13, 12, 2, 3, 14, 13, 3, 4, 15, 14, 4, 5, 16, 15] 114 | ) 115 | conn = exof.get_element_conn(1) 116 | assert np.allclose(conn.flatten(), ia + 1) 117 | 118 | assert np.allclose([1, 12], exof.get_node_set_nodes(10)) 119 | assert np.allclose([11, 22], exof.get_node_set_nodes(20)) 120 | assert np.allclose([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11], exof.get_node_set_nodes(30)) 121 | assert np.allclose( 122 | [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22], exof.get_node_set_nodes(40) 123 | ) 124 | 125 | assert np.allclose([1, 2, 3, 4], exof.get_element_id_map(exof.files[0])) 126 | assert np.allclose( 127 | [1, 2, 3, 4, 5, 12, 13, 14, 15, 16], 128 | exof.get_node_id_map(exof.files[0]), 129 | ) 130 | 131 | assert np.allclose( 132 | exof.get_times(), 133 | [ 134 | 0.0, 135 | 0.0018, 136 | 0.00396, 137 | 0.006552, 138 | 0.0096624, 139 | 0.01339488, 140 | 0.017873856, 141 | 0.0232486272, 142 | 0.02969835264, 143 | 0.037438023168, 144 | 0.0467256278016, 145 | 0.05787075336192, 146 | 0.071244904034304, 147 | 0.0872938848411648, 148 | 0.106552661809398, 149 | 0.129663194171277, 150 | 0.157395833005533, 151 | 0.190674999606639, 152 | 0.230609999527967, 153 | 0.27853199943356, 154 | 0.336038399320273, 155 | 0.405046079184327, 156 | 0.487855295021193, 157 | 0.587226354025431, 158 | 0.706471624830517, 159 | 0.849565949796621, 160 | 1.02127913975594, 161 | 1.22733496770713, 162 | 1.47460196124856, 163 | 1.77132235349827, 164 | 2.12738682419793, 165 | ], 166 | ) 167 | 168 | assert exof.get_global_variable_names().tolist() == [ 169 | "DT_HYDRO", 170 | "DT_HYDROCFL", 171 | "DT_SOUND", 172 | "DT_ELASTIC", 173 | "DT_MATVEL", 174 | "DT_ARTVIS", 175 | "DT_VOLUME", 176 | "ALE_REMESH_CNT", 177 | "NSTEPS", 178 | "CPU", 179 | "GRIND", 180 | "CPUNOIO", 181 | "GRINDNOIO", 182 | "MEMORY_PMAX", 183 | "MEMFRAGS_PMAX", 184 | "MEMORY_PMIN", 185 | "MEMFRAGS_PMIN", 186 | "TMSTP_CONT_EL", 187 | "TMSTP_CONT_PROC", 188 | "ETOT", 189 | "EINT", 190 | "ESOURCE", 191 | "EERROR", 192 | "PERROR", 193 | "PTOT", 194 | "PINT", 195 | "TM_STEP", 196 | "MASSTOT", 197 | "NODEMASS", 198 | "MASSERR", 199 | "MASSGAIN", 200 | "MASSLOSS", 201 | "EKIN", 202 | "EKINRZERR", 203 | "EKINRZTAL", 204 | "ETOTHYDRZERR", 205 | "PTOTHYDRZERR", 206 | "EINTRZFIXADD", 207 | "EINTRZLOSTUPDATING", 208 | "PINTRZLOSTUPDATING", 209 | "EKINLAGSTEP", 210 | "ETOTHYDLAGSTEP", 211 | "EINTGAIN", 212 | "EINTLOSS", 213 | "EKINGAIN", 214 | "EKINLOSS", 215 | "EVELBC", 216 | "PKIN", 217 | "EPDV", 218 | "ENONPDV", 219 | "PPDV", 220 | "PNONPDV", 221 | "PINTRZFIXTOT", 222 | "PVELBC", 223 | "XMOM", 224 | "YMOM", 225 | "ZMOM", 226 | "XMOMLAG", 227 | "YMOMLAG", 228 | "ZMOMLAG", 229 | "XMOMRZERR", 230 | "YMOMRZERR", 231 | "ZMOMRZERR", 232 | "NUM_SOLKIN_RESETS", 233 | "MAT_MASS_1", 234 | "MAT_MOM_X_1", 235 | "MAT_MOM_Y_1", 236 | "MAT_ETOT_1", 237 | "MAT_EK_1", 238 | "MAT_EINT_1", 239 | "MAT_MIN_TEMP_1", 240 | "MAT_MAX_TEMP_1", 241 | "MAT_MIN_DENS_1", 242 | "MAT_MAX_DENS_1", 243 | ] 244 | assert exof.get_node_variable_names().tolist() == [ 245 | "DISPLX", 246 | "DISPLY", 247 | "VELOCITY_X", 248 | "VELOCITY_Y", 249 | ] 250 | assert exof.get_element_variable_names().tolist() == [ 251 | "DENSITY", 252 | "ENERGY_1", 253 | "PROC_ID", 254 | "VOID_FRC", 255 | "VOLFRC_1", 256 | ] 257 | 258 | assert exof.get_displ_variable_names()[0] == "DISPLX" 259 | 260 | gvarL = [ 261 | 0.200582879393687, 262 | 0.245792238995311, 263 | 12.4840883973724, 264 | 12.4840883973724, 265 | 0.70980262403434, 266 | 0.249127814687454, 267 | 0.200582879393687, 268 | 0.0, 269 | 2.0, 270 | 0.0921449661254883, 271 | 0.00329089164733887, 272 | 0.0, 273 | 0.0, 274 | 327.0234375, 275 | 0.0, 276 | 326.515625, 277 | 0.0, 278 | 6.0, 279 | 1.0, 280 | 2.75, 281 | 0.00287593306393069, 282 | 0.0, 283 | 1.70002900645727e-16, 284 | 1.01694148207696e-13, 285 | 1.01634750022269e-13, 286 | 0.91515882589384, 287 | 0.00216, 288 | 10.0, 289 | 10.0, 290 | 0.0, 291 | 0.0, 292 | 0.0, 293 | 2.74712406693607, 294 | 0.0, 295 | 0.0, 296 | 0.0, 297 | 0.0, 298 | 0.0, 299 | 0.0, 300 | 0.0, 301 | 2.74712406693607, 302 | 2.75, 303 | 0.0, 304 | 0.0, 305 | 0.0, 306 | 0.0, 307 | 9.478660739328e-64, 308 | -0.915158825893738, 309 | 1.2922561445999e-06, 310 | 0.00395353472268889, 311 | 0.000598266733611063, 312 | 0.997756816059669, 313 | 0.0, 314 | 3.0169348418e-61, 315 | 5.5, 316 | 0.0, 317 | 0.0, 318 | 5.5, 319 | 0.0, 320 | 0.0, 321 | 0.0, 322 | 0.0, 323 | 0.0, 324 | 0.0, 325 | 10.0, 326 | 5.5, 327 | 0.0, 328 | 2.75, 329 | 2.74712406693607, 330 | 0.00287593306393069, 331 | 1.21e-43, 332 | 704885553397428.0, 333 | 1.0, 334 | 1.00395991914769, 335 | ] 336 | gvar = exof.get_all_global_variable_values(3) 337 | assert np.allclose(gvar, gvarL) 338 | 339 | velxL = [ 340 | 1.0, 341 | 1.0, 342 | 1.0, 343 | 1.0, 344 | 0.999999978628986, 345 | 0.994734772493227, 346 | 0.00526522750677266, 347 | 2.13710140534428e-08, 348 | 6.11150627070137e-21, 349 | 0.0, 350 | 0.0, 351 | 1.0, 352 | 1.0, 353 | 1.0, 354 | 1.0, 355 | 0.999999978628986, 356 | 0.994734772493227, 357 | 0.00526522750677266, 358 | 2.13710140534428e-08, 359 | 6.11150627070137e-21, 360 | 0.0, 361 | 0.0, 362 | ] 363 | velx = exof.get_node_variable_values("VELOCITY_X", time_step=4) 364 | assert len(velx) == len(velxL) 365 | assert np.allclose(velx, velxL) 366 | 367 | engyL1 = [ 368 | 2.59816018018567e-35, 369 | 8.29069561528397e-15, 370 | 4.10477981379954e-06, 371 | 0.0101737788835637, 372 | 0.103766889750046, 373 | ] 374 | engy = exof.get_element_variable_values(1, "ENERGY_1", 31) 375 | assert len(engy) == len(engyL1) 376 | assert np.allclose(engy, engyL1) 377 | 378 | engyL2 = [ 379 | 0.28334004018958, 380 | 0.103766889750046, 381 | 0.0101737788835637, 382 | 4.1047798137994e-06, 383 | 8.29070241942468e-15, 384 | ] 385 | engy = exof.get_element_variable_values(2, "ENERGY_1", 31) 386 | assert len(engy) == len(engyL2) 387 | assert np.allclose(engy, engyL2) 388 | 389 | engyL = engyL1 + engyL2 390 | engy = exof.get_element_variable_values(None, "ENERGY_1", 31) 391 | assert len(engy) == len(engyL) 392 | assert np.allclose(engy, engyL) 393 | 394 | tsL = [ 395 | 1.8000000000000002e-03, 396 | 1.8000000000000002e-03, 397 | 2.1600000000000000e-03, 398 | 2.5920000000000001e-03, 399 | 3.1104000000000001e-03, 400 | 3.7324799999999998e-03, 401 | 4.4789760000000000e-03, 402 | 5.3747711999999996e-03, 403 | 6.4497254399999990e-03, 404 | 7.7396705279999985e-03, 405 | 9.2876046335999985e-03, 406 | 1.1145125560319998e-02, 407 | 1.3374150672383997e-02, 408 | 1.6048980806860794e-02, 409 | 1.9258776968232954e-02, 410 | 2.3110532361879543e-02, 411 | 2.7732638834255450e-02, 412 | 3.3279166601106538e-02, 413 | 3.9934999921327846e-02, 414 | 4.7921999905593413e-02, 415 | 5.7506399886712092e-02, 416 | 6.9007679864054511e-02, 417 | 8.2809215836865416e-02, 418 | 9.9371059004238496e-02, 419 | 1.1924527080508619e-01, 420 | 1.4309432496610341e-01, 421 | 1.7171318995932408e-01, 422 | 2.0605582795118890e-01, 423 | 2.4726699354142667e-01, 424 | 2.9672039224971197e-01, 425 | 3.5606447069965436e-01, 426 | ] 427 | ts = exof.get_global_variable_values("TM_STEP") 428 | assert len(ts) == len(tsL) 429 | assert np.allclose(ts, tsL) 430 | 431 | vL = [ 432 | 1.0, 433 | 0.9991, 434 | 0.99711574964, 435 | 0.994734772493, 436 | 0.991880991528, 437 | 0.98846350487, 438 | 0.984374734605, 439 | 0.979488078236, 440 | 0.973655459212, 441 | 0.966705028309, 442 | 0.95843938673, 443 | 0.948634916753, 444 | 0.937043122046, 445 | 0.923395311499, 446 | 0.907412498792, 447 | 0.888822959682, 448 | 0.867390275439, 449 | 0.842954435118, 450 | 0.815486876904, 451 | 0.785156158094, 452 | 0.752393464328, 453 | 0.717937232906, 454 | 0.682828124286, 455 | 0.648327635188, 456 | 0.615752544501, 457 | 0.586246600528, 458 | 0.560525941575, 459 | 0.538624922599, 460 | 0.519713479061, 461 | 0.502336759139, 462 | 0.485852780561, 463 | ] 464 | v = exof.get_node_variable_history("VELOCITY_X", 6) 465 | assert len(v) == len(vL) 466 | assert np.allclose(v, vL) 467 | 468 | eL = [ 469 | 4.9368e-61, 470 | 4.93680133294e-61, 471 | 3.03459399385e-12, 472 | 8.35805930438e-11, 473 | 6.12654077006e-10, 474 | 2.82997861173e-09, 475 | 1.02552350281e-08, 476 | 3.20073624819e-08, 477 | 9.03501695757e-08, 478 | 2.37400453261e-07, 479 | 5.91332274304e-07, 480 | 1.41334733801e-06, 481 | 3.26840718583e-06, 482 | 7.35486975052e-06, 483 | 1.61675206433e-05, 484 | 3.48020804957e-05, 485 | 7.34552970585e-05, 486 | 0.000152063791808, 487 | 0.000308565227843, 488 | 0.000612845535871, 489 | 0.00118865930759, 490 | 0.0022447788773, 491 | 0.00411283766336, 492 | 0.00728102875914, 493 | 0.0123991686804, 494 | 0.0202141071934, 495 | 0.031386178943, 496 | 0.0461691796204, 497 | 0.0640648397757, 498 | 0.0837768855597, 499 | 0.10376688975, 500 | ] 501 | e = exof.get_element_variable_history("ENERGY_1", 7) 502 | 503 | assert len(e) == len(eL) 504 | assert np.allclose(eL, e) 505 | 506 | exof.close() 507 | 508 | 509 | def test_exodusii_read_mkmesh(tmpdir, datadir): 510 | from exodusii.util import working_dir 511 | 512 | with working_dir(tmpdir.strpath): 513 | exof = exodusii.exo_file( 514 | os.path.join(datadir, "mkmesh.par.2.0"), 515 | os.path.join(datadir, "mkmesh.par.2.1"), 516 | ) 517 | assert exof.num_dimensions() == 2 518 | assert exof.storage_type() == "d" 519 | assert np.allclose([7, 8, 9, 10, 11, 12], exof.get_node_set_nodes(100)) 520 | assert np.allclose([1, 2, 3, 4, 5, 6], exof.get_node_set_nodes(101)) 521 | assert np.allclose( 522 | [1.1, 2.1, 3.1, 4.1, 5.1, 6.1], exof.get_node_set_dist_facts(101) 523 | ) 524 | 525 | assert np.allclose([3, 2], exof.num_elems_in_all_blks()) 526 | assert 3 == exof.num_elems_in_blk(10) 527 | assert 2 == exof.num_elems_in_blk(20) 528 | 529 | elem_map = {(0, 1): 1, (0, 2): 2, (1, 1): 3, (1, 2): 4, (1, 3): 5} 530 | assert exof.get_mapping(maps.elem_local_to_global) == elem_map 531 | 532 | node_map = { 533 | (0, 1): 1, 534 | (0, 2): 2, 535 | (0, 3): 7, 536 | (0, 4): 8, 537 | (0, 5): 3, 538 | (0, 6): 9, 539 | (1, 1): 4, 540 | (1, 2): 5, 541 | (1, 3): 6, 542 | (1, 4): 10, 543 | (1, 5): 11, 544 | (1, 6): 12, 545 | (1, 7): 3, 546 | (1, 8): 9, 547 | } 548 | assert exof.get_mapping(maps.node_local_to_global) == node_map 549 | 550 | ss = exof.get_side_set(200) 551 | assert np.allclose([4, 3, 3, 3, 3, 3], ss.sides) 552 | 553 | ss = exof.get_side_set(201) 554 | assert np.allclose([1, 2, 3, 4, 5, 5], ss.elems) 555 | assert np.allclose([1, 1, 1, 1, 1, 2], ss.sides) 556 | 557 | df = exof.get_side_set_dist_facts(201) 558 | dfL = [1.1, 1.2, 2.1, 2.2, 3.1, 3.2, 4.1, 4.2, 5.1, 5.2, 6.1, 6.2] 559 | assert np.allclose(df, dfL) 560 | -------------------------------------------------------------------------------- /test/parallel_write.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import glob 4 | import exodusii 5 | from exodusii.util import working_dir 6 | 7 | 8 | def test_exodusii_parallel_write(tmpdir, datadir): 9 | with working_dir(tmpdir): 10 | name = "edges" 11 | files = glob.glob(f"{datadir}/{name}.exo.*") 12 | file = exodusii.exo_file(*files) 13 | joined = file.write(f"{name}.exo") 14 | basefile = os.path.join(datadir, f"{name}.base.exo") 15 | assert os.path.exists(basefile) 16 | base = exodusii.exo_file(basefile) 17 | dimensions = "~four|len_line|len_string" 18 | assert exodusii.allclose(base, joined, dimensions=dimensions, variables=None) 19 | -------------------------------------------------------------------------------- /test/pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts = --durations=20 -ra 3 | testpaths = . 4 | norecursedirs = data 5 | python_files = *.py 6 | filterwarnings = 7 | ignore::DeprecationWarning 8 | -------------------------------------------------------------------------------- /test/region.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import exodusii 3 | 4 | 5 | def test_region_cylinder_2d_0(): 6 | p1 = [0.0, 0.0] 7 | p2 = [1.0, 0.0] 8 | radius = 0.5 9 | region = exodusii.region.cylinder(p1, p2, radius) 10 | a = [0, -radius] 11 | b = [1, -radius] 12 | c = [1, radius] 13 | d = [0, radius] 14 | points = [a, b, c, d, [b[0], 1.005 * b[1]], [c[0], 1.005 * c[1]]] 15 | assert region.contains(points[0]) 16 | assert region.contains(points[1]) 17 | assert region.contains(points[2]) 18 | assert region.contains(points[3]) 19 | assert not region.contains(points[4]) 20 | assert not region.contains(points[5]) 21 | contains = region.contains(points) 22 | assert contains[0] 23 | assert contains[1] 24 | assert contains[2] 25 | assert contains[3] 26 | assert not contains[4] 27 | assert not contains[5] 28 | 29 | 30 | def test_region_cylinder_2d_1(): 31 | p1 = [0.0, 0.0] 32 | p2 = [1.0, 1.0] 33 | radius = 0.5 34 | region = exodusii.region.cylinder(p1, p2, radius) 35 | x = 0.5 * np.sqrt(2) / 2 36 | a = [0.99 * x, -0.99 * x] 37 | b = [1 + 0.99 * x, 1 - 0.99 * x] 38 | c = [1 - 0.99 * x, 1 + 0.99 * x] 39 | d = [-0.99 * x, 0.99 * x] 40 | points = [a, b, c, d, [b[0], 1.005 * b[1]], [c[0], 1.005 * c[1]]] 41 | assert region.contains(points[0]) 42 | assert region.contains(points[1]) 43 | assert region.contains(points[2]) 44 | assert region.contains(points[3]) 45 | assert not region.contains(points[4]) 46 | assert not region.contains(points[5]) 47 | contains = region.contains(points) 48 | assert contains[0] 49 | assert contains[1] 50 | assert contains[2] 51 | assert contains[3] 52 | assert not contains[4] 53 | assert not contains[5] 54 | 55 | 56 | def test_region_cylinder_3d(): 57 | p1 = [0.0, 0.0, 0.0] 58 | p2 = [1.0, 0.0, 0.0] 59 | radius = 0.5 60 | region = exodusii.region.cylinder(p1, p2, radius) 61 | points = [[0, 0.5, 0], [0, 1.5, 0]] 62 | assert region.contains(points[0]) 63 | assert not region.contains(points[1]) 64 | contains = region.contains(points) 65 | assert contains[0] 66 | assert not contains[1] 67 | 68 | 69 | def test_region_rectangle(): 70 | origin = [0.0, -2.5] 71 | region = exodusii.region.rectangle(origin, 5.0, 5.0) 72 | points = [[0, 2.5], [-3.0, 2.5]] 73 | assert region.contains(points[0]) 74 | assert not region.contains(points[1]) 75 | contains = region.contains(points) 76 | assert contains[0] 77 | assert not contains[1] 78 | 79 | 80 | def test_region_quad(): 81 | region = exodusii.region.quad([1.0, 1.0], [5.0, 2.0], [6.0, 5.0], [0.0, 3.0]) 82 | points = [[2, 2.5], [4.0, 0.0]] 83 | assert region.contains(points[0]) 84 | assert not region.contains(points[1]) 85 | contains = region.contains(points) 86 | assert contains[0] 87 | assert not contains[1] 88 | 89 | 90 | def test_region_circle(): 91 | region = exodusii.region.circle([0.0, 0.0], 1.0) 92 | points = [[0.0, 0.0], [0.0, 1.0], [1.0, 0.0], [-1.0, 0], [0.0, -1.0], [0, 1.1]] 93 | for point in points[:-1]: 94 | assert region.contains(point) 95 | assert not region.contains(points[-1]) 96 | contains = region.contains(points) 97 | assert all(contains[:-1]) 98 | assert not contains[-1] 99 | 100 | 101 | def test_region_sphere(): 102 | region = exodusii.region.sphere([0.0, 0.0, 0.0], 1.0) 103 | points = [ 104 | [0.0, 0.0, 0.0], 105 | [0.0, 1.0, 0.0], 106 | [1.0, 0.0, 0.0], 107 | [0.0, 0, -1.0], 108 | [0.0, -1.0, 0], 109 | [0, 1.1, 0.8], 110 | ] 111 | for point in points[:-1]: 112 | assert region.contains(point) 113 | assert not region.contains(points[-1]) 114 | contains = region.contains(points) 115 | assert all(contains[:-1]) 116 | assert not contains[-1] 117 | -------------------------------------------------------------------------------- /test/serial_read.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import numpy as np 5 | import exodusii 6 | 7 | 8 | def test_exodusii_read_0(datadir): 9 | 10 | noh = os.path.join(datadir, "noh.exo") 11 | exof = exodusii.File(noh) 12 | assert exof.filename == noh 13 | assert exof.title() == "PAMGEN Inline Mesh" 14 | exof.close() 15 | 16 | 17 | def test_exodusii_read_1(datadir): 18 | 19 | noh = os.path.join(datadir, "noh.exo") 20 | exof = exodusii.File(noh) 21 | assert exof.num_dimensions() == 2 22 | assert exof.storage_type() == "d" 23 | 24 | assert exof.get_info_records() == [ 25 | "EPU: wsblade008, OS: Linux 2.6.18-238.9.1.el5, #1 SMP Fri Mar 18 12:42:39 EDT 20" # noqa: E501 26 | ] 27 | 28 | nums = [ 29 | exof.num_elem_blk(), 30 | exof.num_node_sets(), 31 | exof.num_side_sets(), 32 | exof.num_elem_maps(), 33 | exof.num_node_maps(), 34 | exof.num_edge_blk(), 35 | exof.num_edge_sets(), 36 | exof.num_face_blk(), 37 | exof.num_face_sets(), 38 | exof.num_elem_sets(), 39 | exof.num_edge_maps(), 40 | exof.num_face_maps(), 41 | ] 42 | assert nums == [2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] 43 | 44 | assert exof.get_coord_names().tolist() == ["X", "Y"] 45 | 46 | coords = exof.get_coords() 47 | assert coords.shape[1] == 2 48 | xL = [ 49 | 0.0, 50 | 1.0, 51 | 2.0, 52 | 3.0, 53 | 4.0, 54 | 5.0, 55 | 6.0, 56 | 7.0, 57 | 8.0, 58 | 9.0, 59 | 10.0, 60 | 0.0, 61 | 1.0, 62 | 2.0, 63 | 3.0, 64 | 4.0, 65 | 5.0, 66 | 6.0, 67 | 7.0, 68 | 8.0, 69 | 9.0, 70 | 10.0, 71 | ] 72 | yL = [ 73 | 0.0, 74 | 0.0, 75 | 0.0, 76 | 0.0, 77 | 0.0, 78 | 0.0, 79 | 0.0, 80 | 0.0, 81 | 0.0, 82 | 0.0, 83 | 0.0, 84 | 1.0, 85 | 1.0, 86 | 1.0, 87 | 1.0, 88 | 1.0, 89 | 1.0, 90 | 1.0, 91 | 1.0, 92 | 1.0, 93 | 1.0, 94 | 1.0, 95 | ] 96 | assert np.allclose(coords[:, 0], xL) 97 | assert np.allclose(coords[:, 1], yL) 98 | 99 | assert exof.get_element_block_ids().tolist() == [1, 2] 100 | assert exof.get_node_set_ids().tolist() == [10, 20, 30, 40] 101 | assert exof.get_element_block_id(1) == 1 102 | assert exof.get_node_set_id(4) == 40 103 | assert exof.get_element_block_iid(1) == 1 104 | assert exof.get_element_block_iid(2) == 2 105 | assert exof.get_node_set_iid(10) == 1 106 | assert exof.get_node_set_iid(20) == 2 107 | assert exof.get_node_set_iid(30) == 3 108 | assert exof.get_node_set_iid(40) == 4 109 | 110 | info = exof.get_element_block(1) 111 | assert info.elem_type == "QUAD" 112 | assert info.num_block_elems == 5 113 | assert info.num_elem_nodes == 4 114 | assert info.num_elem_attrs == 0 115 | 116 | info = exof.get_element_block(2) 117 | assert info.elem_type == "QUAD" 118 | assert info.num_block_elems == 5 119 | assert info.num_elem_nodes == 4 120 | assert info.num_elem_attrs == 0 121 | 122 | assert exof.get_node_set_params(10).num_nodes == 2 123 | assert exof.get_node_set_params(20).num_nodes == 2 124 | assert exof.get_node_set_params(30).num_nodes == 11 125 | assert exof.get_node_set_params(40).num_nodes == 11 126 | 127 | ia = np.array( 128 | [0, 1, 12, 11, 1, 2, 13, 12, 2, 3, 14, 13, 3, 4, 15, 14, 4, 5, 16, 15] 129 | ) 130 | conn = exof.get_element_conn(1) 131 | assert np.allclose(conn.flatten(), ia + 1) 132 | 133 | assert np.allclose([1, 12], exof.get_node_set_nodes(10)) 134 | assert np.allclose([11, 22], exof.get_node_set_nodes(20)) 135 | assert np.allclose([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11], exof.get_node_set_nodes(30)) 136 | assert np.allclose( 137 | [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22], exof.get_node_set_nodes(40) 138 | ) 139 | 140 | assert np.allclose([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], exof.get_element_id_map()) 141 | assert np.allclose( 142 | [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22], 143 | exof.get_node_id_map(), 144 | ) 145 | 146 | assert np.allclose( 147 | exof.get_times(), 148 | [ 149 | 0.0, 150 | 0.0018, 151 | 0.00396, 152 | 0.006552, 153 | 0.0096624, 154 | 0.01339488, 155 | 0.017873856, 156 | 0.0232486272, 157 | 0.02969835264, 158 | 0.037438023168, 159 | 0.0467256278016, 160 | 0.05787075336192, 161 | 0.071244904034304, 162 | 0.0872938848411648, 163 | 0.106552661809398, 164 | 0.129663194171277, 165 | 0.157395833005533, 166 | 0.190674999606639, 167 | 0.230609999527967, 168 | 0.27853199943356, 169 | 0.336038399320273, 170 | 0.405046079184327, 171 | 0.487855295021193, 172 | 0.587226354025431, 173 | 0.706471624830517, 174 | 0.849565949796621, 175 | 1.02127913975594, 176 | 1.22733496770713, 177 | 1.47460196124856, 178 | 1.77132235349827, 179 | 2.12738682419793, 180 | ], 181 | ) 182 | 183 | assert exof.get_global_variable_names().tolist() == [ 184 | "DT_HYDRO", 185 | "DT_HYDROCFL", 186 | "DT_SOUND", 187 | "DT_ELASTIC", 188 | "DT_MATVEL", 189 | "DT_ARTVIS", 190 | "DT_VOLUME", 191 | "ALE_REMESH_CNT", 192 | "NSTEPS", 193 | "CPU", 194 | "GRIND", 195 | "CPUNOIO", 196 | "GRINDNOIO", 197 | "MEMORY_PMAX", 198 | "MEMFRAGS_PMAX", 199 | "MEMORY_PMIN", 200 | "MEMFRAGS_PMIN", 201 | "TMSTP_CONT_EL", 202 | "TMSTP_CONT_PROC", 203 | "ETOT", 204 | "EINT", 205 | "ESOURCE", 206 | "EERROR", 207 | "PERROR", 208 | "PTOT", 209 | "PINT", 210 | "TM_STEP", 211 | "MASSTOT", 212 | "NODEMASS", 213 | "MASSERR", 214 | "MASSGAIN", 215 | "MASSLOSS", 216 | "EKIN", 217 | "EKINRZERR", 218 | "EKINRZTAL", 219 | "ETOTHYDRZERR", 220 | "PTOTHYDRZERR", 221 | "EINTRZFIXADD", 222 | "EINTRZLOSTUPDATING", 223 | "PINTRZLOSTUPDATING", 224 | "EKINLAGSTEP", 225 | "ETOTHYDLAGSTEP", 226 | "EINTGAIN", 227 | "EINTLOSS", 228 | "EKINGAIN", 229 | "EKINLOSS", 230 | "EVELBC", 231 | "PKIN", 232 | "EPDV", 233 | "ENONPDV", 234 | "PPDV", 235 | "PNONPDV", 236 | "PINTRZFIXTOT", 237 | "PVELBC", 238 | "XMOM", 239 | "YMOM", 240 | "ZMOM", 241 | "XMOMLAG", 242 | "YMOMLAG", 243 | "ZMOMLAG", 244 | "XMOMRZERR", 245 | "YMOMRZERR", 246 | "ZMOMRZERR", 247 | "NUM_SOLKIN_RESETS", 248 | "MAT_MASS_1", 249 | "MAT_MOM_X_1", 250 | "MAT_MOM_Y_1", 251 | "MAT_ETOT_1", 252 | "MAT_EK_1", 253 | "MAT_EINT_1", 254 | "MAT_MIN_TEMP_1", 255 | "MAT_MAX_TEMP_1", 256 | "MAT_MIN_DENS_1", 257 | "MAT_MAX_DENS_1", 258 | ] 259 | assert exof.get_node_variable_names().tolist() == [ 260 | "DISPLX", 261 | "DISPLY", 262 | "VELOCITY_X", 263 | "VELOCITY_Y", 264 | ] 265 | assert exof.get_element_variable_names().tolist() == [ 266 | "DENSITY", 267 | "ENERGY_1", 268 | "PROC_ID", 269 | "VOID_FRC", 270 | "VOLFRC_1", 271 | ] 272 | 273 | assert exof.get_displ_variable_names()[0] == "DISPLX" 274 | 275 | gvarL = [ 276 | 0.200582879393687, 277 | 0.245792238995311, 278 | 12.4840883973724, 279 | 12.4840883973724, 280 | 0.70980262403434, 281 | 0.249127814687454, 282 | 0.200582879393687, 283 | 0.0, 284 | 2.0, 285 | 0.0921449661254883, 286 | 0.00329089164733887, 287 | 0.0, 288 | 0.0, 289 | 327.0234375, 290 | 0.0, 291 | 326.515625, 292 | 0.0, 293 | 6.0, 294 | 1.0, 295 | 2.75, 296 | 0.00287593306393069, 297 | 0.0, 298 | 1.70002900645727e-16, 299 | 1.01694148207696e-13, 300 | 1.01634750022269e-13, 301 | 0.91515882589384, 302 | 0.00216, 303 | 10.0, 304 | 10.0, 305 | 0.0, 306 | 0.0, 307 | 0.0, 308 | 2.74712406693607, 309 | 0.0, 310 | 0.0, 311 | 0.0, 312 | 0.0, 313 | 0.0, 314 | 0.0, 315 | 0.0, 316 | 2.74712406693607, 317 | 2.75, 318 | 0.0, 319 | 0.0, 320 | 0.0, 321 | 0.0, 322 | 9.478660739328e-64, 323 | -0.915158825893738, 324 | 1.2922561445999e-06, 325 | 0.00395353472268889, 326 | 0.000598266733611063, 327 | 0.997756816059669, 328 | 0.0, 329 | 3.0169348418e-61, 330 | 5.5, 331 | 0.0, 332 | 0.0, 333 | 5.5, 334 | 0.0, 335 | 0.0, 336 | 0.0, 337 | 0.0, 338 | 0.0, 339 | 0.0, 340 | 10.0, 341 | 5.5, 342 | 0.0, 343 | 2.75, 344 | 2.74712406693607, 345 | 0.00287593306393069, 346 | 1.21e-43, 347 | 704885553397428.0, 348 | 1.0, 349 | 1.00395991914769, 350 | ] 351 | gvar = exof.get_all_global_variable_values(3) 352 | assert np.allclose(gvar, gvarL) 353 | 354 | velxL = [ 355 | 1.0, 356 | 1.0, 357 | 1.0, 358 | 1.0, 359 | 0.999999978628986, 360 | 0.994734772493227, 361 | 0.00526522750677266, 362 | 2.13710140534428e-08, 363 | 6.11150627070137e-21, 364 | 0.0, 365 | 0.0, 366 | 1.0, 367 | 1.0, 368 | 1.0, 369 | 1.0, 370 | 0.999999978628986, 371 | 0.994734772493227, 372 | 0.00526522750677266, 373 | 2.13710140534428e-08, 374 | 6.11150627070137e-21, 375 | 0.0, 376 | 0.0, 377 | ] 378 | velx = exof.get_node_variable_values("VELOCITY_X", time_step=4) 379 | assert len(velx) == len(velxL) 380 | assert np.allclose(velx, velxL) 381 | 382 | engyL1 = [ 383 | 2.59816018018567e-35, 384 | 8.29069561528397e-15, 385 | 4.10477981379954e-06, 386 | 0.0101737788835637, 387 | 0.103766889750046, 388 | ] 389 | engy = exof.get_element_variable_values(1, "ENERGY_1", 31) 390 | assert len(engy) == len(engyL1) 391 | assert np.allclose(engy, engyL1) 392 | 393 | engyL2 = [ 394 | 0.28334004018958, 395 | 0.103766889750046, 396 | 0.0101737788835637, 397 | 4.1047798137994e-06, 398 | 8.29070241942468e-15, 399 | ] 400 | engy = exof.get_element_variable_values(2, "ENERGY_1", 31) 401 | assert len(engy) == len(engyL2) 402 | assert np.allclose(engy, engyL2) 403 | 404 | engyL = engyL1 + engyL2 405 | engy = exof.get_element_variable_values(None, "ENERGY_1", 31) 406 | assert len(engy) == len(engyL) 407 | assert np.allclose(engy, engyL) 408 | 409 | tsL = [ 410 | 1.8000000000000002e-03, 411 | 1.8000000000000002e-03, 412 | 2.1600000000000000e-03, 413 | 2.5920000000000001e-03, 414 | 3.1104000000000001e-03, 415 | 3.7324799999999998e-03, 416 | 4.4789760000000000e-03, 417 | 5.3747711999999996e-03, 418 | 6.4497254399999990e-03, 419 | 7.7396705279999985e-03, 420 | 9.2876046335999985e-03, 421 | 1.1145125560319998e-02, 422 | 1.3374150672383997e-02, 423 | 1.6048980806860794e-02, 424 | 1.9258776968232954e-02, 425 | 2.3110532361879543e-02, 426 | 2.7732638834255450e-02, 427 | 3.3279166601106538e-02, 428 | 3.9934999921327846e-02, 429 | 4.7921999905593413e-02, 430 | 5.7506399886712092e-02, 431 | 6.9007679864054511e-02, 432 | 8.2809215836865416e-02, 433 | 9.9371059004238496e-02, 434 | 1.1924527080508619e-01, 435 | 1.4309432496610341e-01, 436 | 1.7171318995932408e-01, 437 | 2.0605582795118890e-01, 438 | 2.4726699354142667e-01, 439 | 2.9672039224971197e-01, 440 | 3.5606447069965436e-01, 441 | ] 442 | ts = exof.get_global_variable_values("TM_STEP") 443 | assert len(ts) == len(tsL) 444 | assert np.allclose(ts, tsL) 445 | 446 | vL = [ 447 | 1.0, 448 | 0.9991, 449 | 0.99711574964, 450 | 0.994734772493, 451 | 0.991880991528, 452 | 0.98846350487, 453 | 0.984374734605, 454 | 0.979488078236, 455 | 0.973655459212, 456 | 0.966705028309, 457 | 0.95843938673, 458 | 0.948634916753, 459 | 0.937043122046, 460 | 0.923395311499, 461 | 0.907412498792, 462 | 0.888822959682, 463 | 0.867390275439, 464 | 0.842954435118, 465 | 0.815486876904, 466 | 0.785156158094, 467 | 0.752393464328, 468 | 0.717937232906, 469 | 0.682828124286, 470 | 0.648327635188, 471 | 0.615752544501, 472 | 0.586246600528, 473 | 0.560525941575, 474 | 0.538624922599, 475 | 0.519713479061, 476 | 0.502336759139, 477 | 0.485852780561, 478 | ] 479 | v = exof.get_node_variable_history("VELOCITY_X", 6) 480 | assert len(v) == len(vL) 481 | assert np.allclose(v, vL) 482 | 483 | eL = [ 484 | 4.9368e-61, 485 | 4.93680133294e-61, 486 | 3.03459399385e-12, 487 | 8.35805930438e-11, 488 | 6.12654077006e-10, 489 | 2.82997861173e-09, 490 | 1.02552350281e-08, 491 | 3.20073624819e-08, 492 | 9.03501695757e-08, 493 | 2.37400453261e-07, 494 | 5.91332274304e-07, 495 | 1.41334733801e-06, 496 | 3.26840718583e-06, 497 | 7.35486975052e-06, 498 | 1.61675206433e-05, 499 | 3.48020804957e-05, 500 | 7.34552970585e-05, 501 | 0.000152063791808, 502 | 0.000308565227843, 503 | 0.000612845535871, 504 | 0.00118865930759, 505 | 0.0022447788773, 506 | 0.00411283766336, 507 | 0.00728102875914, 508 | 0.0123991686804, 509 | 0.0202141071934, 510 | 0.031386178943, 511 | 0.0461691796204, 512 | 0.0640648397757, 513 | 0.0837768855597, 514 | 0.10376688975, 515 | ] 516 | e = exof.get_element_variable_history("ENERGY_1", 7) 517 | assert len(e) == len(eL) 518 | assert np.allclose(eL, e) 519 | 520 | exof.close() 521 | 522 | 523 | def test_exodusii_read_mkmesh(datadir): 524 | # use mkmesh files to test reading distribution factors 525 | 526 | exof = exodusii.File(os.path.join(datadir, "mkmesh.gen")) 527 | assert exof.num_dimensions() == 2 528 | assert exof.storage_type() == "d" 529 | 530 | ns = exof.get_node_set_nodes(100) 531 | assert np.allclose(ns, [7, 8, 9, 10, 11, 12]) 532 | ns = exof.get_node_set_nodes(101) 533 | assert np.allclose(ns, [1, 2, 3, 4, 5, 6]) 534 | 535 | df = exof.get_node_set_dist_facts(101) 536 | dfL = [1.1, 2.1, 3.1, 4.1, 5.1, 6.1] 537 | assert np.allclose(df, dfL) 538 | 539 | ss = exof.get_side_set(200) 540 | print(ss) 541 | assert np.allclose(ss.elems, [1, 1, 2, 3, 4, 5]) 542 | assert np.allclose(ss.sides, [4, 3, 3, 3, 3, 3]) 543 | ss = exof.get_side_set(201) 544 | print(ss) 545 | assert np.allclose(ss.elems, [1, 2, 3, 4, 5, 5]) 546 | assert np.allclose(ss.sides, [1, 1, 1, 1, 1, 2]) 547 | df = exof.get_side_set_dist_facts(201) 548 | dfL = [1.1, 1.2, 2.1, 2.2, 3.1, 3.2, 4.1, 4.2, 5.1, 5.2, 6.1, 6.2] 549 | assert np.allclose(df, dfL) 550 | 551 | exof.close() 552 | 553 | 554 | def test_exodusii_allclose(datadir): 555 | noh = os.path.join(datadir, "noh.exo") 556 | f1 = exodusii.exo_file(noh) 557 | f2 = exodusii.exo_file(noh) 558 | assert exodusii.allclose(f1, f2) 559 | -------------------------------------------------------------------------------- /test/serial_write.py: -------------------------------------------------------------------------------- 1 | import os 2 | import numpy as np 3 | 4 | import exodusii 5 | import exodusii.util as util 6 | 7 | 8 | def test_exodusii_write_1(tmpdir): 9 | # this test sets some initialization values then reads back those values 10 | with util.working_dir(tmpdir.strpath): 11 | f = "baz.exo" 12 | with exodusii.File(f, mode="w") as exof: 13 | exof.put_init(f"Test {f}", 2, 1, 2, 3, 4, 5) 14 | 15 | with exodusii.File(f, mode="r") as exof: 16 | assert exof.title() == f"Test {f}", exof.title() 17 | assert exof.num_dimensions() == 2 18 | assert exof.num_nodes() == 1 19 | assert exof.num_elems() == 2 20 | assert exof.num_blks() == 3 21 | assert exof.num_node_sets() == 4 22 | assert exof.num_side_sets() == 5 23 | 24 | 25 | def test_exodusii_write_2(tmpdir): 26 | # this test sets some initialization values then reads back those values 27 | with util.working_dir(tmpdir.strpath): 28 | baz = np.linspace(0, 10, 25) 29 | foo = np.linspace(10, 20, 25) 30 | spam = np.linspace(20, 30, 25) 31 | data = {"baz": baz, "foo": foo, "spam": spam} 32 | times = np.linspace(0, 100, 25) 33 | f = exodusii.write_globals(data, times, "Test") 34 | with exodusii.File(f, mode="r") as exof: 35 | assert exof.title() == "Test" 36 | assert exof.num_dimensions() == 1 37 | assert exof.num_nodes() == 0 38 | assert exof.num_elems() == 0 39 | assert exof.num_blks() == 0 40 | assert exof.num_node_sets() == 0 41 | assert exof.num_side_sets() == 0 42 | assert sorted(exof.get_global_variable_names()) == ["baz", "foo", "spam"] 43 | 44 | values = exof.get_global_variable_values("baz") 45 | assert np.allclose(values, baz) 46 | 47 | values = exof.get_global_variable_values("foo") 48 | assert np.allclose(values, foo) 49 | 50 | values = exof.get_global_variable_values("spam") 51 | assert np.allclose(values, spam) 52 | 53 | values = exof.get_times() 54 | assert np.allclose(values, times) 55 | 56 | 57 | def test_exodusii_write_3(tmpdir): 58 | # this test sets more values then reads back those values 59 | ndim = 2 60 | type = 'QUAD' 61 | node_count = 4 62 | cell_count = 1 63 | x = np.array([0.0, 1.0, 0.0, 1.0]) 64 | y = np.array([0.0, 0.0, 1.0, 1.0]) 65 | conn = np.array([[0, 1, 2, 3]]) 66 | block_names = ['cell'] 67 | sideset_names = ['sideset_1', 'sideset_2'] 68 | sideset_cells = [[0], [0, 0]] 69 | sideset_sides = [[1], [2, 3]] 70 | with util.working_dir(tmpdir.strpath): 71 | with exodusii.File('write_3.exo', mode="w") as exof: 72 | exof.put_init('Write_3', ndim, node_count, cell_count, 73 | len(block_names), 0, len(sideset_names)) 74 | exof.put_coord(x, y) 75 | exof.put_element_block(1, type, cell_count, node_count) 76 | exof.put_element_block_name(1, 'block_1') 77 | exof.put_element_conn(1, conn) 78 | # Side set 1 79 | exof.put_side_set_param(1, len(sideset_cells[0])) 80 | exof.put_side_set_name(1, sideset_names[0]) 81 | exof.put_side_set_sides(1, sideset_cells[0], sideset_sides[0]) 82 | # Side set 2 83 | exof.put_side_set_param(2, len(sideset_cells[1])) 84 | exof.put_side_set_name(2, sideset_names[1]) 85 | exof.put_side_set_sides(2, sideset_cells[1], sideset_sides[1]) 86 | with exodusii.File('write_3.exo', mode="r") as exof: 87 | assert exof.title() == "Write_3" 88 | assert exof.num_dimensions() == 2 89 | assert exof.num_nodes() == 4 90 | assert exof.num_elems() == 1 91 | assert exof.num_blks() == 1 92 | assert exof.num_node_sets() == 0 93 | assert exof.num_side_sets() == 2 94 | coords = exof.get_coords() 95 | assert coords.shape == (4,2) 96 | xf = coords[:,0] 97 | assert np.allclose(x, xf) 98 | yf = coords[:,1] 99 | assert np.allclose(y, yf) 100 | blk = exof.get_element_block(1) 101 | assert blk.name == 'block_1' 102 | ss = exof.get_side_set(1) 103 | assert ss.name == 'sideset_1' 104 | ss = exof.get_side_set(2) 105 | assert ss.name == 'sideset_2' 106 | 107 | 108 | def test_exodusii_write_4(tmpdir): 109 | # this test sets multiple block names at once then reads back those values 110 | ndim = 2 111 | type = 'QUAD' 112 | node_count = 4 113 | cell_count = 1 114 | x = np.array([0.0, 1.0, 0.0, 1.0]) 115 | y = np.array([0.0, 0.0, 1.0, 1.0]) 116 | block_names = ['block1', 'block2', 'block3'] 117 | with util.working_dir(tmpdir.strpath): 118 | with exodusii.File('write_4.exo', mode="w") as exof: 119 | exof.put_init('Write_4', ndim, node_count, cell_count, 120 | len(block_names), 0, 0) 121 | exof.put_coord(x, y) 122 | exof.put_element_block(1, type, cell_count, node_count) 123 | exof.put_element_block(2, type, cell_count, node_count) 124 | exof.put_element_block(3, type, cell_count, node_count) 125 | exof.put_element_block_names(block_names) 126 | with exodusii.File('write_4.exo', mode="r") as exof: 127 | assert exof.title() == "Write_4" 128 | assert exof.num_dimensions() == 2 129 | assert exof.num_nodes() == 4 130 | assert exof.num_elems() == 1 131 | assert exof.num_blks() == 3 132 | assert exof.num_node_sets() == 0 133 | assert exof.num_side_sets() == 0 134 | blk = exof.get_element_block(1) 135 | assert blk.name == block_names[0] 136 | blk = exof.get_element_block(2) 137 | assert blk.name == block_names[1] 138 | blk = exof.get_element_block(3) 139 | assert blk.name == block_names[2] 140 | 141 | -------------------------------------------------------------------------------- /test/test.sh: -------------------------------------------------------------------------------- 1 | export PYTHONPATH=`pwd`/.. 2 | pytest "$@" 3 | --------------------------------------------------------------------------------