├── tests ├── __init__.py ├── test_testdata.py ├── test_pointset.py ├── test_lineset.py ├── test_composite.py ├── test_surface.py ├── test_assets.py ├── test_textures.py ├── test_base.py ├── test_attributes.py └── test_blockmodel.py ├── assets ├── v1 │ └── test_file.omf └── v2 │ └── test_file.omf ├── docs ├── images │ ├── LineSet.png │ ├── PointSet.png │ ├── Surface.png │ ├── VolumeGrid.png │ ├── ImageTexture.png │ ├── ProjectExport.png │ ├── ProjectImport.png │ ├── LineSetGeometry.png │ ├── SurfaceGeometry.png │ ├── PointSetGeometry.png │ ├── VolumeGridGeometry.png │ └── SurfaceGridGeometry.png ├── index.rst ├── content │ ├── io.rst │ ├── arrays.rst │ ├── projects.rst │ ├── lineset.rst │ ├── composite.rst │ ├── api.rst │ ├── pointset.rst │ ├── surface.rst │ ├── blockmodel.rst │ ├── base.rst │ ├── attributes.rst │ ├── textures.rst │ └── examples.rst ├── test_docs.py ├── Makefile ├── make.bat └── conf.py ├── notebooks ├── zordercurve.png ├── z_order_utils.py ├── cbi_plot.py └── cbi.py ├── .bumpversion.cfg ├── omf ├── compat │ ├── __init__.py │ ├── interface.py │ └── omf_v1.py ├── __init__.py ├── pointset.py ├── composite.py ├── lineset.py ├── surface.py ├── texture.py ├── fileio.py ├── base.py └── attribute.py ├── .github └── workflows │ ├── publish.yml │ └── run-tests.yml ├── LICENSE ├── .gitignore ├── pyproject.toml ├── README.rst ├── code_of_conduct.md └── CONTRIBUTING.md /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /assets/v1/test_file.omf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmggroup/omf-python/HEAD/assets/v1/test_file.omf -------------------------------------------------------------------------------- /assets/v2/test_file.omf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmggroup/omf-python/HEAD/assets/v2/test_file.omf -------------------------------------------------------------------------------- /docs/images/LineSet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmggroup/omf-python/HEAD/docs/images/LineSet.png -------------------------------------------------------------------------------- /docs/images/PointSet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmggroup/omf-python/HEAD/docs/images/PointSet.png -------------------------------------------------------------------------------- /docs/images/Surface.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmggroup/omf-python/HEAD/docs/images/Surface.png -------------------------------------------------------------------------------- /docs/images/VolumeGrid.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmggroup/omf-python/HEAD/docs/images/VolumeGrid.png -------------------------------------------------------------------------------- /notebooks/zordercurve.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmggroup/omf-python/HEAD/notebooks/zordercurve.png -------------------------------------------------------------------------------- /docs/images/ImageTexture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmggroup/omf-python/HEAD/docs/images/ImageTexture.png -------------------------------------------------------------------------------- /docs/images/ProjectExport.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmggroup/omf-python/HEAD/docs/images/ProjectExport.png -------------------------------------------------------------------------------- /docs/images/ProjectImport.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmggroup/omf-python/HEAD/docs/images/ProjectImport.png -------------------------------------------------------------------------------- /docs/images/LineSetGeometry.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmggroup/omf-python/HEAD/docs/images/LineSetGeometry.png -------------------------------------------------------------------------------- /docs/images/SurfaceGeometry.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmggroup/omf-python/HEAD/docs/images/SurfaceGeometry.png -------------------------------------------------------------------------------- /docs/images/PointSetGeometry.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmggroup/omf-python/HEAD/docs/images/PointSetGeometry.png -------------------------------------------------------------------------------- /docs/images/VolumeGridGeometry.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmggroup/omf-python/HEAD/docs/images/VolumeGridGeometry.png -------------------------------------------------------------------------------- /docs/images/SurfaceGridGeometry.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmggroup/omf-python/HEAD/docs/images/SurfaceGridGeometry.png -------------------------------------------------------------------------------- /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 2.0.0a0 3 | files = omf/fileio.py README.rst docs/conf.py 4 | commit = True 5 | tag = True 6 | 7 | -------------------------------------------------------------------------------- /omf/compat/__init__.py: -------------------------------------------------------------------------------- 1 | """compat: Readers for older file versions""" 2 | from .interface import IOMFReader, InvalidOMFFile, WrongVersionError 3 | from . import omf_v1 4 | 5 | compatible_omf_readers = [ 6 | omf_v1.Reader, 7 | ] 8 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. _index: 2 | 3 | .. include:: ../README.rst 4 | 5 | **Contents:** 6 | 7 | .. toctree:: 8 | :maxdepth: 2 9 | 10 | content/api 11 | content/examples 12 | content/io 13 | 14 | 15 | Index 16 | ***** 17 | 18 | * :ref:`genindex` 19 | 20 | .. * :ref:`modindex` 21 | .. * :ref:`search` 22 | -------------------------------------------------------------------------------- /docs/content/io.rst: -------------------------------------------------------------------------------- 1 | .. _io: 2 | 3 | ########## 4 | OMF IO API 5 | ########## 6 | 7 | Save 8 | **** 9 | 10 | .. image:: /images/ProjectExport.png 11 | 12 | .. autofunction:: omf.fileio.save 13 | 14 | 15 | Load 16 | **** 17 | 18 | .. image:: /images/ProjectImport.png 19 | 20 | .. autofunction:: omf.fileio.load 21 | -------------------------------------------------------------------------------- /docs/content/arrays.rst: -------------------------------------------------------------------------------- 1 | .. _arrays: 2 | 3 | Array Types 4 | *********** 5 | 6 | Array 7 | ----- 8 | 9 | .. autoclass:: omf.attribute.Array 10 | 11 | StringList 12 | ---------- 13 | 14 | .. autoclass:: omf.attribute.StringList 15 | 16 | ArrayInstanceProperty 17 | --------------------- 18 | 19 | .. autoclass:: omf.attribute.ArrayInstanceProperty 20 | -------------------------------------------------------------------------------- /docs/content/projects.rst: -------------------------------------------------------------------------------- 1 | .. _projects: 2 | 3 | Projects 4 | ******** 5 | 6 | Projects contain a list of :ref:`pointsets`, :ref:`linesets`, :ref:`surfaces`, 7 | :ref:`blockmodels`, and :ref:`composites`. Projects can be serialized and 8 | deserialized to file using :meth:`omf.fileio.save` and :meth:`omf.fileio.load`. 9 | 10 | For more details on how to build a project, see the :ref:`examples`. 11 | 12 | .. autoclass:: omf.base.Project 13 | -------------------------------------------------------------------------------- /tests/test_testdata.py: -------------------------------------------------------------------------------- 1 | """Tests that the files in the testdata folder can be read. This folder is excluded from source control.""" 2 | import os 3 | from . import test_assets 4 | 5 | 6 | class TestTestdata(test_assets.TestAssets): 7 | """This test looks for a 'testdata' folder in the root of this repository. 8 | All .omf files in that folder will be loaded and validated.""" 9 | 10 | search_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "testdata")) 11 | -------------------------------------------------------------------------------- /docs/content/lineset.rst: -------------------------------------------------------------------------------- 1 | .. _linesets: 2 | 3 | LineSets 4 | ******** 5 | 6 | .. image:: /images/LineSet.png 7 | :scale: 80% 8 | 9 | Element 10 | ------- 11 | 12 | .. image:: /images/LineSetGeometry.png 13 | :width: 80% 14 | :align: center 15 | 16 | .. autoclass:: omf.lineset.LineSet 17 | 18 | Attributes 19 | ---------- 20 | 21 | Attributes is a list of :ref:`attributes `. For LineSets, 22 | :code:`location='vertices'` and :code:`location='segments'` are valid. 23 | 24 | -------------------------------------------------------------------------------- /docs/content/composite.rst: -------------------------------------------------------------------------------- 1 | .. _composites: 2 | 3 | Composites 4 | ********** 5 | 6 | Composites are used to compose multiple other elements into 7 | a single, more complex, grouped object. 8 | 9 | 10 | Element 11 | ------- 12 | 13 | .. autoclass:: omf.composite.Composite 14 | 15 | Attributes 16 | ---------- 17 | 18 | Attributes is a list of :ref:`attributes `. For Composite Elements, 19 | only :code:`location='elements'` is valid. However, attributes may also be 20 | defined on the child :code:`elements` 21 | 22 | -------------------------------------------------------------------------------- /docs/content/api.rst: -------------------------------------------------------------------------------- 1 | .. _api: 2 | 3 | OMF API Index 4 | ============= 5 | 6 | The OMF API contains tools for creating :ref:`projects` and adding 7 | :ref:`pointsets`, :ref:`linesets`, :ref:`surfaces`, :ref:`blockmodels`, 8 | and :ref:`composites`. These different elements may have 9 | :ref:`attributes` or image :ref:`textures`. 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | projects 15 | pointset 16 | lineset 17 | surface 18 | blockmodel 19 | composite 20 | attributes 21 | textures 22 | arrays 23 | base 24 | -------------------------------------------------------------------------------- /tests/test_pointset.py: -------------------------------------------------------------------------------- 1 | """Tests for PointSet validation""" 2 | import datetime 3 | import numpy as np 4 | 5 | import omf 6 | 7 | 8 | def test_pointset(): 9 | """Test pointset geometry validation""" 10 | elem = omf.pointset.PointSet() 11 | elem.vertices = np.random.rand(10, 3) 12 | assert elem.validate() 13 | assert elem.location_length("vertices") == 10 14 | elem.metadata = { 15 | "color": "green", 16 | "date_created": datetime.datetime.utcnow(), 17 | "version": "v1.3", 18 | } 19 | assert elem.validate() 20 | -------------------------------------------------------------------------------- /docs/content/pointset.rst: -------------------------------------------------------------------------------- 1 | .. _pointsets: 2 | 3 | PointSets 4 | ********* 5 | 6 | .. image:: /images/PointSet.png 7 | :scale: 80% 8 | 9 | Element 10 | ------- 11 | 12 | .. image:: /images/PointSetGeometry.png 13 | :width: 80% 14 | :align: center 15 | 16 | .. autoclass:: omf.pointset.PointSet 17 | 18 | Attributes 19 | ---------- 20 | 21 | Attributes is a list of :ref:`attributes `. For PointSets, only 22 | :code:`location='vertices'` is valid. 23 | 24 | Textures 25 | -------- 26 | 27 | Textures is a list of :ref:`textures` mapped to the PointSet. 28 | -------------------------------------------------------------------------------- /docs/content/surface.rst: -------------------------------------------------------------------------------- 1 | .. _surfaces: 2 | 3 | Surfaces 4 | ******** 5 | 6 | .. image:: /images/Surface.png 7 | 8 | Elements 9 | -------- 10 | 11 | .. image:: /images/SurfaceGeometry.png 12 | :align: center 13 | 14 | .. autoclass:: omf.surface.Surface 15 | 16 | .. image:: /images/SurfaceGridGeometry.png 17 | :align: center 18 | 19 | .. autoclass:: omf.surface.TensorGridSurface 20 | 21 | Attributes 22 | ---------- 23 | 24 | Attributes is a list of :ref:`attributes `. For Surfaces, 25 | :code:`location='vertices'` and :code:`location='faces'` are valid. 26 | 27 | Textures 28 | -------- 29 | 30 | Textures is a list of :ref:`textures` mapped to the Surface. 31 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish omf 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | env: 9 | TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }} 10 | TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }} 11 | 12 | jobs: 13 | publish: 14 | runs-on: ubuntu-latest 15 | 16 | steps: 17 | - name: checkout repo 18 | uses: actions/checkout@v3 19 | 20 | - name: Set up Python 3.10 21 | uses: actions/setup-python@v4 22 | with: 23 | python-version: 3.10 24 | 25 | - name: Build 26 | run: | 27 | pip install --upgrade pip . flit twine 28 | flit build 29 | twine upload dist/* 30 | -------------------------------------------------------------------------------- /tests/test_lineset.py: -------------------------------------------------------------------------------- 1 | """Tests for LineSet validation""" 2 | import numpy as np 3 | import pytest 4 | 5 | import omf 6 | 7 | 8 | def test_lineset(): 9 | """Test lineset geometry validation""" 10 | elem = omf.lineset.LineSet() 11 | elem.vertices = np.random.rand(10, 3) 12 | assert elem.validate() 13 | elem.segments = np.random.randint(9, size=[5, 2]) 14 | assert elem.validate() 15 | assert elem.location_length("vertices") == 10 16 | assert elem.location_length("segments") == 5 17 | elem.segments.array[0, 0] = -1 18 | with pytest.raises(ValueError): 19 | elem.validate() 20 | elem.segments.array[0, 0] = 10 21 | with pytest.raises(ValueError): 22 | elem.validate() 23 | -------------------------------------------------------------------------------- /docs/content/blockmodel.rst: -------------------------------------------------------------------------------- 1 | .. _blockmodels: 2 | 3 | Block Models 4 | ************ 5 | 6 | .. image:: /images/VolumeGrid.png 7 | :scale: 80% 8 | 9 | Element 10 | ------- 11 | 12 | .. image:: /images/VolumeGridGeometry.png 13 | :width: 80% 14 | :align: center 15 | 16 | .. autoclass:: omf.blockmodel.TensorGridBlockModel 17 | 18 | .. autoclass:: omf.blockmodel.RegularBlockModel 19 | 20 | .. autoclass:: omf.blockmodel.RegularSubBlockModel 21 | 22 | .. autoclass:: omf.blockmodel.OctreeSubBlockModel 23 | 24 | .. autoclass:: omf.blockmodel.ArbitrarySubBlockModel 25 | 26 | Attributes 27 | ---------- 28 | 29 | Attributes is a list of :ref:`attributes `. For block models, 30 | :code:`location='parent_blocks'` and :code:`location='sub_blocks'` are valid. 31 | -------------------------------------------------------------------------------- /tests/test_composite.py: -------------------------------------------------------------------------------- 1 | """Tests for Composite Element validation""" 2 | import numpy as np 3 | import properties 4 | import pytest 5 | 6 | import omf 7 | 8 | 9 | def test_composite(): 10 | """Test composite element validation""" 11 | elem = omf.Composite() 12 | elem.elements = [ 13 | omf.PointSet(vertices=np.random.rand(10, 3)), 14 | omf.PointSet(vertices=np.random.rand(10, 3)), 15 | ] 16 | assert elem.validate() 17 | assert elem.location_length("elements") == 2 18 | elem.attributes = [ 19 | omf.NumericAttribute(array=[1.0, 2.0], location="elements"), 20 | ] 21 | assert elem.validate() 22 | elem.attributes[0].array = [1.0, 2.0, 3.0] 23 | with pytest.raises(properties.ValidationError): 24 | elem.validate() 25 | -------------------------------------------------------------------------------- /omf/compat/interface.py: -------------------------------------------------------------------------------- 1 | """interface.py: Interface that all OMF readers must adhere to.""" 2 | import abc 3 | 4 | from ..base import Project 5 | 6 | 7 | class WrongVersionError(ValueError): 8 | """Raised if the initial version check failed""" 9 | 10 | 11 | class InvalidOMFFile(ValueError): 12 | """Raised if loading the file failed""" 13 | 14 | 15 | # pylint: disable=too-few-public-methods 16 | class IOMFReader(abc.ABC): 17 | """Interface for readers of older OMF file versions.""" 18 | 19 | @abc.abstractmethod 20 | def __init__(self, filename: str): 21 | pass 22 | 23 | @abc.abstractmethod 24 | def load(self, include_binary: bool = True, project_json: str = None) -> Project: 25 | """Attempt to load the specified file. 26 | See :func:`~omf.load` for parameters. 27 | :raises: 28 | WrongVersionError: 29 | """ 30 | -------------------------------------------------------------------------------- /omf/__init__.py: -------------------------------------------------------------------------------- 1 | """omf: API library for Open Mining Format file interchange format""" 2 | from .base import Project 3 | from .blockmodel import ( 4 | ArbitrarySubBlockModel, 5 | OctreeSubBlockModel, 6 | RegularBlockModel, 7 | RegularSubBlockModel, 8 | TensorGridBlockModel, 9 | ) 10 | from .composite import Composite 11 | from .attribute import ( 12 | Array, 13 | CategoryAttribute, 14 | CategoryColormap, 15 | ContinuousColormap, 16 | DiscreteColormap, 17 | NumericAttribute, 18 | StringAttribute, 19 | VectorAttribute, 20 | ) 21 | from .lineset import LineSet 22 | from .pointset import PointSet 23 | from .surface import Surface, TensorGridSurface 24 | from .texture import ProjectedTexture, UVMappedTexture 25 | 26 | from .fileio import load, save, __version__ 27 | 28 | __author__ = "Global Mining Guidelines Group" 29 | __license__ = "MIT License" 30 | __copyright__ = "Copyright 2021 Global Mining Guidelines Group" 31 | -------------------------------------------------------------------------------- /docs/content/base.rst: -------------------------------------------------------------------------------- 1 | .. _base: 2 | 3 | Other Classes 4 | ************* 5 | 6 | Project Element 7 | --------------- 8 | 9 | Available elements are :ref:`pointsets`, :ref:`linesets`, :ref:`surfaces`, 10 | :ref:`blockmodels`, and :ref:`composites`; :ref:`projects` are built with elements. 11 | 12 | .. autoclass:: omf.base.ProjectElement 13 | 14 | Project Element Attribute 15 | ------------------------- 16 | 17 | .. autoclass:: omf.base.ProjectElementAttribute 18 | 19 | Content Model 20 | ------------- 21 | 22 | .. autoclass:: omf.base.ContentModel 23 | 24 | Base Model 25 | ---------- 26 | 27 | .. autoclass:: omf.base.BaseModel 28 | 29 | Metadata Classes 30 | ---------------- 31 | 32 | .. autoclass:: omf.base.ProjectMetadata 33 | 34 | .. autoclass:: omf.base.ElementMetadata 35 | 36 | .. autoclass:: omf.base.AttributeMetadata 37 | 38 | .. autoclass:: omf.base.BaseMetadata 39 | 40 | .. autoclass:: omf.base.ArbitraryMetadataDict 41 | 42 | .. autoclass:: omf.base.StringDateTime 43 | -------------------------------------------------------------------------------- /.github/workflows/run-tests.yml: -------------------------------------------------------------------------------- 1 | name: Run OMF tests 2 | 3 | env: 4 | app: omf 5 | 6 | on: 7 | push: 8 | branches: 9 | - master 10 | - dev 11 | pull_request: 12 | branches: 13 | - master 14 | - dev 15 | 16 | jobs: 17 | tests: 18 | runs-on: ubuntu-latest 19 | 20 | strategy: 21 | matrix: 22 | python-version: [ "3.7", "3.8", "3.9", "3.10", "3.11"] 23 | 24 | steps: 25 | - name: checkout repo 26 | uses: actions/checkout@v3 27 | 28 | - name: Set up Python ${{ matrix.python-version }} 29 | uses: actions/setup-python@v4 30 | with: 31 | python-version: ${{ matrix.python-version }} 32 | 33 | - name: Install dependencies 34 | run: | 35 | pip install --upgrade pip .[docs,lint,test] 36 | 37 | - name: Unit tests 38 | run: | 39 | pytest 40 | 41 | - name: Docs tests 42 | run: | 43 | nosetests --logging-level=INFO docs 44 | 45 | - name: Pylint 46 | run: | 47 | pylint $app tests 48 | 49 | - name: Black 50 | run: | 51 | black --check $app tests 52 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Global Mining Guidelines Group 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /omf/pointset.py: -------------------------------------------------------------------------------- 1 | """pointset.py: PointSet element definition""" 2 | import properties 3 | from .base import ProjectElement 4 | from .attribute import ArrayInstanceProperty 5 | from .texture import HasTexturesMixin 6 | 7 | 8 | class PointSet(ProjectElement, HasTexturesMixin): 9 | """Point set element defined by vertices""" 10 | 11 | schema = "org.omf.v2.element.pointset" 12 | 13 | origin = properties.Vector3( 14 | "Origin of the PointSet relative to Project coordinate reference system", 15 | default=[0.0, 0.0, 0.0], 16 | ) 17 | 18 | vertices = ArrayInstanceProperty( 19 | "Spatial coordinates of points relative to project origin", 20 | shape=("*", 3), 21 | dtype=float, 22 | ) 23 | 24 | _valid_locations = ("vertices",) 25 | 26 | def location_length(self, location): 27 | """Return correct attribute length based on location""" 28 | return self.num_nodes 29 | 30 | @property 31 | def num_nodes(self): 32 | """Number of nodes (vertices)""" 33 | return len(self.vertices.array) 34 | 35 | @property 36 | def num_cells(self): 37 | """Number of cell centers (same as nodes)""" 38 | return self.num_nodes 39 | -------------------------------------------------------------------------------- /tests/test_surface.py: -------------------------------------------------------------------------------- 1 | """Tests for Surface validation""" 2 | import numpy as np 3 | import pytest 4 | 5 | import omf 6 | 7 | 8 | def test_surface(): 9 | """Test surface geometry validation""" 10 | elem = omf.surface.Surface() 11 | elem.vertices = np.random.rand(10, 3) 12 | elem.triangles = np.random.randint(9, size=[5, 3]) 13 | assert elem.validate() 14 | assert elem.location_length("vertices") == 10 15 | assert elem.location_length("faces") == 5 16 | elem.triangles.array[0, 0] = -1 17 | with pytest.raises(ValueError): 18 | elem.validate() 19 | elem.triangles.array[0, 0] = 10 20 | with pytest.raises(ValueError): 21 | elem.validate() 22 | 23 | 24 | def test_surfacegrid(): 25 | """Test surface grid geometry validation""" 26 | elem = omf.surface.TensorGridSurface() 27 | elem.tensor_u = [1.0, 1.0] 28 | elem.tensor_v = [2.0, 2.0, 2.0] 29 | assert elem.validate() 30 | assert elem.location_length("vertices") == 12 31 | assert elem.location_length("faces") == 6 32 | elem.axis_v = [1.0, 1.0, 0] 33 | with pytest.raises(ValueError): 34 | elem.validate() 35 | elem.axis_v = "Y" 36 | elem.offset_w = np.random.rand(12) 37 | elem.validate() 38 | elem.offset_w = np.random.rand(6) 39 | with pytest.raises(ValueError): 40 | elem.validate() 41 | -------------------------------------------------------------------------------- /docs/content/attributes.rst: -------------------------------------------------------------------------------- 1 | .. _attributes: 2 | 3 | Attributes 4 | ********** 5 | 6 | ProjectElements include a list of ProjectElementAttribute. These specify mesh location 7 | ('vertices', 'faces', etc.) as well as the array, name, and 8 | description. See class descriptions below for specific types of Attributes. 9 | 10 | Mapping attribute array values to a mesh is straightforward for unstructured meshes 11 | (those defined by vertices, segments, triangles, etc); the order of the attribute 12 | array corresponds to the order of the associated mesh parameter. 13 | For grid meshes, however, mapping 1D attribute array to the 2D or 3D grid requires 14 | correctly ordered ijk unwrapping. 15 | 16 | NumericAttribute 17 | ---------------- 18 | 19 | .. autoclass:: omf.attribute.NumericAttribute 20 | 21 | VectorAttribute 22 | --------------- 23 | 24 | .. autoclass:: omf.attribute.VectorAttribute 25 | 26 | StringAttribute 27 | --------------- 28 | 29 | .. autoclass:: omf.attribute.StringAttribute 30 | 31 | CategoryAttribute 32 | ----------------- 33 | 34 | .. autoclass:: omf.attribute.CategoryAttribute 35 | 36 | ContinuousColormap 37 | ------------------ 38 | 39 | .. autoclass:: omf.attribute.ContinuousColormap 40 | 41 | DiscreteColormap 42 | ---------------- 43 | 44 | .. autoclass:: omf.attribute.DiscreteColormap 45 | 46 | CategoryColormap 47 | ---------------- 48 | 49 | .. autoclass:: omf.attribute.CategoryColormap 50 | -------------------------------------------------------------------------------- /docs/content/textures.rst: -------------------------------------------------------------------------------- 1 | .. _textures: 2 | 3 | Textures 4 | ******** 5 | 6 | Projected Texture 7 | ----------------- 8 | 9 | Projected textures are images that exist in space and are mapped to their 10 | corresponding elements. Unlike attributes, they do not need to correspond to mesh 11 | nodes or cell centers. This image shows how textures are mapped to a surface. 12 | Their position is defined by a corner and axis vectors then they 13 | are mapped laterally to the element position. 14 | 15 | .. image:: /images/ImageTexture.png 16 | 17 | Like attributes, multiple textures can be applied to a element; simply provide a 18 | list of textures. Each of these textures provides a corner point and two 19 | extent vectors for the plane defining where images rests. 20 | The `axis_*` properties define the extent of that image out from the corner. 21 | Given a rectangular PNG image, the `corner` is the bottom left, 22 | `corner + axis_u` is the bottom right, and `corner + axis_v` is the top left. 23 | This allows the image to be rotated and/or skewed. 24 | These values are independent of the corresponding Surface; in fact, there is 25 | nothing requiring the image to actually align with the Surface. 26 | 27 | .. autoclass:: omf.texture.ProjectedTexture 28 | 29 | 30 | UV Mapped Textures 31 | ------------------ 32 | 33 | Rather than being projected onto points or a surface, UV Mapped Textures 34 | are given normalized UV coordinates which correspond to element 35 | vertices. This allows arbitrary mapping of images to surfaces. 36 | 37 | .. autoclass:: omf.texture.UVMappedTexture 38 | 39 | Image 40 | ----- 41 | 42 | .. autoclass:: omf.texture.Image 43 | -------------------------------------------------------------------------------- /tests/test_assets.py: -------------------------------------------------------------------------------- 1 | """Tests that the files in the assets folder can be read""" 2 | import os 3 | 4 | import omf 5 | 6 | 7 | class TestAssets: 8 | """Tests that the files in the assets folder can be read""" 9 | 10 | search_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "assets")) 11 | 12 | @classmethod 13 | def pytest_generate_tests(cls, metafunc): # pylint: disable=missing-function-docstring 14 | metafunc.parametrize("path", cls.iter_assets(), ids=cls.idfn) 15 | 16 | @classmethod 17 | def iter_assets(cls): 18 | """Yields the full path of all omf files inside cls.search_dir""" 19 | for dir_, _, files in os.walk(cls.search_dir): 20 | for filename in files: 21 | _, ext = os.path.splitext(filename) 22 | if ext.lower() == ".omf": 23 | yield os.path.join(dir_, filename) 24 | 25 | @classmethod 26 | def idfn(cls, path): 27 | """Generates a test-name from a given filename""" 28 | if not isinstance(path, str): 29 | return "test" 30 | path, name = os.path.split(path) 31 | _, path = os.path.split(path) 32 | return f"{path}.{name}" 33 | 34 | def test_assets(self, path): 35 | """Tests that the file can be loaded with/without binary data""" 36 | omf.base.BaseModel._INSTANCES = {} # pylint: disable=W0212 37 | omf.load(path, include_binary=False) 38 | 39 | omf.base.BaseModel._INSTANCES = {} # pylint: disable=W0212 40 | new_proj = omf.load(path) 41 | assert new_proj is not None 42 | assert new_proj.validate() 43 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | docs/_themes/ 66 | 67 | # PyBuilder 68 | target/ 69 | 70 | # IPython Notebook 71 | .ipynb_checkpoints 72 | Untitled*.ipynb 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # celery beat schedule file 78 | celerybeat-schedule 79 | 80 | # dotenv 81 | .env 82 | 83 | # virtualenv 84 | venv/ 85 | ENV/ 86 | 87 | # Spyder project settings 88 | .spyderproject 89 | 90 | # Rope project settings 91 | .ropeproject 92 | 93 | .DS_Store 94 | deps/ 95 | 96 | cover/ 97 | .idea/ 98 | example.omf 99 | example.png 100 | testdata/ 101 | -------------------------------------------------------------------------------- /docs/test_docs.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from __future__ import division 3 | from __future__ import print_function 4 | from __future__ import unicode_literals 5 | 6 | import os 7 | import subprocess 8 | import unittest 9 | 10 | 11 | class TestDoc(unittest.TestCase): 12 | @property 13 | def docs_dir(self): 14 | dirname, filename = os.path.split(os.path.abspath(__file__)) 15 | return os.path.sep.join(dirname.split(os.path.sep)[:-1] + ["docs"]) 16 | 17 | def setUp(self): 18 | self.build_dir = os.path.join(self.docs_dir, "_build") 19 | os.makedirs(self.build_dir, exist_ok=True) 20 | 21 | self.doctrees_dir = os.path.join(self.build_dir, "doctrees") 22 | os.makedirs(self.doctrees_dir, exist_ok=True) 23 | 24 | self.html_dir = os.path.join(self.build_dir, "html") 25 | os.makedirs(self.html_dir, exist_ok=True) 26 | 27 | def test_html(self): 28 | check = subprocess.call( 29 | [ 30 | "sphinx-build", 31 | "-nW", 32 | "-b", 33 | "html", 34 | "-d", 35 | "{}".format(self.doctrees_dir), 36 | "{}".format(self.docs_dir), 37 | "{}".format(self.html_dir), 38 | ] 39 | ) 40 | assert check == 0 41 | 42 | def test_linkcheck(self): 43 | check = subprocess.call( 44 | [ 45 | "sphinx-build", 46 | "-nW", 47 | "-b", 48 | "linkcheck", 49 | "-d", 50 | "{}".format(self.doctrees_dir), 51 | "{}".format(self.docs_dir), 52 | "{}".format(self.build_dir), 53 | ] 54 | ) 55 | assert check == 0 56 | 57 | 58 | if __name__ == "__main__": 59 | unittest.main() 60 | -------------------------------------------------------------------------------- /tests/test_textures.py: -------------------------------------------------------------------------------- 1 | """Tests for texture validation""" 2 | import os 3 | import numpy as np 4 | import png 5 | import properties 6 | import pytest 7 | 8 | import omf 9 | 10 | 11 | def setup_texture(func): 12 | """Function wrapper to create png image""" 13 | 14 | def new_func(): 15 | """Create png image and pass to func""" 16 | dirname, _ = os.path.split(os.path.abspath(__file__)) 17 | pngfile = os.path.sep.join([dirname, "out.png"]) 18 | img = ["110010010011", "101011010100", "110010110101", "100010010011"] 19 | img = [[int(val) for val in value] for value in img] 20 | writer = png.Writer(len(img[0]), len(img), greyscale=True, bitdepth=16) 21 | with open(pngfile, "wb") as file: 22 | writer.write(file, img) 23 | try: 24 | func(pngfile) 25 | finally: 26 | os.remove(pngfile) 27 | 28 | return new_func 29 | 30 | 31 | @setup_texture 32 | def test_projectedtexture(pngfile): 33 | """Test projected texture validation""" 34 | tex = omf.ProjectedTexture() 35 | tex.image = pngfile 36 | assert tex.validate() 37 | 38 | 39 | @setup_texture 40 | def test_uvmappedtexture(pngfile): 41 | """Test uv mapped texture validation""" 42 | tex = omf.UVMappedTexture() 43 | tex.image = pngfile 44 | with pytest.raises(properties.ValidationError): 45 | tex.uv_coordinates = [0.0, 1.0, 0.5] 46 | tex.uv_coordinates = [[0.0, -0.5], [0.5, 1]] 47 | assert tex.validate() 48 | tex.uv_coordinates = [[0.0, 0.5], [0.5, np.nan]] 49 | assert tex.validate() 50 | 51 | points = omf.PointSet() 52 | points.vertices = [[0.0, 0, 0], [1, 1, 1], [2, 2, 2]] 53 | points.textures = [tex] 54 | with pytest.raises(properties.ValidationError): 55 | points.validate() 56 | points.vertices = [[0.0, 0, 0], [1, 1, 1]] 57 | assert points.validate() 58 | -------------------------------------------------------------------------------- /omf/composite.py: -------------------------------------------------------------------------------- 1 | """composite.py: Element composed of other elements""" 2 | import properties 3 | 4 | from .base import ProjectElement 5 | from .blockmodel import ( 6 | ArbitrarySubBlockModel, 7 | OctreeSubBlockModel, 8 | RegularBlockModel, 9 | RegularSubBlockModel, 10 | TensorGridBlockModel, 11 | ) 12 | from .lineset import LineSet 13 | from .pointset import PointSet 14 | from .surface import Surface, TensorGridSurface 15 | 16 | 17 | class Composite(ProjectElement): 18 | """Placeholder Composite class that will be replaced in a few lines of code 19 | 20 | The properties library does not allow updating forward references 21 | so we need to add Composite to the valid element types after the 22 | class is created, then create an identical subclass so the docs 23 | update. 24 | 25 | We should switch from properties to pydantic, which does allow 26 | updating forward refs... 27 | """ 28 | 29 | elements = properties.List( 30 | "Elements grouped into one composite element", 31 | prop=properties.Union( 32 | "", 33 | ( 34 | RegularBlockModel, 35 | RegularSubBlockModel, 36 | OctreeSubBlockModel, 37 | TensorGridBlockModel, 38 | ArbitrarySubBlockModel, 39 | LineSet, 40 | PointSet, 41 | Surface, 42 | TensorGridSurface, 43 | ), 44 | ), 45 | default=list, 46 | ) 47 | 48 | _valid_locations = ("elements",) 49 | 50 | def location_length(self, location): 51 | """Composite attributes may only be defined on each element 52 | 53 | Each element within the composite may also have its own 54 | attributes. 55 | """ 56 | return len(self.elements) 57 | 58 | 59 | composite_props = Composite._props["elements"].prop.props # pylint: disable=E1101 60 | Composite._props["elements"].prop.props = composite_props + (Composite,) # pylint: disable=E1101 61 | 62 | 63 | class Composite(Composite): # pylint: disable=E0102 64 | """Object constructed from other primitive elements and composites""" 65 | 66 | schema = "org.omf.v2.composite" 67 | -------------------------------------------------------------------------------- /omf/lineset.py: -------------------------------------------------------------------------------- 1 | """lineset.py: LineSet element definition""" 2 | import numpy as np 3 | import properties 4 | 5 | from .base import ProjectElement 6 | from .attribute import ArrayInstanceProperty 7 | 8 | 9 | class LineSet(ProjectElement): 10 | """Line set element defined by vertices connected by segments""" 11 | 12 | schema = "org.omf.v2.element.lineset" 13 | 14 | origin = properties.Vector3( 15 | "Origin of the LineSet relative to Project coordinate reference system", 16 | default=[0.0, 0.0, 0.0], 17 | ) 18 | 19 | vertices = ArrayInstanceProperty( 20 | "Spatial coordinates of line vertices relative to project origin", 21 | shape=("*", 3), 22 | dtype=float, 23 | ) 24 | segments = ArrayInstanceProperty( 25 | "Endpoint vertex indices of line segments; if segments is not " 26 | "specified, the vertices are connected in order, equivalent to " 27 | "segments=[[0, 1], [1, 2], [2, 3], ...]", 28 | shape=("*", 2), 29 | dtype=int, 30 | required=False, 31 | ) 32 | 33 | _valid_locations = ("vertices", "segments") 34 | 35 | def location_length(self, location): 36 | """Return correct attribute length based on location""" 37 | if location == "segments": 38 | return self.num_cells 39 | return self.num_nodes 40 | 41 | @property 42 | def num_nodes(self): 43 | """Number of nodes (vertices)""" 44 | return len(self.vertices.array) 45 | 46 | @property 47 | def num_cells(self): 48 | """Number of cells (segments)""" 49 | if self.segments is None: 50 | return len(self.vertices.array) - 1 51 | return len(self.segments.array) 52 | 53 | @properties.validator 54 | def _validate_mesh(self): 55 | """Ensures segment indices are valid""" 56 | if self.segments is None: 57 | return True 58 | if np.min(self.segments.array) < 0: 59 | raise properties.ValidationError("Segments may only have positive integers") 60 | if np.max(self.segments.array) >= len(self.vertices.array): 61 | raise properties.ValidationError("Segments expects more vertices than provided") 62 | return True 63 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "omf" 3 | authors = [{ name = "Global Mining Guidelines Group", email = "info@gmggroup.org" }] 4 | description = "API Library for Open Mining Format" 5 | keywords = ["mining", "data", "interchange"] 6 | readme = "README.rst" 7 | requires-python = ">=3.7" 8 | license = { file = "LICENSE" } 9 | dynamic = ["version"] 10 | 11 | classifiers = [ 12 | "Development Status :: 4 - Beta", 13 | "Programming Language :: Python", 14 | "Topic :: Scientific/Engineering", 15 | "Topic :: Scientific/Engineering :: Mathematics", 16 | "Topic :: Scientific/Engineering :: Physics", 17 | "Operating System :: Microsoft :: Windows", 18 | "Operating System :: POSIX", 19 | "Operating System :: Unix", 20 | "Operating System :: MacOS", 21 | "Natural Language :: English", 22 | ] 23 | 24 | dependencies = [ 25 | "numpy>=1.20", 26 | "properties==0.6.1", 27 | "pypng", 28 | "vectormath>=0.2.0", 29 | ] 30 | 31 | [project.urls] 32 | homepage = "https://gmggroup.org" 33 | repository = "https://github.com/gmggroup/omf" 34 | documentation = "https://readthedocs.org/projects/omf" 35 | 36 | [project.optional-dependencies] 37 | dev = [ 38 | "jupyter", 39 | "matplotlib", 40 | ] 41 | 42 | docs = [ 43 | "sphinx", 44 | "sphinx_rtd_theme", 45 | ] 46 | 47 | lint = [ 48 | "black", 49 | "pylint", 50 | ] 51 | 52 | test = [ 53 | "mock", 54 | "nose-py3", 55 | "pytest==7.2.1", 56 | "pytest-cov==4.0.0", 57 | "pytest-rst==0.1.5", 58 | ] 59 | 60 | [build-system] 61 | requires = ["flit_core>=3.2,<4"] 62 | build-backend = "flit_core.buildapi" 63 | 64 | [tool.black] 65 | line-length = 120 66 | target-version = ['py37', 'py38', 'py39', 'py310', 'py311'] 67 | 68 | [tool.pylint.'CLASSES'] 69 | exclude-protected = "_asdict,_fields,_replace,_source,_make,_props,_backend" 70 | 71 | [tool.pylint.'FORMAT'] 72 | max-line-length = 120 73 | 74 | [tool.pylint.'MESSAGES CONTROL'] 75 | disable = "consider-using-f-string" 76 | 77 | [tool.pylint.'SIMILARITIES'] 78 | min-similarity-lines = 20 79 | ignore-comments = "yes" 80 | ignore-docstrings = "yes" 81 | ignore-imports = "yes" 82 | 83 | [tool.pylint.'TYPECHECK'] 84 | generated-members = "_backend,array" 85 | 86 | [tool.pytest.ini_options] 87 | minversion = "7.2" 88 | required_plugins = "pytest-rst" 89 | testpaths = ["docs", "tests"] 90 | 91 | -------------------------------------------------------------------------------- /notebooks/z_order_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Given a `pointer` and a `level` get_index interleaves the pointer bits 3 | and left-shits the resulting index and adds the level information. 4 | 5 | The resulting index is a Z-Order Curve that can store a linear octree. 6 | 7 | Example encoding: 8 | 9 | level of block | 0111 = 7 10 | corner of sub-block, |u 0 0 0 0 0 0 0 1 = 1 11 | indexed by smallest |v 0 0 0 1 0 0 0 0 = 16 12 | possible level |w 0 0 0 0 0 0 0 0 = 0 13 | | 0000000000100000000000010111 = 131095 14 | """ 15 | 16 | import numpy as np 17 | 18 | dimension = 3 # Always in 3D 19 | level_bits = 4 # Enough for eight refinements 20 | max_bits = 8 # max necessary per integer, enough for UInt32 21 | total_bits = max_bits * dimension + level_bits 22 | 23 | 24 | # Below code is a bit general. For this implementation, 25 | # we can remove/hard-code dimension. And probably use 26 | # `0b01010101` like integers instead of doing for-loops. 27 | # See https://en.wikipedia.org/wiki/Z-order_curve 28 | 29 | 30 | def bitrange(x, width, start, end): 31 | """ 32 | Extract a bit range as an integer. 33 | (start, end) is inclusive lower bound, exclusive upper bound. 34 | """ 35 | return x >> (width - end) & ((2 ** (end - start)) - 1) 36 | 37 | 38 | def get_index(pointer, level): 39 | idx = 0 40 | iwidth = max_bits * dimension 41 | for i in range(iwidth): 42 | bitoff = max_bits - (i // dimension) - 1 43 | poff = dimension - (i % dimension) - 1 44 | b = bitrange(pointer[dimension - 1 - poff], max_bits, bitoff, bitoff + 1) << i 45 | idx |= b 46 | return (idx << level_bits) + level 47 | 48 | 49 | def get_pointer(index): 50 | level = index & (2**level_bits - 1) 51 | index = index >> level_bits 52 | 53 | pointer = [0] * dimension 54 | iwidth = max_bits * dimension 55 | for i in range(iwidth): 56 | b = bitrange(index, iwidth, i, i + 1) << (iwidth - i - 1) // dimension 57 | pointer[i % dimension] |= b 58 | pointer.reverse() 59 | return pointer, level 60 | 61 | 62 | def level_width(level): 63 | total_levels = 8 64 | # Remove assert to be more efficient? 65 | assert 0 <= level < total_levels 66 | return 2 ** (total_levels - level) 67 | 68 | 69 | def _print_example(pointer, level): 70 | 71 | ind = get_index(pointer, level) 72 | pnt, lvl = get_pointer(ind) 73 | assert (pointer == pnt) & (level == lvl) 74 | 75 | def print_binary(num, frm): 76 | bstr = "{0:b}".format(num).rjust(max_bits, "0") 77 | print("".join([frm(b) for b in bstr]) + " = " + str(num)) 78 | 79 | print("{0:b}".format(level).rjust(level_bits, "0").rjust(total_bits, " ") + " = " + str(level)) 80 | print_binary(pointer[0], lambda b: " " + b + "") 81 | print_binary(pointer[1], lambda b: " " + b + " ") 82 | print_binary(pointer[2], lambda b: "" + b + " ") 83 | print("{0:b}".format(ind).rjust(total_bits, "0") + " = " + str(ind)) 84 | 85 | return ind 86 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | omf 2 | *** 3 | 4 | .. image:: https://img.shields.io/pypi/v/omf.svg 5 | :target: https://pypi.org/project/omf/ 6 | :alt: Latest PyPI version 7 | 8 | .. image:: https://readthedocs.org/projects/omf/badge/?version=stable 9 | :target: https://omf.readthedocs.io/en/latest/ 10 | :alt: Documentation 11 | 12 | .. image:: https://img.shields.io/badge/license-MIT-blue.svg 13 | :target: https://github.com/gmggroup/omf/blob/master/LICENSE 14 | :alt: MIT license 15 | 16 | .. image:: https://github.com/gmggroup/omf/actions/workflows/run-tests.yml/badge.svg 17 | :target: https://github.com/gmggroup/omf/actions 18 | :alt: Github actions 19 | 20 | 21 | Version: 2.0.0a0 22 | 23 | API library for Open Mining Format, a new standard for mining data backed by 24 | the `Global Mining Guidelines Group `_. 25 | 26 | .. warning:: 27 | **Pre-Release Notice** 28 | 29 | Version 2 of the Open Mining Format (OMF) and the associated Python API 30 | is under active development, and subject to backwards-incompatible changes 31 | at any time. The latest stable release of Version 1 is 32 | `available on PyPI `_. 33 | 34 | Why? 35 | ---- 36 | 37 | An open-source serialization format and API library to support data interchange 38 | across the entire mining community. 39 | 40 | Scope 41 | ----- 42 | 43 | This library provides an abstracted object-based interface to the underlying 44 | OMF serialization format, which enables rapid development of the interface while 45 | allowing for future changes under the hood. 46 | 47 | Goals 48 | ----- 49 | 50 | - The goal of Open Mining Format is to standardize data formats across the 51 | mining community and promote collaboration 52 | - The goal of the API library is to provide a well-documented, object-based 53 | interface for serializing OMF files 54 | 55 | Alternatives 56 | ------------ 57 | 58 | OMF is intended to supplement the many alternative closed-source file formats 59 | used in the mining community. 60 | 61 | Connections 62 | ----------- 63 | 64 | This library makes use of the `properties `_ 65 | open-source project, which is designed and publicly supported by 66 | `Seequent `_. 67 | 68 | Installation 69 | ------------ 70 | 71 | To install the repository, ensure that you have 72 | `pip installed `_ and run: 73 | 74 | .. code:: 75 | 76 | pip install --pre omf 77 | 78 | Or from `github `_: 79 | 80 | .. code:: 81 | 82 | git clone https://github.com/gmggroup/omf.git 83 | cd omf 84 | pip install -e . 85 | 86 | 87 | 3D Visualization 88 | ---------------- 89 | 90 | To easily visualize OMF project files and data objects in a pure Python environment, 91 | check out omfvista_ which provides a module for loading OMF datasets into PyVista_ 92 | mesh objects for 3D visualization and analysis. 93 | 94 | .. _omfvista: https://github.com/OpenGeoVis/omfvista 95 | .. _PyVista: https://github.com/pyvista/pyvista 96 | -------------------------------------------------------------------------------- /notebooks/cbi_plot.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | import z_order_utils 4 | from mpl_toolkits.mplot3d import axes3d, Axes3D 5 | 6 | 7 | def _get_vecs(bc, bs): 8 | 9 | vec_x = np.arange(bc[0] + 1) * np.repeat(bs[0], bc[0] + 1) 10 | vec_y = np.arange(bc[1] + 1) * np.repeat(bs[1], bc[1] + 1) 11 | vec_z = np.arange(bc[2] + 1) * np.repeat(bs[2], bc[2] + 1) 12 | return vec_x, vec_y, vec_z 13 | 14 | 15 | def _plot_rbm(bc, bs, corner, vecs=None, ax=None): 16 | if ax is None: 17 | plt.figure() 18 | ax = plt.subplot(111, projection="3d") 19 | 20 | if vecs is None: 21 | vec_x, vec_y, vec_z = _get_vecs(bc, bs) 22 | else: 23 | vec_x, vec_y, vec_z = vecs 24 | 25 | x_lines = [] 26 | for z in vec_z: 27 | for y in vec_y: 28 | x_lines += [[vec_x[0], y, z], [vec_x[-1], y, z], [np.nan] * 3] 29 | x_lines = np.array(x_lines) 30 | 31 | y_lines = [] 32 | for z in vec_z: 33 | for x in vec_x: 34 | y_lines += [[x, vec_y[0], z], [x, vec_y[-1], z], [np.nan] * 3] 35 | 36 | z_lines = [] 37 | for x in vec_x: 38 | for y in vec_y: 39 | z_lines += [[x, y, vec_z[0]], [x, y, vec_z[-1]], [np.nan] * 3] 40 | 41 | X, Y, Z = np.array(x_lines), np.array(y_lines), np.array(z_lines) 42 | XS = np.r_[X[:, 0], Y[:, 0], Z[:, 0]] + corner[0] 43 | YS = np.r_[X[:, 1], Y[:, 1], Z[:, 1]] + corner[1] 44 | ZS = np.r_[X[:, 2], Y[:, 2], Z[:, 2]] + corner[2] 45 | plt.plot(XS, YS, zs=ZS) 46 | 47 | return ax 48 | 49 | 50 | def plot_rbm(rbm): 51 | _plot_rbm(rbm.block_count, rbm.block_size, rbm.corner) 52 | 53 | 54 | def plot_tbm(tbm): 55 | vecs = ( 56 | np.r_[0, np.cumsum(tbm.tensor_u)], 57 | np.r_[0, np.cumsum(tbm.tensor_v)], 58 | np.r_[0, np.cumsum(tbm.tensor_w)], 59 | ) 60 | _plot_rbm(tbm.block_count, np.nan, tbm.corner, vecs=vecs) 61 | 62 | 63 | def plot_rsbm(rsbm, ax=None): 64 | pbc = rsbm.parent_block_count 65 | pbs = rsbm.parent_block_size 66 | isb = rsbm.is_sub_blocked 67 | sbc = rsbm.sub_block_count 68 | sbs = rsbm.sub_block_size 69 | corner = rsbm.corner 70 | 71 | ax = _plot_rbm(pbc, pbs, corner, ax=ax) 72 | 73 | for k in range(0, pbc[2]): 74 | for j in range(0, pbc[1]): 75 | for i in range(0, pbc[0]): 76 | ind = rsbm._get_parent_index([i, j, k]) 77 | if isb[ind]: 78 | sub_corner = corner + pbs * np.array([i, j, k]) 79 | _plot_rbm(sbc, sbs, sub_corner, ax=ax) 80 | 81 | 82 | def plot_osbm(osbm, ax=None): 83 | pbc = osbm.parent_block_count 84 | pbs = osbm.parent_block_size 85 | isb = osbm.is_sub_blocked 86 | corner = osbm.corner 87 | 88 | max_lvl = z_order_utils.level_width(0) 89 | 90 | ax = _plot_rbm(pbc, pbs, corner, ax=ax) 91 | 92 | vec_x, vec_y, vec_z = _get_vecs(pbc, pbs) 93 | 94 | def plot_block(index, corner): 95 | pnt, lvl = z_order_utils.get_pointer(index) 96 | bs = [s * (z_order_utils.level_width(lvl) / max_lvl) for s in pbs] 97 | cnr = [c + s * (p / max_lvl) for c, p, s in zip(corner, pnt, pbs)] 98 | _plot_rbm([1, 1, 1], bs, cnr, ax=ax) 99 | 100 | for parent_index, tree in enumerate(osbm._get_forest()): 101 | i, j, k = np.unravel_index(parent_index, osbm.parent_block_count, order="F") 102 | parent_corner = vec_x[i], vec_y[j], vec_z[k] 103 | 104 | for block in tree: 105 | if block == 0: 106 | # plotted as a parent above 107 | continue 108 | plot_block(block, parent_corner) 109 | 110 | 111 | def plot_asbm(asbm, ax=None): 112 | if ax is None: 113 | plt.figure() 114 | ax = plt.subplot(111, projection="3d") 115 | 116 | def plot_block(centroid, size): 117 | cnr = [c - s / 2.0 for c, s in zip(centroid, size)] 118 | _plot_rbm([1, 1, 1], size, cnr, ax=ax) 119 | 120 | for centroids, sizes in asbm._get_lists(): 121 | for i in range(centroids.shape[0]): 122 | plot_block(centroids[i, :], sizes[i, :]) 123 | -------------------------------------------------------------------------------- /omf/surface.py: -------------------------------------------------------------------------------- 1 | """surface.py: Surface element definitions""" 2 | import numpy as np 3 | import properties 4 | 5 | from .base import ProjectElement 6 | from .attribute import ArrayInstanceProperty 7 | from .texture import HasTexturesMixin 8 | 9 | 10 | class BaseSurfaceElement(ProjectElement, HasTexturesMixin): 11 | """Base class for surface elements""" 12 | 13 | _valid_locations = ("vertices", "faces") 14 | 15 | origin = properties.Vector3( 16 | "Origin of the Mesh relative to Project coordinate reference system", 17 | default=[0.0, 0.0, 0.0], 18 | ) 19 | 20 | def location_length(self, location): 21 | """Return correct attribute length based on location""" 22 | if location == "faces": 23 | return self.num_cells 24 | return self.num_nodes 25 | 26 | @property 27 | def num_nodes(self): 28 | """get number of nodes""" 29 | raise NotImplementedError() 30 | 31 | @property 32 | def num_cells(self): 33 | """get number of cells""" 34 | raise NotImplementedError() 35 | 36 | 37 | class Surface(BaseSurfaceElement): # pylint: disable=R0901 38 | """Surface element defined by vertices connected by triangles""" 39 | 40 | schema = "org.omf.v2.element.surface" 41 | 42 | vertices = ArrayInstanceProperty( 43 | "Spatial coordinates of vertices relative to project origin", 44 | shape=("*", 3), 45 | dtype=float, 46 | ) 47 | 48 | triangles = ArrayInstanceProperty( 49 | "Vertex indices of surface triangles", 50 | shape=("*", 3), 51 | dtype=int, 52 | ) 53 | 54 | @property 55 | def num_nodes(self): 56 | """get number of nodes""" 57 | return len(self.vertices.array) 58 | 59 | @property 60 | def num_cells(self): 61 | """get number of cells""" 62 | return len(self.triangles.array) 63 | 64 | @properties.validator 65 | def _validate_mesh(self): 66 | """Ensure triangles values are valid indices""" 67 | if np.min(self.triangles.array) < 0: 68 | raise properties.ValidationError("Triangles may only have positive integers") 69 | if np.max(self.triangles.array) >= len(self.vertices.array): 70 | raise properties.ValidationError("Triangles expects more vertices than provided") 71 | return True 72 | 73 | 74 | class TensorGridSurface(BaseSurfaceElement): # pylint: disable=R0901 75 | """Surface element defined by grid with variable spacing in both dimensions""" 76 | 77 | schema = "org.omf.v2.element.surfacetensorgrid" 78 | 79 | tensor_u = properties.List( 80 | "Grid cell widths, u-direction", 81 | properties.Float("", min=0.0), 82 | coerce=True, 83 | ) 84 | tensor_v = properties.List( 85 | "Grid cell widths, v-direction", 86 | properties.Float("", min=0.0), 87 | coerce=True, 88 | ) 89 | axis_u = properties.Vector3( 90 | "Vector orientation of u-direction", 91 | default="X", 92 | length=1, 93 | ) 94 | axis_v = properties.Vector3( 95 | "Vector orientation of v-direction", 96 | default="Y", 97 | length=1, 98 | ) 99 | offset_w = ArrayInstanceProperty( 100 | "Node offset", 101 | shape=("*",), 102 | dtype=float, 103 | required=False, 104 | ) 105 | 106 | @property 107 | def num_nodes(self): 108 | """Number of nodes (vertices)""" 109 | return (len(self.tensor_u) + 1) * (len(self.tensor_v) + 1) 110 | 111 | @property 112 | def num_cells(self): 113 | """Number of cells (faces)""" 114 | return len(self.tensor_u) * len(self.tensor_v) 115 | 116 | @properties.validator 117 | def _validate_mesh(self): 118 | """Check if mesh content is built correctly""" 119 | if not np.abs(self.axis_u.dot(self.axis_v)) < 1e-6: 120 | raise properties.ValidationError("axis_u and axis_v must be orthogonal") 121 | if self.offset_w is properties.undefined or self.offset_w is None: 122 | return True 123 | if len(self.offset_w.array) != self.num_nodes: 124 | raise properties.ValidationError( 125 | "Length of offset_w, {zlen}, must equal number of nodes, " 126 | "{nnode}".format(zlen=len(self.offset_w.array), nnode=self.num_nodes) 127 | ) 128 | return True 129 | -------------------------------------------------------------------------------- /omf/texture.py: -------------------------------------------------------------------------------- 1 | """texture.py: contains Texture definitions""" 2 | import io 3 | import uuid 4 | 5 | import properties 6 | 7 | from .base import BaseModel, ContentModel 8 | from .attribute import ArrayInstanceProperty 9 | 10 | 11 | class Image(BaseModel): 12 | """Class to validate and serialize a PNG image 13 | 14 | Data type and size are computed directly from the image. 15 | 16 | Serializing and deserializing this class requires passing an additional 17 | keyword argument :code:`binary_dict` where the image binary is persisted. 18 | The serialized JSON includes image metadata and a UUID; this UUID 19 | is the key in the binary_dict. 20 | """ 21 | 22 | schema = "org.omf.v2.image.png" 23 | 24 | image = properties.ImagePNG( 25 | "PNG image file", 26 | serializer=lambda *args, **kwargs: None, 27 | deserializer=lambda *args, **kwargs: None, 28 | ) 29 | 30 | def __init__(self, image=None, **kwargs): 31 | super().__init__(**kwargs) 32 | if image is not None: 33 | self.image = image 34 | 35 | @properties.StringChoice("Image data type string", choices=["png"]) 36 | def data_type(self): 37 | """Image type descriptor, currently only PNGs are supported""" 38 | return "png" 39 | 40 | @properties.Integer("Size of image in bytes") 41 | def size(self): 42 | """Total size of the array in bits""" 43 | if self.image is None: 44 | return None 45 | size = self.image.seek(0, 2) 46 | self.image.seek(0) 47 | return size 48 | 49 | def serialize(self, include_class=True, save_dynamic=False, **kwargs): 50 | output = super().serialize(include_class=include_class, save_dynamic=True, **kwargs) 51 | image_uid = str(uuid.uuid4()) 52 | binary_dict = kwargs.get("binary_dict", None) 53 | if binary_dict is not None: 54 | self.image.seek(0) 55 | binary_dict.update({image_uid: self.image.read()}) 56 | output.update({"image": image_uid}) 57 | return output 58 | 59 | @classmethod 60 | def deserialize(cls, value, trusted=False, strict=False, assert_valid=False, **kwargs): 61 | binary_dict = kwargs.get("binary_dict", {}) 62 | if not isinstance(value, dict): 63 | pass 64 | elif "image" not in value: 65 | pass 66 | elif value["image"] in binary_dict: 67 | return cls(io.BytesIO(binary_dict[value["image"]])) 68 | return cls() 69 | 70 | 71 | class ProjectedTexture(ContentModel): 72 | """Image located in space to be projected at its normal onto an element""" 73 | 74 | schema = "org.omf.v2.texture.projected" 75 | 76 | origin = properties.Vector3( 77 | "Origin point of the texture", 78 | default=[0.0, 0.0, 0.0], 79 | ) 80 | axis_u = properties.Vector3( 81 | "Vector corresponding to the image x-axis", 82 | default="X", 83 | ) 84 | axis_v = properties.Vector3( 85 | "Vector corresponding to the image y-axis", 86 | default="Y", 87 | ) 88 | image = properties.Instance( 89 | "PNG image file", 90 | Image, 91 | ) 92 | 93 | 94 | class UVMappedTexture(ContentModel): 95 | """Image mapped to surface where uv coordinates correspond to vertices""" 96 | 97 | schema = "org.omf.v2.texture.uv_mapped" 98 | 99 | image = properties.Instance( 100 | "PNG image file", 101 | Image, 102 | ) 103 | uv_coordinates = ArrayInstanceProperty( 104 | "Normalized UV coordinates mapping the image to element vertices; " 105 | "for values outside 0-1 the texture repeats at every integer level, " 106 | "and NaN indicates no texture at a vertex", 107 | shape=("*", 2), 108 | dtype=float, 109 | ) 110 | 111 | 112 | class HasTexturesMixin(properties.HasProperties): 113 | """Mixin for elements with textures""" 114 | 115 | textures = properties.List( 116 | "Images mapped on the element", 117 | prop=properties.Union("", (ProjectedTexture, UVMappedTexture)), 118 | required=False, 119 | default=list, 120 | ) 121 | 122 | @properties.validator 123 | def _validate_textures(self): 124 | """Validate UVTextures against geometry""" 125 | if not hasattr(self, "num_nodes"): 126 | return True 127 | for i, tex in enumerate(self.textures): 128 | if isinstance(tex, ProjectedTexture): 129 | continue 130 | if len(tex.uv_coordinates.array) != self.num_nodes: 131 | raise properties.ValidationError( 132 | "texture[{index}] length {datalen} does not match " 133 | "vertices length {meshlen}".format( 134 | index=i, 135 | datalen=len(tex.uv_coordinates.array), 136 | meshlen=self.num_nodes, 137 | ) 138 | ) 139 | return True 140 | -------------------------------------------------------------------------------- /omf/fileio.py: -------------------------------------------------------------------------------- 1 | """fileio.py: OMF Writer and Reader for serializing to and from .omf files""" 2 | import datetime 3 | import json 4 | import os 5 | import zipfile 6 | 7 | from .base import Project 8 | from . import compat 9 | 10 | __version__ = "2.0.0a0" 11 | OMF_VERSION = "2.0" 12 | 13 | 14 | def save(project, filename, mode="x"): 15 | """Serialize a OMF project to a file 16 | 17 | The .omf file is a ZIP archive containing the project JSON 18 | with pointers to separate files for each binary array/image. 19 | 20 | **Inputs:** 21 | 22 | * **project** - Instance of :class:`omf.base.Project` to be saved 23 | * **filename** - Name and path of output OMF file. If not already present, 24 | ".omf" will be appended 25 | * **mode** - Valid values are "w" or "x" - if file exists, "w" will 26 | overwrite and "x" will error. Default is "X" 27 | """ 28 | time_tuple = datetime.datetime.utcnow().timetuple()[:6] 29 | if mode not in ("w", "x"): 30 | raise ValueError("File mode must be 'w' or 'x'") 31 | if len(filename) < 4 or filename[-4:] != ".omf": 32 | filename = filename + ".omf" 33 | if mode == "x" and os.path.exists(filename): 34 | raise ValueError("File already exists: {}".format(filename)) 35 | project.validate() 36 | binary_dict = {} 37 | serial_dict = project.serialize(binary_dict=binary_dict, include_class=False) 38 | serial_dict["version"] = OMF_VERSION 39 | with zipfile.ZipFile( 40 | file=filename, 41 | mode="w", 42 | compression=zipfile.ZIP_DEFLATED, 43 | allowZip64=True, 44 | ) as zip_file: 45 | serial_info = zipfile.ZipInfo( 46 | filename="project.json", 47 | date_time=time_tuple, 48 | ) 49 | serial_info.compress_type = zipfile.ZIP_DEFLATED 50 | zip_file.writestr(serial_info, json.dumps(serial_dict).encode("utf-8")) 51 | for key, value in binary_dict.items(): 52 | binary_info = zipfile.ZipInfo( 53 | filename="{}".format(key), 54 | date_time=time_tuple, 55 | ) 56 | binary_info.compress_type = zipfile.ZIP_DEFLATED 57 | zip_file.writestr(binary_info, value) 58 | return filename 59 | 60 | 61 | # pylint: disable=too-few-public-methods 62 | class _Reader(compat.IOMFReader): 63 | def __init__(self, filename: str): 64 | self._filename = filename 65 | 66 | def load(self, include_binary: bool = True, project_json: str = None) -> Project: 67 | project_dict = {} 68 | binary_dict = {} 69 | project_version = None 70 | 71 | try: 72 | with zipfile.ZipFile(file=self._filename, mode="r") as zip_file: 73 | for info in zip_file.infolist(): 74 | with zip_file.open(info, mode="r") as file: 75 | if info.filename == "project.json": 76 | project_dict = json.load(file) 77 | project_version = project_dict.pop("version") 78 | elif include_binary: 79 | binary_dict[info.filename] = file.read() 80 | 81 | except zipfile.BadZipFile as exc: 82 | raise compat.WrongVersionError(exc) 83 | 84 | except Exception as exc: 85 | raise compat.InvalidOMFFile(exc) 86 | 87 | if project_version is None: 88 | raise compat.InvalidOMFFile(f"Unsupported format: {self._filename}") 89 | if project_version != OMF_VERSION: 90 | raise compat.WrongVersionError(f"Unsupported file version: {project_version}") 91 | 92 | return Project.deserialize(value=project_dict, binary_dict=binary_dict, trusted=True) 93 | 94 | 95 | def load(filename: str, include_binary: bool = True, project_json: str = None) -> Project: 96 | """Deserialize an OMF file into a project 97 | 98 | **Inputs:** 99 | 100 | * **filename** - Name and path of input OMF file 101 | * **include_binary** - If True, binary data from the OMF file will be 102 | loaded into memory. Default is True 103 | * **project_json** - Alternative JSON used to construct the output OMF 104 | project. By default, the project JSON from the OMF file is used. 105 | 106 | The most common use of this function is simply to load an entire OMF 107 | file: 108 | 109 | .. code:: 110 | 111 | import omf 112 | proj = omf.load('my_project.omf') 113 | 114 | However, if the OMF file is too big, you may partially load it with 115 | something like: 116 | 117 | .. code:: 118 | 119 | import omf 120 | proj_no_bin = omf.load('my_project.omf', include_binary=False) 121 | ... # Mutate proj_no_bin to include only the desired elements/attributes 122 | proj = omf.load('my_project.omf', project_json=proj_no_bin.serialize()) 123 | """ 124 | 125 | for reader_cls in [_Reader] + compat.compatible_omf_readers: 126 | try: 127 | reader = reader_cls(filename) 128 | return reader.load(include_binary=include_binary, project_json=project_json) 129 | except compat.WrongVersionError: 130 | continue 131 | raise compat.InvalidOMFFile(f"Unsupported file: {filename}") 132 | -------------------------------------------------------------------------------- /docs/content/examples.rst: -------------------------------------------------------------------------------- 1 | .. _examples: 2 | 3 | OMF API Example 4 | =============== 5 | 6 | This (very impractical) example shows usage of the OMF API. 7 | 8 | Also, this example builds elements all at once. They can also be initialized 9 | with no arguments, and properties can be set one-by-one (see code snippet at 10 | bottom of page). 11 | 12 | .. code:: python 13 | :name: test_doc 14 | 15 | import datetime 16 | import numpy as np 17 | import os 18 | import png 19 | import omf 20 | 21 | # setup sample files 22 | dir = os.getcwd() 23 | png_file = os.path.join(dir, "example.png") 24 | omf_file = os.path.join(dir, "example.omf") 25 | for f in (png_file, omf_file): 26 | if os.path.exists(f): 27 | os.remove(f) 28 | img = ["110010010011", "101011010100", "110010110101", "100010010011"] 29 | img = [[int(val) for val in value] for value in img] 30 | writer = png.Writer(len(img[0]), len(img), greyscale=True, bitdepth=16) 31 | with open(png_file, "wb") as file: 32 | writer.write(file, img) 33 | 34 | proj = omf.Project( 35 | name="Test project", 36 | description="Just some assorted elements", 37 | ) 38 | pts = omf.PointSet( 39 | name="Random Points", 40 | description="Just random points", 41 | vertices=np.random.rand(100, 3), 42 | attributes=[ 43 | omf.NumericAttribute( 44 | name="rand attr", 45 | array=np.random.rand(100), 46 | location="vertices", 47 | ), 48 | omf.NumericAttribute( 49 | name="More rand attr", 50 | array=np.random.rand(100), 51 | location="vertices", 52 | ), 53 | ], 54 | textures=[ 55 | omf.ProjectedTexture( 56 | name="test image", 57 | image=png_file, 58 | origin=[0, 0, 0], 59 | axis_u=[1, 0, 0], 60 | axis_v=[0, 1, 0], 61 | ), 62 | omf.ProjectedTexture( 63 | name="test image", 64 | image=png_file, 65 | origin=[0, 0, 0], 66 | axis_u=[1, 0, 0], 67 | axis_v=[0, 0, 1], 68 | ), 69 | ], 70 | metadata={ 71 | "color": "green", 72 | }, 73 | ) 74 | lin = omf.LineSet( 75 | name="Random Line", 76 | vertices=np.random.rand(100, 3), 77 | segments=np.floor(np.random.rand(50, 2) * 100).astype(int), 78 | attributes=[ 79 | omf.NumericAttribute( 80 | name="rand vert attr", 81 | array=np.random.rand(100), 82 | location="vertices", 83 | ), 84 | omf.NumericAttribute( 85 | name="rand segment attr", 86 | array=np.random.rand(50), 87 | location="segments", 88 | ), 89 | ], 90 | metadata={ 91 | "color": "#0000FF", 92 | }, 93 | ) 94 | surf = omf.Surface( 95 | name="trisurf", 96 | vertices=np.random.rand(100, 3), 97 | triangles=np.floor(np.random.rand(50, 3) * 100).astype(int), 98 | attributes=[ 99 | omf.NumericAttribute( 100 | name="rand vert attr", 101 | array=np.random.rand(100), 102 | location="vertices", 103 | ), 104 | omf.NumericAttribute( 105 | name="rand face attr", 106 | array=np.random.rand(50), 107 | location="faces", 108 | ), 109 | ], 110 | metadata={ 111 | "color": [100, 200, 200], 112 | }, 113 | ) 114 | grid = omf.TensorGridSurface( 115 | name="gridsurf", 116 | tensor_u=np.ones(10).astype(float), 117 | tensor_v=np.ones(15).astype(float), 118 | origin=[50.0, 50.0, 50.0], 119 | axis_u=[1.0, 0, 0], 120 | axis_v=[0, 0, 1.0], 121 | offset_w=np.random.rand(11 * 16), 122 | attributes=[ 123 | omf.NumericAttribute( 124 | name="rand vert attr", 125 | array=np.random.rand(11 * 16), 126 | location="vertices", 127 | ), 128 | omf.NumericAttribute( 129 | name="rand face attr", 130 | array=np.random.rand(10 * 15), 131 | location="faces", 132 | ), 133 | ], 134 | textures=[ 135 | omf.ProjectedTexture( 136 | name="test image", 137 | image=png_file, 138 | origin=[2.0, 2.0, 2.0], 139 | axis_u=[5.0, 0, 0], 140 | axis_v=[0, 2.0, 5.0], 141 | ), 142 | ], 143 | ) 144 | vol = omf.TensorGridBlockModel( 145 | name="vol", 146 | tensor_u=np.ones(10).astype(float), 147 | tensor_v=np.ones(15).astype(float), 148 | tensor_w=np.ones(20).astype(float), 149 | origin=[10.0, 10.0, -10], 150 | attributes=[ 151 | omf.NumericAttribute( 152 | name="random attr", location="cells", array=np.random.rand(10 * 15 * 20) 153 | ), 154 | ], 155 | ) 156 | 157 | proj.elements = [pts, lin, surf, grid, vol] 158 | 159 | proj.metadata = { 160 | "coordinate_reference_system": "epsg 3857", 161 | "date_created": datetime.datetime.utcnow(), 162 | "version": "v1.3", 163 | "revision": "10", 164 | } 165 | 166 | assert proj.validate() 167 | 168 | omf.save(proj, omf_file) 169 | 170 | 171 | Piecewise building example: 172 | 173 | .. code:: python 174 | 175 | ... 176 | pts = omf.PointSet() 177 | pts.name = 'Random Points', 178 | pts.vertices = np.random.rand(100, 3) 179 | ... 180 | -------------------------------------------------------------------------------- /code_of_conduct.md: -------------------------------------------------------------------------------- 1 | # Open Mining Format Code of Conduct 2 | 3 | - [Overview](#Overview) 4 | - [Principles](#Principles) 5 | - [Reporting](#Reporting) 6 | - [Antitrust Compliance](#Antitrust-Compliance) 7 | 8 | ## Overview 9 | 10 | When attempting to overcome challenges alone, the obstacles can seem 11 | monumental. When approached together, however, anything is possible. 12 | 13 | The Global Mining Guidelines Group (GMG) is a community of mining 14 | companies, OEMs, OTMs, research organizations, and consultants 15 | from around the world who recognize that innovation does not 16 | happen in silos. Together, this diverse community addresses 17 | common challenges and creates tangible deliverables, 18 | enabling a productive, safe, and sustainable future for mining. 19 | 20 | A diverse community brings diverse ideas and perspectives to complex 21 | problems; this is paramount to the success of any open source project, 22 | including OMF. However, conflicting viewpoints and disagreement can 23 | quickly escalate into harassment and aggression when amplified by 24 | misunderstanding, miscommunication, and taking disagreement personally. 25 | 26 | ## Principles 27 | 28 | By embracing the following principles, guidelines, and actions to 29 | follow or avoid, you will help us make the OMF community welcoming 30 | and productive. 31 | 32 | - **Be friendly and patient.** 33 | 34 | - **Be welcoming.** We strive to be a community that welcomes and 35 | supports people of all backgrounds and identities. This includes, 36 | but is not limited to, members of any race, ethnicity, culture, 37 | national origin, color, immigration status, social and economic 38 | class, educational level, sex, sexual orientation, gender identity 39 | and expression, age, physical appearance, family status, 40 | technological or professional choices, academic discipline, religion, 41 | mental ability, and physical ability. 42 | 43 | - **Be considerate.** Your work will be used by other people, and you 44 | in turn will depend on the work of others. Any decision you take will 45 | affect users and colleagues, and you should take those consequences 46 | into account when making decisions. Remember that we're a world-wide 47 | community. You may be communicating with someone with a different 48 | primary language or cultural background. 49 | 50 | - **Be respectful.** Not all of us will agree all the time, but 51 | disagreement is no excuse for poor behavior or poor manners. We might 52 | all experience some frustration now and then, but we cannot allow that 53 | frustration to turn into a personal attack. It’s important to remember 54 | that a community where people feel uncomfortable or threatened is not 55 | a productive one. 56 | 57 | - **Be careful in the words that you choose.** Be kind to others. Do not 58 | insult or put down other community members. Harassment and other 59 | exclusionary behavior are not acceptable. This includes, but is not 60 | limited to: 61 | - Violent threats or violent language directed against another 62 | person 63 | - Discriminatory jokes and language 64 | - Posting sexually explicit or violent material 65 | - Posting (or threatening to post) other people's personally 66 | identifying information ("doxing") 67 | - Personal insults, especially those using racist or sexist terms 68 | - Unwelcome sexual attention 69 | - Advocating for, or encouraging, any of the above behavior 70 | - Repeated harassment of others. In general, if someone asks you to 71 | stop, then stop 72 | 73 | - **Moderate your expectations.** Please respect that community members 74 | choose how they spend their time in the project. A thoughtful question 75 | about your expectations is preferable to demands for another person's 76 | time. 77 | 78 | - **When we disagree, try to understand why.** Disagreements, both 79 | social and technical, happen all the time and the OMF community is no 80 | exception. Try to understand where others are coming from, as seeing 81 | a question from their viewpoint may help find a new path forward. And 82 | don’t forget that it is human to err: blaming each other doesn’t get 83 | us anywhere, while we can learn from mistakes to find better 84 | solutions. 85 | 86 | - **A simple apology can go a long way.** It can often de-escalate a 87 | situation, and telling someone that you are sorry is an act of empathy 88 | that doesn’t automatically imply an admission of guilt. 89 | 90 | ## Reporting 91 | 92 | As a member of our community, you are also a steward of these values. 93 | Not all problems need to be resolved via formal processes, and often a 94 | quick, friendly but clear word in an online message or in person can 95 | help resolve a misunderstanding and de-escalate things. 96 | 97 | An informal enforcement of this process may be inadequate if there is 98 | urgency, risk to someone, no one is comfortable speaking out, the 99 | offender is unresponsive, etc. In that case, please file a report 100 | by emailing [Heather Turnbull](mailto:hturnbull@gmggroup.org), 101 | the Operations Manager at GMG. Reports will be kept anonymous and action 102 | will be taken. 103 | 104 | ## Antitrust Compliance 105 | 106 | All participants in GMG activity must comply with applicable 107 | antitrust laws. Please refer to the detailed guidelines provided on the 108 | [GMG governance page](https://gmggroup.org/about-us/governance/). 109 | 110 | #### Attribution 111 | 112 | This document is modified from other codes of conduct, courtesy of the 113 | [*Speak Up!*](http://web.archive.org/web/20141109123859/http://speakup.io/coc.html), 114 | [*Django*](https://www.djangoproject.com/conduct), and 115 | [*Jupyter*](https://github.com/jupyter/governance/blob/master/conduct/code_of_conduct.md) 116 | Projects. 117 | 118 | All content on this page is licensed under a 119 | [*Creative Commons Attribution*](http://creativecommons.org/licenses/by/3.0/) 120 | license. 121 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to the Open Mining Format 2 | 3 | There are many ways to contribute to OMF: 4 | - [**Become a member** of GMG](#membership) 5 | - [**Raise issues** for any questions/feedback/problems you have](#issues) 6 | - [**Write code** to directly move the format forward](#code) 7 | - [**Provide documentation** and examples](#docs) 8 | 9 | ## High-level involvement through Global Mining Guidelines Group 10 | 11 | The Open Mining Format has been created through the Data Exchange for 12 | Mine Software Project under the 13 | [Data Access and Usage Working Group](https://gmggroup.org/groups/data-access-and-usage-dau/). 14 | This sub-committee is involved with all aspects of the success of OMF, 15 | including industry engagement and organization, technical design and 16 | development, and outreach and marketing. 17 | [Learn more about GMG](https://gmggroup.org/) or 18 | [become a member](https://gmggroup.org/about-us/membership/) 19 | 20 | ## Raising questions, feedback, problems 21 | 22 | Given the *open* nature of OMF, all technical conversations and 23 | development happen, here, on GitHub, visible to everyone. To 24 | participate, all you need is a [free GitHub account](https://github.com/join); 25 | there is no requirement to be a member of GMG or even part of 26 | the mining community. 27 | 28 | Any questions/feedback/problems should be raised as 29 | [issues](https://github.com/gmggroup/omf/issues). You may comment on 30 | existing, relevant issues or 31 | [create a new issue](https://github.com/gmggroup/omf/issues/new). There 32 | is no restriction on what issues are "supposed to" look like; this is 33 | a place for anyone to voice anything about OMF. Examples include: 34 | - detailed technical questions around implementation 35 | - problems encountered when attempting to support OMF 36 | - confusion around the documentation 37 | - feature requests for the format 38 | - questions around the high-level goal of an open standard 39 | - suggestions around non-technical aspects, such as marketing and engagement 40 | - etc! 41 | 42 | If the question is non-technical, follow up may happen outside of 43 | GitHub, but this is a place to start. 44 | 45 | To ensure the OMF community remains welcoming and productive, please read and 46 | follow our [Code of Conduct](code_of_conduct.md). 47 | 48 | ## Contributing to code development 49 | 50 | Anyone can submit pull requests to the OMF repository. Preferably these 51 | are related to an 52 | [existing bug](https://github.com/gmggroup/omf/issues?q=is%3Aopen+is%3Aissue+label%3Abug) 53 | or a feature included in an 54 | [upcoming milestone](https://github.com/gmggroup/omf/milestones). 55 | [Low-hanging-fruit issues](https://github.com/gmggroup/omf/issues?q=is%3Aopen+is%3Aissue+label%3A%22%3Aarrow_down%3A+%3Agreen_apple%3A%22) 56 | are great for first-time contributors. If the solution to the issue is 57 | unclear, please follow up and ask for clarification; if nothing else, 58 | it's useful to know what people are working on. If your pull request 59 | does not have an existing issue, consider [creating an issue](#issues) 60 | first, just to add context and promote discussion. 61 | 62 | When working on your contribution, you may 63 | [fork](https://help.github.com/en/articles/fork-a-repo) the OMF repo to 64 | your personal or company GitHub organization and develop there. 65 | Alternatively, if you are interested in being identified as a contributor 66 | to the [GMG GitHub organization](https://github.com/gmggroup), 67 | reach out to [Heather Turnbull](mailto:hturnbull@gmggroup.org), 68 | the Operations Manager at GMG (note: this is distinct from 69 | [GMG membership](#membership)). Once you are a contributor on GitHub, you may 70 | [create feature branches](https://help.github.com/en/articles/creating-and-deleting-branches-within-your-repository) 71 | directly in the GMG OMF repository. 72 | 73 | When creating a branch, consider naming it in the format 74 | `GH-##/human_readable_description`, where "##" is the related 75 | issue number. Strive for as much inter-linking as possible of pull 76 | requests, issues numbers, commits, etc. 77 | 78 | When submitting a pull request, please base off the `dev` branch. 79 | Contributions will be collected here, then version-bumped and deployed 80 | via pull request from `dev` to `master` as appropriate. 81 | 82 | Finally, everyone appreciates unit tests, code documentation, and 83 | consistent style (just run [black](https://black.readthedocs.io/en/stable/)). 84 | And, to ensure the OMF community remains welcoming 85 | and productive, read and follow our [Code of Conduct](code_of_conduct.md). 86 | 87 | ### Contributing documentation and examples 88 | 89 | The most useful contributions for the success of OMF are documentation 90 | and examples that can be shared with everyone. To use the format, 91 | people must understand the format. Documentation comes in many forms; 92 | it can include: 93 | - Technical documentation of the code and API 94 | - Description of how the API relates to real objects 95 | - Workflows describing specific implementations of OMF in prose, code, 96 | or screenshots 97 | - Example OMF files, ideally along side source files of other formats 98 | and description of the import/export process. 99 | - etc! 100 | 101 | Contributing documentation and examples is also more flexible than 102 | code contributions. Documentation hosted on 103 | [readthedocs](https://omf.readthedocs.io/en/stable/) is directly built 104 | from the GitHub repository, so you may contribute documentation there 105 | by submitting a PR. However, it is also entirely valid to 106 | [create a new issue](https://github.com/gmggroup/omf/issues/new) and 107 | attach any files or text you have there. Somebody will take those 108 | submissions and put them in the appropriate place. 109 | 110 | By documenting and highlighting diverse, successful OMF use-cases, we 111 | are able to demonstrate early industry engagement, and this will promote 112 | further adoption. 113 | 114 | -------------------------------------------------------------------------------- /tests/test_base.py: -------------------------------------------------------------------------------- 1 | """Tests for BaseModel class behaviors""" 2 | import datetime 3 | import json 4 | 5 | import numpy as np 6 | import properties 7 | import properties.extras 8 | import pytest 9 | 10 | import omf 11 | 12 | 13 | class Metadata(properties.HasProperties): 14 | """Metadata class for testing""" 15 | 16 | meta_int = properties.Integer("", required=False) 17 | meta_string = properties.String("", required=False) 18 | meta_color = properties.Color("", required=False) 19 | meta_anything = properties.Property("", required=False) 20 | meta_date = omf.base.StringDateTime("", required=False) 21 | 22 | 23 | def test_metadata_property(): 24 | """Test metadata validates predefined keys but allows any keys""" 25 | 26 | class WithMetadata(properties.HasProperties): 27 | """Test class with metadata""" 28 | 29 | metadata = omf.base.ArbitraryMetadataDict( 30 | "Some metadata", 31 | Metadata, 32 | default=dict, 33 | ) 34 | 35 | with pytest.raises(AttributeError): 36 | WithMetadata._props["metadata"].metadata_class = object # pylint: disable=E1101 37 | 38 | has_metadata = WithMetadata() 39 | assert has_metadata.validate() 40 | has_metadata.metadata["meta_int"] = 5 41 | assert has_metadata.validate() 42 | has_metadata.metadata["meta_int"] = "not an int" 43 | with pytest.raises(properties.ValidationError): 44 | has_metadata.validate() 45 | has_metadata.metadata["meta_int"] = 5 46 | has_metadata.metadata["meta_string"] = "a string" 47 | assert has_metadata.validate() 48 | has_metadata.metadata["meta_color"] = "red" 49 | assert has_metadata.validate() 50 | assert has_metadata.metadata["meta_color"] == (255, 0, 0) 51 | has_metadata.metadata["meta_anything"] = "a string" 52 | assert has_metadata.validate() 53 | has_metadata.metadata["meta_anything"] = Metadata 54 | with pytest.raises(properties.ValidationError): 55 | has_metadata.validate() 56 | has_metadata.metadata["meta_anything"] = "a string" 57 | has_metadata.metadata["meta_date"] = "some date" 58 | with pytest.raises(properties.ValidationError): 59 | has_metadata.validate() 60 | has_metadata.metadata["meta_date"] = datetime.datetime(1980, 1, 1) 61 | assert has_metadata.validate() 62 | has_metadata.metadata["another"] = "a string" 63 | has_metadata.metadata["even another"] = "a string" 64 | assert has_metadata.validate() 65 | 66 | has_metadata.metadata["and another"] = Metadata 67 | with pytest.raises(properties.ValidationError): 68 | has_metadata.validate() 69 | has_metadata.metadata.pop("and another") 70 | serialized_has_meta = has_metadata.serialize(include_class=False) 71 | assert serialized_has_meta == { 72 | "metadata": { 73 | "meta_int": 5, 74 | "meta_string": "a string", 75 | "meta_color": (255, 0, 0), 76 | "meta_anything": "a string", 77 | "meta_date": "1980-01-01T00:00:00Z", 78 | "another": "a string", 79 | "even another": "a string", 80 | } 81 | } 82 | new_metadata = WithMetadata.deserialize(json.loads(json.dumps(serialized_has_meta))) 83 | new_metadata.validate() 84 | assert properties.equal(has_metadata, new_metadata) 85 | assert new_metadata.serialize(include_class=False) == serialized_has_meta 86 | 87 | 88 | class MyModelWithInt(omf.base.BaseModel): 89 | """Test class with one integer property""" 90 | 91 | schema = "my.model.with.int" 92 | my_int = properties.Integer("") 93 | 94 | 95 | class MyModelWithIntAndInstance(omf.base.BaseModel): 96 | """Test class with an integer property and an instance property""" 97 | 98 | schema = "my.model.with.int.and.instance" 99 | my_int = properties.Integer("") 100 | my_model = properties.Instance("", omf.base.BaseModel) 101 | 102 | 103 | @pytest.mark.parametrize("include_class", [True, False]) 104 | def test_uid_model_serialize(include_class): 105 | """Test BaseModel correctly serializes to flat dictionary""" 106 | model = MyModelWithIntAndInstance( 107 | my_int=0, 108 | my_model=MyModelWithIntAndInstance( 109 | my_int=1, 110 | my_model=MyModelWithInt(), 111 | ), 112 | ) 113 | expected = { 114 | "schema": "my.model.with.int.and.instance", 115 | "my_int": 0, 116 | "my_model": { 117 | "schema": "my.model.with.int.and.instance", 118 | "my_int": 1, 119 | "my_model": { 120 | "schema": "my.model.with.int", 121 | }, 122 | }, 123 | } 124 | if include_class: 125 | expected["__class__"] = "MyModelWithIntAndInstance" 126 | expected["my_model"]["__class__"] = "MyModelWithIntAndInstance" 127 | expected["my_model"]["my_model"]["__class__"] = "MyModelWithInt" 128 | assert model.serialize(include_class=include_class) == expected 129 | 130 | 131 | def test_deserialize(): 132 | """Test deserialize correctly builds BaseModel from registry""" 133 | input_dict = { 134 | "my_int": 0, 135 | "my_model": { 136 | "my_int": 1, 137 | "schema": "my.model.with.int", 138 | }, 139 | "schema": "my.model.with.int.and.instance", 140 | } 141 | model_a = omf.base.BaseModel.deserialize(input_dict, trusted=True) 142 | assert isinstance(model_a, MyModelWithIntAndInstance) 143 | # pylint: disable=E1101 144 | assert model_a.my_int == 0 145 | assert isinstance(model_a.my_model, MyModelWithInt) 146 | assert model_a.my_model.my_int == 1 147 | # pylint: enable=E1101 148 | 149 | 150 | class MockArray(omf.base.BaseModel): 151 | """Test array class""" 152 | 153 | schema = "test.base.mock.array" 154 | array = np.array([1, 2, 3]) 155 | 156 | 157 | class MockAttribute(omf.base.ProjectElementAttribute): 158 | """Test attribute class""" 159 | 160 | schema = "test.mock.attribute" 161 | 162 | array = MockArray() 163 | 164 | 165 | def test_project_element(): 166 | """Test validation of element geometry and attributes""" 167 | element = omf.base.ProjectElement() 168 | with pytest.raises(AssertionError): 169 | element.validate() 170 | element._valid_locations = ("vertices",) # pylint: disable=W0212 171 | element.location_length = lambda _: 5 172 | element.attributes = [MockAttribute(location="faces")] 173 | with pytest.raises(ValueError): 174 | element.validate() 175 | element.attributes = [MockAttribute(location="vertices")] 176 | with pytest.raises(ValueError): 177 | element.validate() 178 | element.location_length = lambda _: 3 179 | assert element.validate() 180 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help 23 | help: 24 | @echo "Please use \`make ' where is one of" 25 | @echo " html to make standalone HTML files" 26 | @echo " dirhtml to make HTML files named index.html in directories" 27 | @echo " singlehtml to make a single large HTML file" 28 | @echo " pickle to make pickle files" 29 | @echo " json to make JSON files" 30 | @echo " htmlhelp to make HTML files and a HTML help project" 31 | @echo " qthelp to make HTML files and a qthelp project" 32 | @echo " applehelp to make an Apple Help Book" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | @echo " coverage to run coverage check of the documentation (if enabled)" 49 | 50 | .PHONY: clean 51 | clean: 52 | rm -rf $(BUILDDIR)/* 53 | 54 | .PHONY: html 55 | html: 56 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 57 | @echo 58 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 59 | 60 | .PHONY: dirhtml 61 | dirhtml: 62 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 63 | @echo 64 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 65 | 66 | .PHONY: singlehtml 67 | singlehtml: 68 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 69 | @echo 70 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 71 | 72 | .PHONY: pickle 73 | pickle: 74 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 75 | @echo 76 | @echo "Build finished; now you can process the pickle files." 77 | 78 | .PHONY: json 79 | json: 80 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 81 | @echo 82 | @echo "Build finished; now you can process the JSON files." 83 | 84 | .PHONY: htmlhelp 85 | htmlhelp: 86 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 87 | @echo 88 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 89 | ".hhp project file in $(BUILDDIR)/htmlhelp." 90 | 91 | .PHONY: qthelp 92 | qthelp: 93 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 94 | @echo 95 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 96 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 97 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/omf.qhcp" 98 | @echo "To view the help file:" 99 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/omf.qhc" 100 | 101 | .PHONY: applehelp 102 | applehelp: 103 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 104 | @echo 105 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 106 | @echo "N.B. You won't be able to view it unless you put it in" \ 107 | "~/Library/Documentation/Help or install it in your application" \ 108 | "bundle." 109 | 110 | .PHONY: devhelp 111 | devhelp: 112 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 113 | @echo 114 | @echo "Build finished." 115 | @echo "To view the help file:" 116 | @echo "# mkdir -p $$HOME/.local/share/devhelp/omf" 117 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/omf" 118 | @echo "# devhelp" 119 | 120 | .PHONY: epub 121 | epub: 122 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 123 | @echo 124 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 125 | 126 | .PHONY: latex 127 | latex: 128 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 129 | @echo 130 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 131 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 132 | "(use \`make latexpdf' here to do that automatically)." 133 | 134 | .PHONY: latexpdf 135 | latexpdf: 136 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 137 | @echo "Running LaTeX files through pdflatex..." 138 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 139 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 140 | 141 | .PHONY: latexpdfja 142 | latexpdfja: 143 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 144 | @echo "Running LaTeX files through platex and dvipdfmx..." 145 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 146 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 147 | 148 | .PHONY: text 149 | text: 150 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 151 | @echo 152 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 153 | 154 | .PHONY: man 155 | man: 156 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 157 | @echo 158 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 159 | 160 | .PHONY: texinfo 161 | texinfo: 162 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 163 | @echo 164 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 165 | @echo "Run \`make' in that directory to run these through makeinfo" \ 166 | "(use \`make info' here to do that automatically)." 167 | 168 | .PHONY: info 169 | info: 170 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 171 | @echo "Running Texinfo files through makeinfo..." 172 | make -C $(BUILDDIR)/texinfo info 173 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 174 | 175 | .PHONY: gettext 176 | gettext: 177 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 178 | @echo 179 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 180 | 181 | .PHONY: changes 182 | changes: 183 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 184 | @echo 185 | @echo "The overview file is in $(BUILDDIR)/changes." 186 | 187 | .PHONY: linkcheck 188 | linkcheck: 189 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 190 | @echo 191 | @echo "Link check complete; look for any errors in the above output " \ 192 | "or in $(BUILDDIR)/linkcheck/output.txt." 193 | 194 | .PHONY: doctest 195 | doctest: 196 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 197 | @echo "Testing of doctests in the sources finished, look at the " \ 198 | "results in $(BUILDDIR)/doctest/output.txt." 199 | 200 | .PHONY: coverage 201 | coverage: 202 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 203 | @echo "Testing of coverage in the sources finished, look at the " \ 204 | "results in $(BUILDDIR)/coverage/python.txt." 205 | 206 | .PHONY: xml 207 | xml: 208 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 209 | @echo 210 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 211 | 212 | .PHONY: pseudoxml 213 | pseudoxml: 214 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 215 | @echo 216 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 217 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | if "%PYTHON%" == "" ( 9 | set PYTHON=python 10 | ) 11 | set BUILDDIR=_build 12 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 13 | set I18NSPHINXOPTS=%SPHINXOPTS% . 14 | if NOT "%PAPER%" == "" ( 15 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 16 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 17 | ) 18 | 19 | if "%1" == "" goto help 20 | 21 | if "%1" == "help" ( 22 | :help 23 | echo.Please use `make ^` where ^ is one of 24 | echo. html to make standalone HTML files 25 | echo. dirhtml to make HTML files named index.html in directories 26 | echo. singlehtml to make a single large HTML file 27 | echo. pickle to make pickle files 28 | echo. json to make JSON files 29 | echo. htmlhelp to make HTML files and a HTML help project 30 | echo. qthelp to make HTML files and a qthelp project 31 | echo. devhelp to make HTML files and a Devhelp project 32 | echo. epub to make an epub 33 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 34 | echo. text to make text files 35 | echo. man to make manual pages 36 | echo. texinfo to make Texinfo files 37 | echo. gettext to make PO message catalogs 38 | echo. changes to make an overview over all changed/added/deprecated items 39 | echo. xml to make Docutils-native XML files 40 | echo. pseudoxml to make pseudoxml-XML files for display purposes 41 | echo. linkcheck to check all external links for integrity 42 | echo. doctest to run all doctests embedded in the documentation if enabled 43 | echo. coverage to run coverage check of the documentation if enabled 44 | goto end 45 | ) 46 | 47 | if "%1" == "clean" ( 48 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 49 | del /q /s %BUILDDIR%\* 50 | goto end 51 | ) 52 | 53 | 54 | REM Check if sphinx-build is available and fallback to Python version if any 55 | %SPHINXBUILD% 1>NUL 2>NUL 56 | if errorlevel 9009 goto sphinx_python 57 | goto sphinx_ok 58 | 59 | :sphinx_python 60 | 61 | set SPHINXBUILD=%PYTHON% -m sphinx.__init__ 62 | %SPHINXBUILD% 2> nul 63 | if errorlevel 9009 ( 64 | echo. 65 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 66 | echo.installed, then set the SPHINXBUILD environment variable to point 67 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 68 | echo.may add the Sphinx directory to PATH. 69 | echo. 70 | echo.If you don't have Sphinx installed, grab it from 71 | echo.http://sphinx-doc.org/ 72 | exit /b 1 73 | ) 74 | 75 | :sphinx_ok 76 | 77 | if "%1" == "graphs" ( 78 | pyreverse -my -A -o pdf -p omf ../omf/*.py 79 | goto end 80 | ) 81 | 82 | if "%1" == "html" ( 83 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 84 | if errorlevel 1 exit /b 1 85 | echo. 86 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 87 | goto end 88 | ) 89 | 90 | if "%1" == "dirhtml" ( 91 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 92 | if errorlevel 1 exit /b 1 93 | echo. 94 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 95 | goto end 96 | ) 97 | 98 | if "%1" == "singlehtml" ( 99 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 100 | if errorlevel 1 exit /b 1 101 | echo. 102 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 103 | goto end 104 | ) 105 | 106 | if "%1" == "pickle" ( 107 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 108 | if errorlevel 1 exit /b 1 109 | echo. 110 | echo.Build finished; now you can process the pickle files. 111 | goto end 112 | ) 113 | 114 | if "%1" == "json" ( 115 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 116 | if errorlevel 1 exit /b 1 117 | echo. 118 | echo.Build finished; now you can process the JSON files. 119 | goto end 120 | ) 121 | 122 | if "%1" == "htmlhelp" ( 123 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 124 | if errorlevel 1 exit /b 1 125 | echo. 126 | echo.Build finished; now you can run HTML Help Workshop with the ^ 127 | .hhp project file in %BUILDDIR%/htmlhelp. 128 | goto end 129 | ) 130 | 131 | if "%1" == "qthelp" ( 132 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 133 | if errorlevel 1 exit /b 1 134 | echo. 135 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 136 | .qhcp project file in %BUILDDIR%/qthelp, like this: 137 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\omf.qhcp 138 | echo.To view the help file: 139 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\omf.ghc 140 | goto end 141 | ) 142 | 143 | if "%1" == "devhelp" ( 144 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 145 | if errorlevel 1 exit /b 1 146 | echo. 147 | echo.Build finished. 148 | goto end 149 | ) 150 | 151 | if "%1" == "epub" ( 152 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 153 | if errorlevel 1 exit /b 1 154 | echo. 155 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 156 | goto end 157 | ) 158 | 159 | if "%1" == "latex" ( 160 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 161 | if errorlevel 1 exit /b 1 162 | echo. 163 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 164 | goto end 165 | ) 166 | 167 | if "%1" == "latexpdf" ( 168 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 169 | cd %BUILDDIR%/latex 170 | make all-pdf 171 | cd %~dp0 172 | echo. 173 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 174 | goto end 175 | ) 176 | 177 | if "%1" == "latexpdfja" ( 178 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 179 | cd %BUILDDIR%/latex 180 | make all-pdf-ja 181 | cd %~dp0 182 | echo. 183 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 184 | goto end 185 | ) 186 | 187 | if "%1" == "text" ( 188 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 189 | if errorlevel 1 exit /b 1 190 | echo. 191 | echo.Build finished. The text files are in %BUILDDIR%/text. 192 | goto end 193 | ) 194 | 195 | if "%1" == "man" ( 196 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 197 | if errorlevel 1 exit /b 1 198 | echo. 199 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 200 | goto end 201 | ) 202 | 203 | if "%1" == "texinfo" ( 204 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 205 | if errorlevel 1 exit /b 1 206 | echo. 207 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 208 | goto end 209 | ) 210 | 211 | if "%1" == "gettext" ( 212 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 213 | if errorlevel 1 exit /b 1 214 | echo. 215 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 216 | goto end 217 | ) 218 | 219 | if "%1" == "changes" ( 220 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 221 | if errorlevel 1 exit /b 1 222 | echo. 223 | echo.The overview file is in %BUILDDIR%/changes. 224 | goto end 225 | ) 226 | 227 | if "%1" == "linkcheck" ( 228 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 229 | if errorlevel 1 exit /b 1 230 | echo. 231 | echo.Link check complete; look for any errors in the above output ^ 232 | or in %BUILDDIR%/linkcheck/output.txt. 233 | goto end 234 | ) 235 | 236 | if "%1" == "doctest" ( 237 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 238 | if errorlevel 1 exit /b 1 239 | echo. 240 | echo.Testing of doctests in the sources finished, look at the ^ 241 | results in %BUILDDIR%/doctest/output.txt. 242 | goto end 243 | ) 244 | 245 | if "%1" == "coverage" ( 246 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage 247 | if errorlevel 1 exit /b 1 248 | echo. 249 | echo.Testing of coverage in the sources finished, look at the ^ 250 | results in %BUILDDIR%/coverage/python.txt. 251 | goto end 252 | ) 253 | 254 | if "%1" == "xml" ( 255 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 256 | if errorlevel 1 exit /b 1 257 | echo. 258 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 259 | goto end 260 | ) 261 | 262 | if "%1" == "pseudoxml" ( 263 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 264 | if errorlevel 1 exit /b 1 265 | echo. 266 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 267 | goto end 268 | ) 269 | 270 | :end 271 | -------------------------------------------------------------------------------- /notebooks/cbi.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import properties 3 | import z_order_utils 4 | 5 | 6 | class BaseMetadata(properties.HasProperties): 7 | name = properties.String("Name of the block model", default="") 8 | description = properties.String("Description of the block model", default="") 9 | # Other named metadata? 10 | 11 | 12 | class BaseOrientation(properties.HasProperties): 13 | origin = properties.Vector3( 14 | "Origin of the block model, where axes extend from", 15 | default="ZERO", 16 | ) 17 | axis_u = properties.Vector3("Vector orientation of u-direction", default="X") 18 | axis_v = properties.Vector3("Vector orientation of v-direction", default="Y") 19 | axis_w = properties.Vector3("Vector orientation of w-direction", default="Z") 20 | 21 | 22 | class RegularBlockModel(BaseMetadata, BaseOrientation): 23 | block_size = properties.Vector3( 24 | "Size of each block", 25 | ) 26 | block_count = properties.List( 27 | "Number of blocks in each dimension", 28 | min_length=3, 29 | max_length=3, 30 | prop=properties.Integer("", min=1), 31 | ) 32 | 33 | 34 | class TensorBlockModel(BaseMetadata, BaseOrientation): 35 | tensor_u = properties.Array("Tensor cell widths, u-direction", shape=("*",), dtype=float) 36 | tensor_v = properties.Array("Tensor cell widths, v-direction", shape=("*",), dtype=float) 37 | tensor_w = properties.Array("Tensor cell widths, w-direction", shape=("*",), dtype=float) 38 | 39 | @property 40 | def block_count(self): 41 | return [ 42 | len(self.tensor_u), 43 | len(self.tensor_v), 44 | len(self.tensor_w), 45 | ] 46 | 47 | @property 48 | def num_blocks(self): 49 | return np.prod(self.block_count) 50 | 51 | 52 | class BaseCompressedBlockStorage(properties.HasProperties): 53 | 54 | parent_block_size = properties.Vector3( 55 | "Size of each parent block", 56 | ) 57 | parent_block_count = properties.List( 58 | "Number of parent blocks in each dimension", 59 | min_length=3, 60 | max_length=3, 61 | prop=properties.Integer("", min=1), 62 | ) 63 | 64 | @property 65 | def num_parent_blocks(self): 66 | return np.prod(self.parent_block_count) 67 | 68 | @property 69 | def num_blocks(self): 70 | return self.compressed_block_index[-1] 71 | 72 | @property 73 | def is_sub_blocked(self): 74 | self.compressed_block_index # assert that _cbi exists 75 | return (self._cbi[1:] - self._cbi[:-1]) > 1 76 | 77 | def _get_starting_cbi(self): 78 | return np.arange(self.num_parent_blocks + 1, dtype="uint32") 79 | 80 | @property 81 | def compressed_block_index(self): 82 | # Need the block counts to exist 83 | assert self._props["parent_block_count"].assert_valid(self, self.parent_block_count) 84 | if "sub_block_count" in self._props: 85 | assert self._props["sub_block_count"].assert_valid(self, self.sub_block_count) 86 | # Note: We could have some warnings here, if the above change 87 | # It is probably less relevant as these are not targeted 88 | # to be used in a dynamic context? 89 | 90 | # If the sub block storage does not exist, create it 91 | if not hasattr(self, "_cbi"): 92 | # Each parent cell has a single attribute before refinement 93 | self._cbi = self._get_starting_cbi() 94 | return self._cbi 95 | 96 | def _get_parent_index(self, ijk): 97 | pbc = self.parent_block_count 98 | assert len(ijk) == 3 # Should be a 3 length integer tuple/list 99 | assert (0 <= ijk[0] < pbc[0]) & (0 <= ijk[1] < pbc[1]) & (0 <= ijk[2] < pbc[2]), "Must be valid ijk index" 100 | 101 | (parent_index,) = np.ravel_multi_index( 102 | [[ijk[0]], [ijk[1]], [ijk[2]]], # Index into the block model 103 | self.parent_block_count, # shape of the parent 104 | order="F", # Explicit column major ordering, "i moves fastest" 105 | ) 106 | return parent_index 107 | 108 | 109 | class RegularSubBlockModel(BaseMetadata, BaseOrientation, BaseCompressedBlockStorage): 110 | 111 | sub_block_count = properties.List( 112 | "Number of sub blocks in each sub-blocked parent", 113 | min_length=3, 114 | max_length=3, 115 | prop=properties.Integer("", min=1), 116 | ) 117 | 118 | @property 119 | def sub_block_size(self): 120 | return self.parent_block_size / np.array(self.sub_block_count) 121 | 122 | def refine(self, ijk): 123 | self.compressed_block_index # assert that _cbi exists 124 | parent_index = self._get_parent_index(ijk) 125 | # Adding "num_sub_blocks" - 1, because the parent was already counted 126 | self._cbi[parent_index + 1 :] += np.prod(self.sub_block_count) - 1 127 | # Attribute index is where to insert into attribute arrays 128 | attribute_index = tuple(self._cbi[parent_index : parent_index + 2]) 129 | return parent_index, attribute_index 130 | 131 | # Note: Perhaps if there is an unrefined RSBM, 132 | # then OMF should serialize as a RBM? 133 | 134 | 135 | class OctreeSubBlockModel(BaseMetadata, BaseOrientation, BaseCompressedBlockStorage): 136 | @property 137 | def z_order_curves(self): 138 | forest = self._get_forest() 139 | cbi = self.compressed_block_index 140 | curves = np.zeros(self.num_blocks, dtype="uint32") 141 | for i, tree in enumerate(forest): 142 | curves[cbi[i] : cbi[i + 1]] = sorted(tree) 143 | return curves 144 | 145 | def _get_forest(self): 146 | """Want a set before we create the array. 147 | This may not be useful for less dynamic implementations. 148 | """ 149 | if not hasattr(self, "_forest"): 150 | # Do your part for the planet: 151 | # Plant trees in every parent block. 152 | self._forest = [{0} for _ in range(self.num_parent_blocks)] 153 | return self._forest 154 | 155 | def _refine_child(self, ijk, ind): 156 | 157 | self.compressed_block_index # assert that _cbi exists 158 | parent_index = self._get_parent_index(ijk) 159 | tree = self._get_forest()[parent_index] 160 | 161 | if ind not in tree: 162 | raise IndexError(ind) 163 | 164 | p, lvl = z_order_utils.get_pointer(ind) 165 | w = z_order_utils.level_width(lvl + 1) 166 | 167 | children = [ 168 | [p[0], p[1], p[2], lvl + 1], 169 | [p[0] + w, p[1], p[2], lvl + 1], 170 | [p[0], p[1] + w, p[2], lvl + 1], 171 | [p[0] + w, p[1] + w, p[2], lvl + 1], 172 | [p[0], p[1], p[2] + w, lvl + 1], 173 | [p[0] + w, p[1], p[2] + w, lvl + 1], 174 | [p[0], p[1] + w, p[2] + w, lvl + 1], 175 | [p[0] + w, p[1] + w, p[2] + w, lvl + 1], 176 | ] 177 | 178 | for child in children: 179 | tree.add(z_order_utils.get_index(child[:3], child[3])) 180 | tree.remove(ind) 181 | 182 | # Adding "num_sub_blocks" - 1, because the parent was already counted 183 | self._cbi[parent_index + 1 :] += 7 184 | 185 | return children 186 | 187 | 188 | class ArbitrarySubBlockModel(BaseMetadata, BaseOrientation, BaseCompressedBlockStorage): 189 | def _get_starting_cbi(self): 190 | """Unlike octree and rsbm, this has zero sub-blocks to start with.""" 191 | return np.zeros(self.num_parent_blocks + 1, dtype="uint32") 192 | 193 | def _get_lists(self): 194 | """Want a set before we create the array. 195 | This may not be useful for less dynamic implementations. 196 | """ 197 | if not hasattr(self, "_lists"): 198 | # Do your part for the planet: 199 | # Plant trees in every parent block. 200 | self._lists = [(np.zeros((0, 3)), np.zeros((0, 3))) for _ in range(self.num_parent_blocks)] 201 | return self._lists 202 | 203 | def _add_sub_blocks(self, ijk, new_centroids, new_sizes): 204 | self.compressed_block_index # assert that _cbi exists 205 | parent_index = self._get_parent_index(ijk) 206 | centroids, sizes = self._get_lists()[parent_index] 207 | 208 | if not isinstance(new_centroids, np.ndarray): 209 | new_centroids = np.array(new_centroids) 210 | new_centroids = new_centroids.reshape((-1, 3)) 211 | 212 | if not isinstance(new_sizes, np.ndarray): 213 | new_sizes = np.array(new_sizes) 214 | new_sizes = new_sizes.reshape((-1, 3)) 215 | 216 | assert (new_centroids.size % 3 == 0) & (new_sizes.size % 3 == 0) & (new_centroids.size == new_sizes.size) 217 | 218 | # TODO: Check that the centroid exists in the block 219 | 220 | self._lists[parent_index] = ( 221 | np.r_[centroids, new_centroids], 222 | np.r_[sizes, new_sizes], 223 | ) 224 | 225 | self._cbi[parent_index + 1 :] += new_sizes.size // 3 226 | -------------------------------------------------------------------------------- /omf/base.py: -------------------------------------------------------------------------------- 1 | """base.py: OMF Project and base classes for its components""" 2 | import json 3 | 4 | import properties 5 | import properties.extras 6 | 7 | 8 | class BaseModel(properties.HasProperties): 9 | """BaseModel is a HasProperties subclass with schema 10 | 11 | When deserializing, this class prioritizes schema value over __class__ 12 | to decide the class. 13 | """ 14 | 15 | schema = "" 16 | 17 | def serialize(self, include_class=True, save_dynamic=False, **kwargs): 18 | output = super().serialize(include_class, save_dynamic, **kwargs) 19 | output.update({"schema": self.schema}) 20 | return output 21 | 22 | @classmethod 23 | def deserialize(cls, value, trusted=False, strict=False, assert_valid=False, **kwargs): 24 | schema = value.pop("schema", "") 25 | for class_name, class_value in cls._REGISTRY.items(): 26 | if not hasattr(class_value, "schema"): 27 | continue 28 | if class_value.schema == schema: 29 | value.update({"__class__": class_name}) 30 | break 31 | return super().deserialize(value, trusted, strict, assert_valid, **kwargs) 32 | 33 | 34 | class StringDateTime(properties.DateTime): 35 | """DateTime property validated to be a string""" 36 | 37 | def validate(self, instance, value): 38 | value = super().validate(instance, value) 39 | return self.to_json(value) 40 | 41 | 42 | class BaseMetadata(properties.HasProperties): 43 | """Validated metadata properties for all objects""" 44 | 45 | date_created = StringDateTime( 46 | "Date object was created", 47 | required=False, 48 | ) 49 | date_modified = StringDateTime( 50 | "Date object was modified", 51 | required=False, 52 | ) 53 | 54 | 55 | class ProjectMetadata(BaseMetadata): 56 | """Validated metadata properties for Projects""" 57 | 58 | coordinate_reference_system = properties.String( 59 | "EPSG or Proj4 plus optional local transformation string", 60 | required=False, 61 | ) 62 | author = properties.String( 63 | "Author of the project", 64 | required=False, 65 | ) 66 | revision = properties.String( 67 | "Revision", 68 | required=False, 69 | ) 70 | date = StringDateTime( 71 | "Date associated with the project data", 72 | required=False, 73 | ) 74 | 75 | 76 | class ElementMetadata(BaseMetadata): 77 | """Validated metadata properties for Elements""" 78 | 79 | coordinate_reference_system = properties.String( 80 | "EPSG or Proj4 plus optional local transformation string", 81 | required=False, 82 | ) 83 | color = properties.Color( 84 | "Solid element color", 85 | required=False, 86 | ) 87 | opacity = properties.Float( 88 | "Element opacity", 89 | min=0, 90 | max=1, 91 | required=False, 92 | ) 93 | 94 | 95 | class AttributeMetadata(BaseMetadata): 96 | """Validated metadata properties for Attributes""" 97 | 98 | units = properties.String( 99 | "Units of attribute values", 100 | required=False, 101 | ) 102 | 103 | 104 | class ArbitraryMetadataDict(properties.Dictionary): 105 | """Custom property class for metadata dictionaries 106 | 107 | This property accepts JSON-compatible dictionary with any arbitrary 108 | fields. However, an additional :code:`metadata_class` is specified 109 | to validate specific fields. 110 | """ 111 | 112 | @property 113 | def metadata_class(self): 114 | """HasProperties class to validate metadata fields against""" 115 | return self._metadata_class 116 | 117 | @metadata_class.setter 118 | def metadata_class(self, value): 119 | if not issubclass(value, properties.HasProperties): 120 | raise AttributeError("metadata_class must be HasProperites subclass") 121 | self._metadata_class = value # pylint: disable=W0201 122 | 123 | def __init__(self, doc, metadata_class, **kwargs): 124 | self.metadata_class = metadata_class 125 | kwargs.update({"key_prop": properties.String("")}) 126 | super().__init__(doc, **kwargs) 127 | 128 | def validate(self, instance, value): 129 | """Validate the dictionary and any property defined in metadata_class 130 | 131 | This also reassigns the dictionary after validation, so any 132 | coerced values persist. 133 | """ 134 | new_value = super().validate(instance, value) 135 | filtered_value = properties.utils.filter_props( 136 | self.metadata_class, 137 | new_value, 138 | )[0] 139 | try: 140 | for key, val in filtered_value.items(): 141 | new_value[key] = self.metadata_class._props[key].validate(instance, val) 142 | except properties.ValidationError as err: 143 | raise properties.ValidationError( 144 | "Invalid metadata: {}".format(err), 145 | reason="invalid", 146 | prop=self.name, 147 | instance=instance, 148 | ) from err 149 | try: 150 | json.dumps(new_value) 151 | except TypeError as err: 152 | raise properties.ValidationError( 153 | "Metadata is not JSON compatible", 154 | reason="invalid", 155 | prop=self.name, 156 | instance=instance, 157 | ) from err 158 | if not self.equal(value, new_value): 159 | setattr(instance, self.name, new_value) 160 | return value 161 | 162 | @property 163 | def info(self): 164 | """Description of the property, supplemental to the basic doc""" 165 | info = ( 166 | "an arbitrary JSON-serializable dictionary, with certain keys " 167 | "validated against :class:`{cls} <{pref}.{cls}>`".format( 168 | cls=self.metadata_class.__name__, 169 | pref=self.metadata_class.__module__, 170 | ) 171 | ) 172 | return info 173 | 174 | 175 | class ContentModel(BaseModel): 176 | """ContentModel is a model with name, description, and metadata""" 177 | 178 | name = properties.String( 179 | "Title of the object", 180 | default="", 181 | ) 182 | description = properties.String( 183 | "Description of the object", 184 | default="", 185 | ) 186 | metadata = ArbitraryMetadataDict( 187 | "Basic object metadata", 188 | metadata_class=BaseMetadata, 189 | default=dict, 190 | ) 191 | 192 | 193 | class ProjectElementAttribute(ContentModel): 194 | """Attribute with values at specific locations on the mesh""" 195 | 196 | location = properties.StringChoice( 197 | "Location of the attribute on mesh", 198 | choices=( 199 | "vertices", 200 | "segments", 201 | "faces", 202 | "cells", 203 | "parent_blocks", 204 | "sub_blocks", 205 | "elements", 206 | ), 207 | ) 208 | metadata = ArbitraryMetadataDict( 209 | "Attribute metadata", 210 | metadata_class=AttributeMetadata, 211 | default=dict, 212 | ) 213 | 214 | @property 215 | def array(self): 216 | """Attribute subclasses should override array""" 217 | raise ValueError("Cannot access array of base ProjectElementAttribute") 218 | 219 | 220 | class ProjectElement(ContentModel): 221 | """Base class for all OMF elements 222 | 223 | ProjectElement subclasses must define their geometry. 224 | """ 225 | 226 | attributes = properties.List( 227 | "Attributes defined on the element", 228 | prop=ProjectElementAttribute, 229 | required=False, 230 | default=list, 231 | ) 232 | metadata = ArbitraryMetadataDict( 233 | "Element metadata", 234 | metadata_class=ElementMetadata, 235 | default=dict, 236 | ) 237 | 238 | _valid_locations = None 239 | 240 | def location_length(self, location): 241 | """Return correct attribute length based on location""" 242 | raise NotImplementedError() 243 | 244 | @properties.validator 245 | def _validate_attributes(self): 246 | """Check if element is built correctly""" 247 | assert self._valid_locations, "ProjectElement needs _valid_locations" 248 | for i, attr in enumerate(self.attributes): 249 | if attr.location not in self._valid_locations: # pylint: disable=W0212 250 | raise properties.ValidationError( 251 | "Invalid location {loc} - valid values: {locs}".format( 252 | loc=attr.location, 253 | locs=", ".join(self._valid_locations), # pylint: disable=W0212 254 | ) 255 | ) 256 | valid_length = self.location_length(attr.location) 257 | if len(attr.array.array) != valid_length: 258 | raise properties.ValidationError( 259 | "attributes[{index}] length {attrlen} does not match " 260 | "{loc} length {meshlen}".format( 261 | index=i, 262 | attrlen=len(attr.array.array), 263 | loc=attr.location, 264 | meshlen=valid_length, 265 | ) 266 | ) 267 | return True 268 | 269 | 270 | class Project(ContentModel): 271 | """OMF Project for holding all elements and metadata 272 | 273 | Save these objects to OMF files with :meth:`omf.fileio.save` and 274 | load them with :meth:`omf.fileio.load` 275 | """ 276 | 277 | schema = "org.omf.v2.project" 278 | 279 | elements = properties.List( 280 | "Project Elements", 281 | prop=ProjectElement, 282 | default=list, 283 | ) 284 | 285 | metadata = ArbitraryMetadataDict( 286 | "Project metadata", 287 | metadata_class=ProjectMetadata, 288 | default=dict, 289 | ) 290 | 291 | origin = properties.Vector3( 292 | "Origin for all elements in the project relative to the coordinate reference system", 293 | default=[0.0, 0.0, 0.0], 294 | ) 295 | -------------------------------------------------------------------------------- /tests/test_attributes.py: -------------------------------------------------------------------------------- 1 | """Tests for attribute object validation""" 2 | import datetime 3 | 4 | import numpy as np 5 | import properties 6 | import pytest 7 | 8 | import omf 9 | 10 | 11 | # pylint: disable=W0143 12 | def test_scalar_array(): 13 | """Test array init and access works correctly""" 14 | arr = omf.attribute.Array(np.array([1, 2, 3], dtype="uint8")) 15 | assert arr.array.dtype.kind == "u" 16 | assert np.array_equal(arr.array, [1, 2, 3]) 17 | assert arr.data_type == "Uint8Array" 18 | assert arr.shape == [3] 19 | assert arr.size == 3 20 | binary_dict = {} 21 | output = arr.serialize(include_class=False, binary_dict=binary_dict) 22 | assert len(binary_dict) == 1 23 | assert output == { 24 | "schema": "org.omf.v2.array.numeric", 25 | "data_type": "Uint8Array", 26 | "shape": [3], 27 | "size": 3, 28 | "array": list(binary_dict.keys())[0], 29 | } 30 | new_arr = omf.attribute.Array.deserialize(output, binary_dict=binary_dict) 31 | assert properties.equal(arr, new_arr) 32 | 33 | 34 | def test_invalid_array(): 35 | """Test Array class without valid array""" 36 | arr = omf.attribute.Array() 37 | assert arr.data_type is None 38 | assert arr.shape is None 39 | assert arr.size is None 40 | assert isinstance(omf.attribute.Array.deserialize(""), omf.attribute.Array) 41 | assert isinstance(omf.attribute.Array.deserialize({}), omf.attribute.Array) 42 | 43 | 44 | def test_invalid_string_list(): 45 | """Test StringList class without valid array""" 46 | arr = omf.attribute.StringList() 47 | assert arr.data_type is None 48 | assert arr.shape is None 49 | assert arr.size is None 50 | assert isinstance(omf.attribute.StringList.deserialize(""), omf.attribute.StringList) 51 | assert isinstance(omf.attribute.StringList.deserialize({}), omf.attribute.StringList) 52 | 53 | 54 | def test_boolean_array(): 55 | """Test boolean array bits""" 56 | arr = omf.attribute.Array(np.array([[1, 1], [0, 0]], dtype="bool")) 57 | assert arr.array.dtype.kind == "b" 58 | assert arr.data_type == "BooleanArray" 59 | assert arr.shape == [2, 2] 60 | assert arr.size == 1 61 | binary_dict = {} 62 | output = arr.serialize(include_class=False, binary_dict=binary_dict) 63 | assert len(binary_dict) == 1 64 | assert output == { 65 | "schema": "org.omf.v2.array.numeric", 66 | "data_type": "BooleanArray", 67 | "shape": [2, 2], 68 | "size": 1, 69 | "array": list(binary_dict.keys())[0], 70 | } 71 | new_arr = omf.attribute.Array.deserialize(output, binary_dict=binary_dict) 72 | assert properties.equal(arr, new_arr) 73 | 74 | 75 | def test_datetime_list(): 76 | """Test string list gives datetime data_type""" 77 | arr = omf.attribute.StringList(["1995-08-12T18:00:00Z", "1995-08-13T18:00:00Z"]) 78 | assert arr.data_type == "DateTimeArray" 79 | assert arr.shape == [2] 80 | binary_dict = {} 81 | output = arr.serialize(include_class=False, binary_dict=binary_dict) 82 | assert len(binary_dict) == 1 83 | assert output == { 84 | "schema": "org.omf.v2.array.string", 85 | "data_type": "DateTimeArray", 86 | "shape": [2], 87 | "size": 48, 88 | "array": list(binary_dict.keys())[0], 89 | } 90 | 91 | 92 | def test_string_list(): 93 | """Test string list gives string data_type""" 94 | arr = omf.attribute.StringList.deserialize( 95 | { 96 | "shape": "", 97 | "data_type": "", 98 | "size": "", 99 | "array": "a", 100 | }, 101 | binary_dict={"a": b'["a", "b", "c"]'}, 102 | ) 103 | assert arr.data_type == "StringArray" 104 | assert arr.shape == [3] 105 | assert arr.size == 15 106 | assert len(arr) == 3 107 | assert arr[0] == "a" 108 | assert arr[1] == "b" 109 | assert arr[2] == "c" 110 | output = arr.serialize(include_class=False) 111 | assert output == { 112 | "schema": "org.omf.v2.array.string", 113 | "data_type": "StringArray", 114 | "shape": [3], 115 | "size": 15, 116 | } 117 | 118 | 119 | # pylint: enable=W0143 120 | 121 | 122 | def test_array_instance_prop(): 123 | """Test ArrayInstanceProperty validates correctly""" 124 | 125 | class HasArray(properties.HasProperties): 126 | """Test class for ArrayInstanceProperty""" 127 | 128 | arr = omf.attribute.ArrayInstanceProperty( 129 | "Array instance", 130 | shape=("*", 3), 131 | dtype=float, 132 | ) 133 | 134 | harr = HasArray() 135 | harr.arr = np.array([[1.0, 2, 3], [4, 5, 6]]) 136 | assert harr.validate() 137 | assert np.array_equal(harr.arr.array, [[1.0, 2, 3], [4, 5, 6]]) 138 | assert harr.arr.data_type == "Float64Array" # pylint: disable=E1101 139 | assert harr.arr.shape == [2, 3] 140 | assert harr.arr.size == 8 * 6 141 | 142 | with pytest.raises(properties.ValidationError): 143 | harr.arr = np.array([1.0, 2, 3]) 144 | with pytest.raises(properties.ValidationError): 145 | harr.arr = np.array([[1, 2, 3], [4, 5, 6]]) 146 | 147 | 148 | def test_vector_data_dimensionality(): 149 | """Test only 2D and 3D arrays are valid for vector data""" 150 | vattr = omf.attribute.VectorAttribute(array=[[1, 1], [2, 2], [3, 3]]) 151 | assert vattr.array.shape == [3, 2] 152 | vattr = omf.attribute.VectorAttribute(array=[[1, 1, 1], [2, 2, 2], [3, 3, 3]]) 153 | assert vattr.array.shape == [3, 3] 154 | with pytest.raises(properties.ValidationError): 155 | omf.attribute.VectorAttribute(array=[1, 2, 3]) 156 | with pytest.raises(properties.ValidationError): 157 | omf.attribute.VectorAttribute(array=[[1, 2, 3, 4]]) 158 | 159 | 160 | def test_contiuous_colormap(): 161 | """Test continuous colormap validation""" 162 | cmap = omf.attribute.ContinuousColormap() 163 | with pytest.raises(properties.ValidationError): 164 | cmap.limits = [1.0, 0.0] 165 | cmap.gradient = [[0, 0, 0]] * 100 166 | cmap.limits = [0.0, 1.0] 167 | cmap.limits[0] = 2.0 168 | with pytest.raises(properties.ValidationError): 169 | cmap.validate() 170 | cmap.limits[0] = 0.0 171 | cmap.validate() 172 | with pytest.raises(properties.ValidationError): 173 | cmap.gradient = np.array([[0, 0, -1]]) 174 | with pytest.raises(properties.ValidationError): 175 | cmap.gradient = np.array([[0, 0, 256]]) 176 | 177 | 178 | def test_discrete_colormap(): 179 | """Test discrete colormap validation""" 180 | cmap = omf.attribute.DiscreteColormap() 181 | cmap.end_points = [0.5] 182 | cmap.end_inclusive = [True] 183 | cmap.colors = [[0, 100, 0], [100, 0, 0]] 184 | assert cmap.validate() 185 | cmap.end_points = [-0.5, 0.5] 186 | with pytest.raises(properties.ValidationError): 187 | cmap.validate() 188 | cmap.end_points = [0.5] 189 | cmap.end_inclusive = [True, False] 190 | with pytest.raises(properties.ValidationError): 191 | cmap.validate() 192 | cmap.end_inclusive = [True] 193 | cmap.colors = [[0, 100, 0]] 194 | with pytest.raises(properties.ValidationError): 195 | cmap.validate() 196 | cmap.colors = [[0, 100, 0], [100, 0, 0], [0, 0, 100]] 197 | with pytest.raises(properties.ValidationError): 198 | cmap.validate() 199 | cmap.end_points = [0.5, 1, 1] 200 | cmap.end_inclusive = [True, False, True] 201 | cmap.colors = [[0, 100, 0], [100, 0, 0], [0, 0, 100], [100, 100, 100]] 202 | assert cmap.validate() 203 | with pytest.raises(properties.ValidationError): 204 | cmap.end_points = [0.5, 1, 0.5] 205 | with pytest.raises(properties.ValidationError): 206 | cmap.end_points = [1.5, 1, 0.5] 207 | 208 | 209 | def test_category_colormap(): 210 | """Test legend validation""" 211 | legend = omf.attribute.CategoryColormap( 212 | name="test", 213 | indices=[0, 1, 2], 214 | values=["x", "y", "z"], 215 | ) 216 | assert legend.validate() 217 | legend.colors = [[0, 0, 0], [0, 0, 255], [255, 0, 0]] 218 | assert legend.validate() 219 | legend = omf.attribute.CategoryColormap( 220 | name="test", 221 | indices=[0, 1, 2], 222 | values=["x", "y"], 223 | ) 224 | with pytest.raises(properties.ValidationError): 225 | legend.validate() 226 | legend = omf.attribute.CategoryColormap( 227 | name="test", 228 | indices=[0, 1, 2], 229 | values=["x", "y", "z"], 230 | colors=[[0, 0, 0], [0, 0, 255], [255, 0, 0], [255, 255, 255]], 231 | ) 232 | with pytest.raises(properties.ValidationError): 233 | legend.validate() 234 | 235 | 236 | def test_category_data(): 237 | """Test mapped data validation""" 238 | mattr = omf.attribute.CategoryAttribute() 239 | mattr.array = [0, 2, 1, -1] 240 | mattr.categories = omf.attribute.CategoryColormap( 241 | name="letter", 242 | indices=[0, 1, 2], 243 | values=["x", "y", "z"], 244 | ) 245 | mattr.location = "vertices" 246 | assert mattr.validate() 247 | with pytest.raises(properties.ValidationError): 248 | mattr.array = [0.5, 1.5, 2.5] 249 | mattr.array = [-10, 0, 1] 250 | assert mattr.validate() 251 | mattr.array.array[0] = 0 252 | mattr.categories.colors = ["red", "blue", "green"] 253 | mattr.metadata = { 254 | "units": "m", 255 | "date_created": datetime.datetime.utcnow(), 256 | "version": "v1.3", 257 | } 258 | assert mattr.validate() 259 | mattr.categories.colors = ["red", "blue"] 260 | with pytest.raises(properties.ValidationError): 261 | mattr.validate() 262 | with pytest.raises(properties.ValidationError): 263 | mattr.categories = omf.attribute.CategoryColormap( 264 | name="numeric", 265 | indices=[0, 1, 2], 266 | values=[0.5, 0.6, 0.7], 267 | ) 268 | 269 | 270 | def test_basemodel_schema(): 271 | """Checks for a unique schema name""" 272 | classes_to_check = {omf.base.BaseModel} 273 | classes_with_subclasses = {} 274 | while classes_to_check: 275 | klass = classes_to_check.pop() 276 | subclasses = klass.__subclasses__() 277 | is_leaf = len(subclasses) == 0 278 | classes_with_subclasses[klass] = is_leaf 279 | classes_to_check |= set(subclasses) 280 | 281 | schemas_seen = set() 282 | for klass, is_leaf in classes_with_subclasses.items(): 283 | if is_leaf: 284 | assert klass.schema != "" 285 | assert klass.schema not in schemas_seen 286 | schemas_seen.add(klass.schema) 287 | else: 288 | assert klass.schema == "" 289 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # omf documentation build configuration file, created by 4 | # sphinx-quickstart on Thu May 12 10:22:17 2016. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import sys 16 | import os 17 | 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | # sys.path.insert(0, os.path.abspath('.')) 22 | 23 | sys.path.insert(0, os.path.abspath("../")) 24 | 25 | # -- General configuration ------------------------------------------------ 26 | 27 | # If your documentation needs a minimal Sphinx version, state it here. 28 | # needs_sphinx = '1.0' 29 | 30 | # Add any Sphinx extension module names here, as strings. They can be 31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 32 | # ones. 33 | extensions = [ 34 | "sphinx.ext.autodoc", 35 | "sphinx.ext.doctest", 36 | "sphinx.ext.intersphinx", 37 | "sphinx.ext.mathjax", 38 | "sphinx.ext.viewcode", 39 | ] 40 | 41 | # Add any paths that contain templates here, relative to this directory. 42 | templates_path = ["_templates"] 43 | 44 | # The suffix(es) of source filenames. 45 | # You can specify multiple suffix as a list of string: 46 | # source_suffix = ['.rst', '.md'] 47 | source_suffix = ".rst" 48 | 49 | # The encoding of source files. 50 | # source_encoding = 'utf-8-sig' 51 | 52 | # The master toctree document. 53 | master_doc = "index" 54 | 55 | # General information about the project. 56 | project = "omf" 57 | copyright = "2019, Global Mining Guidelines Group" 58 | author = "Global Mining Guidelines Group" 59 | 60 | # The version info for the project you're documenting, acts as replacement for 61 | # |version| and |release|, also used in various other places throughout the 62 | # built documents. 63 | # 64 | # The short X.Y version. 65 | version = "2.0" 66 | # The full version, including alpha/beta/rc tags. 67 | release = "2.0.0a0" 68 | 69 | # The language for content autogenerated by Sphinx. Refer to documentation 70 | # for a list of supported languages. 71 | # 72 | # This is also used if you do content translation via gettext catalogs. 73 | # Usually you set "language" from the command line for these cases. 74 | language = "en" 75 | 76 | # There are two options for replacing |today|: either, you set today to some 77 | # non-false value, then it is used: 78 | # today = '' 79 | # Else, today_fmt is used as the format for a strftime call. 80 | # today_fmt = '%B %d, %Y' 81 | 82 | # List of patterns, relative to source directory, that match files and 83 | # directories to ignore when looking for source files. 84 | exclude_patterns = ["_build"] 85 | 86 | # The reST default role (used for this markup: `text`) to use for all 87 | # documents. 88 | # default_role = None 89 | 90 | # If true, '()' will be appended to :func: etc. cross-reference text. 91 | # add_function_parentheses = True 92 | 93 | # If true, the current module name will be prepended to all description 94 | # unit titles (such as .. function::). 95 | # add_module_names = True 96 | 97 | # If true, sectionauthor and moduleauthor directives will be shown in the 98 | # output. They are ignored by default. 99 | # show_authors = False 100 | 101 | # The name of the Pygments (syntax highlighting) style to use. 102 | pygments_style = "sphinx" 103 | 104 | # A list of ignored prefixes for module index sorting. 105 | # modindex_common_prefix = [] 106 | 107 | # If true, keep warnings as "system message" paragraphs in the built documents. 108 | # keep_warnings = False 109 | 110 | # If true, `todo` and `todoList` produce output, else they produce nothing. 111 | todo_include_todos = False 112 | 113 | 114 | # -- Options for HTML output ---------------------------------------------- 115 | 116 | # The theme to use for HTML and HTML Help pages. See the documentation for 117 | # a list of builtin themes. 118 | try: 119 | import sphinx_rtd_theme 120 | 121 | html_theme = "sphinx_rtd_theme" 122 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 123 | pass 124 | except: 125 | html_theme = "default" 126 | 127 | # Theme options are theme-specific and customize the look and feel of a theme 128 | # further. For a list of options available for each theme, see the 129 | # documentation. 130 | 131 | # Add any paths that contain custom themes here, relative to this directory. 132 | # html_theme_path = [] 133 | 134 | # The name for this set of Sphinx documents. If None, it defaults to 135 | # " v documentation". 136 | # html_title = None 137 | 138 | # A shorter title for the navigation bar. Default is the same as html_title. 139 | # html_short_title = None 140 | 141 | # The name of an image file (relative to this directory) to place at the top 142 | # of the sidebar. 143 | # html_logo = None 144 | 145 | # html_sidebar 146 | html_sidebars = { 147 | "**": [ 148 | "globaltoc.html", 149 | "searchbox.html", 150 | ], 151 | } 152 | 153 | # The name of an image file (within the static path) to use as favicon of the 154 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 155 | # pixels large. 156 | # html_favicon = None 157 | 158 | # Add any paths that contain custom static files (such as style sheets) here, 159 | # relative to this directory. They are copied after the builtin static files, 160 | # so a file named "default.css" will overwrite the builtin "default.css". 161 | # html_static_path = ['_static'] 162 | 163 | exclude_patterns = ["_static"] 164 | # Add any extra paths that contain custom files (such as robots.txt or 165 | # .htaccess) here, relative to this directory. These files are copied 166 | # directly to the root of the documentation. 167 | # html_extra_path = [] 168 | 169 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 170 | # using the given strftime format. 171 | # html_last_updated_fmt = '%b %d, %Y' 172 | 173 | # If true, SmartyPants will be used to convert quotes and dashes to 174 | # typographically correct entities. 175 | # html_use_smartypants = True 176 | 177 | # Custom sidebar templates, maps document names to template names. 178 | # html_sidebars = {} 179 | 180 | # Additional templates that should be rendered to pages, maps page names to 181 | # template names. 182 | # html_additional_pages = {} 183 | 184 | # If false, no module index is generated. 185 | # html_domain_indices = True 186 | 187 | # If false, no index is generated. 188 | # html_use_index = True 189 | 190 | # If true, the index is split into individual pages for each letter. 191 | # html_split_index = False 192 | 193 | # If true, links to the reST sources are added to the pages. 194 | # html_show_sourcelink = True 195 | 196 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 197 | # html_show_sphinx = True 198 | 199 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 200 | # html_show_copyright = True 201 | 202 | # If true, an OpenSearch description file will be output, and all pages will 203 | # contain a tag referring to it. The value of this option must be the 204 | # base URL from which the finished HTML is served. 205 | # html_use_opensearch = '' 206 | 207 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 208 | # html_file_suffix = None 209 | 210 | # Language to be used for generating the HTML full-text search index. 211 | # Sphinx supports the following languages: 212 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' 213 | # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' 214 | # html_search_language = 'en' 215 | 216 | # A dictionary with options for the search language support, empty by default. 217 | # Now only 'ja' uses this config value 218 | # html_search_options = {'type': 'default'} 219 | 220 | # The name of a javascript file (relative to the configuration directory) that 221 | # implements a search results scorer. If empty, the default will be used. 222 | # html_search_scorer = 'scorer.js' 223 | 224 | # Output file base name for HTML help builder. 225 | htmlhelp_basename = "omfdoc" 226 | 227 | # -- Options for LaTeX output --------------------------------------------- 228 | 229 | latex_elements = { 230 | # The paper size ('letterpaper' or 'a4paper'). 231 | #'papersize': 'letterpaper', 232 | # The font size ('10pt', '11pt' or '12pt'). 233 | #'pointsize': '10pt', 234 | # Additional stuff for the LaTeX preamble. 235 | #'preamble': '', 236 | # Latex figure (float) alignment 237 | #'figure_align': 'htbp', 238 | } 239 | 240 | # Grouping the document tree into LaTeX files. List of tuples 241 | # (source start file, target name, title, 242 | # author, documentclass [howto, manual, or own class]). 243 | latex_documents = [ 244 | ( 245 | master_doc, 246 | "omf.tex", 247 | "omf Documentation", 248 | "Global Mining Guidelines Group", 249 | "manual", 250 | ), 251 | ] 252 | 253 | # The name of an image file (relative to this directory) to place at the top of 254 | # the title page. 255 | # latex_logo = None 256 | 257 | # For "manual" documents, if this is true, then toplevel headings are parts, 258 | # not chapters. 259 | # latex_use_parts = False 260 | 261 | # If true, show page references after internal links. 262 | # latex_show_pagerefs = False 263 | 264 | # If true, show URL addresses after external links. 265 | # latex_show_urls = False 266 | 267 | # Documents to append as an appendix to all manuals. 268 | # latex_appendices = [] 269 | 270 | # If false, no module index is generated. 271 | # latex_domain_indices = True 272 | 273 | 274 | # -- Options for manual page output --------------------------------------- 275 | 276 | # One entry per manual page. List of tuples 277 | # (source start file, name, description, authors, manual section). 278 | man_pages = [(master_doc, "omf", "omf Documentation", [author], 1)] 279 | 280 | # If true, show URL addresses after external links. 281 | # man_show_urls = False 282 | 283 | 284 | # -- Options for Texinfo output ------------------------------------------- 285 | 286 | # Grouping the document tree into Texinfo files. List of tuples 287 | # (source start file, target name, title, author, 288 | # dir menu entry, description, category) 289 | texinfo_documents = [ 290 | ( 291 | master_doc, 292 | "omf", 293 | "omf Documentation", 294 | author, 295 | "omf", 296 | "omf Documentation.", 297 | "Miscellaneous", 298 | ), 299 | ] 300 | 301 | # Documents to append as an appendix to all manuals. 302 | # texinfo_appendices = [] 303 | 304 | # If false, no module index is generated. 305 | # texinfo_domain_indices = True 306 | 307 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 308 | # texinfo_show_urls = 'footnote' 309 | 310 | # If true, do not generate a @detailmenu in the "Top" node's menu. 311 | # texinfo_no_detailmenu = False 312 | 313 | 314 | # Example configuration for intersphinx: refer to the Python standard library. 315 | intersphinx_mapping = { 316 | "https://docs.python.org/": None, 317 | # 'http://docs.scipy.org/doc/numpy/': None, 318 | # 'http://docs.scipy.org/doc/scipy/reference/': None, 319 | "http://propertiespy.readthedocs.io/en/latest/": None, 320 | } 321 | linkcheck_ignore = ["https://gmggroup.org"] 322 | linkcheck_retries = 10 323 | 324 | 325 | import sphinx.environment 326 | from docutils.utils import get_source_line 327 | 328 | 329 | def _supress_nonlocal_image_warn(self, msg, node, **kwargs): 330 | if not msg.startswith("nonlocal image URI found:"): 331 | self._warnfunc(msg, "%s:%s" % get_source_line(node), **kwargs) 332 | 333 | 334 | sphinx.environment.BuildEnvironment.warn_node = _supress_nonlocal_image_warn 335 | -------------------------------------------------------------------------------- /omf/attribute.py: -------------------------------------------------------------------------------- 1 | """attribute.py: different ProjectElementAttribute classes""" 2 | import json 3 | import uuid 4 | 5 | import numpy as np 6 | import properties 7 | 8 | from .base import BaseModel, ContentModel, ProjectElementAttribute 9 | 10 | 11 | DATA_TYPE_LOOKUP_TO_NUMPY = { 12 | "Int8Array": np.dtype("int8"), 13 | "Uint8Array": np.dtype("uint8"), 14 | "Int16Array": np.dtype("int16"), 15 | "Uint16Array": np.dtype("uint16"), 16 | "Int32Array": np.dtype("int32"), 17 | "Uint32Array": np.dtype("uint32"), 18 | "Int64Array": np.dtype("int64"), 19 | "Uint64Array": np.dtype("uint64"), 20 | "Float32Array": np.dtype("float32"), 21 | "Float64Array": np.dtype("float64"), 22 | "BooleanArray": np.dtype("bool"), 23 | } 24 | DATA_TYPE_LOOKUP_TO_STRING = {value: key for key, value in DATA_TYPE_LOOKUP_TO_NUMPY.items()} 25 | 26 | 27 | class Array(BaseModel): 28 | """Class to validate and serialize a 1D or 2D numpy array 29 | 30 | Data type, size, shape are computed directly from the array. 31 | 32 | Serializing and deserializing this class requires passing an additional 33 | keyword argument :code:`binary_dict` where the array binary is persisted. 34 | The serialized JSON includes array metadata and a UUID; this UUID 35 | is the key in the binary_dict. 36 | """ 37 | 38 | schema = "org.omf.v2.array.numeric" 39 | 40 | array = properties.Array( 41 | "1D or 2D numpy array wrapped by the Array instance", 42 | shape={("*",), ("*", "*")}, 43 | dtype=(int, float, bool), 44 | serializer=lambda *args, **kwargs: None, 45 | deserializer=lambda *args, **kwargs: None, 46 | ) 47 | 48 | def __init__(self, array=None, **kwargs): 49 | super().__init__(**kwargs) 50 | if array is not None: 51 | self.array = array 52 | 53 | def __len__(self): 54 | return self.array.__len__() 55 | 56 | def __getitem__(self, i): 57 | return self.array.__getitem__(i) 58 | 59 | @properties.validator 60 | def _validate_data_type(self): 61 | if self.array.dtype not in DATA_TYPE_LOOKUP_TO_STRING: 62 | raise properties.ValidationError( 63 | "bad dtype: {} - Array must have dtype in {}".format( 64 | self.array.dtype, 65 | ", ".join([dtype.name for dtype in DATA_TYPE_LOOKUP_TO_STRING]), 66 | ) 67 | ) 68 | return True 69 | 70 | @properties.StringChoice("Array data type string", choices=list(DATA_TYPE_LOOKUP_TO_NUMPY)) 71 | def data_type(self): 72 | """Array type descriptor, determined directly from the array""" 73 | if self.array is None: 74 | return None 75 | return DATA_TYPE_LOOKUP_TO_STRING.get(self.array.dtype, None) 76 | 77 | @properties.List( 78 | "Shape of the array", 79 | properties.Integer(""), 80 | ) 81 | def shape(self): 82 | """Array shape, determined directly from the array""" 83 | if self.array is None: 84 | return None 85 | return list(self.array.shape) 86 | 87 | @properties.Integer("Size of array in bytes") 88 | def size(self): 89 | """Total size of the array in bytes, determined directly from the array""" 90 | if self.array is None: 91 | return None 92 | if self.data_type == "BooleanArray": # pylint: disable=W0143 93 | return int(np.ceil(self.array.size / 8)) 94 | return self.array.size * self.array.itemsize 95 | 96 | def serialize(self, include_class=True, save_dynamic=False, **kwargs): 97 | output = super().serialize(include_class=include_class, save_dynamic=True, **kwargs) 98 | binary_dict = kwargs.get("binary_dict", None) 99 | if binary_dict is not None: 100 | array_uid = str(uuid.uuid4()) 101 | if self.data_type == "BooleanArray": # pylint: disable=W0143 102 | array_binary = np.packbits(self.array, axis=None).tobytes() 103 | else: 104 | array_binary = self.array.tobytes() 105 | binary_dict.update({array_uid: array_binary}) 106 | output.update({"array": array_uid}) 107 | return output 108 | 109 | @classmethod 110 | def deserialize(cls, value, trusted=False, strict=False, assert_valid=False, **kwargs): 111 | binary_dict = kwargs.get("binary_dict", {}) 112 | if not isinstance(value, dict): 113 | pass 114 | elif any(key not in value for key in ["shape", "data_type", "array"]): 115 | pass 116 | elif value["array"] in binary_dict: 117 | array_binary = binary_dict[value["array"]] 118 | array_dtype = DATA_TYPE_LOOKUP_TO_NUMPY[value["data_type"]] 119 | if value["data_type"] == "BooleanArray": 120 | int_arr = np.frombuffer(array_binary, dtype="uint8") 121 | bit_arr = np.unpackbits(int_arr)[: np.product(value["shape"])] 122 | arr = bit_arr.astype(array_dtype) 123 | else: 124 | arr = np.frombuffer(array_binary, dtype=array_dtype) 125 | arr = arr.reshape(value["shape"]) 126 | return cls(arr) 127 | return cls() 128 | 129 | 130 | class ArrayInstanceProperty(properties.Instance): 131 | """Instance property for OMF Array objects 132 | 133 | This is a custom :class:`Instance ` property 134 | that has :code:`instance_class` set as :class:`Array `. 135 | It exposes additional keyword arguments that further validate the 136 | shape and data type of the array. 137 | 138 | **Available keywords**: 139 | 140 | * **shape** - Valid array shape(s), as described by :class:`properties.Array` 141 | * **dtype** - Valid array dtype(s), as described by :class:`properties.Array` 142 | """ 143 | 144 | def __init__(self, doc, **kwargs): 145 | if "instance_class" in kwargs: 146 | raise AttributeError("ArrayInstanceProperty does not allow custom instance_class") 147 | self.validator_prop = properties.Array( 148 | "", 149 | shape={("*",), ("*", "*")}, 150 | dtype=(int, float, bool), 151 | ) 152 | super().__init__(doc, instance_class=Array, **kwargs) 153 | 154 | @property 155 | def shape(self): 156 | """Required shape of the Array instance's array property""" 157 | return self.validator_prop.shape 158 | 159 | @shape.setter 160 | def shape(self, value): 161 | self.validator_prop.shape = value 162 | 163 | @property 164 | def dtype(self): 165 | """Required dtype of the Array instance's array property""" 166 | return self.validator_prop.dtype 167 | 168 | @dtype.setter 169 | def dtype(self, value): 170 | self.validator_prop.dtype = value 171 | 172 | def validate(self, instance, value): 173 | self.validator_prop.name = self.name 174 | value = super().validate(instance, value) 175 | if value.array is not None: 176 | value.array = self.validator_prop.validate(instance, value.array) 177 | return value 178 | 179 | @property 180 | def info(self): 181 | info = "{instance_info} with shape {shape} and dtype {dtype}".format( 182 | instance_info=super().info, 183 | shape=self.shape, 184 | dtype=self.dtype, 185 | ) 186 | return info 187 | 188 | 189 | class StringList(BaseModel): 190 | """Class to validate and serialize a large list of strings 191 | 192 | Data type, size, shape are computed directly from the list. 193 | 194 | Serializing and deserializing this class requires passing an additional 195 | keyword argument :code:`binary_dict` where the string list is persisted. 196 | The serialized JSON includes array metadata and a UUID; this UUID 197 | is the key in the binary_dict. 198 | """ 199 | 200 | schema = "org.omf.v2.array.string" 201 | 202 | array = properties.List( 203 | "List of datetimes or strings", 204 | properties.String(""), 205 | serializer=lambda *args, **kwargs: None, 206 | deserializer=lambda *args, **kwargs: None, 207 | ) 208 | 209 | def __init__(self, array=None, **kwargs): 210 | super().__init__(**kwargs) 211 | if array is not None: 212 | self.array = array 213 | 214 | def __len__(self): 215 | return self.array.__len__() 216 | 217 | def __getitem__(self, i): 218 | return self.array.__getitem__(i) 219 | 220 | @properties.StringChoice("List data type string", choices=["DateTimeArray", "StringArray"]) 221 | def data_type(self): 222 | """Array type descriptor, determined directly from the array""" 223 | if self.array is None: 224 | return None 225 | try: 226 | properties.List("", properties.DateTime("")).validate(self, self.array) 227 | except properties.ValidationError: 228 | return "StringArray" 229 | return "DateTimeArray" 230 | 231 | @properties.List( 232 | "Shape of the string list", 233 | properties.Integer(""), 234 | min_length=1, 235 | max_length=1, 236 | ) 237 | def shape(self): 238 | """Array shape, determined directly from the array""" 239 | if self.array is None: 240 | return None 241 | return [len(self.array)] 242 | 243 | @properties.Integer("Size of string list dumped to JSON in bytes") 244 | def size(self): 245 | """Total size of the string list in bytes""" 246 | if self.array is None: 247 | return None 248 | return len(json.dumps(self.array)) 249 | 250 | def serialize(self, include_class=True, save_dynamic=False, **kwargs): 251 | output = super().serialize(include_class=include_class, save_dynamic=True, **kwargs) 252 | binary_dict = kwargs.get("binary_dict", None) 253 | if binary_dict is not None: 254 | array_uid = str(uuid.uuid4()) 255 | binary_dict.update({array_uid: bytes(json.dumps(self.array), "utf8")}) 256 | output.update({"array": array_uid}) 257 | return output 258 | 259 | @classmethod 260 | def deserialize(cls, value, trusted=False, strict=False, assert_valid=False, **kwargs): 261 | binary_dict = kwargs.get("binary_dict", {}) 262 | if not isinstance(value, dict): 263 | pass 264 | elif any(key not in value for key in ["shape", "data_type", "array"]): 265 | pass 266 | elif value["array"] in binary_dict: 267 | arr = json.loads(binary_dict[value["array"]].decode("utf8")) 268 | return cls(arr) 269 | return cls() 270 | 271 | 272 | class ContinuousColormap(ContentModel): 273 | """Color gradient with min/max values, used with NumericAttribute 274 | 275 | When this colormap is applied to a numeric attribute the attribute 276 | values between the limits are colored based on the gradient values. 277 | Any attribute value below and above the limits are colored with the 278 | first and last gradient values, respectively. 279 | 280 | .. code:: 281 | 282 | # gradient 283 | # 284 | # RGB4 - x - - - - - - -> 285 | # RGB3 - / 286 | # RGB2 - / 287 | # RGB1 - / 288 | # RGB0 - <- - - - - - x 289 | # <------------|---|--------------> attribute values 290 | # limits 291 | """ 292 | 293 | schema = "org.omf.v2.colormap.scalar" 294 | 295 | gradient = ArrayInstanceProperty( 296 | "N x 3 Array of RGB values between 0 and 255 which defines the color gradient", 297 | shape=("*", 3), 298 | dtype=int, 299 | ) 300 | limits = properties.List( 301 | "Attribute range associated with the gradient", 302 | prop=properties.Float(""), 303 | min_length=2, 304 | max_length=2, 305 | default=properties.undefined, 306 | ) 307 | 308 | @properties.validator("gradient") 309 | def _check_gradient_values(self, change): 310 | """Ensure gradient values are all between 0 and 255""" 311 | arr = change["value"].array 312 | if arr is None: 313 | return 314 | arr_uint8 = arr.astype("uint8") 315 | if not np.array_equal(arr, arr_uint8): 316 | raise properties.ValidationError("Gradient must be an array of RGB values between 0 and 255") 317 | change["value"].array = arr_uint8 318 | 319 | @properties.validator("limits") 320 | def _check_limits_on_change(self, change): 321 | """Ensure limits are valid""" 322 | if change["value"][0] > change["value"][1]: 323 | raise properties.ValidationError("Colormap limits[0] must be <= limits[1]") 324 | 325 | 326 | class DiscreteColormap(ContentModel): 327 | """Colormap for grouping discrete intervals of NumericAttribute 328 | 329 | This colormap creates n+1 intervals where n is the length of end_points. 330 | Attribute values between -inf and the first end point correspond to 331 | the first color; attribute values between the first and second end point 332 | correspond to the second color; and so on until attribute values between 333 | the last end point and inf correspond to the last color. 334 | 335 | The end_inclusive property dictates if attribute values that equal the 336 | end point are in the lower interval (end_inclusive is True) or the upper 337 | interval (end_inclusive is False). 338 | 339 | .. code:: 340 | 341 | # colors 342 | # 343 | # RGB2 x - - - - -> 344 | # 345 | # RGB1 x - - - o 346 | # 347 | # RGB0 <- - - - - - o 348 | # 349 | # <------------|--------|------------> attribute values 350 | # end_points 351 | """ 352 | 353 | schema = "org.omf.v2.colormap.discrete" 354 | 355 | end_points = properties.List( 356 | "Attribute values associated with edge of color intervals", 357 | prop=properties.Float(""), 358 | default=properties.undefined, 359 | ) 360 | end_inclusive = properties.List( 361 | "True if corresponding end_point is included in lower interval; False if end_point is in upper interval", 362 | prop=properties.Boolean(""), 363 | default=properties.undefined, 364 | ) 365 | colors = properties.List( 366 | "Colors for each interval", 367 | prop=properties.Color(""), 368 | min_length=1, 369 | default=properties.undefined, 370 | ) 371 | 372 | @properties.validator 373 | def _validate_lengths(self): 374 | if len(self.end_points) != len(self.end_inclusive): 375 | pass 376 | elif len(self.colors) == len(self.end_points) + 1: 377 | return True 378 | raise properties.ValidationError( 379 | "Discrete colormap colors length must be one greater than end_points and end_inclusive values" 380 | ) 381 | 382 | @properties.validator("end_points") 383 | def _validate_end_points_monotonic(self, change): 384 | for i in range(len(change["value"]) - 1): 385 | diff = change["value"][i + 1] - change["value"][i] 386 | if diff < 0: 387 | raise properties.ValidationError("end_points must be monotonically increasing") 388 | 389 | 390 | class NumericAttribute(ProjectElementAttribute): 391 | """Attribute with scalar values and optional continuous or discrete colormap""" 392 | 393 | schema = "org.omf.v2.attribute.numeric" 394 | 395 | array = ArrayInstanceProperty( 396 | "Numeric values at locations on a mesh (see location parameter); these values must be scalars", 397 | shape=("*",), 398 | ) 399 | colormap = properties.Union( 400 | "colormap associated with the attribute", 401 | [ContinuousColormap, DiscreteColormap], 402 | required=False, 403 | ) 404 | 405 | 406 | class VectorAttribute(ProjectElementAttribute): 407 | """Attribute with 2D or 3D vector values 408 | 409 | This attribute type cannot have a colormap, since you cannot map colormaps 410 | to vectors. 411 | """ 412 | 413 | schema = "org.omf.v2.attribute.vector" 414 | 415 | array = ArrayInstanceProperty( 416 | "Numeric vectors at locations on a mesh (see location parameter); these vectors may be 2D or 3D", 417 | shape={("*", 2), ("*", 3)}, 418 | ) 419 | 420 | 421 | class StringAttribute(ProjectElementAttribute): 422 | """Attribute with a list of strings or datetimes 423 | 424 | This attribute type cannot have a colormap; to use colors with strings, 425 | use :class:`omf.attribute.CategoryAttribute` instead. 426 | """ 427 | 428 | schema = "org.omf.v2.attribute.string" 429 | 430 | array = properties.Instance( 431 | "String values at locations on a mesh (see " 432 | "location parameter); these values may be DateTimes or " 433 | "arbitrary strings", 434 | StringList, 435 | ) 436 | 437 | 438 | class CategoryColormap(ContentModel): 439 | """Legends to be used with CategoryAttribute 440 | 441 | Every index in the CategoryAttribute array must correspond to a string 442 | value (the "category") and may additionally correspond to a color. 443 | 444 | .. code:: 445 | 446 | # values colors 447 | # 448 | # -- RGB2 x 449 | # 450 | # -- RGB1 x 451 | # 452 | # -- RGB0 x 453 | # 454 | # | | | <- attribute values 455 | # indices 456 | """ 457 | 458 | schema = "org.omf.v2.colormap.category" 459 | 460 | indices = properties.List( 461 | "indices corresponding to CategoryAttribute array values", 462 | properties.Integer(""), 463 | ) 464 | values = properties.List( 465 | "values for mapping indexed attribute", 466 | properties.String(""), 467 | ) 468 | colors = properties.List( 469 | "colors corresponding to values", 470 | properties.Color(""), 471 | required=False, 472 | ) 473 | 474 | @properties.validator 475 | def _validate_lengths(self): 476 | """Validate indices, values, and colors are all the same length""" 477 | if len(self.indices) != len(self.values): 478 | pass 479 | elif self.colors is None or len(self.colors) == len(self.values): 480 | return True 481 | raise properties.ValidationError("Legend colors and values must be the same length") 482 | 483 | 484 | class CategoryAttribute(ProjectElementAttribute): 485 | """Attribute of indices linked to category values 486 | 487 | To specify no data, index value in the array should be any value 488 | not present in the categories. 489 | """ 490 | 491 | schema = "org.omf.v2.attribute.category" 492 | 493 | array = ArrayInstanceProperty( 494 | "indices into the category values for locations on a mesh", 495 | shape=("*",), 496 | dtype=int, 497 | ) 498 | categories = properties.Instance( 499 | "categories into which the indices map", 500 | CategoryColormap, 501 | ) 502 | -------------------------------------------------------------------------------- /omf/compat/omf_v1.py: -------------------------------------------------------------------------------- 1 | """omf_v1.py: Reader for OMF V1 files.""" 2 | import contextlib 3 | import io 4 | import json 5 | import struct 6 | import uuid 7 | import zlib 8 | 9 | import numpy as np 10 | import properties 11 | 12 | from .interface import IOMFReader, InvalidOMFFile, WrongVersionError 13 | 14 | from .. import attribute, base, blockmodel, lineset, pointset, surface, texture 15 | 16 | COMPATIBILITY_VERSION = b"OMF-v0.9.0" 17 | _default = object() 18 | 19 | 20 | # pylint: disable=too-few-public-methods 21 | class Reader(IOMFReader): 22 | """Reader for OMF V1 files.""" 23 | 24 | def __init__(self, filename: str): 25 | self._filename = filename 26 | self._f = None 27 | self._include_binary = True 28 | self._project = None 29 | self.__cache = {} # uuid -> reusable item 30 | 31 | def load(self, include_binary: bool = True, project_json: bool = None): 32 | self._include_binary = include_binary 33 | try: 34 | with open(self._filename, "rb") as self._f: 35 | project_uuid, json_start = self._read_header() 36 | self._project = self._read_json(json_start) 37 | try: 38 | return self._convert_project(project_uuid) 39 | except properties.ValidationError as exc: 40 | raise InvalidOMFFile(exc) from exc 41 | finally: 42 | self._reset() 43 | 44 | def _reset(self): 45 | self._f = None 46 | self._project = None 47 | self._include_binary = True 48 | self.__cache = {} # uuid -> reusable item 49 | 50 | def _read_header(self): 51 | """Checks magic number and version; gets project uid and json start""" 52 | self._f.seek(0) 53 | if self._f.read(4) != b"\x84\x83\x82\x81": 54 | raise WrongVersionError(f"Unsupported format: {self._filename}") 55 | file_version = struct.unpack("<32s", self._f.read(32))[0] 56 | file_version = file_version[0 : len(COMPATIBILITY_VERSION)] 57 | if file_version != COMPATIBILITY_VERSION: 58 | raise WrongVersionError("Unsupported file version: {}".format(file_version)) 59 | project_uuid = uuid.UUID(bytes=struct.unpack("<16s", self._f.read(16))[0]) 60 | json_start = struct.unpack("