├── .gitignore
├── LICENSE
├── README.md
├── pershombox
├── __init__.py
├── _software_backends
│ ├── __init__.py
│ ├── dipha_adapter.py
│ ├── hera_adapter.py
│ ├── perseus_adapter.py
│ ├── resource_handler.py
│ └── software_backends.cfg
├── dgm_util.py
├── lebedev.py
├── pht.py
├── pht_metric.py
└── toplex.py
└── tutorials
├── .gitignore
├── cubical_complex_persistence_diagrams.ipynb
├── discrete_2d_npht.ipynb
├── shared_code.py
└── toplex_persistence_diagrams.ipynb
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 | __pycache__
3 |
4 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 Christoph Hofer
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # tda-toolkit
2 |
3 | This repository contains persistent homology related code which can be used
4 | to implement the approaches from [1] and [2] (see [References](#references))
5 |
6 | *A final code release is planned for late 2017.*
7 |
8 | # Installation
9 |
10 | The `pershombox` package is dependent on some third party software tools which we do not provide here.
11 | In order to install `pershombox` you have to get those executables and tell `pershombox` where
12 | to find them by editing the corresponding entry in
13 | `pershombox/_software_backends/software_backends.cfg`.
14 |
15 | Where to find the executables/sources and which `software_backends.cfg` entry corresponds to them
16 | is listed below.
17 | **Do not forget to `chmod` executables on Unix-based systems!**
18 |
19 | 1. `DIPHA`: [Source code](https://github.com/DIPHA/dipha).
20 | Entry: `dipha`.
21 |
22 | 2. `Perseus`: [Source code or precompiled executables](http://people.maths.ox.ac.uk/nanda/perseus/index.html).
23 | Entry: `perseus`.
24 |
25 | 3. `hera`: [Source code](https://bitbucket.org/grey_narn/hera.git).
26 | We need the `wasserstein_dist` executable in `geom_matching/wasserstein`. Entry: `hera_wasserstein_dist`.
27 |
28 | We plan to also support [Dionysus (v2)](http://mrzv.org/software/dionysus2/) in the future.
29 |
30 | ## Exemplary DIPHA installation
31 |
32 | ```bash
33 | git clone https://github.com/DIPHA/dipha.git
34 | cd dipha
35 | mkdir build
36 | cmake ..
37 | make -j4
38 | ```
39 |
40 | Then manipulate the `software_backends.cfg`:
41 |
42 | ```bash
43 | [paths]
44 | # Configure the paths to the backend software here
45 | # e.g., dipha=/home/myHome/dipha
46 | # do not forget to chmod +x on unix bases systems
47 |
48 | dipha=[[tuple]]:
244 | """
245 | Calculates the persistence diagram for a cubical complex.
246 |
247 | :param filtrated_cubical_complex: An n dimensional array, such that
248 |
249 | filtrated_cubical_complex[x_1, ... , x_d] = f(x_1, ... , x_d)
250 |
251 | where f is the filtration and x_i are the coordinates of the vertex with respect to the canonical basis in the
252 | positive quadrant on the unit spaced grid.
253 |
254 | :return:
255 | List with the points of the persistence diagram of dimension k at position k.
256 | """
257 | filtrated_cubical_complex = numpy.array(filtrated_cubical_complex)
258 | dimension = filtrated_cubical_complex.ndim
259 |
260 | with __tmp_dir_fact() as tmp_dir:
261 |
262 | image_data_file_path = os.path.join(tmp_dir, "image_data")
263 | persistence_diagram_file_path = os.path.join(tmp_dir, "persistence_diagram")
264 |
265 | with open(image_data_file_path, "bw") as f:
266 | _ImageDataFile(filtrated_cubical_complex).write_to_binary_file(f)
267 |
268 | _run_dipha(image_data_file_path,
269 | persistence_diagram_file_path,
270 | limit_dimensions,
271 | dual,
272 | benchmark)
273 |
274 | with open(persistence_diagram_file_path, "rb") as f:
275 | diagram = _PersistenceDiagramFile.load_from_binary_file(f)
276 |
277 | tmp = {}
278 | for i in range(dimension):
279 | tmp[i] = []
280 |
281 | for dimension, birth, death in diagram.points:
282 | if dimension < 0:
283 | dimension = -dimension - 1
284 | if not set_inf_to_max_filt_val:
285 | death = float('inf')
286 |
287 | tmp[dimension].append((birth, death))
288 |
289 | return [tmp[key] for key in tmp.keys()]
290 |
291 |
292 | def persistence_diagrams_of_VR_complex_from_distance_matrix(distance_matrix: numpy.array,
293 | upper_dimension: int,
294 | dual: bool = False,
295 | benchmark: bool = False) -> [[tuple]]:
296 | distance_matrix = numpy.array(distance_matrix)
297 |
298 | with __tmp_dir_fact() as tmp_dir:
299 | distance_matrix_file_path = os.path.join(tmp_dir, "distance_matrix")
300 | persistence_diagram_file_path = os.path.join(tmp_dir, "persistence_diagram")
301 |
302 | with open(distance_matrix_file_path, "bw") as f:
303 | _DistanceMatrixFile(distance_matrix).write_to_binary_file(f)
304 |
305 | _run_dipha(distance_matrix_file_path,
306 | persistence_diagram_file_path,
307 | upper_dimension,
308 | dual,
309 | benchmark)
310 |
311 | with open(persistence_diagram_file_path, "rb") as f:
312 | diagram = _PersistenceDiagramFile.load_from_binary_file(f)
313 |
314 | tmp = {}
315 | for i in range(upper_dimension):
316 | tmp[i] = []
317 |
318 | for dimension, birth, death in diagram.points:
319 | if dimension < 0:
320 | dimension = -dimension - 1
321 | death = float('inf')
322 |
323 | tmp[dimension].append((birth, death))
324 |
325 | return [tmp[key] for key in tmp.keys()]
326 |
327 |
328 | # endregion
329 |
330 |
331 |
332 |
--------------------------------------------------------------------------------
/pershombox/_software_backends/hera_adapter.py:
--------------------------------------------------------------------------------
1 | import numpy
2 | import os
3 | from subprocess import check_output
4 | from subprocess import DEVNULL
5 | from .resource_handler import get_path, Backends
6 | from tempfile import TemporaryDirectory
7 |
8 |
9 | __stdout = DEVNULL
10 | __stderr = DEVNULL
11 |
12 |
13 | def _get_hera_wasserstein_dist_path():
14 | return get_path(Backends.hera_wasserstein_dist)
15 |
16 |
17 | def wasserstein_distance(dgm_1: [[]], dgm_2: [[]], degree: float=2.0, internal_norm='inf', relative_error: float=0.01)->float:
18 | """
19 | Calculates wasserstein_distance distance of two persistence diagrams.
20 |
21 | Parameters
22 | ----------
23 | dgm_1
24 | dgm_2
25 | degree: Wasserstein degree
26 | internal_norm: Internal norm used. 'inf' sets to infinity norm, q >= 1 to q-norm.
27 | relative_error
28 |
29 | Returns
30 | -------
31 |
32 | """
33 |
34 | # region parameter checking
35 |
36 | degree = float(degree)
37 | if degree < 1.0:
38 | raise ValueError("""Value range of parameter degree is [1, inf) given was {}""".format(degree))
39 | degree = '{:.10f}'.format(degree)
40 |
41 | if not internal_norm == 'inf':
42 | internal_norm = float(internal_norm)
43 |
44 | if internal_norm < 1.0:
45 | raise ValueError("""Value range of parameter internal_norm is [1, inf] given was {}""".format(internal_norm))
46 |
47 | internal_norm = '{:.10f}'.format(internal_norm)
48 |
49 | relative_error = float(relative_error)
50 | if relative_error < 0:
51 | raise ValueError("""Value range of parameter relative_error is [0, inf) given was {}""".format(relative_error))
52 | relative_error = '{:.10f}'.format(relative_error)
53 |
54 | #endregion
55 |
56 | with TemporaryDirectory() as tmp_dir:
57 | dgm_1_file_path = os.path.join(tmp_dir, 'dgm_1')
58 | dgm_2_file_path = os.path.join(tmp_dir, 'dgm_2')
59 |
60 | numpy.savetxt(dgm_1_file_path, numpy.array(dgm_1), delimiter=' ')
61 | numpy.savetxt(dgm_2_file_path, numpy.array(dgm_2), delimiter=' ')
62 |
63 | cmd = [_get_hera_wasserstein_dist_path(),
64 | dgm_1_file_path,
65 | dgm_2_file_path,
66 | degree,
67 | relative_error,
68 | internal_norm]
69 |
70 | out = check_output(cmd)
71 |
72 | return float(out.rstrip())
73 |
--------------------------------------------------------------------------------
/pershombox/_software_backends/perseus_adapter.py:
--------------------------------------------------------------------------------
1 | import os
2 | import numpy
3 | from subprocess import call
4 | from tempfile import TemporaryDirectory
5 | from subprocess import DEVNULL
6 | from .resource_handler import get_path, Backends
7 |
8 |
9 | __stdout = DEVNULL
10 | __stderr = DEVNULL
11 |
12 |
13 | def _get_perseus_path():
14 | return get_path(Backends.perseus)
15 |
16 |
17 | def _call_perseus(complex_type, complex_file_string):
18 | def get_dim_from_dgm_file(name):
19 | x = name.split('.txt')[0]
20 | x = x.split('_')[1]
21 | return int(x)
22 |
23 | with TemporaryDirectory() as tmp_dir:
24 | comp_file_path = os.path.join(tmp_dir, 'complex.txt')
25 | perseus_path = _get_perseus_path()
26 |
27 | with open(comp_file_path, 'w') as comp_file:
28 | comp_file.write(complex_file_string)
29 |
30 | call([perseus_path, complex_type, comp_file_path, tmp_dir + '/'], stdout=__stdout, stderr=__stderr)
31 |
32 | # dgm file names are assumed to be like output_0.txt
33 | diagram_files = [name for name in os.listdir(tmp_dir) if name.startswith('_') and name != '_betti.txt']
34 |
35 | dgms = {}
36 | for name in diagram_files:
37 | dim = get_dim_from_dgm_file(name)
38 | dgm_file_path = os.path.join(tmp_dir, name)
39 |
40 | if os.stat(dgm_file_path).st_size == 0:
41 | dgms[dim] = []
42 |
43 | else:
44 | dgm = numpy.loadtxt(dgm_file_path)
45 |
46 | if dgm.ndim == 2:
47 | dgms[dim] = dgm.tolist()
48 | elif dgm.ndim == 1:
49 | dgms[dim] = [dgm.tolist()]
50 | else:
51 | raise ValueError('Oddly shaped array read from dgm_file_path.')
52 |
53 | return dgms
54 |
55 |
56 | class PerseusAdapterException(Exception):
57 | pass
58 |
--------------------------------------------------------------------------------
/pershombox/_software_backends/resource_handler.py:
--------------------------------------------------------------------------------
1 | import os
2 | import warnings
3 | from configparser import ConfigParser
4 | from subprocess import call, DEVNULL
5 | from enum import Enum
6 |
7 |
8 | __CFG_FILE_NAME = 'software_backends.cfg'
9 |
10 |
11 | __cfg_path = os.path.join(os.path.dirname(__file__), __CFG_FILE_NAME)
12 |
13 |
14 | class Backends(Enum):
15 | dipha = 'dipha'
16 | perseus = 'perseus'
17 | hera_wasserstein_dist = 'hera_wasserstein_dist'
18 |
19 |
20 | __fall_backs = {
21 | Backends.dipha: 'dipha',
22 | Backends.hera_wasserstein_dist: 'hera',
23 | Backends.perseus: 'perseus'
24 | }
25 |
26 |
27 | __paths_or_errors = {
28 | Backends.dipha: None,
29 | Backends.hera_wasserstein_dist: None,
30 | Backends.perseus: None
31 | }
32 |
33 |
34 | parser = ConfigParser()
35 | parser.read(__cfg_path)
36 |
37 |
38 | class SoftwareBackendError(Exception):
39 | pass
40 |
41 |
42 | def init_backend(backend: Backends):
43 | path = parser.get('paths', backend.value)
44 |
45 | if path == '':
46 | path = __fall_backs[backend]
47 |
48 | try:
49 | call([path], stdout=DEVNULL, stderr=DEVNULL)
50 |
51 | except Exception as ex:
52 | __paths_or_errors[backend] = ex
53 |
54 | else:
55 | __paths_or_errors[backend] = path
56 |
57 |
58 | def get_path(backend: Backends)->str:
59 | path_or_error = __paths_or_errors[backend]
60 |
61 | if isinstance(path_or_error, Exception):
62 | ex_text = "{} backend software is not available.".format(backend.value)
63 | new_ex = SoftwareBackendError(ex_text)
64 | raise new_ex from path_or_error
65 |
66 | else:
67 | return path_or_error
68 |
69 |
70 | def get_backend_cfg_errors():
71 | return [(b.value, e) for b, e in __paths_or_errors.items() if isinstance(e, Exception)]
72 |
73 |
74 | def init_software_backends():
75 |
76 | for software_backend in Backends:
77 | init_backend(software_backend)
78 |
79 | backend_errors = get_backend_cfg_errors()
80 | if len(backend_errors) > 0:
81 |
82 | error_text = ""
83 | error_text += "The following backends are not properly configured\n"
84 |
85 | for b, _ in backend_errors:
86 | error_text += (b) + '\n'
87 |
88 | error_text += "Using stuff dependent on those backends will cause runtime errors.\n"
89 | error_text += "You can get all errors by calling pershombox.get_backend_cfg_errors().\n"
90 |
91 | warnings.warn(error_text, UserWarning)
--------------------------------------------------------------------------------
/pershombox/_software_backends/software_backends.cfg:
--------------------------------------------------------------------------------
1 | [paths]
2 | # Configure the paths to the backend software here
3 | # e.g., dipha=/home/myHome/dipha
4 | # do not forget to chmod +x on unix bases systems
5 |
6 | dipha=
7 |
8 | hera_wasserstein_dist=
9 |
10 | perseus=
--------------------------------------------------------------------------------
/pershombox/dgm_util.py:
--------------------------------------------------------------------------------
1 | def de_essentialize(persistence_diagram: [[]], maximal_filtration_value: float)->[[]]:
2 | """
3 | Replaces all essental classes with points dying at maximal_filtration_value.
4 | Example:
5 | de_essentialize([(1, 2), (0, inf)], 10) = [(1, 2), (0, 10)]
6 |
7 | :param persistence_diagram:
8 | :param maximal_filtration_value: value which replaces inf
9 | :return:
10 | """
11 | if len(persistence_diagram) == 0:
12 | return persistence_diagram
13 |
14 | else:
15 | dgm = [tuple(p) for p in persistence_diagram]
16 |
17 | de_essentialized_dgm = [p for p in dgm if p[1] != float('inf')] + \
18 | [(p[0], maximal_filtration_value) for p in dgm if p[1] == float('inf')]
19 |
20 | return de_essentialized_dgm
21 |
--------------------------------------------------------------------------------
/pershombox/lebedev.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import itertools
3 | import functools
4 |
5 |
6 |
7 | # region Octahedral rotation group
8 | """
9 | Implementation of Octahedral Rotation group with matrix representation from [2]
10 |
11 | [2]:
12 | @article{Lenz2009,
13 | author = {Lenz, Reiner},
14 | file = {:home/anonymous/data/library/papers/Lenz - 2009 - Octahedral Filters for 3D Image Processing.pdf:pdf},
15 | journal = {Proceedings SSBA 2009},
16 | mendeley-groups = {group theory},
17 | pages = {109--112},
18 | title = {{Octahedral Filters for 3D Image Processing}},
19 | year = {2009}
20 | }
21 | """
22 |
23 |
24 | class OctahedralMatrixRotationGroup2Generators:
25 | """
26 | Implementation of Octahedral rotation group. Isomorphic to S4 freely generated with two elements:
27 |
28 | G = < A, D | DDD = AAAA = ADDA(1/D)= ADA(1/D)(1/A)(1/A)(1/D) = e
29 |
30 | """
31 | # The two generator elements for matrix
32 | _R1 = np.array(((0, 0, 1), (1, 0, 0), (0, 1, 0)))
33 | _R2 = np.array(((0, 1, 0), (-1, 0, 0), (0, 0, 1)))
34 |
35 | # Identifiers of generators in string word
36 | _R1_id = 'D'
37 | _R2_id = 'A'
38 |
39 | # Complete representation as shortest words
40 | _elements_as_words = ['', 'A', 'D', 'AA', 'AD', 'DA', 'DD', 'AAA', 'AAD', 'ADA', 'ADD', 'DAA', 'DAD', 'DDA',
41 | 'AADA', 'AADD', 'ADAA', 'ADAD', 'DADA', 'DADD', 'DDAA', 'DDAD', 'DADAA', 'DADAD']
42 |
43 | @property
44 | def elements(self):
45 | return self._elements_as_words
46 |
47 | @property
48 | def generators(self):
49 | return [self._R1_id, self._R2_id]
50 |
51 | @classmethod
52 | def word_to_matrix(cls, word):
53 | matrix_word = []
54 | for c in word:
55 | if c == 'D':
56 | matrix_word.append(cls._R1)
57 | if c == 'A':
58 | matrix_word.append(cls._R2)
59 |
60 | return functools.reduce(np.matmul, matrix_word)
61 |
62 | def __iter__(self):
63 | return iter(self._elements_as_words)
64 |
65 |
66 | # endregion
67 |
68 |
69 | # region Lebedev orbits
70 | """
71 | Class names were given with respect to [1].
72 | In this a Lebedev point is a tuple of length 2 where
73 | point[0] -> string identifier of the class to which the lebedev point contains.
74 | point[1] -> its identifier within the class.
75 |
76 | [1]: https://en.wikipedia.org/wiki/Lebedev_quadrature#Construction
77 |
78 | This region implements:
79 | 1. Lebedev point sets a1, a2, a3
80 |
81 | 2. A Lebedev Grid with 26 points.
82 |
83 | """
84 |
85 |
86 | class _LebedevOrbitBase:
87 | """
88 | Base class of a LebedevOrbit. A Lebedev Orbit is a point set on S^2 which is invariant
89 | under the Octahedral Symetry Group.
90 | """
91 | # Possible point numbers for points of child class. Should be [str]
92 | _point_numbers = None
93 |
94 | # id connecting a LebedevPoint with its class. Should be str
95 | _string_id = None
96 | _points = None
97 |
98 | def __init__(self):
99 | self._spherical_dict = self._initial_state_spherical_dict()
100 |
101 | @staticmethod
102 | def _initial_state_spherical_dict():
103 | raise NotImplementedError("Abstract method.")
104 |
105 | @classmethod
106 | def _get_points(cls):
107 | for num in cls._point_numbers:
108 | yield (cls._string_id, num)
109 |
110 | def _check_point(self, point):
111 | point = tuple(point)
112 |
113 | if len(point) != 2:
114 | raise ValueError("{} is no point of a Lebedev point set".format(point))
115 |
116 | if point[0] != self._string_id:
117 | raise ValueError("{} does not contain to {}.".format(point, type(self).__name__))
118 |
119 | if point[1] not in self._point_numbers:
120 | raise ValueError("{} has no point with number {}.".format(type(self).__name__, point.number))
121 |
122 | return point
123 |
124 | @staticmethod
125 | def __snap_to_zero_one(value):
126 | if np.isclose([value], [1]):
127 | return 1
128 | elif np.isclose([value], [0]):
129 | return 0
130 | else:
131 | return value
132 |
133 | def to_cartesian(self, point: tuple)->tuple:
134 | """
135 | Returns cartesian coordinates of point.
136 |
137 | Parameters
138 | ----------
139 | point : Lebedev Point.
140 |
141 | Returns
142 | -------
143 | tuple. (x,y,z) cartesian coordinates of point.
144 | """
145 | point = self._check_point(point)
146 |
147 | polar = self.to_spherical(point)
148 | theta = polar[0] * np.pi / 180
149 | phi = polar[1] * np.pi / 180
150 |
151 | x = np.cos(theta) * np.sin(phi)
152 | y = np.sin(theta) * np.sin(phi)
153 | z = np.cos(phi)
154 |
155 | return tuple(map(self.__snap_to_zero_one, [x, y, z]))
156 |
157 | def to_spherical(self, point: tuple):
158 | """
159 | Returns spherical coordinates of point.
160 |
161 | Parameters
162 | ----------
163 | point :
164 | tuple. Lebedev Point.
165 |
166 | Returns
167 | -------
168 | tuple. (theta, phi) spherical coordinates in degree of point:
169 |
170 | (theta, phi) \in [-180, 180] x [0, 180]
171 | """
172 | point = self._check_point(point)
173 | point_number = point[1]
174 | return self._spherical_dict[point_number]
175 |
176 | def point_permutation_by_generator(self, octahedral_rotation_group_matrix_implementation):
177 | """
178 | Calculates the permutation on the orbit induced by the generator elements of
179 | octahedral_rotation_group_implementation.
180 |
181 | Parameters
182 | ----------
183 | octahedral_rotation_group_matrix_implementation :
184 | type. A class which implements a generative representation of S4 in matrix form.
185 | Expected call implementation:
186 | self.generators
187 | self.word_to_matrix(...)
188 |
189 | Returns
190 | -------
191 | dict. return_value['xy'][point_id] = (Matrix Representation of 'xy')(point_id)
192 |
193 | """
194 | O = octahedral_rotation_group_matrix_implementation()
195 | return_value = {}
196 | generator_words = O.generators
197 | for generator in generator_words:
198 | mapping = {}
199 | for p in self:
200 | generator_matrix = O.word_to_matrix(generator)
201 |
202 | rotated_point = np.matmul(generator_matrix, self.to_cartesian(p))
203 |
204 | for pp in self:
205 | if np.isclose(self.to_cartesian(pp), rotated_point).all():
206 | mapping[p] = pp
207 | break
208 | return_value[generator] = mapping
209 | return return_value
210 |
211 | @property
212 | def points(self)->list:
213 | """
214 | Gives the points of the orbit.
215 |
216 | Returns
217 | -------
218 | list.
219 | """
220 | return list(self._get_points())
221 |
222 | def __iter__(self):
223 | return self._get_points()
224 |
225 | def __len__(self):
226 | return len(self._point_numbers)
227 |
228 |
229 | class LebedevOrbit_a1(_LebedevOrbitBase):
230 | """
231 | Orbit of (1, 0, 0).
232 | """
233 | _point_numbers = [str(i + 1) for i in range(6)]
234 | _string_id = 'a1'
235 |
236 | def __init__(self):
237 | super().__init__()
238 |
239 | @staticmethod
240 | def _initial_state_spherical_dict():
241 | return {'1': (0, 90), '2': (180, 90), '3': (90, 90),
242 | '4': (-90, 90), '5': (90, 0), '6': (90, 180)}
243 |
244 | def to_cartesian(self, point):
245 | cart = super().to_cartesian(point)
246 | return tuple([int(c) for c in cart])
247 |
248 |
249 | class LebedevOrbit_a2(_LebedevOrbitBase):
250 | """
251 | Orbit of 1/sqrt(2) * (1, 1, 0).
252 | """
253 | _point_numbers = [str(i + 1) for i in range(12)]
254 | _string_id = 'a2'
255 |
256 | def __init__(self):
257 | super().__init__()
258 |
259 | @staticmethod
260 | def _initial_state_spherical_dict():
261 | return {'1': (90, 45),
262 | '2': (90, 135),
263 | '3': (-90, 45),
264 | '4': (-90, 135),
265 | '5': (0, 45),
266 | '6': (0, 135),
267 | '7': (180, 45),
268 | '8': (180, 135),
269 | '9': (45, 90),
270 | '10': (-45, 90),
271 | '11': (135, 90),
272 | '12': (-135, 90)}
273 |
274 |
275 | class LebedevOrbit_a3(_LebedevOrbitBase):
276 | """
277 | Orbit of 1/sqrt(3) * (1, 1, 1)
278 | """
279 | _point_numbers = [str(i + 1) for i in range(8)]
280 | _string_id = 'a3'
281 |
282 | def __init__(self):
283 | super().__init__()
284 |
285 | @staticmethod
286 | def _initial_state_spherical_dict():
287 | return {'1': (45, 54.735610317245346),
288 | '2': (45, 125.264389682754654),
289 | '3': (-45, 54.735610317245346),
290 | '4': (-45, 125.264389682754654),
291 | '5': (135, 54.735610317245346),
292 | '6': (135, 125.264389682754654),
293 | '7': (-135, 54.735610317245346),
294 | '8': (-135, 125.264389682754654)}
295 |
296 |
297 | def LebedevOrbit_b_k_Meta(typical_point: tuple):
298 | """
299 | class LebedevPoints_b_k(_LebedevOrbitBase):
300 | pass
301 |
302 | return LebedevPoints_b_k
303 | """
304 | raise NotImplementedError()
305 |
306 |
307 | def LebedevOrbit_c_k_Meta(typical_point: tuple):
308 | raise NotImplementedError()
309 |
310 |
311 | def LebedevOrbit_d_k_Meta(typical_point: tuple):
312 | raise NotImplementedError()
313 |
314 |
315 | # endregion
316 |
317 |
318 | # region LebedevGrids
319 | """
320 | A LebedevGrid is a union of distinct Lebedev point sets.
321 | """
322 | def LebedevGridMeta(lebedev_point_sets: [_LebedevOrbitBase])->type:
323 | """
324 |
325 | Parameters
326 | ----------
327 | lebedev_point_sets :
328 | LebedevPointBase. The LebedevPointBase derivations from which the grid will be built.
329 |
330 | Returns
331 | -------
332 | type. A LebedevGrid class built from the chosen LebedevPointSets.
333 | """
334 | class LebedevGrid:
335 | _lebedev_point_sets_types = lebedev_point_sets
336 |
337 | def __init__(self):
338 | self._point_set_instances = {}
339 | for point_set_type in self._lebedev_point_sets_types:
340 | self._point_set_instances[point_set_type._string_id] = point_set_type()
341 |
342 | def point_permutation_by_generator(self, octahedral_rotation_group_matrix_implementation):
343 | """
344 | Calculates the permutation on the grid induced by the generator elements of
345 | octahedral_rotation_group_implementation.
346 |
347 | Parameters
348 | ----------
349 | octahedral_rotation_group_matrix_implementation :
350 | type. A class which implements a generative representation of S4 in matrix form.
351 | Expected call implementation:
352 | self.generators
353 | self.word_to_matrix(...)
354 |
355 | Returns
356 | -------
357 | dict. return_value['xy'][point_id] = (Matrix Representation of 'xy')(point_id)
358 |
359 | """
360 | point_set_instances = self._point_set_instances.values()
361 | word_to_point_mappings = []
362 | for psi in point_set_instances:
363 | wpm = psi.point_permutation_by_generator(octahedral_rotation_group_matrix_implementation)
364 | word_to_point_mappings.append(wpm)
365 |
366 | G = OctahedralMatrixRotationGroup2Generators()
367 | generator_words = G.generators
368 |
369 | return_value = {}
370 | for w in generator_words:
371 | return_value[w] = {}
372 |
373 | for wpm in word_to_point_mappings:
374 | for w in generator_words:
375 | try:
376 | len_before_update = len(return_value[w])
377 | return_value[w].update(wpm[w])
378 | if len_before_update + len(wpm[w]) != len(return_value[w]):
379 | raise AssertionError(
380 | """
381 | It seems that two orbits are not disjoint.
382 | """
383 | )
384 |
385 | except KeyError:
386 | # If we get here than one of the point set instances does not generate a permutation
387 | # dict for w
388 | raise AssertionError(
389 | """
390 | {} seems not to act on one of the orbits.
391 | """.format(w)
392 | )
393 |
394 | return return_value
395 |
396 | @property
397 | def points(self):
398 | return list(self.__iter__())
399 |
400 | def _check_point(self, point):
401 | point = tuple(point)
402 |
403 | if point[0] not in self._point_set_instances.keys():
404 | raise ValueError('{} does not contain to this grid.'.format(point))
405 |
406 | return point
407 |
408 | def to_spherical(self, point):
409 | """
410 | Returns spherical coordinates of point.
411 | Parameters
412 | ----------
413 | point :
414 | tuple. Lebedev Point.
415 |
416 | Returns
417 | -------
418 | tuple. (theta, phi) spherical coordinates in degree of point:
419 |
420 | (theta, phi) \in [-180, 180] x [0, 180]
421 | """
422 | point = self._check_point(point)
423 | point_class_id = point[0]
424 | return self._point_set_instances[point_class_id].to_spherical(point)
425 |
426 | def to_cartesian(self, point):
427 | """
428 | Returns cartesian coordinates of point.
429 |
430 | Parameters
431 | ----------
432 | point : Lebedev Point.
433 |
434 | Returns
435 | -------
436 | tuple. (x,y,z) cartesian coordinates of point.
437 | """
438 | point = self._check_point(point)
439 | point_class_id = point[0]
440 | return self._point_set_instances[point_class_id].to_cartesian(point)
441 |
442 | def __iter__(self):
443 | return itertools.chain(*self._point_set_instances.values())
444 |
445 | def __len__(self):
446 | return sum([len(point_set_instance) for point_set_instance in self._point_set_instances.values()])
447 |
448 | return LebedevGrid
449 |
450 |
451 | LebedevGrid26 = LebedevGridMeta([LebedevOrbit_a1, LebedevOrbit_a2, LebedevOrbit_a3])
452 |
453 |
454 | # endregion
455 |
456 |
457 | # region Lebedev Integration
458 | """
459 | implemented after https://people.sc.fsu.edu/~jburkardt/datasets/sphere_lebedev_rule/sphere_lebedev_rule.html
460 | """
461 |
462 |
463 | class _Lebedev26Integrator:
464 | """
465 | Implementation for 26 point rule. Using weights from
466 | https://people.sc.fsu.edu/~jburkardt/datasets/sphere_lebedev_rule/lebedev_007.txt
467 | """
468 | _grid = LebedevGrid26()
469 | _w = {'a1': 0.047619047619048,
470 | 'a2': 0.038095238095238,
471 | 'a3': 0.032142857142857}
472 |
473 | @classmethod
474 | def _check_function(cls, function):
475 | if set(cls._grid) != function.keys():
476 | raise ValueError('{} is not a valid function from the LebedevGrid26 to R. ')
477 |
478 | @classmethod
479 | def weight(cls, lebedev_point):
480 | return cls._w[lebedev_point[0]]
481 |
482 | @classmethod
483 | def integrate(cls, function: dict):
484 | cls._check_function(function)
485 |
486 | return 4 * np.pi * sum([f_value * cls.weight(leb_point) for leb_point, f_value in function.items()])
487 |
488 |
489 | def lebedev_26_integration(function: dict)->float:
490 | """
491 | Integration of function residing on 26 point Lebedev grid.
492 |
493 | Parameters
494 | ----------
495 | function :
496 | dict. Expects Lebedev points, i.e., ('a1', '1') ... ('a3', '8'), as keys.
497 |
498 | Returns
499 | -------
500 | float.
501 |
502 | """
503 | return _Lebedev26Integrator.integrate(function)
504 |
505 |
506 | # endregion
507 |
508 |
509 | # region Octahedral rotation group action on the set of lebedev grid functions
510 | """
511 | A Lebedev Grid Function, f, is a dict with keys in a LebedevGrid.
512 | The octahedral rotation group, O, induces an action, sigma, M = {f: f is a Lebedev Grid Function}
513 | by setting
514 |
515 | sigma(f, rho) := f(rho(.)).
516 |
517 | This region implements:
518 | 1. A representation of O on Lebedev point classes.
519 |
520 | 2. sigma
521 | """
522 |
523 |
524 | class ActionOctahedralRotationGroupOnLebedevGridFunctions:
525 | def __init__(self, lebedev_grid: type, octahedral_rotation_group_matrix_implementation: type):
526 | self._lebedev_orbit_instance = lebedev_grid()
527 | self._group_instance = octahedral_rotation_group_matrix_implementation()
528 | self._recursion_tails = self._generate_recursion_tails(octahedral_rotation_group_matrix_implementation)
529 |
530 | def _generate_recursion_tails(self,
531 | octahedral_rotation_group_matrix_implementation):
532 | grid = self._lebedev_orbit_instance
533 | permutation_by_generator = grid.point_permutation_by_generator(octahedral_rotation_group_matrix_implementation)
534 |
535 | recursion_tails = {}
536 |
537 | def get_recursion_tail_for_generator_word(permut):
538 | def recursion_tail_for_generator_word(f):
539 | f_new = {}
540 | for k, v in permut.items():
541 | f_new[v] = f[k]
542 |
543 | return f_new
544 | return recursion_tail_for_generator_word
545 |
546 | for word, permutation in permutation_by_generator.items():
547 | recursion_tails[word] = get_recursion_tail_for_generator_word(permutation)
548 |
549 | return recursion_tails
550 |
551 | def _check_f(self, f):
552 | if set(f.keys()) != set(self._lebedev_orbit_instance.points):
553 | raise ValueError(
554 | """
555 | {} is not a function from a Lebedev Grid.
556 | """.format(f)
557 | )
558 |
559 | def _check_w(self, word):
560 | generators = self._group_instance.generators
561 |
562 | for c in word:
563 | if c not in generators:
564 | raise ValueError(
565 | """
566 | {} does not contain to {}. It is no combination of its generators.
567 | """.format(word, type(self._group_instance))
568 | )
569 |
570 | def _execute(self, f, w):
571 | """
572 | This is a right action on the set of functions from the lebedev grid/orbit.
573 | """
574 | if w == '':
575 | return f
576 |
577 | if w in self._recursion_tails:
578 | return self._recursion_tails[w](f)
579 | else:
580 | return self._execute(self._execute(f, w[0]), w[1:])
581 |
582 | def __call__(self, f: dict, word: str):
583 | self._check_f(f)
584 | self._check_w(word)
585 | return self._execute(f, word)
586 |
587 |
588 | # endregion
--------------------------------------------------------------------------------
/pershombox/pht.py:
--------------------------------------------------------------------------------
1 | import numpy
2 | from ._software_backends.dipha_adapter import persistence_diagrams_of_filtrated_cubical_complex
3 | from .dgm_util import de_essentialize
4 | from .lebedev import LebedevGrid26
5 |
6 |
7 | # region helpers
8 |
9 | def _snap_zero_one(value):
10 | if numpy.isclose([value], [1]):
11 | return 1
12 | elif numpy.isclose([value], [0]):
13 | return 0
14 | else:
15 | return value
16 |
17 |
18 | # endregion
19 |
20 |
21 | # region height functions
22 |
23 |
24 | class NormalizedBarycentricHeightFiltration:
25 | def __init__(self, vertices, direction):
26 | vertices = numpy.array(vertices)
27 | direction = numpy.array(direction)
28 | self._direction = direction / numpy.linalg.norm(direction)
29 |
30 | if vertices.shape[1] != direction.shape[0]:
31 | raise ValueError('shape of vertices and direction do not comply!')
32 |
33 | self._barycenter = sum(vertices) / len(vertices)
34 | self._radius = max([numpy.linalg.norm(vertex - self._barycenter) for vertex in vertices])
35 |
36 | def __call__(self, vertex):
37 | return (numpy.dot(vertex - self._barycenter, self._direction) + self._radius) / (2 * self._radius)
38 |
39 |
40 | class BarycentricHeightFiltration:
41 | def __init__(self, vertices, direction):
42 | vertices = numpy.array(vertices)
43 | direction = numpy.array(direction)
44 | self._direction = direction / numpy.linalg.norm(direction)
45 |
46 | if vertices.shape[1] != direction.shape[0]:
47 | raise ValueError('shape of vertices and direction do not comply!')
48 |
49 | self._barycenter = sum(vertices) / len(vertices)
50 |
51 | def __call__(self, vertex):
52 | return numpy.dot(vertex - self._barycenter, self._direction)
53 |
54 |
55 | # endregion
56 |
57 |
58 | def calculate_discrete_NPHT_2d(binary_cubical_complex: numpy.array,
59 | number_of_directions)->list:
60 | """
61 | Calculates NPHT for 2d cubical complexes with equidistant directions.
62 |
63 | :param binary_cubical_complex:
64 | :param number_of_directions:
65 | :return:
66 | """
67 |
68 | binary_cubical_complex = binary_cubical_complex.astype(bool)
69 |
70 | if binary_cubical_complex.ndim != 2:
71 | raise ValueError("binary_cubical_complex must have dimension 2.")
72 |
73 | vertices = [v for v, b in numpy.ndenumerate(binary_cubical_complex) if b]
74 |
75 | return_value = []
76 | # Spherical coordinates without PI as multiplicative factor
77 | spherical_coordinates = numpy.linspace(0, 2, number_of_directions + 1)[:-1]
78 | # _snap_zero_one guarantees that (1, 0), (-1, 0), (0, 1), (0, -1) are in cartesian_coordiantes.
79 | cartesian_coordinates = [(_snap_zero_one(numpy.cos(t*numpy.pi)),
80 | _snap_zero_one(numpy.sin(t*numpy.pi)))
81 | for t in spherical_coordinates]
82 |
83 | for v_cart in cartesian_coordinates:
84 |
85 | filtration = NormalizedBarycentricHeightFiltration(vertices, v_cart)
86 |
87 | filtrated_complex = numpy.empty(binary_cubical_complex.shape)
88 | filtrated_complex.fill(float('inf'))
89 |
90 | f_values = []
91 | for v in vertices:
92 | f_v = filtration(v)
93 | f_values.append(f_v)
94 | filtrated_complex[v] = f_v
95 |
96 | f_max = max(f_values)
97 |
98 | dgms = persistence_diagrams_of_filtrated_cubical_complex(filtrated_complex)
99 | dgms = [de_essentialize(dgm, f_max) for dgm in dgms]
100 |
101 | return_value.append(dgms)
102 | return return_value
103 |
104 |
105 | class GeneralPersistentHomologyTransform3d:
106 | def __init__(self, heigt_function_type: type, grid_type: type):
107 | self._height_function_type = heigt_function_type
108 | self._grid_type = grid_type
109 |
110 | def __call__(self, binary_cubical_complex: numpy.array, de_essentialized=True)->dict:
111 | binary_cubical_complex = binary_cubical_complex.astype(bool)
112 |
113 | if binary_cubical_complex.ndim != 3:
114 | raise ValueError("simplicial_complex must have dimension 3.")
115 |
116 | vertices = [v for v, b in numpy.ndenumerate(binary_cubical_complex) if b]
117 | grid = self._grid_type()
118 | return_value = {}
119 |
120 | for direction in grid:
121 | filtrated_complex = numpy.empty(binary_cubical_complex.shape)
122 | filtrated_complex.fill(float('inf'))
123 |
124 | filtration = self._height_function_type(vertices,
125 | grid.to_cartesian(direction))
126 |
127 | f_values = []
128 | for v in vertices:
129 | f_v = filtration(v)
130 | f_values.append(f_v)
131 | filtrated_complex[v] = f_v
132 |
133 | f_max = max(f_values)
134 |
135 | dgms = persistence_diagrams_of_filtrated_cubical_complex(filtrated_complex)
136 | dgms = [de_essentialize(dgm, f_max) for dgm in dgms]
137 |
138 | return_value[direction] = dgms
139 |
140 | return return_value
141 |
142 |
143 | def calculate_discrete_NPHT_3d_Lebedev26(binary_cubical_complex: numpy.array):
144 | """
145 | Calculates NPHT for 3d binary complexes with respect to the Lebedev grid with 26 directions.
146 |
147 | :param binary_cubical_complex:
148 | :return:
149 | """
150 | f = GeneralPersistentHomologyTransform3d(BarycentricHeightFiltration,
151 | LebedevGrid26)
152 |
153 | return f(binary_cubical_complex)
154 |
--------------------------------------------------------------------------------
/pershombox/pht_metric.py:
--------------------------------------------------------------------------------
1 | import numpy
2 | import scipy.integrate
3 |
4 | from .lebedev import lebedev_26_integration, \
5 | OctahedralMatrixRotationGroup2Generators, \
6 | ActionOctahedralRotationGroupOnLebedevGridFunctions, \
7 | LebedevGrid26
8 |
9 |
10 | from ._software_backends.hera_adapter import wasserstein_distance
11 |
12 |
13 | class Distance_NPHT_2d:
14 | def __init__(self,
15 | wasserstein_degree=2,
16 | wasserstein_internal_norm=2,
17 | included_dimensions=(0, 1),
18 | minimize_over_rotations=True):
19 | """
20 |
21 | Parameters
22 | ----------
23 | wasserstein_degree:
24 | int. p-parameter of the Wasserstein distance used inside.
25 |
26 | wasserstein_internal_norm:
27 | int or str. Internal norm used in wasserstein dist. Use 'inf' to set to ininity norm or q >= 1 for q-norm.
28 |
29 | included_dimensions :
30 | Controls which dimensions of the npht are used.
31 | (0,1) -> dimension 0 and 1
32 | (0) -> dimension 0
33 | (1) -> dimension 1
34 | minimize_over_rotations:
35 | bool. If false the min over the rotation group is not searched.
36 | """
37 | self.p = wasserstein_degree
38 | self.q = wasserstein_internal_norm
39 | self.included_dimensions = included_dimensions
40 | self.minimize_over_rotations = bool(minimize_over_rotations)
41 |
42 | def __call__(self, t_1: [[[]]], t_2: [[[]]]):
43 | """
44 | Calculate the approximated npht distance between both arguments.
45 |
46 | Parameters
47 | ----------
48 | t_1 : [[[]]]. Discretised npht over equidistant distributed directions on S^1.
49 |
50 | t_1[i][j] persistence diagram of dimension j in direction i.
51 |
52 | t_2 : [[[]]]. like t_1.
53 |
54 | Returns
55 | -------
56 | float.
57 | """
58 | self._check_parameters(t_1, t_2)
59 |
60 | # n -> number of shifts
61 | if self.minimize_over_rotations:
62 | n = len(t_1)
63 | else:
64 | n = 1
65 |
66 | abscissa = numpy.linspace(0, 2 * numpy.pi, n + 1)
67 |
68 | integration_results = []
69 | for shift in range(n):
70 | ordinates_shifted = []
71 | for i in range(n):
72 | dgm_t_1_dir = [t_1[i][dim] for dim in self.included_dimensions]
73 | dgm_t_2_dir = [t_2[(i + shift) % n][dim] for dim in self.included_dimensions]
74 |
75 | y = sum(
76 | [wasserstein_distance(dgm_c_1, dgm_c_2, degree=self.p, internal_norm=self.q)
77 | for dgm_c_1, dgm_c_2
78 | in zip(dgm_t_1_dir, dgm_t_2_dir)])
79 | ordinates_shifted.append(y)
80 |
81 | # Last point twice to emulate closed curve integral
82 | ordinates_shifted.append(ordinates_shifted[0])
83 |
84 | integration_results.append(scipy.integrate.simps(ordinates_shifted, abscissa))
85 |
86 | return min(integration_results)
87 |
88 | @staticmethod
89 | def _check_parameters(t_1, t_2):
90 | if len(t_1) != len(t_2):
91 | raise ValueError("Expected len(t_1) == len(t_2)")
92 |
93 |
94 | class DistanceNPHT3D_Lebedev26:
95 | def __init__(self,
96 | wasserstein_degree: int=2,
97 | wasserstein_internal_norm=2,
98 | included_dimensions: tuple=(0, 1, 2),
99 | minimize_over_rotations=True):
100 | """
101 | Parameters
102 | ----------
103 | wasserstein_degree: int. p-parameter of the Wasserstein distance used inside.
104 |
105 | wasserstein_internal_norm:
106 | int or str. Internal norm used in wasserstein dist. Use 'inf' to set to ininity norm or q >= 1 for q-norm.
107 |
108 | included_dimensions :
109 | tuple.Controls which dimensions of the npht are used.
110 | (0,1,2) -> dimension 0, 1, 2
111 | (0) -> dimension 0
112 | ...
113 | (0, 2) -> dimension 0, 2
114 | minimize_over_rotations:
115 | bool. If false the min over the rotation group is not searched.
116 | """
117 | self.p = wasserstein_degree
118 | self.q = wasserstein_internal_norm
119 | self.included_dimensions = tuple(included_dimensions)
120 | self.minimize_over_rotations = bool(minimize_over_rotations)
121 |
122 | def __call__(self, t_1: [[[]]], t_2: [[[]]]):
123 | """
124 | Calculate the approximated npht distance between both arguments.
125 |
126 | Parameters
127 | ----------
128 | t_1 : [[[]]]. Discretised npht over 26 point Lebedev Grid.
129 |
130 | t_1[lebedev_point][j] persistence diagram of dimension j in direction lebedev_point.
131 |
132 | t_2 : [[[]]]. like t_1.
133 |
134 | Returns
135 | -------
136 | float.
137 | """
138 | self._check_parameters(t_1, t_2)
139 |
140 | if self.minimize_over_rotations:
141 | return self._calculate_rotation_optimized_distance(t_1, t_2)
142 | else:
143 | return self._calculate_distance(t_1, t_2)
144 |
145 | def _calculate_distance(self, t_1, t_2):
146 | function = {}
147 |
148 | for lebedev_point in t_1.keys():
149 | diagrams_t_1 = t_1[lebedev_point]
150 | diagrams_t_2 = t_2[lebedev_point]
151 |
152 | value = sum([
153 | wasserstein_distance(diagrams_t_1[dim], diagrams_t_2[dim],
154 | degree=self.p, internal_norm=self.q)
155 | for dim in range(3) if dim in self.included_dimensions
156 | ])
157 |
158 | function[lebedev_point] = value
159 |
160 | return lebedev_26_integration(function)
161 |
162 | def _calculate_rotation_optimized_distance(self, t_1, t_2):
163 |
164 | distances = []
165 | O = OctahedralMatrixRotationGroup2Generators()
166 | sigma = ActionOctahedralRotationGroupOnLebedevGridFunctions(LebedevGrid26,
167 | OctahedralMatrixRotationGroup2Generators)
168 |
169 | for element in O:
170 | t_2_rotated = sigma(t_2, element)
171 | distances.append(self._calculate_distance(t_1, t_2_rotated))
172 |
173 | return min(distances)
174 |
175 | @staticmethod
176 | def _check_parameters(t_1, t_2):
177 | if t_1.keys() != t_2.keys():
178 | raise ValueError("Expected t_1.keys() == t_2.keys()")
179 |
180 |
181 | # region functional interface
182 |
183 |
184 | def distance_npht2D(npht_1: [[[]]],
185 | npht_2: [[[]]],
186 | wasserstein_degree=2,
187 | wasserstein_internal_norm=2,
188 | included_dimensions=(0, 1),
189 | minimize_over_rotations=True)->float:
190 | """
191 | Calculate the approximated npht distance between npht_1 and npht_2.
192 |
193 | Parameters
194 | ----------
195 | npht_1 : [[[]]]. Discretised npht over equidistant distributed directions on S^1.
196 |
197 | t_1[i][j] persistence diagram of dimension j in direction i.
198 |
199 | npht_2 : like npht_1
200 |
201 | wasserstein_degree : int. p-parameter of the Wasserstein distance used inside.
202 |
203 | wasserstein_internal_norm:
204 | int or str. Internal norm used in wasserstein dist. Use 'inf' to set to ininity norm or q >= 1 for q-norm.
205 |
206 | included_dimensions : tuple. Controls which dimensions of the npht are used.
207 | (0,1) -> dimension 0 and 1
208 | (0) -> dimension 0
209 | (1) -> dimension 1
210 |
211 | minimize_over_rotations : bool. If false the min over the rotation group is not searched.
212 |
213 | Returns
214 | -------
215 | """
216 | f = Distance_NPHT_2d(wasserstein_degree=wasserstein_degree,
217 | wasserstein_internal_norm=wasserstein_internal_norm,
218 | included_dimensions=included_dimensions,
219 | minimize_over_rotations=minimize_over_rotations)
220 |
221 | return f(npht_1, npht_2)
222 |
223 |
224 | def distance_npht3D_lebedev_26(npht_1: [[[]]],
225 | npht_2: [[[]]],
226 | wasserstein_degree: int=2,
227 | wasserstein_internal_norm=2,
228 | included_dimensions: tuple = (0, 1, 2),
229 | minimize_over_rotations=True)->float:
230 | """
231 | Calculate the approximated npht distance between npht_1 and npht_2.
232 |
233 | Parameters
234 | ----------
235 | npht_1 : [[[]]]. Discretised npht over 26 point Lebedev Grid.
236 |
237 | t_1[lebedev_point][j] persistence diagram of dimension j in direction lebedev_point.
238 | npht_2 : like npht_1
239 |
240 | wasserstein_degree : int. p-parameter of the Wasserstein distance used inside.
241 |
242 | wasserstein_internal_norm:
243 | int or str. Internal norm used in wasserstein dist. Use 'inf' to set to ininity norm or q >= 1 for q-norm.
244 |
245 | included_dimensions : tuple. Controls which dimensions of the npht are used.
246 | (0,1,2) -> dimension 0, 1, 2
247 | (0) -> dimension 0
248 | ...
249 | (0, 2) -> dimension 0, 2
250 |
251 | minimize_over_rotations : bool. If false the min over the rotation group is not searched.
252 |
253 | Returns
254 | -------
255 | """
256 | f = DistanceNPHT3D_Lebedev26(wasserstein_degree=wasserstein_degree,
257 | wasserstein_internal_norm=wasserstein_internal_norm,
258 | included_dimensions=included_dimensions,
259 | minimize_over_rotations=minimize_over_rotations)
260 |
261 | return f(npht_1, npht_2)
262 |
263 |
264 | # endregion
--------------------------------------------------------------------------------
/pershombox/toplex.py:
--------------------------------------------------------------------------------
1 | from ._software_backends.perseus_adapter import _call_perseus
2 |
3 |
4 | class Toplex:
5 | def __init__(self, toplices: [tuple], filtration_values: [], deessentialize=False):
6 | self.simplices = [tuple(t) for t in toplices]
7 | self.deessentialize = deessentialize
8 | self.filtration = filtration_values
9 |
10 | self._check_state()
11 |
12 | def _check_state(self):
13 | if len(self.simplices) != len(self.filtration):
14 | raise ToplexException("Simplices and filtration are not consistent: Assumed to have same length.")
15 |
16 | @property
17 | def filtration(self):
18 | return [self._internal_filt_to_filt[v] for v in self._internal_filt]
19 |
20 | @filtration.setter
21 | def filtration(self, filt):
22 | self._filt_to_internal_filt = {}
23 | self._internal_filt_to_filt = {}
24 | for i, v in enumerate(sorted(list(set(filt)))):
25 | self._filt_to_internal_filt[v] = i + 1
26 | self._internal_filt_to_filt[i + 1] = v
27 |
28 | self._internal_filt = [self._filt_to_internal_filt[v] for v in filt]
29 | self._internal_filt_to_filt[-1] = max(filt) if self.deessentialize else float('inf')
30 |
31 | def _simplex_to_string_iter(self):
32 | def num_iter(simplex, filtration_value):
33 | yield str(len(simplex) - 1)
34 |
35 | for n in simplex:
36 | yield str(n)
37 |
38 | yield str(filtration_value)
39 |
40 | for s, f in zip(self.simplices, self._internal_filt):
41 | yield ' '.join(num_iter(s, f))
42 |
43 | def _get_complex_string(self):
44 |
45 | header = '1\n'
46 | vertices = '\n'.join([s for s in self._simplex_to_string_iter()])
47 |
48 | return header + vertices
49 |
50 | def _convert_dgm_from_internal_filt_to_filt(self, dgm):
51 | # points = [p for p in dgm if p[1] != -1]
52 | # essential_points = [p for p in dgm if p[1] == -1]
53 |
54 | points = [[self._internal_filt_to_filt[p[0]], self._internal_filt_to_filt[p[1]]] for p in dgm]
55 | # essential_points = [[self._internal_filt_to_filt[p[0]], float('inf')] for p in essential_points]
56 |
57 | # return points + essential_points
58 | return points
59 |
60 | def calculate_persistence_diagrams(self):
61 |
62 | complex_string = self._get_complex_string()
63 | dgms = _call_perseus('nmfsimtop', complex_string)
64 |
65 | return_value = []
66 |
67 | homology_dimension_upper_bound = max([len(s) for s in self.simplices])
68 | for dim in range(homology_dimension_upper_bound):
69 | if dim in dgms:
70 | return_value.append(self._convert_dgm_from_internal_filt_to_filt(dgms[dim]))
71 | else:
72 | return_value.append([])
73 |
74 | return return_value
75 |
76 |
77 | def toplex_persistence_diagrams(toplices: [tuple], filtration_values: [], deessentialize=False):
78 | """
79 | Calculates the persistence diagrams for the given toplex using the given
80 | filtration. A toplex is a notion of a simplicial complex where just the
81 | highest dimensional simplices are noted, i.e. toplex
82 | {[1,2]} stands for the simplicial complex {[1], [2], [1,2]}
83 |
84 | :param toplices: List of toplices given as numeric tuples.
85 | The numeric value of each dimension of a toplix tuple stands
86 | for a vertex, e.g. [1, 2, 3] is the 2 simplex built from the vertices 1, 2, 3.
87 |
88 | :param filtration_values: List which gives the filtration value of each toplix
89 | enlisted in toplices.
90 |
91 | :param deessentialize: If True the death-time of essential classes is mapped to max(filtration_values).
92 | If False the death time is mapped to float('inf').
93 |
94 | :return: [[[]]
95 | """
96 | toplex = Toplex(toplices, filtration_values, deessentialize=deessentialize)
97 | return toplex.calculate_persistence_diagrams()
98 |
99 |
100 | class ToplexException(Exception):
101 | pass
--------------------------------------------------------------------------------
/tutorials/.gitignore:
--------------------------------------------------------------------------------
1 | .ipynb_checkpoints
2 |
--------------------------------------------------------------------------------
/tutorials/cubical_complex_persistence_diagrams.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 2,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import os\n",
10 | "import sys\n",
11 | "import numpy as np\n",
12 | "import matplotlib.pyplot as plt\n",
13 | "\n",
14 | "%matplotlib notebook\n",
15 | "\n",
16 | "sys.path.insert(0,os.path.join(os.path.abspath(sys.path[0]),'..'))\n",
17 | "\n",
18 | "from shared_code import check_pershombox_availability\n",
19 | "\n",
20 | "check_pershombox_availability\n",
21 | "from pershombox import cubical_complex_persistence_diagrams"
22 | ]
23 | },
24 | {
25 | "cell_type": "markdown",
26 | "metadata": {},
27 | "source": [
28 | "`cubical_complex_persistence_diagrams` receives a `n` dimensional array which is interpreted as a filtrated cubical simpicial complex. "
29 | ]
30 | },
31 | {
32 | "cell_type": "code",
33 | "execution_count": 3,
34 | "metadata": {},
35 | "outputs": [
36 | {
37 | "data": {
38 | "text/plain": [
39 | "array([[0, 2, 2],\n",
40 | " [1, 3, 2],\n",
41 | " [1, 1, 0]])"
42 | ]
43 | },
44 | "execution_count": 3,
45 | "metadata": {},
46 | "output_type": "execute_result"
47 | }
48 | ],
49 | "source": [
50 | "cubical_complex = np.array([[0, 2, 2],\n",
51 | " [1, 3, 2],\n",
52 | " [1, 1, 0]])\n",
53 | "\n",
54 | "cubical_complex"
55 | ]
56 | },
57 | {
58 | "cell_type": "markdown",
59 | "metadata": {},
60 | "source": [
61 | "The values at the array positions reflect the filtration value. If we would evolve our cubical complex it would look like ..."
62 | ]
63 | },
64 | {
65 | "cell_type": "code",
66 | "execution_count": 4,
67 | "metadata": {
68 | "scrolled": true
69 | },
70 | "outputs": [
71 | {
72 | "data": {
73 | "application/javascript": [
74 | "/* Put everything inside the global mpl namespace */\n",
75 | "window.mpl = {};\n",
76 | "\n",
77 | "\n",
78 | "mpl.get_websocket_type = function() {\n",
79 | " if (typeof(WebSocket) !== 'undefined') {\n",
80 | " return WebSocket;\n",
81 | " } else if (typeof(MozWebSocket) !== 'undefined') {\n",
82 | " return MozWebSocket;\n",
83 | " } else {\n",
84 | " alert('Your browser does not have WebSocket support. ' +\n",
85 | " 'Please try Chrome, Safari or Firefox ≥ 6. ' +\n",
86 | " 'Firefox 4 and 5 are also supported but you ' +\n",
87 | " 'have to enable WebSockets in about:config.');\n",
88 | " };\n",
89 | "}\n",
90 | "\n",
91 | "mpl.figure = function(figure_id, websocket, ondownload, parent_element) {\n",
92 | " this.id = figure_id;\n",
93 | "\n",
94 | " this.ws = websocket;\n",
95 | "\n",
96 | " this.supports_binary = (this.ws.binaryType != undefined);\n",
97 | "\n",
98 | " if (!this.supports_binary) {\n",
99 | " var warnings = document.getElementById(\"mpl-warnings\");\n",
100 | " if (warnings) {\n",
101 | " warnings.style.display = 'block';\n",
102 | " warnings.textContent = (\n",
103 | " \"This browser does not support binary websocket messages. \" +\n",
104 | " \"Performance may be slow.\");\n",
105 | " }\n",
106 | " }\n",
107 | "\n",
108 | " this.imageObj = new Image();\n",
109 | "\n",
110 | " this.context = undefined;\n",
111 | " this.message = undefined;\n",
112 | " this.canvas = undefined;\n",
113 | " this.rubberband_canvas = undefined;\n",
114 | " this.rubberband_context = undefined;\n",
115 | " this.format_dropdown = undefined;\n",
116 | "\n",
117 | " this.image_mode = 'full';\n",
118 | "\n",
119 | " this.root = $('');\n",
120 | " this._root_extra_style(this.root)\n",
121 | " this.root.attr('style', 'display: inline-block');\n",
122 | "\n",
123 | " $(parent_element).append(this.root);\n",
124 | "\n",
125 | " this._init_header(this);\n",
126 | " this._init_canvas(this);\n",
127 | " this._init_toolbar(this);\n",
128 | "\n",
129 | " var fig = this;\n",
130 | "\n",
131 | " this.waiting = false;\n",
132 | "\n",
133 | " this.ws.onopen = function () {\n",
134 | " fig.send_message(\"supports_binary\", {value: fig.supports_binary});\n",
135 | " fig.send_message(\"send_image_mode\", {});\n",
136 | " if (mpl.ratio != 1) {\n",
137 | " fig.send_message(\"set_dpi_ratio\", {'dpi_ratio': mpl.ratio});\n",
138 | " }\n",
139 | " fig.send_message(\"refresh\", {});\n",
140 | " }\n",
141 | "\n",
142 | " this.imageObj.onload = function() {\n",
143 | " if (fig.image_mode == 'full') {\n",
144 | " // Full images could contain transparency (where diff images\n",
145 | " // almost always do), so we need to clear the canvas so that\n",
146 | " // there is no ghosting.\n",
147 | " fig.context.clearRect(0, 0, fig.canvas.width, fig.canvas.height);\n",
148 | " }\n",
149 | " fig.context.drawImage(fig.imageObj, 0, 0);\n",
150 | " };\n",
151 | "\n",
152 | " this.imageObj.onunload = function() {\n",
153 | " fig.ws.close();\n",
154 | " }\n",
155 | "\n",
156 | " this.ws.onmessage = this._make_on_message_function(this);\n",
157 | "\n",
158 | " this.ondownload = ondownload;\n",
159 | "}\n",
160 | "\n",
161 | "mpl.figure.prototype._init_header = function() {\n",
162 | " var titlebar = $(\n",
163 | " '');\n",
165 | " var titletext = $(\n",
166 | " '');\n",
168 | " titlebar.append(titletext)\n",
169 | " this.root.append(titlebar);\n",
170 | " this.header = titletext[0];\n",
171 | "}\n",
172 | "\n",
173 | "\n",
174 | "\n",
175 | "mpl.figure.prototype._canvas_extra_style = function(canvas_div) {\n",
176 | "\n",
177 | "}\n",
178 | "\n",
179 | "\n",
180 | "mpl.figure.prototype._root_extra_style = function(canvas_div) {\n",
181 | "\n",
182 | "}\n",
183 | "\n",
184 | "mpl.figure.prototype._init_canvas = function() {\n",
185 | " var fig = this;\n",
186 | "\n",
187 | " var canvas_div = $('');\n",
188 | "\n",
189 | " canvas_div.attr('style', 'position: relative; clear: both; outline: 0');\n",
190 | "\n",
191 | " function canvas_keyboard_event(event) {\n",
192 | " return fig.key_event(event, event['data']);\n",
193 | " }\n",
194 | "\n",
195 | " canvas_div.keydown('key_press', canvas_keyboard_event);\n",
196 | " canvas_div.keyup('key_release', canvas_keyboard_event);\n",
197 | " this.canvas_div = canvas_div\n",
198 | " this._canvas_extra_style(canvas_div)\n",
199 | " this.root.append(canvas_div);\n",
200 | "\n",
201 | " var canvas = $('');\n",
202 | " canvas.addClass('mpl-canvas');\n",
203 | " canvas.attr('style', \"left: 0; top: 0; z-index: 0; outline: 0\")\n",
204 | "\n",
205 | " this.canvas = canvas[0];\n",
206 | " this.context = canvas[0].getContext(\"2d\");\n",
207 | "\n",
208 | " var backingStore = this.context.backingStorePixelRatio ||\n",
209 | "\tthis.context.webkitBackingStorePixelRatio ||\n",
210 | "\tthis.context.mozBackingStorePixelRatio ||\n",
211 | "\tthis.context.msBackingStorePixelRatio ||\n",
212 | "\tthis.context.oBackingStorePixelRatio ||\n",
213 | "\tthis.context.backingStorePixelRatio || 1;\n",
214 | "\n",
215 | " mpl.ratio = (window.devicePixelRatio || 1) / backingStore;\n",
216 | "\n",
217 | " var rubberband = $('');\n",
218 | " rubberband.attr('style', \"position: absolute; left: 0; top: 0; z-index: 1;\")\n",
219 | "\n",
220 | " var pass_mouse_events = true;\n",
221 | "\n",
222 | " canvas_div.resizable({\n",
223 | " start: function(event, ui) {\n",
224 | " pass_mouse_events = false;\n",
225 | " },\n",
226 | " resize: function(event, ui) {\n",
227 | " fig.request_resize(ui.size.width, ui.size.height);\n",
228 | " },\n",
229 | " stop: function(event, ui) {\n",
230 | " pass_mouse_events = true;\n",
231 | " fig.request_resize(ui.size.width, ui.size.height);\n",
232 | " },\n",
233 | " });\n",
234 | "\n",
235 | " function mouse_event_fn(event) {\n",
236 | " if (pass_mouse_events)\n",
237 | " return fig.mouse_event(event, event['data']);\n",
238 | " }\n",
239 | "\n",
240 | " rubberband.mousedown('button_press', mouse_event_fn);\n",
241 | " rubberband.mouseup('button_release', mouse_event_fn);\n",
242 | " // Throttle sequential mouse events to 1 every 20ms.\n",
243 | " rubberband.mousemove('motion_notify', mouse_event_fn);\n",
244 | "\n",
245 | " rubberband.mouseenter('figure_enter', mouse_event_fn);\n",
246 | " rubberband.mouseleave('figure_leave', mouse_event_fn);\n",
247 | "\n",
248 | " canvas_div.on(\"wheel\", function (event) {\n",
249 | " event = event.originalEvent;\n",
250 | " event['data'] = 'scroll'\n",
251 | " if (event.deltaY < 0) {\n",
252 | " event.step = 1;\n",
253 | " } else {\n",
254 | " event.step = -1;\n",
255 | " }\n",
256 | " mouse_event_fn(event);\n",
257 | " });\n",
258 | "\n",
259 | " canvas_div.append(canvas);\n",
260 | " canvas_div.append(rubberband);\n",
261 | "\n",
262 | " this.rubberband = rubberband;\n",
263 | " this.rubberband_canvas = rubberband[0];\n",
264 | " this.rubberband_context = rubberband[0].getContext(\"2d\");\n",
265 | " this.rubberband_context.strokeStyle = \"#000000\";\n",
266 | "\n",
267 | " this._resize_canvas = function(width, height) {\n",
268 | " // Keep the size of the canvas, canvas container, and rubber band\n",
269 | " // canvas in synch.\n",
270 | " canvas_div.css('width', width)\n",
271 | " canvas_div.css('height', height)\n",
272 | "\n",
273 | " canvas.attr('width', width * mpl.ratio);\n",
274 | " canvas.attr('height', height * mpl.ratio);\n",
275 | " canvas.attr('style', 'width: ' + width + 'px; height: ' + height + 'px;');\n",
276 | "\n",
277 | " rubberband.attr('width', width);\n",
278 | " rubberband.attr('height', height);\n",
279 | " }\n",
280 | "\n",
281 | " // Set the figure to an initial 600x600px, this will subsequently be updated\n",
282 | " // upon first draw.\n",
283 | " this._resize_canvas(600, 600);\n",
284 | "\n",
285 | " // Disable right mouse context menu.\n",
286 | " $(this.rubberband_canvas).bind(\"contextmenu\",function(e){\n",
287 | " return false;\n",
288 | " });\n",
289 | "\n",
290 | " function set_focus () {\n",
291 | " canvas.focus();\n",
292 | " canvas_div.focus();\n",
293 | " }\n",
294 | "\n",
295 | " window.setTimeout(set_focus, 100);\n",
296 | "}\n",
297 | "\n",
298 | "mpl.figure.prototype._init_toolbar = function() {\n",
299 | " var fig = this;\n",
300 | "\n",
301 | " var nav_element = $('');\n",
302 | " nav_element.attr('style', 'width: 100%');\n",
303 | " this.root.append(nav_element);\n",
304 | "\n",
305 | " // Define a callback function for later on.\n",
306 | " function toolbar_event(event) {\n",
307 | " return fig.toolbar_button_onclick(event['data']);\n",
308 | " }\n",
309 | " function toolbar_mouse_event(event) {\n",
310 | " return fig.toolbar_button_onmouseover(event['data']);\n",
311 | " }\n",
312 | "\n",
313 | " for(var toolbar_ind in mpl.toolbar_items) {\n",
314 | " var name = mpl.toolbar_items[toolbar_ind][0];\n",
315 | " var tooltip = mpl.toolbar_items[toolbar_ind][1];\n",
316 | " var image = mpl.toolbar_items[toolbar_ind][2];\n",
317 | " var method_name = mpl.toolbar_items[toolbar_ind][3];\n",
318 | "\n",
319 | " if (!name) {\n",
320 | " // put a spacer in here.\n",
321 | " continue;\n",
322 | " }\n",
323 | " var button = $('');\n",
324 | " button.addClass('ui-button ui-widget ui-state-default ui-corner-all ' +\n",
325 | " 'ui-button-icon-only');\n",
326 | " button.attr('role', 'button');\n",
327 | " button.attr('aria-disabled', 'false');\n",
328 | " button.click(method_name, toolbar_event);\n",
329 | " button.mouseover(tooltip, toolbar_mouse_event);\n",
330 | "\n",
331 | " var icon_img = $('');\n",
332 | " icon_img.addClass('ui-button-icon-primary ui-icon');\n",
333 | " icon_img.addClass(image);\n",
334 | " icon_img.addClass('ui-corner-all');\n",
335 | "\n",
336 | " var tooltip_span = $('');\n",
337 | " tooltip_span.addClass('ui-button-text');\n",
338 | " tooltip_span.html(tooltip);\n",
339 | "\n",
340 | " button.append(icon_img);\n",
341 | " button.append(tooltip_span);\n",
342 | "\n",
343 | " nav_element.append(button);\n",
344 | " }\n",
345 | "\n",
346 | " var fmt_picker_span = $('');\n",
347 | "\n",
348 | " var fmt_picker = $('');\n",
349 | " fmt_picker.addClass('mpl-toolbar-option ui-widget ui-widget-content');\n",
350 | " fmt_picker_span.append(fmt_picker);\n",
351 | " nav_element.append(fmt_picker_span);\n",
352 | " this.format_dropdown = fmt_picker[0];\n",
353 | "\n",
354 | " for (var ind in mpl.extensions) {\n",
355 | " var fmt = mpl.extensions[ind];\n",
356 | " var option = $(\n",
357 | " '', {selected: fmt === mpl.default_extension}).html(fmt);\n",
358 | " fmt_picker.append(option);\n",
359 | " }\n",
360 | "\n",
361 | " // Add hover states to the ui-buttons\n",
362 | " $( \".ui-button\" ).hover(\n",
363 | " function() { $(this).addClass(\"ui-state-hover\");},\n",
364 | " function() { $(this).removeClass(\"ui-state-hover\");}\n",
365 | " );\n",
366 | "\n",
367 | " var status_bar = $('