├── .github
└── workflows
│ ├── publish-to-pypi.yml
│ └── publish-to-testpypi.yml
├── .gitignore
├── .pre-commit-config.yaml
├── .readthedocs.yaml
├── LICENSE
├── MANIFEST.in
├── README.md
├── docs
├── Makefile
├── debug.gif
├── make.bat
├── ontor-logo.svg
├── source
│ ├── conf.py
│ └── index.rst
└── visualize.png
├── example
├── Makefile
├── __init__.py
├── data
│ ├── class_axioms.csv
│ ├── complex_axioms.json
│ ├── gcas.json
│ ├── props.json
│ └── taxo.csv
└── example.py
├── pyproject.toml
├── requirements.txt
├── setup.py
├── src
└── ontor
│ ├── __init__.py
│ ├── _about.py
│ ├── config
│ ├── __init__.py
│ └── network_visualization.config
│ ├── ontor.py
│ └── queries
│ ├── __init__.py
│ ├── class_axioms.sparql
│ ├── dp_axioms.sparql
│ ├── op_axioms.sparql
│ └── prefixes.sparql
└── tests
├── __init__.py
├── data
├── gold_visu.html
└── ins.csv
└── test_core.py
/.github/workflows/publish-to-pypi.yml:
--------------------------------------------------------------------------------
1 | name: pypi-release
2 |
3 | on:
4 | release:
5 | types: [created]
6 |
7 | jobs:
8 | build-n-publish:
9 | name: Build and publish Python distributions to PyPI
10 | runs-on: ubuntu-latest
11 | steps:
12 |
13 | - uses: actions/checkout@master
14 |
15 | - name: Install pypa/build
16 | run: python -m pip install build --user
17 |
18 | - name: Build a binary wheel and a source tarball
19 | run: python -m build --sdist --wheel --outdir dist/ .
20 |
21 | - name: Publish distribution to PyPI
22 | uses: pypa/gh-action-pypi-publish@release/v1
23 | with:
24 | password: ${{ secrets.PYPI_API_TOKEN }}
25 |
--------------------------------------------------------------------------------
/.github/workflows/publish-to-testpypi.yml:
--------------------------------------------------------------------------------
1 | name: test-and-build
2 |
3 | on:
4 | push:
5 | branches: [ main ]
6 |
7 | jobs:
8 | build-n-publish:
9 | name: Build, test, and publish Python distributions to TestPyPI
10 | runs-on: ubuntu-latest
11 | container:
12 | image: felixocker/python3_java:latest
13 | steps:
14 |
15 | - uses: actions/checkout@master
16 |
17 | - name: Install dependencies
18 | run: |
19 | python -m pip install -r requirements.txt --user
20 | python -m pip install . --user
21 |
22 | - name: Test
23 | run: python -m unittest discover tests/
24 |
25 | - name: Install pypa/build
26 | run: python -m pip install build --user
27 |
28 | - name: Build a binary wheel and a source tarball
29 | run: python -m build --sdist --wheel --outdir dist/ .
30 |
31 | - name: Publish distribution to Test PyPI
32 | uses: pypa/gh-action-pypi-publish@release/v1
33 | with:
34 | password: ${{ secrets.TEST_PYPI_API_TOKEN }}
35 | repository_url: https://test.pypi.org/legacy/
36 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # dirs
2 | .venv/
3 | .idea/
4 | __pycache__/
5 | *.egg-info/
6 | pip-wheel-metadata/
7 | docs/build/
8 | dist/
9 |
10 | # file types
11 | *.owl
12 | *.nt
13 | *.log
14 | *.html
15 |
16 | # individual files
17 | !tests/data/gold_visu.html
18 |
19 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v3.2.0
4 | hooks:
5 | - id: check-yaml
6 | - id: end-of-file-fixer
7 | - id: trailing-whitespace
8 | - repo: https://github.com/psf/black
9 | rev: 22.3.0
10 | hooks:
11 | - id: black
12 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | build:
4 | image: testing
5 |
6 | python:
7 | version: "3.9"
8 | install:
9 | - method: pip
10 | path: .
11 |
12 | sphinx:
13 | fail_on_warning: true
14 |
15 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU General Public License is a free, copyleft license for
11 | software and other kinds of works.
12 |
13 | The licenses for most software and other practical works are designed
14 | to take away your freedom to share and change the works. By contrast,
15 | the GNU General Public License is intended to guarantee your freedom to
16 | share and change all versions of a program--to make sure it remains free
17 | software for all its users. We, the Free Software Foundation, use the
18 | GNU General Public License for most of our software; it applies also to
19 | any other work released this way by its authors. You can apply it to
20 | your programs, too.
21 |
22 | When we speak of free software, we are referring to freedom, not
23 | price. Our General Public Licenses are designed to make sure that you
24 | have the freedom to distribute copies of free software (and charge for
25 | them if you wish), that you receive source code or can get it if you
26 | want it, that you can change the software or use pieces of it in new
27 | free programs, and that you know you can do these things.
28 |
29 | To protect your rights, we need to prevent others from denying you
30 | these rights or asking you to surrender the rights. Therefore, you have
31 | certain responsibilities if you distribute copies of the software, or if
32 | you modify it: responsibilities to respect the freedom of others.
33 |
34 | For example, if you distribute copies of such a program, whether
35 | gratis or for a fee, you must pass on to the recipients the same
36 | freedoms that you received. You must make sure that they, too, receive
37 | or can get the source code. And you must show them these terms so they
38 | know their rights.
39 |
40 | Developers that use the GNU GPL protect your rights with two steps:
41 | (1) assert copyright on the software, and (2) offer you this License
42 | giving you legal permission to copy, distribute and/or modify it.
43 |
44 | For the developers' and authors' protection, the GPL clearly explains
45 | that there is no warranty for this free software. For both users' and
46 | authors' sake, the GPL requires that modified versions be marked as
47 | changed, so that their problems will not be attributed erroneously to
48 | authors of previous versions.
49 |
50 | Some devices are designed to deny users access to install or run
51 | modified versions of the software inside them, although the manufacturer
52 | can do so. This is fundamentally incompatible with the aim of
53 | protecting users' freedom to change the software. The systematic
54 | pattern of such abuse occurs in the area of products for individuals to
55 | use, which is precisely where it is most unacceptable. Therefore, we
56 | have designed this version of the GPL to prohibit the practice for those
57 | products. If such problems arise substantially in other domains, we
58 | stand ready to extend this provision to those domains in future versions
59 | of the GPL, as needed to protect the freedom of users.
60 |
61 | Finally, every program is threatened constantly by software patents.
62 | States should not allow patents to restrict development and use of
63 | software on general-purpose computers, but in those that do, we wish to
64 | avoid the special danger that patents applied to a free program could
65 | make it effectively proprietary. To prevent this, the GPL assures that
66 | patents cannot be used to render the program non-free.
67 |
68 | The precise terms and conditions for copying, distribution and
69 | modification follow.
70 |
71 | TERMS AND CONDITIONS
72 |
73 | 0. Definitions.
74 |
75 | "This License" refers to version 3 of the GNU General Public License.
76 |
77 | "Copyright" also means copyright-like laws that apply to other kinds of
78 | works, such as semiconductor masks.
79 |
80 | "The Program" refers to any copyrightable work licensed under this
81 | License. Each licensee is addressed as "you". "Licensees" and
82 | "recipients" may be individuals or organizations.
83 |
84 | To "modify" a work means to copy from or adapt all or part of the work
85 | in a fashion requiring copyright permission, other than the making of an
86 | exact copy. The resulting work is called a "modified version" of the
87 | earlier work or a work "based on" the earlier work.
88 |
89 | A "covered work" means either the unmodified Program or a work based
90 | on the Program.
91 |
92 | To "propagate" a work means to do anything with it that, without
93 | permission, would make you directly or secondarily liable for
94 | infringement under applicable copyright law, except executing it on a
95 | computer or modifying a private copy. Propagation includes copying,
96 | distribution (with or without modification), making available to the
97 | public, and in some countries other activities as well.
98 |
99 | To "convey" a work means any kind of propagation that enables other
100 | parties to make or receive copies. Mere interaction with a user through
101 | a computer network, with no transfer of a copy, is not conveying.
102 |
103 | An interactive user interface displays "Appropriate Legal Notices"
104 | to the extent that it includes a convenient and prominently visible
105 | feature that (1) displays an appropriate copyright notice, and (2)
106 | tells the user that there is no warranty for the work (except to the
107 | extent that warranties are provided), that licensees may convey the
108 | work under this License, and how to view a copy of this License. If
109 | the interface presents a list of user commands or options, such as a
110 | menu, a prominent item in the list meets this criterion.
111 |
112 | 1. Source Code.
113 |
114 | The "source code" for a work means the preferred form of the work
115 | for making modifications to it. "Object code" means any non-source
116 | form of a work.
117 |
118 | A "Standard Interface" means an interface that either is an official
119 | standard defined by a recognized standards body, or, in the case of
120 | interfaces specified for a particular programming language, one that
121 | is widely used among developers working in that language.
122 |
123 | The "System Libraries" of an executable work include anything, other
124 | than the work as a whole, that (a) is included in the normal form of
125 | packaging a Major Component, but which is not part of that Major
126 | Component, and (b) serves only to enable use of the work with that
127 | Major Component, or to implement a Standard Interface for which an
128 | implementation is available to the public in source code form. A
129 | "Major Component", in this context, means a major essential component
130 | (kernel, window system, and so on) of the specific operating system
131 | (if any) on which the executable work runs, or a compiler used to
132 | produce the work, or an object code interpreter used to run it.
133 |
134 | The "Corresponding Source" for a work in object code form means all
135 | the source code needed to generate, install, and (for an executable
136 | work) run the object code and to modify the work, including scripts to
137 | control those activities. However, it does not include the work's
138 | System Libraries, or general-purpose tools or generally available free
139 | programs which are used unmodified in performing those activities but
140 | which are not part of the work. For example, Corresponding Source
141 | includes interface definition files associated with source files for
142 | the work, and the source code for shared libraries and dynamically
143 | linked subprograms that the work is specifically designed to require,
144 | such as by intimate data communication or control flow between those
145 | subprograms and other parts of the work.
146 |
147 | The Corresponding Source need not include anything that users
148 | can regenerate automatically from other parts of the Corresponding
149 | Source.
150 |
151 | The Corresponding Source for a work in source code form is that
152 | same work.
153 |
154 | 2. Basic Permissions.
155 |
156 | All rights granted under this License are granted for the term of
157 | copyright on the Program, and are irrevocable provided the stated
158 | conditions are met. This License explicitly affirms your unlimited
159 | permission to run the unmodified Program. The output from running a
160 | covered work is covered by this License only if the output, given its
161 | content, constitutes a covered work. This License acknowledges your
162 | rights of fair use or other equivalent, as provided by copyright law.
163 |
164 | You may make, run and propagate covered works that you do not
165 | convey, without conditions so long as your license otherwise remains
166 | in force. You may convey covered works to others for the sole purpose
167 | of having them make modifications exclusively for you, or provide you
168 | with facilities for running those works, provided that you comply with
169 | the terms of this License in conveying all material for which you do
170 | not control copyright. Those thus making or running the covered works
171 | for you must do so exclusively on your behalf, under your direction
172 | and control, on terms that prohibit them from making any copies of
173 | your copyrighted material outside their relationship with you.
174 |
175 | Conveying under any other circumstances is permitted solely under
176 | the conditions stated below. Sublicensing is not allowed; section 10
177 | makes it unnecessary.
178 |
179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180 |
181 | No covered work shall be deemed part of an effective technological
182 | measure under any applicable law fulfilling obligations under article
183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
184 | similar laws prohibiting or restricting circumvention of such
185 | measures.
186 |
187 | When you convey a covered work, you waive any legal power to forbid
188 | circumvention of technological measures to the extent such circumvention
189 | is effected by exercising rights under this License with respect to
190 | the covered work, and you disclaim any intention to limit operation or
191 | modification of the work as a means of enforcing, against the work's
192 | users, your or third parties' legal rights to forbid circumvention of
193 | technological measures.
194 |
195 | 4. Conveying Verbatim Copies.
196 |
197 | You may convey verbatim copies of the Program's source code as you
198 | receive it, in any medium, provided that you conspicuously and
199 | appropriately publish on each copy an appropriate copyright notice;
200 | keep intact all notices stating that this License and any
201 | non-permissive terms added in accord with section 7 apply to the code;
202 | keep intact all notices of the absence of any warranty; and give all
203 | recipients a copy of this License along with the Program.
204 |
205 | You may charge any price or no price for each copy that you convey,
206 | and you may offer support or warranty protection for a fee.
207 |
208 | 5. Conveying Modified Source Versions.
209 |
210 | You may convey a work based on the Program, or the modifications to
211 | produce it from the Program, in the form of source code under the
212 | terms of section 4, provided that you also meet all of these conditions:
213 |
214 | a) The work must carry prominent notices stating that you modified
215 | it, and giving a relevant date.
216 |
217 | b) The work must carry prominent notices stating that it is
218 | released under this License and any conditions added under section
219 | 7. This requirement modifies the requirement in section 4 to
220 | "keep intact all notices".
221 |
222 | c) You must license the entire work, as a whole, under this
223 | License to anyone who comes into possession of a copy. This
224 | License will therefore apply, along with any applicable section 7
225 | additional terms, to the whole of the work, and all its parts,
226 | regardless of how they are packaged. This License gives no
227 | permission to license the work in any other way, but it does not
228 | invalidate such permission if you have separately received it.
229 |
230 | d) If the work has interactive user interfaces, each must display
231 | Appropriate Legal Notices; however, if the Program has interactive
232 | interfaces that do not display Appropriate Legal Notices, your
233 | work need not make them do so.
234 |
235 | A compilation of a covered work with other separate and independent
236 | works, which are not by their nature extensions of the covered work,
237 | and which are not combined with it such as to form a larger program,
238 | in or on a volume of a storage or distribution medium, is called an
239 | "aggregate" if the compilation and its resulting copyright are not
240 | used to limit the access or legal rights of the compilation's users
241 | beyond what the individual works permit. Inclusion of a covered work
242 | in an aggregate does not cause this License to apply to the other
243 | parts of the aggregate.
244 |
245 | 6. Conveying Non-Source Forms.
246 |
247 | You may convey a covered work in object code form under the terms
248 | of sections 4 and 5, provided that you also convey the
249 | machine-readable Corresponding Source under the terms of this License,
250 | in one of these ways:
251 |
252 | a) Convey the object code in, or embodied in, a physical product
253 | (including a physical distribution medium), accompanied by the
254 | Corresponding Source fixed on a durable physical medium
255 | customarily used for software interchange.
256 |
257 | b) Convey the object code in, or embodied in, a physical product
258 | (including a physical distribution medium), accompanied by a
259 | written offer, valid for at least three years and valid for as
260 | long as you offer spare parts or customer support for that product
261 | model, to give anyone who possesses the object code either (1) a
262 | copy of the Corresponding Source for all the software in the
263 | product that is covered by this License, on a durable physical
264 | medium customarily used for software interchange, for a price no
265 | more than your reasonable cost of physically performing this
266 | conveying of source, or (2) access to copy the
267 | Corresponding Source from a network server at no charge.
268 |
269 | c) Convey individual copies of the object code with a copy of the
270 | written offer to provide the Corresponding Source. This
271 | alternative is allowed only occasionally and noncommercially, and
272 | only if you received the object code with such an offer, in accord
273 | with subsection 6b.
274 |
275 | d) Convey the object code by offering access from a designated
276 | place (gratis or for a charge), and offer equivalent access to the
277 | Corresponding Source in the same way through the same place at no
278 | further charge. You need not require recipients to copy the
279 | Corresponding Source along with the object code. If the place to
280 | copy the object code is a network server, the Corresponding Source
281 | may be on a different server (operated by you or a third party)
282 | that supports equivalent copying facilities, provided you maintain
283 | clear directions next to the object code saying where to find the
284 | Corresponding Source. Regardless of what server hosts the
285 | Corresponding Source, you remain obligated to ensure that it is
286 | available for as long as needed to satisfy these requirements.
287 |
288 | e) Convey the object code using peer-to-peer transmission, provided
289 | you inform other peers where the object code and Corresponding
290 | Source of the work are being offered to the general public at no
291 | charge under subsection 6d.
292 |
293 | A separable portion of the object code, whose source code is excluded
294 | from the Corresponding Source as a System Library, need not be
295 | included in conveying the object code work.
296 |
297 | A "User Product" is either (1) a "consumer product", which means any
298 | tangible personal property which is normally used for personal, family,
299 | or household purposes, or (2) anything designed or sold for incorporation
300 | into a dwelling. In determining whether a product is a consumer product,
301 | doubtful cases shall be resolved in favor of coverage. For a particular
302 | product received by a particular user, "normally used" refers to a
303 | typical or common use of that class of product, regardless of the status
304 | of the particular user or of the way in which the particular user
305 | actually uses, or expects or is expected to use, the product. A product
306 | is a consumer product regardless of whether the product has substantial
307 | commercial, industrial or non-consumer uses, unless such uses represent
308 | the only significant mode of use of the product.
309 |
310 | "Installation Information" for a User Product means any methods,
311 | procedures, authorization keys, or other information required to install
312 | and execute modified versions of a covered work in that User Product from
313 | a modified version of its Corresponding Source. The information must
314 | suffice to ensure that the continued functioning of the modified object
315 | code is in no case prevented or interfered with solely because
316 | modification has been made.
317 |
318 | If you convey an object code work under this section in, or with, or
319 | specifically for use in, a User Product, and the conveying occurs as
320 | part of a transaction in which the right of possession and use of the
321 | User Product is transferred to the recipient in perpetuity or for a
322 | fixed term (regardless of how the transaction is characterized), the
323 | Corresponding Source conveyed under this section must be accompanied
324 | by the Installation Information. But this requirement does not apply
325 | if neither you nor any third party retains the ability to install
326 | modified object code on the User Product (for example, the work has
327 | been installed in ROM).
328 |
329 | The requirement to provide Installation Information does not include a
330 | requirement to continue to provide support service, warranty, or updates
331 | for a work that has been modified or installed by the recipient, or for
332 | the User Product in which it has been modified or installed. Access to a
333 | network may be denied when the modification itself materially and
334 | adversely affects the operation of the network or violates the rules and
335 | protocols for communication across the network.
336 |
337 | Corresponding Source conveyed, and Installation Information provided,
338 | in accord with this section must be in a format that is publicly
339 | documented (and with an implementation available to the public in
340 | source code form), and must require no special password or key for
341 | unpacking, reading or copying.
342 |
343 | 7. Additional Terms.
344 |
345 | "Additional permissions" are terms that supplement the terms of this
346 | License by making exceptions from one or more of its conditions.
347 | Additional permissions that are applicable to the entire Program shall
348 | be treated as though they were included in this License, to the extent
349 | that they are valid under applicable law. If additional permissions
350 | apply only to part of the Program, that part may be used separately
351 | under those permissions, but the entire Program remains governed by
352 | this License without regard to the additional permissions.
353 |
354 | When you convey a copy of a covered work, you may at your option
355 | remove any additional permissions from that copy, or from any part of
356 | it. (Additional permissions may be written to require their own
357 | removal in certain cases when you modify the work.) You may place
358 | additional permissions on material, added by you to a covered work,
359 | for which you have or can give appropriate copyright permission.
360 |
361 | Notwithstanding any other provision of this License, for material you
362 | add to a covered work, you may (if authorized by the copyright holders of
363 | that material) supplement the terms of this License with terms:
364 |
365 | a) Disclaiming warranty or limiting liability differently from the
366 | terms of sections 15 and 16 of this License; or
367 |
368 | b) Requiring preservation of specified reasonable legal notices or
369 | author attributions in that material or in the Appropriate Legal
370 | Notices displayed by works containing it; or
371 |
372 | c) Prohibiting misrepresentation of the origin of that material, or
373 | requiring that modified versions of such material be marked in
374 | reasonable ways as different from the original version; or
375 |
376 | d) Limiting the use for publicity purposes of names of licensors or
377 | authors of the material; or
378 |
379 | e) Declining to grant rights under trademark law for use of some
380 | trade names, trademarks, or service marks; or
381 |
382 | f) Requiring indemnification of licensors and authors of that
383 | material by anyone who conveys the material (or modified versions of
384 | it) with contractual assumptions of liability to the recipient, for
385 | any liability that these contractual assumptions directly impose on
386 | those licensors and authors.
387 |
388 | All other non-permissive additional terms are considered "further
389 | restrictions" within the meaning of section 10. If the Program as you
390 | received it, or any part of it, contains a notice stating that it is
391 | governed by this License along with a term that is a further
392 | restriction, you may remove that term. If a license document contains
393 | a further restriction but permits relicensing or conveying under this
394 | License, you may add to a covered work material governed by the terms
395 | of that license document, provided that the further restriction does
396 | not survive such relicensing or conveying.
397 |
398 | If you add terms to a covered work in accord with this section, you
399 | must place, in the relevant source files, a statement of the
400 | additional terms that apply to those files, or a notice indicating
401 | where to find the applicable terms.
402 |
403 | Additional terms, permissive or non-permissive, may be stated in the
404 | form of a separately written license, or stated as exceptions;
405 | the above requirements apply either way.
406 |
407 | 8. Termination.
408 |
409 | You may not propagate or modify a covered work except as expressly
410 | provided under this License. Any attempt otherwise to propagate or
411 | modify it is void, and will automatically terminate your rights under
412 | this License (including any patent licenses granted under the third
413 | paragraph of section 11).
414 |
415 | However, if you cease all violation of this License, then your
416 | license from a particular copyright holder is reinstated (a)
417 | provisionally, unless and until the copyright holder explicitly and
418 | finally terminates your license, and (b) permanently, if the copyright
419 | holder fails to notify you of the violation by some reasonable means
420 | prior to 60 days after the cessation.
421 |
422 | Moreover, your license from a particular copyright holder is
423 | reinstated permanently if the copyright holder notifies you of the
424 | violation by some reasonable means, this is the first time you have
425 | received notice of violation of this License (for any work) from that
426 | copyright holder, and you cure the violation prior to 30 days after
427 | your receipt of the notice.
428 |
429 | Termination of your rights under this section does not terminate the
430 | licenses of parties who have received copies or rights from you under
431 | this License. If your rights have been terminated and not permanently
432 | reinstated, you do not qualify to receive new licenses for the same
433 | material under section 10.
434 |
435 | 9. Acceptance Not Required for Having Copies.
436 |
437 | You are not required to accept this License in order to receive or
438 | run a copy of the Program. Ancillary propagation of a covered work
439 | occurring solely as a consequence of using peer-to-peer transmission
440 | to receive a copy likewise does not require acceptance. However,
441 | nothing other than this License grants you permission to propagate or
442 | modify any covered work. These actions infringe copyright if you do
443 | not accept this License. Therefore, by modifying or propagating a
444 | covered work, you indicate your acceptance of this License to do so.
445 |
446 | 10. Automatic Licensing of Downstream Recipients.
447 |
448 | Each time you convey a covered work, the recipient automatically
449 | receives a license from the original licensors, to run, modify and
450 | propagate that work, subject to this License. You are not responsible
451 | for enforcing compliance by third parties with this License.
452 |
453 | An "entity transaction" is a transaction transferring control of an
454 | organization, or substantially all assets of one, or subdividing an
455 | organization, or merging organizations. If propagation of a covered
456 | work results from an entity transaction, each party to that
457 | transaction who receives a copy of the work also receives whatever
458 | licenses to the work the party's predecessor in interest had or could
459 | give under the previous paragraph, plus a right to possession of the
460 | Corresponding Source of the work from the predecessor in interest, if
461 | the predecessor has it or can get it with reasonable efforts.
462 |
463 | You may not impose any further restrictions on the exercise of the
464 | rights granted or affirmed under this License. For example, you may
465 | not impose a license fee, royalty, or other charge for exercise of
466 | rights granted under this License, and you may not initiate litigation
467 | (including a cross-claim or counterclaim in a lawsuit) alleging that
468 | any patent claim is infringed by making, using, selling, offering for
469 | sale, or importing the Program or any portion of it.
470 |
471 | 11. Patents.
472 |
473 | A "contributor" is a copyright holder who authorizes use under this
474 | License of the Program or a work on which the Program is based. The
475 | work thus licensed is called the contributor's "contributor version".
476 |
477 | A contributor's "essential patent claims" are all patent claims
478 | owned or controlled by the contributor, whether already acquired or
479 | hereafter acquired, that would be infringed by some manner, permitted
480 | by this License, of making, using, or selling its contributor version,
481 | but do not include claims that would be infringed only as a
482 | consequence of further modification of the contributor version. For
483 | purposes of this definition, "control" includes the right to grant
484 | patent sublicenses in a manner consistent with the requirements of
485 | this License.
486 |
487 | Each contributor grants you a non-exclusive, worldwide, royalty-free
488 | patent license under the contributor's essential patent claims, to
489 | make, use, sell, offer for sale, import and otherwise run, modify and
490 | propagate the contents of its contributor version.
491 |
492 | In the following three paragraphs, a "patent license" is any express
493 | agreement or commitment, however denominated, not to enforce a patent
494 | (such as an express permission to practice a patent or covenant not to
495 | sue for patent infringement). To "grant" such a patent license to a
496 | party means to make such an agreement or commitment not to enforce a
497 | patent against the party.
498 |
499 | If you convey a covered work, knowingly relying on a patent license,
500 | and the Corresponding Source of the work is not available for anyone
501 | to copy, free of charge and under the terms of this License, through a
502 | publicly available network server or other readily accessible means,
503 | then you must either (1) cause the Corresponding Source to be so
504 | available, or (2) arrange to deprive yourself of the benefit of the
505 | patent license for this particular work, or (3) arrange, in a manner
506 | consistent with the requirements of this License, to extend the patent
507 | license to downstream recipients. "Knowingly relying" means you have
508 | actual knowledge that, but for the patent license, your conveying the
509 | covered work in a country, or your recipient's use of the covered work
510 | in a country, would infringe one or more identifiable patents in that
511 | country that you have reason to believe are valid.
512 |
513 | If, pursuant to or in connection with a single transaction or
514 | arrangement, you convey, or propagate by procuring conveyance of, a
515 | covered work, and grant a patent license to some of the parties
516 | receiving the covered work authorizing them to use, propagate, modify
517 | or convey a specific copy of the covered work, then the patent license
518 | you grant is automatically extended to all recipients of the covered
519 | work and works based on it.
520 |
521 | A patent license is "discriminatory" if it does not include within
522 | the scope of its coverage, prohibits the exercise of, or is
523 | conditioned on the non-exercise of one or more of the rights that are
524 | specifically granted under this License. You may not convey a covered
525 | work if you are a party to an arrangement with a third party that is
526 | in the business of distributing software, under which you make payment
527 | to the third party based on the extent of your activity of conveying
528 | the work, and under which the third party grants, to any of the
529 | parties who would receive the covered work from you, a discriminatory
530 | patent license (a) in connection with copies of the covered work
531 | conveyed by you (or copies made from those copies), or (b) primarily
532 | for and in connection with specific products or compilations that
533 | contain the covered work, unless you entered into that arrangement,
534 | or that patent license was granted, prior to 28 March 2007.
535 |
536 | Nothing in this License shall be construed as excluding or limiting
537 | any implied license or other defenses to infringement that may
538 | otherwise be available to you under applicable patent law.
539 |
540 | 12. No Surrender of Others' Freedom.
541 |
542 | If conditions are imposed on you (whether by court order, agreement or
543 | otherwise) that contradict the conditions of this License, they do not
544 | excuse you from the conditions of this License. If you cannot convey a
545 | covered work so as to satisfy simultaneously your obligations under this
546 | License and any other pertinent obligations, then as a consequence you may
547 | not convey it at all. For example, if you agree to terms that obligate you
548 | to collect a royalty for further conveying from those to whom you convey
549 | the Program, the only way you could satisfy both those terms and this
550 | License would be to refrain entirely from conveying the Program.
551 |
552 | 13. Use with the GNU Affero General Public License.
553 |
554 | Notwithstanding any other provision of this License, you have
555 | permission to link or combine any covered work with a work licensed
556 | under version 3 of the GNU Affero General Public License into a single
557 | combined work, and to convey the resulting work. The terms of this
558 | License will continue to apply to the part which is the covered work,
559 | but the special requirements of the GNU Affero General Public License,
560 | section 13, concerning interaction through a network will apply to the
561 | combination as such.
562 |
563 | 14. Revised Versions of this License.
564 |
565 | The Free Software Foundation may publish revised and/or new versions of
566 | the GNU General Public License from time to time. Such new versions will
567 | be similar in spirit to the present version, but may differ in detail to
568 | address new problems or concerns.
569 |
570 | Each version is given a distinguishing version number. If the
571 | Program specifies that a certain numbered version of the GNU General
572 | Public License "or any later version" applies to it, you have the
573 | option of following the terms and conditions either of that numbered
574 | version or of any later version published by the Free Software
575 | Foundation. If the Program does not specify a version number of the
576 | GNU General Public License, you may choose any version ever published
577 | by the Free Software Foundation.
578 |
579 | If the Program specifies that a proxy can decide which future
580 | versions of the GNU General Public License can be used, that proxy's
581 | public statement of acceptance of a version permanently authorizes you
582 | to choose that version for the Program.
583 |
584 | Later license versions may give you additional or different
585 | permissions. However, no additional obligations are imposed on any
586 | author or copyright holder as a result of your choosing to follow a
587 | later version.
588 |
589 | 15. Disclaimer of Warranty.
590 |
591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599 |
600 | 16. Limitation of Liability.
601 |
602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610 | SUCH DAMAGES.
611 |
612 | 17. Interpretation of Sections 15 and 16.
613 |
614 | If the disclaimer of warranty and limitation of liability provided
615 | above cannot be given local legal effect according to their terms,
616 | reviewing courts shall apply local law that most closely approximates
617 | an absolute waiver of all civil liability in connection with the
618 | Program, unless a warranty or assumption of liability accompanies a
619 | copy of the Program in return for a fee.
620 |
621 | END OF TERMS AND CONDITIONS
622 |
623 | How to Apply These Terms to Your New Programs
624 |
625 | If you develop a new program, and you want it to be of the greatest
626 | possible use to the public, the best way to achieve this is to make it
627 | free software which everyone can redistribute and change under these terms.
628 |
629 | To do so, attach the following notices to the program. It is safest
630 | to attach them to the start of each source file to most effectively
631 | state the exclusion of warranty; and each file should have at least
632 | the "copyright" line and a pointer to where the full notice is found.
633 |
634 |
635 | Copyright (C)
636 |
637 | This program is free software: you can redistribute it and/or modify
638 | it under the terms of the GNU General Public License as published by
639 | the Free Software Foundation, either version 3 of the License, or
640 | (at your option) any later version.
641 |
642 | This program is distributed in the hope that it will be useful,
643 | but WITHOUT ANY WARRANTY; without even the implied warranty of
644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
645 | GNU General Public License for more details.
646 |
647 | You should have received a copy of the GNU General Public License
648 | along with this program. If not, see .
649 |
650 | Also add information on how to contact you by electronic and paper mail.
651 |
652 | If the program does terminal interaction, make it output a short
653 | notice like this when it starts in an interactive mode:
654 |
655 | Copyright (C)
656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
657 | This is free software, and you are welcome to redistribute it
658 | under certain conditions; type `show c' for details.
659 |
660 | The hypothetical commands `show w' and `show c' should show the appropriate
661 | parts of the General Public License. Of course, your program's commands
662 | might be different; for a GUI interface, you would use an "about box".
663 |
664 | You should also get your employer (if you work as a programmer) or school,
665 | if any, to sign a "copyright disclaimer" for the program, if necessary.
666 | For more information on this, and how to apply and follow the GNU GPL, see
667 | .
668 |
669 | The GNU General Public License does not permit incorporating your program
670 | into proprietary programs. If your program is a subroutine library, you
671 | may consider it more useful to permit linking proprietary applications with
672 | the library. If this is what you want to do, use the GNU Lesser General
673 | Public License instead of this License. But first, please read
674 | .
675 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include src/ontor/queries/*
2 | include src/ontor/config/*
3 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [](https://github.com/felixocker/ontor/releases/)
2 | [](https://github.com/felixocker/ontor/actions/workflows/publish-to-testpypi.yml)
3 | [](https://felixocker-ontor.readthedocs.io/en/latest/?badge=latest)
4 | [](https://pypi.org/project/ontor/)
5 | [](https://www.gnu.org/licenses/gpl-3.0.html)
6 | [](https://github.com/psf/black)
7 |
8 |
9 |
10 | # ONTology editOR (ontor)
11 | ontology editor built on [Owlready2](https://pypi.org/project/Owlready2/)
12 |
13 | ## functionality
14 | each instance of the ontor class represents an individual ontology and provides support for:
15 | * creating new, loading existing, and saving ontologies
16 | * modifying ontologies:
17 | * import other ontologies
18 | * simply extract information such as axioms and class restrictions
19 | * insert classes, properties, instances, relations, and restrictions
20 | * insert general class axioms using a workaround for Owlready2
21 | * delete classes, properties, instances, relations, and restrictions but preserve the ontology's structure by reassigning subclasses and instances appropriately
22 | * reasoning over ontologies and debugging by interactively deleting problematic axioms
23 | * visualizing the entire ontology or selected parts thereof
24 |
25 | ontor provides a tuple based syntax with JSON and CSV support for ontology editing to facilitate focusing on the ontology's content
26 |
27 | ## requirements and installation
28 | * Python 3.9+
29 | * install ontor using pip
30 | * from PyPI: ```pip install ontor```
31 | * from GitHub, in editable mode: ```pip install -e .```
32 | * generate documentation via sphinx using the makefile in *docs/*: ```make html```
33 |
34 | ## demo
35 |
36 | the directory *example/* includes a demo application inspired by [Protégé's pizza example](https://protegewiki.stanford.edu/wiki/Protege4Pizzas10Minutes)
37 |
38 | ### general class axioms
39 | in addition to class axioms, General Class Axioms (GCAs) can express more complex statements - the generic axioms are equivalented using helper classes\
40 | in the example, a uniform price of 5 is set for all pizzas with seafood toppings without making use of an explicitly defined class for these pizzas:\
41 | ```
42 | [
43 | ["has_topping",null,"min",1,"seafood_topping",null,null,null,null,null,null,null,true],
44 | ["has_price",null,"value",null,null,"float",null,null,5,null,null,null,true]
45 | ]
46 | ```
47 | this allows a reasoner to infer that the price for all instances of *seafood_pizza* as well as for the instance *Another_pizza* is 5
48 |
49 | ### interactive debugging
50 | interactively debug an ontology\
51 | in the example: ```ontor3.debug_onto()```
52 |
53 |
54 |
55 | ### visualization
56 | visualize selected instances, classes, and properties in a given radius around a focus node; e.g., all nodes in a radius of two relations around the node "John"\
57 | in the example: ```ontor3.visualize(classes=["human", "pizza"], properties=["likes", "diameter_in_cm"], focusnode="John", radius=2)```
58 |
59 |
60 |
61 | ### workflow
62 |
63 | When creating ontologies from scratch, note that some functions have to be called in a specific order:
64 | 1. *add_taxo* - the taxonomy has to be created first to ensure that all classes are defined, which are required by the properties, axioms, and individuals
65 | 2. *add_ops*, *add_dps* - properties must be defined before axioms can be specified
66 | 3. *add_axioms*, *add_gcas*, *add_instances* - axioms and instances can only be added when all the necessary classes and properties have been defined
67 |
68 | ## license
69 | GPL v3.0
70 |
71 | ## contact
72 | Felix Ocker - [felix.ocker@googlemail.com](mailto:felix.ocker@googlemail.com)
73 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/debug.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felixocker/ontor/eb2088eccb2b25f129d75041684729555b947373/docs/debug.gif
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/ontor-logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
217 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import sys
15 |
16 | sys.path.insert(0, os.path.abspath("../../"))
17 |
18 |
19 | # -- Project information -----------------------------------------------------
20 |
21 | project = "ontor"
22 | copyright = "2021, Felix Ocker"
23 | author = "Felix Ocker"
24 |
25 |
26 | # -- General configuration ---------------------------------------------------
27 |
28 | # Add any Sphinx extension module names here, as strings. They can be
29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
30 | # ones.
31 | extensions = [
32 | "sphinx.ext.autodoc",
33 | ]
34 |
35 | # Add any paths that contain templates here, relative to this directory.
36 | templates_path = ["_templates"]
37 |
38 | # List of patterns, relative to source directory, that match files and
39 | # directories to ignore when looking for source files.
40 | # This pattern also affects html_static_path and html_extra_path.
41 | exclude_patterns = []
42 |
43 |
44 | # -- Options for HTML output -------------------------------------------------
45 |
46 | # The theme to use for HTML and HTML Help pages. See the documentation for
47 | # a list of builtin themes.
48 | #
49 | html_theme = "alabaster"
50 |
51 | # Add any paths that contain custom static files (such as style sheets) here,
52 | # relative to this directory. They are copied after the builtin static files,
53 | # so a file named "default.css" will overwrite the builtin "default.css".
54 | html_static_path = []
55 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | .. ontor documentation master file, created by
2 | sphinx-quickstart on Sat Jul 24 23:52:00 2021.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Welcome to ontor's documentation!
7 | =================================
8 |
9 | **ontor** (ONTology editOR) is a Python library built on `Owlready2
10 | `_ for creating, editing, extending,
11 | debugging, and visualizing OWL2 ontologies
12 |
13 | .. toctree::
14 | :maxdepth: 2
15 | :caption: Contents:
16 |
17 | .. automodule:: ontor.ontor
18 | :members:
19 |
20 |
21 | Indices and tables
22 | ==================
23 |
24 | * :ref:`genindex`
25 | * :ref:`modindex`
26 | * :ref:`search`
27 |
--------------------------------------------------------------------------------
/docs/visualize.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felixocker/ontor/eb2088eccb2b25f129d75041684729555b947373/docs/visualize.png
--------------------------------------------------------------------------------
/example/Makefile:
--------------------------------------------------------------------------------
1 | ## all : Create all onto files.
2 | .PHONY : all
3 | all : example.py
4 | python $<
5 |
6 | ## clean : Remove all generated files.
7 | .PHONY : clean
8 | clean:
9 | rm -f *.owl
10 | rm -f *.nt
11 | find ../ -name "*.log" -type f -delete
12 | rm -f *.xml
13 | rm -f *.html
14 | rm -rf __pycache__/
15 | find ../ontor -name "__pycache__" -exec rm -rf {} \;
16 | rm -rf ../docs/build
17 |
18 | ## help : Return help info.
19 | .PHONY : help
20 | help : Makefile
21 | @sed -n 's/^##//p' $<
22 |
23 |
--------------------------------------------------------------------------------
/example/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felixocker/ontor/eb2088eccb2b25f129d75041684729555b947373/example/__init__.py
--------------------------------------------------------------------------------
/example/data/class_axioms.csv:
--------------------------------------------------------------------------------
1 | class,superclass,property,inverted,cardinalitytype,cardinality,opobject,dprange,dpminex,dpminin,dpexact,dpmaxin,dpmaxex,negated,equivalence
2 | pizza,,has_base,,exactly,1,pizza_base,,,,,,,,
3 | pizza,,has_topping,,some,,pizza_topping,,,,,,,,
4 | pizza,,diameter_in_cm,,some,,,integer,,,32,,,,
5 | pizza,,weight_in_grams,,only,,,float,0,,,,,,
6 | vegetarian_pizza,,has_topping,,min,1,vegetable_topping,,,,,,,,
7 | vegetarian_pizza,,has_topping,,exactly,0,meat_topping,,,,,,,,
8 | vegetarian_pizza,,has_topping,,exactly,0,seafood_topping,,,,,,,,
9 | meaty_pizza,,has_topping,,min,1,meat_topping,,,,,,,,True
10 | seafood_pizza,,has_topping,,min,1,seafood_topping,,,,,,,,
11 | seafood_pizza,,has_topping,,exactly,0,meat_topping,,,,,,,,
12 | cheesy_pizza,,has_topping,,min,2,cheese_topping,,,,,,,,True
13 | vegetarian,,likes,,value,,His_pizza,,,,,,,,
14 | vegetarian,,likes,,some,,meat_topping,,,,,,,True,
15 | vegetarian,,likes,True,some,,seafood_topping,,,,,,,,
16 | human,,likes,,some,,food,,,,,,,,
17 | margherita,vegetarian_pizza,has_topping,,some,,tomato_topping,,,,,,,,
18 | margherita,vegetarian_pizza,has_topping,,some,,basil_topping,,,,,,,,
19 | margherita,vegetarian_pizza,has_topping,,some,,mozzarella_topping,,,,,,,,
20 | margherita,vegetarian_pizza,has_topping,,some,,ham_topping,,,,,,,,
21 | quattro_stagioni,pizza,has_topping,,some,,tomato_topping,,,,,,,,
22 | quattro_stagioni,pizza,has_topping,,some,,basil_topping,,,,,,,,
23 | quattro_stagioni,pizza,has_topping,,some,,mozzarella_topping,,,,,,,,
24 | quattro_stagioni,pizza,has_topping,,some,,ham_topping,,,,,,,,
25 | quattro_stagioni,pizza,has_topping,,some,,pepperoni_topping,,,,,,,,
26 | quattro_stagioni,pizza,has_topping,,some,,olive_topping,,,,,,,,
27 | quattro_stagioni,pizza,has_topping,,some,,mushroom_topping,,,,,,,,
28 | quattro_stagioni,pizza,has_topping,,some,,artichoke_topping,,,,,,,,
29 | quattro_formaggi,vegetarian_pizza,has_topping,,some,,tomato_topping,,,,,,,,
30 | quattro_formaggi,vegetarian_pizza,has_topping,,some,,basil_topping,,,,,,,,
31 | quattro_formaggi,vegetarian_pizza,has_topping,,some,,mozzarella_topping,,,,,,,,
32 | quattro_formaggi,vegetarian_pizza,has_topping,,some,,parmesan_topping,,,,,,,,
33 | quattro_formaggi,vegetarian_pizza,has_topping,,some,,gorgonzola_topping,,,,,,,,
34 | quattro_formaggi,vegetarian_pizza,has_topping,,some,,fontina_topping,,,,,,,,
35 | pizza_pane,vegetarian_pizza,has_topping,,some,,tomato_topping,,,,,,,,
36 | pizza_pane,vegetarian_pizza,has_topping,,some,,basil_topping,,,,,,,,
37 | pizza_pane,vegetarian_pizza,has_topping,,some,,ham_topping,,,,,,,,
--------------------------------------------------------------------------------
/example/data/complex_axioms.json:
--------------------------------------------------------------------------------
1 | [
2 | {"or":
3 | [
4 | ["human", null, "likes", null, "some", null, "human", null, null, null, null, null, null, null, false],
5 | {"and":
6 | [
7 | ["human", null, "likes", null, "some", null, "pizza", null, null, null, null, null, null, null, false],
8 | ["human", null, "likes", null, "some", null, "drink", null, null, null, null, null, null, null, false]
9 | ]
10 | }
11 | ]
12 | }
13 | ]
--------------------------------------------------------------------------------
/example/data/gcas.json:
--------------------------------------------------------------------------------
1 | [
2 | [
3 | ["has_topping",null,"min",1,"seafood_topping",null,null,null,null,null,null,null,true],
4 | ["has_price",null,"value",null,null,"float",null,null,5,null,null,null,true]
5 | ]
6 | ]
--------------------------------------------------------------------------------
/example/data/props.json:
--------------------------------------------------------------------------------
1 | {
2 | "op": [
3 | ["part", null, null, null, false, false, false, false, false, false, false, null],
4 | ["has_base", "part", "pizza", "pizza_base", true, false, false, false, false, false, false, null],
5 | ["has_topping", "part", "pizza", "pizza_topping", false, false, false, false, false, false, false, null],
6 | ["produces", null, "company", null, false, false, false, false, false, false, false, null]
7 |
8 | ],
9 | "dp": [
10 | ["topping_weight_in_grams", null, true, "pizza", "float", 0, null, null, null, null],
11 | ["faulty_dp", "weight_in_grams", true, "pizza", "float", 10, null, 10, null, null]
12 | ]
13 | }
14 |
--------------------------------------------------------------------------------
/example/data/taxo.csv:
--------------------------------------------------------------------------------
1 | class,superclass
2 | thin_crust,pizza_base
3 | thick_crust,pizza_base
4 | meat_topping,pizza_topping
5 | ham_topping,meat_topping
6 | seafood_topping,pizza_topping
7 | shrimp_topping,seafood_topping
8 | cheese_topping,pizza_topping
9 | mozzarella_topping,cheese_topping
10 | parmesan_topping,cheese_topping
11 | gorgonzola_topping,cheese_topping
12 | fontina_topping,cheese_topping
13 | vegetable_topping,pizza_topping
14 | tomato_topping,vegetable_topping
15 | pepperoni_topping,vegetable_topping
16 | olive_topping,vegetable_topping
17 | mushroom_topping,vegetable_topping
18 | basil_topping,vegetable_topping
19 | artichoke_topping,vegetable_topping
20 | cheesy_pizza,pizza
21 | meaty_pizza,pizza
22 | seafood_pizza,pizza
23 | margherita,vegetarian_pizza
24 | quattro_stagioni,pizza
25 | quattro_formaggi,vegetarian_pizza
26 | pizza_pane,vegetarian_pizza
--------------------------------------------------------------------------------
/example/example.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """minimal example for applying the ontor module"""
3 |
4 | #
5 | # This file is part of ontor (https://github.com/felixocker/ontor).
6 | # Copyright (c) 2021 Felix Ocker.
7 | #
8 | # ontor is free software: you can redistribute it and/or modify
9 | # it under the terms of the GNU General Public License as published by
10 | # the Free Software Foundation, either version 3 of the License, or
11 | # (at your option) any later version.
12 | #
13 | # ontor is distributed in the hope that it will be useful,
14 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 | # GNU General Public License for more details.
17 | #
18 | # You should have received a copy of the GNU General Public License
19 | # along with ontor. If not, see .
20 | #
21 |
22 | from owlready2 import locstr
23 | import ontor
24 |
25 |
26 | def create_first_onto():
27 | iri = "http://example.org/onto-ex.owl"
28 | fname = "./onto-ex.owl"
29 | classes = [
30 | ["human", None],
31 | ["vegetarian", "human"],
32 | ["food", None],
33 | ["drink", None],
34 | ["pizza", "food"],
35 | ["pizza_base", "food"],
36 | ["pizza_topping", "food"],
37 | ["vegetarian_pizza", "pizza"],
38 | ["margherita", "vegetarian_pizza"],
39 | ]
40 | ops = [
41 | [
42 | "likes",
43 | None,
44 | "human",
45 | None,
46 | False,
47 | False,
48 | False,
49 | False,
50 | False,
51 | False,
52 | False,
53 | None,
54 | ]
55 | ]
56 | dps = [
57 | [
58 | "diameter_in_cm",
59 | None,
60 | True,
61 | "pizza",
62 | "integer",
63 | None,
64 | None,
65 | None,
66 | None,
67 | None,
68 | ],
69 | ["weight_in_grams", None, True, "pizza", "float", 0, None, None, None, None],
70 | ["description", None, False, "food", "string", None, None, None, None, None],
71 | ["has_price", None, True, None, "float", None, None, None, None, None],
72 | ]
73 | axs = [
74 | [
75 | "human",
76 | None,
77 | "likes",
78 | None,
79 | "some",
80 | None,
81 | "food",
82 | None,
83 | None,
84 | None,
85 | None,
86 | None,
87 | None,
88 | None,
89 | False,
90 | ],
91 | [
92 | "food",
93 | None,
94 | "weight_in_grams",
95 | None,
96 | "only",
97 | None,
98 | None,
99 | "float",
100 | 0,
101 | None,
102 | None,
103 | None,
104 | None,
105 | None,
106 | False,
107 | ],
108 | ]
109 | ins = [
110 | ["John", "vegetarian", None, None, None],
111 | ["His_pizza", "margherita", None, None, None],
112 | ["John", "vegetarian", "likes", "His_pizza", None],
113 | ]
114 | ontor1 = ontor.OntoEditor(iri, fname)
115 | ontor1.add_taxo(classes)
116 | ontor1.add_ops(ops)
117 | ontor1.add_dps(dps)
118 | ontor1.add_axioms(axs)
119 | ontor1.add_instances(ins)
120 |
121 |
122 | def create_second_onto():
123 | iri = "http://example.org/onto-ex-add.owl"
124 | fname = "./onto-ex-add.owl"
125 | classes = [["beverage", None], ["water", "beverage"]]
126 | ontor2 = ontor.OntoEditor(iri, fname)
127 | ontor2.add_taxo(classes)
128 |
129 |
130 | def modify_onto():
131 | classes = [
132 | ["company", None],
133 | ["pizza_company", "company"],
134 | ["margherita_company", "pizza_company"],
135 | [None, None],
136 | ["quattro_stagioni", "pizza"],
137 | ]
138 | ins = [
139 | ["Her_pizza", "quattro_stagioni", None, None, None],
140 | ["Jane", "human", "likes", "Her_pizza", None],
141 | ["Faulty_pizza", None, None, None, None],
142 | ["Her_pizza", "quattro_stagioni", "weight_in_grams", "430.0", "float"],
143 | ["Her_pizza", "quattro_stagioni", "diameter_in_cm", "32", "integer"],
144 | ["Her_pizza", "quattro_stagioni", "description", "jane's pizza", "string"],
145 | ["Another_pizza", "seafood_pizza", None, None, None],
146 | ]
147 | axs = [
148 | [
149 | "pizza_company",
150 | "company",
151 | "produces",
152 | None,
153 | "some",
154 | None,
155 | "pizza",
156 | None,
157 | None,
158 | None,
159 | None,
160 | None,
161 | None,
162 | None,
163 | False,
164 | ],
165 | [
166 | "pizza_company",
167 | "company",
168 | "likes",
169 | None,
170 | "some",
171 | None,
172 | "food",
173 | None,
174 | None,
175 | None,
176 | None,
177 | None,
178 | None,
179 | None,
180 | False,
181 | ],
182 | ]
183 | ontor3 = ontor.OntoEditor("http://example.org/onto-ex.owl", "./onto-ex.owl")
184 | ontor3.add_taxo(classes)
185 | ontor3.add_taxo(ontor.load_csv("./data/taxo.csv"))
186 | # print(list(elem for elem in ontor3.get_elems()[0]))
187 | ontor3.add_ops(ontor.load_json("./data/props.json")["op"])
188 | # print(list(elem for elem in ontor3.get_elems()[0]))
189 | ontor3.add_dps(ontor.load_json("./data/props.json")["dp"])
190 | # print(list(elem for elem in ontor3.get_elems()[0]))
191 | ontor3.add_instances(ins)
192 | # print(list(elem for elem in ontor3.get_elems()[0]))
193 | ontor3.add_axioms(ontor.load_csv("./data/class_axioms.csv"))
194 | # print(*ontor3.get_axioms()[0], sep="\n")
195 | ontor3.add_axioms(axs)
196 |
197 | ontor3.add_distinctions(
198 | [["classes", ["human", "pizza"]], ["classes", ["has_base", "has_topping"]]]
199 | )
200 |
201 | # print(*ontor3.get_axioms(), sep="\n")
202 | ontor3.add_import("file://./onto-ex-add.owl")
203 | # ontor3.save_as("test.owl")
204 |
205 | print("inconsistent classes")
206 | print(ontor3.reasoning("hermit", False))
207 | print("debugging")
208 | ontor3.debug_onto(assume_correct_taxo=False)
209 |
210 | # removing objects from the onto
211 | # removing restrictions by op - produces
212 | ontor3.remove_restrictions_including_prop("produces")
213 | _test_rm(
214 | ontor3.get_class_restrictions("pizza_company"),
215 | ["onto-ex.likes.some(onto-ex.food)"],
216 | "produces restrictions",
217 | )
218 | # removing restrictions by class - pizza_company
219 | ontor3.remove_restrictions_on_class("pizza_company")
220 | _test_rm(
221 | ontor3.get_class_restrictions("pizza_company"),
222 | [],
223 | "restrictions on pizza_company",
224 | )
225 | # removing entities - pizza_company
226 | ontor3.remove_from_taxo(["pizza_company"])
227 | _test_rm(
228 | ontor3.get_class_restrictions("margherita_company", res_only=False),
229 | ["onto-ex.company"],
230 | "pizza_company",
231 | )
232 | # removing relations - produces
233 | ontor3.remove_elements(["produces"])
234 | _test_rm(
235 | ontor3.get_elems()[1],
236 | ["onto-ex.likes", "onto-ex.part", "onto-ex.has_base", "onto-ex.has_topping"],
237 | "produces",
238 | )
239 |
240 | # labels for rendering by labels demo - set "bylabel" to True and "lang" to "en" in "visualize"
241 | ontor3.add_label("John", "John's English label", "en")
242 | ontor3.add_label("likes", "likes' label")
243 |
244 | ontor3.visualize(
245 | classes=["human", "pizza"],
246 | properties=["likes", "diameter_in_cm"],
247 | focusnode="John",
248 | radius=2,
249 | bylabel=False,
250 | lang=None,
251 | open_html=True,
252 | bgcolor="#FFFFFF",
253 | classcolor="#7303fc",
254 | instancecolor="#03dbfc",
255 | font_color="#222222",
256 | )
257 |
258 | ontor3.visualize(
259 | classes=["human", "food"],
260 | tbox_only=True,
261 | bylabel=False,
262 | lang=None,
263 | open_html=True,
264 | )
265 |
266 |
267 | def _test_rm(as_is: list, as_expected: list, elem: str) -> None:
268 | """check whether remove function worked as expected
269 |
270 | :param as_is: current elements
271 | :param as_expected: elements expected after modification
272 | """
273 | as_is = [str(e) for e in as_is]
274 | if set(as_is) == set(as_expected):
275 | print(
276 | f"successfully removed {elem} (reparented subclasses and instances if applicable)"
277 | )
278 | else:
279 | print(f"removing {elem} failed")
280 |
281 |
282 | def add_gcas_to_onto():
283 | gcas = ontor.load_json("./data/gcas.json")
284 | ontor4 = ontor.OntoEditor("http://example.org/onto-ex.owl", "./onto-ex.owl")
285 | ontor4.add_gcas(gcas)
286 |
287 |
288 | def add_complex_axioms():
289 | compl_axs = ontor.load_json("./data/complex_axioms.json")
290 | ontor5 = ontor.OntoEditor("http://example.org/onto-ex.owl", "./onto-ex.owl")
291 | ontor5.add_axioms(compl_axs)
292 |
293 |
294 | def check_import():
295 | ontor4 = ontor.OntoEditor("http://example.org/onto-ex.owl", "./onto-ex.owl", ["."])
296 | print("Imports are:")
297 | print(ontor4.onto.imported_ontologies)
298 |
299 |
300 | if __name__ == "__main__":
301 | ontor.cleanup(False, "log", "owl")
302 | create_first_onto()
303 | create_second_onto()
304 | modify_onto()
305 | check_import()
306 | add_gcas_to_onto()
307 | add_complex_axioms()
308 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = [
3 | "setuptools>=42",
4 | "wheel"
5 | ]
6 | build-backend = "setuptools.build_meta"
7 |
8 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | alabaster==0.7.12
2 | astroid==2.8.4
3 | asttokens==2.0.5
4 | Babel==2.9.1
5 | backcall==0.2.0
6 | black==22.10.0
7 | certifi==2020.12.5
8 | cfgv==3.3.1
9 | chardet==4.0.0
10 | click==8.1.3
11 | decorator==4.4.2
12 | distlib==0.3.6
13 | docutils==0.17.1
14 | executing==0.8.2
15 | filelock==3.8.0
16 | identify==2.5.8
17 | idna==2.10
18 | imagesize==1.2.0
19 | ipython==8.0.1
20 | ipython-genutils==0.2.0
21 | isodate==0.6.0
22 | isort==5.9.3
23 | jedi==0.18.0
24 | Jinja2==3.0.1
25 | jsonpickle==2.0.0
26 | lark==1.0.0
27 | lazy-object-proxy==1.6.0
28 | MarkupSafe==2.0.1
29 | matplotlib-inline==0.1.2
30 | mccabe==0.6.1
31 | mypy==0.910
32 | mypy-extensions==0.4.3
33 | networkx==2.5.1
34 | nodeenv==1.7.0
35 | numpy==1.23.0
36 | Owlready2==0.33
37 | packaging==21.0
38 | pandas==1.3.0
39 | parso==0.8.2
40 | pathspec==0.9.0
41 | pexpect==4.8.0
42 | pickleshare==0.7.5
43 | platformdirs==2.4.0
44 | pre-commit==2.20.0
45 | prompt-toolkit==3.0.19
46 | ptyprocess==0.7.0
47 | pure-eval==0.2.2
48 | Pygments==2.9.0
49 | pylint==2.11.1
50 | pyparsing==2.4.7
51 | python-dateutil==2.8.2
52 | pytz==2021.1
53 | pyvis==0.1.9
54 | PyYAML==6.0
55 | rdflib==5.0.0
56 | requests==2.25.1
57 | six==1.15.0
58 | snowballstemmer==2.1.0
59 | Sphinx==4.1.1
60 | sphinxcontrib-applehelp==1.0.2
61 | sphinxcontrib-devhelp==1.0.2
62 | sphinxcontrib-htmlhelp==2.0.0
63 | sphinxcontrib-jsmath==1.0.1
64 | sphinxcontrib-qthelp==1.0.3
65 | sphinxcontrib-serializinghtml==1.1.5
66 | stack-data==0.1.4
67 | toml==0.10.2
68 | tomli==2.0.1
69 | traitlets==5.0.5
70 | typing-extensions==3.10.0.2
71 | urllib3==1.26.6
72 | virtualenv==20.16.6
73 | wcwidth==0.2.5
74 | wrapt==1.13.2
75 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import setuptools
3 |
4 | with open("README.md", "r", encoding="utf-8") as fh:
5 | long_description = fh.read()
6 |
7 | with open("./src/ontor/_about.py", "r") as fa:
8 | about = {}
9 | exec(fa.read(), about)
10 |
11 | setuptools.setup(
12 | name=about["__name__"],
13 | version=about["__version__"],
14 | author=about["__author__"],
15 | author_email=about["__author_email__"],
16 | description="ontor - an ontology editor based on Owlready2",
17 | long_description=long_description,
18 | long_description_content_type="text/markdown",
19 | url=about["__url__"],
20 | project_urls={
21 | "Bug Tracker": "https://github.com/felixocker/ontor/issues",
22 | },
23 | download_url=about["__download_url__"],
24 | classifiers=[
25 | "Programming Language :: Python :: 3",
26 | "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
27 | "Operating System :: OS Independent",
28 | ],
29 | keywords=about["__keywords__"],
30 | include_package_data=True, # include non-code files during installation
31 | package_dir={"": "src"},
32 | packages=setuptools.find_packages(where="src"),
33 | python_requires=">=3.9",
34 | install_requires=[
35 | "networkx",
36 | "owlready2",
37 | "pandas",
38 | "pyvis==0.1.9",
39 | ],
40 | )
41 |
--------------------------------------------------------------------------------
/src/ontor/__init__.py:
--------------------------------------------------------------------------------
1 | from .ontor import *
2 | from ._about import *
3 |
--------------------------------------------------------------------------------
/src/ontor/_about.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | __name__ = "ontor"
4 | __author__ = "Felix Ocker"
5 | __author_email__ = "felix.ocker@googlemail.com"
6 | __version__ = "0.4.11"
7 | __url__ = "https://github.com/felixocker/ontor"
8 | __download_url__ = (
9 | "https://github.com/felixocker/ontor/archive/refs/tags/v" + __version__ + ".tar.gz"
10 | )
11 | __keywords__ = ["ontology", "owl", "python", "knowledge graph"]
12 |
--------------------------------------------------------------------------------
/src/ontor/config/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felixocker/ontor/eb2088eccb2b25f129d75041684729555b947373/src/ontor/config/__init__.py
--------------------------------------------------------------------------------
/src/ontor/config/network_visualization.config:
--------------------------------------------------------------------------------
1 | var options = {
2 | "nodes": {
3 | "color": "rgba(153,153,153,1)",
4 | "font": {
5 | "color": "rgba(52,52,52,1)"
6 | }
7 | },
8 | "edges": {
9 | "color": {
10 | "inherit": true
11 | },
12 | "font": {
13 | "color": "rgba(158,158,158,1)",
14 | "strokeWidth": 0
15 | },
16 | "smooth": false
17 | },
18 | "physics": {
19 | "minVelocity": 0.75
20 | }
21 | }
--------------------------------------------------------------------------------
/src/ontor/ontor.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """ONTology editOR (ontor) module"""
3 |
4 | #
5 | # This file is part of ontor (https://github.com/felixocker/ontor).
6 | # Copyright (c) 2021 Felix Ocker.
7 | #
8 | # ontor is free software: you can redistribute it and/or modify
9 | # it under the terms of the GNU General Public License as published by
10 | # the Free Software Foundation, either version 3 of the License, or
11 | # (at your option) any later version.
12 | #
13 | # ontor is distributed in the hope that it will be useful,
14 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 | # GNU General Public License for more details.
17 | #
18 | # You should have received a copy of the GNU General Public License
19 | # along with ontor. If not, see .
20 | #
21 |
22 | import csv
23 | import datetime
24 | import importlib.resources as pkg_resources
25 | import json
26 | import logging
27 | import os
28 | import random
29 | import re
30 | import string
31 | import sys
32 | import textwrap
33 | import traceback
34 | import typing
35 | from contextlib import contextmanager
36 | from io import StringIO
37 |
38 | import networkx as nx
39 | import pandas as pd
40 | from owlready2 import (
41 | destroy_entity,
42 | get_ontology,
43 | onto_path,
44 | types,
45 | sync_reasoner_hermit,
46 | sync_reasoner_pellet,
47 | Thing,
48 | Nothing,
49 | AllDisjoint,
50 | AllDifferent,
51 | DataProperty,
52 | ObjectProperty,
53 | World,
54 | Restriction,
55 | ConstrainedDatatype,
56 | FunctionalProperty,
57 | InverseFunctionalProperty,
58 | TransitiveProperty,
59 | SymmetricProperty,
60 | AsymmetricProperty,
61 | ReflexiveProperty,
62 | IrreflexiveProperty,
63 | ThingClass,
64 | Not,
65 | Inverse,
66 | base,
67 | locstr,
68 | And,
69 | Or,
70 | ClassConstruct,
71 | )
72 | from pyvis.network import Network
73 |
74 | from . import config
75 | from . import queries
76 |
77 |
78 | LOGFILE = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + "_ontor.log"
79 | logging.basicConfig(filename=LOGFILE, level=logging.DEBUG)
80 |
81 |
82 | def load_csv(csv_file: str, load_first_line: bool = False) -> list:
83 | """load data from CSV file
84 |
85 | :param csv_file: input CSV file
86 | :param load_first_line: indicates whether content from first row is also returned
87 | :return: CSV contents as list of lists
88 | """
89 | with open(csv_file) as f:
90 | if load_first_line:
91 | data = list(csv.reader(f))
92 | else:
93 | data = list(csv.reader(f))[1:]
94 | return data
95 |
96 |
97 | def load_json(json_file: str) -> typing.Union[dict, list]:
98 | """load data from JSON file
99 |
100 | :param json_file: input JSON file
101 | :return: JSON contents as dictionary
102 | """
103 | with open(json_file) as f:
104 | data = json.load(f)
105 | return data
106 |
107 |
108 | def cleanup(complete: bool, *extensions: str) -> None:
109 | """delete all files in the current directory with the extensions specified
110 |
111 | :param extensions: extensions of files to be deleted
112 | :param complete: do not delete current log file if set to False
113 | """
114 | this_dir = "./"
115 | for e in extensions:
116 | files = [f for f in os.listdir(this_dir) if f.endswith("." + e)]
117 | if not complete and LOGFILE in files:
118 | files.remove(LOGFILE)
119 | for f in files:
120 | os.remove(os.path.join(this_dir, f))
121 |
122 |
123 | class InfoException(Exception):
124 | """exception for invalid ontor inputs"""
125 |
126 | def __init__(self, **kwargs: str):
127 | Exception.__init__(self)
128 | self.info = kwargs
129 |
130 |
131 | class OntoEditor:
132 | """create, load, and edit ontologies"""
133 |
134 | # NOTE: _prop_types corresponds to owlready2.prop._TYPE_PROPS; defined here to ensure order
135 | _prop_types = [
136 | FunctionalProperty,
137 | InverseFunctionalProperty,
138 | TransitiveProperty,
139 | SymmetricProperty,
140 | AsymmetricProperty,
141 | ReflexiveProperty,
142 | IrreflexiveProperty,
143 | ]
144 | _dp_range_types = {
145 | "boolean": bool,
146 | "float": float,
147 | "integer": int,
148 | "string": str,
149 | "date": datetime.date,
150 | "time": datetime.time,
151 | "datetime": datetime.datetime,
152 | }
153 |
154 | def __init__(self, iri: str, path: str, import_paths: list = None) -> None:
155 | """tries to load onto from file specified, creates new file if none is available
156 |
157 | :param iri: ontology's IRI
158 | :param path: path to local ontology file or URL; local is checked first
159 | :param import_paths: list of local directories to be checked for imports
160 | """
161 | self.iri = iri
162 | self.path = path
163 | self.filename = path.split(sep="/")[-1]
164 | self.logger = logging.getLogger(self.filename.split(".")[0])
165 | self.query_prefixes = pkg_resources.read_text(queries, "prefixes.sparql")
166 | onto_path.extend(list({path.rsplit("/", 1)[0]} - set(onto_path)))
167 | if import_paths:
168 | onto_path.extend(list(set(import_paths) - set(onto_path)))
169 | self.onto_world = World()
170 | try:
171 | self.onto = self.onto_world.get_ontology(self.path).load()
172 | self.logger.info("successfully loaded ontology specified")
173 | except FileNotFoundError:
174 | self.onto = self.onto_world.get_ontology(self.iri)
175 | self.onto.save(file=self.path)
176 | self.logger.info("ontology file did not exist - created a new one")
177 |
178 | @contextmanager
179 | def _redirect_to_log(self) -> typing.Iterator[None]:
180 | with open(os.devnull, "w") as devnull:
181 | old_stdout = sys.stdout
182 | old_stderr = sys.stderr
183 | result_out = StringIO()
184 | result_err = StringIO()
185 | sys.stdout = result_out
186 | sys.stderr = result_err
187 | try:
188 | yield
189 | finally:
190 | sys.stdout = old_stdout
191 | sys.stderr = old_stderr
192 | if result_out.getvalue():
193 | self.logger.info(
194 | f"reasoner output redirect: \n{self._indent_log(result_out.getvalue())}"
195 | )
196 | if result_err.getvalue():
197 | self.logger.info(
198 | f"reasoner errors redirect: \n{self._indent_log(result_err.getvalue())}"
199 | )
200 |
201 | @staticmethod
202 | def _indent_log(info: str) -> str:
203 | return textwrap.indent(info, "> ")
204 |
205 | def _reload_from_file(self) -> None:
206 | try:
207 | self.onto_world = World()
208 | self.onto = self.onto_world.get_ontology(self.path).load()
209 | self.logger.info("successfully reloaded ontology from file")
210 | except FileNotFoundError:
211 | self.logger.error("ontology file did not exist")
212 | sys.exit(1)
213 |
214 | def _transform_to_dp_type(self, data_type: str, val):
215 | if data_type != "boolean":
216 | return self._dp_range_types[data_type](val)
217 | elif str(val).lower() == "false":
218 | return False
219 | elif str(val).lower() == "true":
220 | return True
221 |
222 | def add_import(self, other_path: str) -> None:
223 | """load an additional onto
224 |
225 | :param other_path: path to file of onto to be imported
226 | """
227 | if "file://" in other_path:
228 | onto_path.extend(
229 | list(
230 | set(other_path.rsplit("/", 1)[0].removeprefix("file://"))
231 | - set(onto_path)
232 | )
233 | )
234 | onto_import = get_ontology(other_path).load()
235 | with self.onto:
236 | self.onto.imported_ontologies.append(onto_import)
237 | self.onto.save(file=self.path)
238 |
239 | def save_as(self, new_path: str) -> None:
240 | """safe ontology as new file
241 | helpful, e.g., if multiple ontos were loaded
242 |
243 | :param new_path: path including filename for saving the onto
244 | """
245 | self.onto.save(file=new_path)
246 | self.path = new_path
247 | self.filename = new_path.rsplit("/", 1)[1]
248 |
249 | def export_ntriples(self) -> None:
250 | """saves with same filename, but as ntriples"""
251 | ntpath = self.path.rsplit(".", 1)[0] + ".nt"
252 | self.onto.save(file=ntpath, format="ntriples")
253 |
254 | def get_elems(self) -> list:
255 | """get classes, object properties, datatype properties, and instances
256 |
257 | :return: nodes and edges from onto
258 | """
259 | with self.onto:
260 | cl = self.onto.classes()
261 | ops = self.onto.object_properties()
262 | dps = self.onto.data_properties()
263 | ins = self.onto.individuals()
264 | return [cl, ops, dps, ins]
265 |
266 | def _build_query(self, body: str) -> str:
267 | """use default prefixes to construct entire SPARQL query
268 |
269 | :param body: body of the SPARQL query, without prefixes
270 | :return: complete SPARQL query consisting of prefixes and body
271 | """
272 | gp = self.query_prefixes
273 | sp = "PREFIX : <" + self.iri + "#>"
274 | b = body
275 | return gp + sp + "\n\n" + b
276 |
277 | def query_onto(self, query: str) -> list:
278 | """query onto using SPARQL
279 | NOTE: use of query_owlready messes up ranges of dps
280 |
281 | :param query: SPARQL query
282 | :return: query results as list
283 | """
284 | with self.onto:
285 | graph = self.onto_world.as_rdflib_graph()
286 | return list(graph.query(query))
287 |
288 | def get_axioms(self) -> list:
289 | """identify all axioms included in the onto
290 |
291 | :return: list of class, op, and dp axioms
292 | """
293 | axioms = []
294 | for body in ["class_axioms.sparql", "op_axioms.sparql", "dp_axioms.sparql"]:
295 | query_ax = pkg_resources.read_text(queries, body)
296 | axioms.append(self.query_onto(self._build_query(query_ax)))
297 | return axioms
298 |
299 | def _create_notion(self, name, parent, elem_type) -> type:
300 | """load a notion from the ontology or create a new one if not yet available
301 | works for classes, object properties, and data properties
302 |
303 | :param name: name of the notion
304 | :param parent: name of the parent notion
305 | :param elem_type: indicates class, op, or dp
306 | :return: notion
307 | """
308 | type_dict = {
309 | "c": Thing,
310 | "o": ObjectProperty,
311 | "d": DataProperty,
312 | }
313 | with self.onto:
314 | if name and not parent:
315 | notion = types.new_class(name, (type_dict[elem_type],))
316 | elif name and parent and elem_type != "c":
317 | notion = types.new_class(
318 | name,
319 | (
320 | self.onto[parent],
321 | type_dict[elem_type],
322 | ),
323 | )
324 | elif name and parent and elem_type == "c":
325 | notion = types.new_class(name, (self.onto[parent],))
326 | else:
327 | self.logger.warning(f"unexpected info: {name, parent, elem_type}")
328 | raise InfoException
329 | return notion
330 |
331 | def add_taxo(self, class_tuples: list) -> None:
332 | """add taxonomy to onto
333 |
334 | :param class_tuples: list of 2-tuples of the form [class, superclass]
335 | """
336 | with self.onto:
337 | for clst in class_tuples:
338 | try:
339 | my_class = self._create_notion(clst[0], clst[1], "c")
340 | except (TypeError, InfoException):
341 | self.logger.warning(f"unexpected class info: {clst}")
342 | continue
343 | self.onto.save(file=self.path)
344 |
345 | @staticmethod
346 | def class_dict_to_tuple_list(cls_dict: dict) -> list:
347 | """helper function to convert dict with class definitions to list of
348 | tuples as required by add_taxo function
349 |
350 | :param cls_dict: dictionary for taxonomy definition of the form
351 | {superclass: [subclasses]}
352 | :return: list of class definition 2-tuples of the form [[subclass, superclass], ...]
353 | """
354 | return [
355 | [subcls, supercls]
356 | for supercls in cls_dict.keys()
357 | for subcls in cls_dict[supercls]
358 | ]
359 |
360 | def _combine_axioms(self, axs: dict) -> tuple:
361 | """define complex axioms, i.e., elementary axioms that are logically combined
362 |
363 | :param axs: input for axioms, either simple list or of the form {"or": [ax1, "and": [ax2, ax3]]}
364 | :return: combined restriction, restriction type (equivalence or subclass), and class
365 | """
366 | res, comb, cls = [], None, None
367 | res_type_set, res_type = False, None
368 | res_type_vals = {
369 | None: False,
370 | False: False,
371 | True: True,
372 | }
373 | assert len(axs.keys()) == 1, f"more than one operator defined for axiom: {axs}"
374 | operator = list(axs.keys())[0]
375 | assert operator in [
376 | "and",
377 | "or",
378 | ], f"invalid key for axiom combination: {operator}"
379 | for axiom in axs[operator]:
380 | if isinstance(axiom, list):
381 | if not res_type_set:
382 | res_type_set, res_type = True, res_type_vals[axiom[-1]]
383 | else:
384 | assert (
385 | res_type_vals[axiom[-1]] == res_type
386 | ), f"restriction types (subsumption vs equivalence) do not match: {axs}"
387 | if not cls:
388 | cls = axiom[0]
389 | else:
390 | assert (
391 | axiom[0] == cls
392 | ), f"aggregated restriction does not always refer to same class: {axs}"
393 | res.append(
394 | self._tuple_to_res(
395 | axiom[1],
396 | [self.onto[axiom[2]], axiom[3], axiom[4], axiom[5], axiom[13]],
397 | [self.onto[axiom[6]]],
398 | axiom[7:13],
399 | axiom,
400 | )
401 | )
402 | elif isinstance(axiom, dict):
403 | res.append(self._combine_axioms(axiom)[0])
404 | if operator == "and":
405 | comb = And(res)
406 | elif operator == "or":
407 | comb = Or(res)
408 | return comb, res_type, cls
409 |
410 | def add_axioms(self, axioms: list) -> None:
411 | """add entire axioms to onto
412 | NOTE: only one axiom may be specified at once
413 | NOTE: no error handling implemented for input tuples
414 |
415 | :param axioms: list of tuples of the form [class, superclass, property,
416 | inverted(bool), cardinality type, cardinality, op-object, dp-range,
417 | dp-min-ex, dp-min-in, dp-exact, dp-max-in, dp-max-ex, negated(bool),
418 | equivalence(bool)]
419 | may also include dicts containing aggregate axioms of the form {"or": [ax1, "and": [ax2, ax3]]}
420 | """
421 | with self.onto:
422 | for axiom in axioms:
423 | if isinstance(axiom, list):
424 | my_class = self.onto[axiom[0]]
425 | if not any(axiom[i] for i in [1, 2, 4, 5, 6]) and not axiom[5] == 0:
426 | continue
427 | if (
428 | all(axiom[i] is not None for i in [0, 1, -1])
429 | or all(axiom[i] for i in [2, 4, 6])
430 | or all(axiom[i] for i in [2, 4, 7])
431 | ):
432 | if axiom[-1]:
433 | current_axioms = my_class.equivalent_to
434 | else:
435 | current_axioms = my_class.is_a
436 | res = self._tuple_to_res(
437 | axiom[1],
438 | [
439 | self.onto[axiom[2]],
440 | axiom[3],
441 | axiom[4],
442 | axiom[5],
443 | axiom[13],
444 | ],
445 | [self.onto[axiom[6]]],
446 | axiom[7:13],
447 | axiom,
448 | )
449 | if res:
450 | current_axioms.append(res)
451 | else:
452 | self.logger.warning(f"unexpected input: {axiom}")
453 | elif isinstance(axiom, dict):
454 | comb, res_type, cls = self._combine_axioms(axiom)
455 | my_class = self.onto[cls]
456 | if res_type:
457 | current_axioms = my_class.equivalent_to
458 | else:
459 | current_axioms = my_class.is_a
460 | if comb:
461 | current_axioms.append(comb)
462 | else:
463 | self.logger.warning(f"unexpected input: {axiom}")
464 | self.onto.save(file=self.path)
465 |
466 | def _tuple_to_res(
467 | self, supercls: str, resinfo: list, opinfo: list, dpinfo: list, axiom: list
468 | ) -> typing.Union[ClassConstruct, None]:
469 | """
470 | :param supercls: parent class or equivalent class, depending on equiv parameter
471 | :param resinfo: list with general restriction info [prop, inverted, p_type,
472 | cardin, negated]
473 | :param opinfo: list with op restriction info [op-object]
474 | :param dpinfo: list with dp restriction info [dprange, minex, minin,
475 | exact, maxin, maxex]
476 | :param axiom: list with complete axiom info
477 | """
478 | if supercls and not any(opinfo) and not any(dpinfo):
479 | return self.onto[supercls]
480 | elif any(opinfo) and not any(dpinfo):
481 | obj = opinfo[0]
482 | elif not any(opinfo) and any(dpinfo):
483 | obj = None
484 | if resinfo[1]:
485 | self.logger.warning(
486 | f"invalid dp constraint - dp may not be inverted: {axiom}"
487 | )
488 | return None
489 | if resinfo[2] in ["some", "only"]:
490 | obj = self._dp_constraint(dpinfo)
491 | elif resinfo[2] in ["value"] and dpinfo[3]:
492 | obj = self._transform_to_dp_type(dpinfo[0], dpinfo[3])
493 | if obj is None:
494 | self.logger.warning(f"invalid dp constraint: {axiom}")
495 | return None
496 | if resinfo[2] in ["exactly", "max", "min"]:
497 | # NOTE: this may be resolved in future versions of Owlready2
498 | self.logger.warning(
499 | "qualified cardinality restrictions currently not "
500 | f"supported for DPs: {axiom}"
501 | )
502 | return None
503 | else:
504 | self.logger.warning(f"restriction includes both op and dp: {axiom}")
505 | return None
506 | if resinfo[1]:
507 | resinfo[0] = Inverse(resinfo[0])
508 | if (
509 | resinfo[2] in ["some", "only", "value"]
510 | and not resinfo[3]
511 | and not resinfo[3] == 0
512 | ):
513 | res = getattr(resinfo[0], resinfo[2])(obj)
514 | elif resinfo[2] in ["exactly", "max", "min"] and (
515 | resinfo[3] or resinfo[3] == 0
516 | ):
517 | res = getattr(resinfo[0], resinfo[2])(resinfo[3], obj)
518 | else:
519 | self.logger.warning(f"unexpected cardinality definition: {axiom}")
520 | return None
521 | if resinfo[4]:
522 | res = Not(res)
523 | return res
524 |
525 | def _dp_constraint(self, dpres: list) -> typing.Optional[ConstrainedDatatype]:
526 | """
527 | :param dpres: DP restriction is list of the form [dprange, minex, minin,
528 | exact, maxin, maxex]
529 | :return: constrained datatype for DP, set to None if invalid
530 | """
531 | dp_range = None
532 | if dpres[0] not in list(self._dp_range_types.keys()):
533 | self.logger.warning(f"unexpected dp range: {dpres}")
534 | if self._check_available_vals(dpres, [0]):
535 | dp_range = self._dp_range_types[dpres[0]]
536 | elif self._check_available_vals(dpres, [0, 3]):
537 | dp_range = ConstrainedDatatype(
538 | self._dp_range_types[dpres[0]],
539 | min_inclusive=dpres[3],
540 | max_inclusive=dpres[3],
541 | )
542 | elif self._check_available_vals(dpres, [0, 1, 4]):
543 | dp_range = ConstrainedDatatype(
544 | self._dp_range_types[dpres[0]],
545 | min_exclusive=dpres[1],
546 | max_inclusive=dpres[4],
547 | )
548 | elif self._check_available_vals(dpres, [0, 1, 5]):
549 | dp_range = ConstrainedDatatype(
550 | self._dp_range_types[dpres[0]],
551 | min_exclusive=dpres[1],
552 | max_exclusive=dpres[5],
553 | )
554 | elif self._check_available_vals(dpres, [0, 2, 4]):
555 | dp_range = ConstrainedDatatype(
556 | self._dp_range_types[dpres[0]],
557 | min_inclusive=dpres[2],
558 | max_inclusive=dpres[4],
559 | )
560 | elif self._check_available_vals(dpres, [0, 2, 5]):
561 | dp_range = ConstrainedDatatype(
562 | self._dp_range_types[dpres[0]],
563 | min_inclusive=dpres[2],
564 | max_exclusive=dpres[5],
565 | )
566 | elif self._check_available_vals(dpres, [0, 1]):
567 | dp_range = ConstrainedDatatype(
568 | self._dp_range_types[dpres[0]], min_exclusive=dpres[1]
569 | )
570 | elif self._check_available_vals(dpres, [0, 2]):
571 | dp_range = ConstrainedDatatype(
572 | self._dp_range_types[dpres[0]], min_inclusive=dpres[2]
573 | )
574 | elif self._check_available_vals(dpres, [0, 4]):
575 | dp_range = ConstrainedDatatype(
576 | self._dp_range_types[dpres[0]], max_inclusive=dpres[4]
577 | )
578 | elif self._check_available_vals(dpres, [0, 5]):
579 | dp_range = ConstrainedDatatype(
580 | self._dp_range_types[dpres[0]], max_exclusive=dpres[5]
581 | )
582 | else:
583 | self.logger.warning(f"unexpected dp range restriction: {dpres}")
584 | return dp_range
585 |
586 | def _check_available_vals(self, values: list, expected_values: list) -> bool:
587 | """
588 | :param values: list with values
589 | :param expected_values: list with indices of expected values
590 | :return: True iff expected indices contain values
591 | """
592 | indices = [x for x, _ in enumerate(values)]
593 | assert all(x in indices for x in expected_values), "invalid expected_values"
594 | test = all(
595 | self._check_value_validity(values[i]) for i in expected_values
596 | ) and not any(
597 | self._check_value_validity(values[i])
598 | for i in [e for e in indices if e not in expected_values]
599 | )
600 | return test
601 |
602 | @staticmethod
603 | def _check_value_validity(value) -> bool:
604 | return value is not None and value != ""
605 |
606 | def add_gcas(self, gcas: list) -> None:
607 | """workaround for representing General Class Axioms
608 | adds two helper classes, each defined via an axiom, that are defined to be equivalent
609 | helper classes are denoted with an underscore
610 |
611 | :param gcas: list of two-tuples with axioms as defined by add_axioms()
612 | """
613 | with self.onto:
614 |
615 | class GcaHelper(Thing):
616 | comment = [
617 | "Helper class for workaround to represent General Class Axioms"
618 | ]
619 |
620 | for gca in gcas:
621 | for a in gca:
622 | gh_name = "_" + "".join(
623 | random.choices(string.ascii_letters + string.digits, k=16)
624 | )
625 | a.insert(0, gh_name)
626 | a.insert(1, "GcaHelper")
627 | assert (
628 | a[-1] is True
629 | ), "GCAs must be equivalented with auxiliary classes for inferences to work"
630 | self.add_taxo([a[:2] for a in gca])
631 | for a in gca:
632 | a[1] = None
633 | gca.append([gca[0][0], gca[1][0]] + [None] * 12 + [True])
634 | self.add_axioms(gca)
635 | self.onto.save(file=self.path)
636 |
637 | def add_ops(self, op_tuples: list) -> None:
638 | """add object properties including their axioms to onto
639 | NOTE: only one inverse_prop can be processed per tuple
640 |
641 | :param op_tuples: list of tuples of the form [op, super-op, domain, range,
642 | functional, inverse functional, transitive, symmetric,
643 | asymmetric, reflexive, irreflexive, inverse_prop]
644 | """
645 | with self.onto:
646 | for op in op_tuples:
647 | try:
648 | my_op = self._create_notion(op[0], op[1], "o")
649 | except (TypeError, InfoException):
650 | self.logger.warning(f"unexpected op info: {op}")
651 | continue
652 | if op[2]:
653 | my_op.domain.append(self.onto[op[2]])
654 | if op[3]:
655 | my_op.range.append(self.onto[op[3]])
656 | for count, charac in enumerate(op[4:11]):
657 | if charac:
658 | my_op.is_a.append(self._prop_types[count])
659 | if op[-1]:
660 | my_op.inverse_property = self.onto[op[11]]
661 | self.onto.save(file=self.path)
662 |
663 | def add_dps(self, dp_tuples: list) -> None:
664 | """add datatype properties including their axioms to onto
665 |
666 | :param dp_tuples: list of input tuples of the form [dp, super-dp, functional,
667 | domain, range, minex, minin, exact, maxin, maxex]
668 | """
669 | with self.onto:
670 | for dp in dp_tuples:
671 | try:
672 | my_dp = self._create_notion(dp[0], dp[1], "d")
673 | except (TypeError, InfoException):
674 | self.logger.warning(f"unexpected dp info: {dp}")
675 | continue
676 | if dp[2]:
677 | my_dp.is_a.append(FunctionalProperty)
678 | if dp[3]:
679 | try:
680 | my_dp.domain.append(self.onto[dp[3]])
681 | except Exception:
682 | self.logger.warning(f"unexpected dp domain: {dp}")
683 | if any(self._check_value_validity(d) for d in dp[4:]):
684 | dprange = self._dp_constraint(dp[4:])
685 | if dprange:
686 | my_dp.range = dprange
687 | else:
688 | self.logger.warning(f"unexpected dp range: {dp}")
689 | continue
690 | self.onto.save(file=self.path)
691 |
692 | def add_instances(self, instance_tuples: list) -> None:
693 | """add instances and their relations to onto
694 |
695 | :param instance_tuples: list of tuples of the form [instance, class,
696 | property, range, range-type]
697 | """
698 | with self.onto:
699 | for inst in instance_tuples:
700 | if inst[0] and inst[1]:
701 | my_instance = self.onto[inst[1]](inst[0])
702 | else:
703 | self.logger.warning(f"unexpected instance info: {inst}")
704 | if not any(inst[2:]):
705 | continue
706 | if inst[2] and self._check_value_validity(inst[3]):
707 | pred = self.onto[inst[2]]
708 | if DataProperty in pred.is_a:
709 | if inst[4] and not inst[4] in self._dp_range_types:
710 | self.logger.warning(f"unexpected DP range: {inst}")
711 | elif inst[4]:
712 | val = self._transform_to_dp_type(inst[4], inst[3])
713 | else:
714 | self.logger.warning(
715 | f"DP range undefined - defaulting to string: {inst}"
716 | )
717 | val = inst[3]
718 | elif ObjectProperty in pred.is_a and not inst[4]:
719 | val = self.onto[inst[3]]
720 | self._add_instance_relation(my_instance, pred, val)
721 | else:
722 | self.logger.warning(f"unexpected triple: {inst}")
723 | self.onto.save(file=self.path)
724 |
725 | @staticmethod
726 | def _add_instance_relation(subj, pred, obj) -> None:
727 | if FunctionalProperty in pred.is_a:
728 | setattr(subj, pred.name, obj)
729 | else:
730 | getattr(subj, pred.name).append(obj)
731 |
732 | def add_distinctions(self, distinct_sets: list) -> None:
733 | """make classes disjoint and instances distinct
734 | NOTE: distinctions may lead to inconsistencies reasoners cannot handle
735 |
736 | :param distinct_sets: list of lists with disjoint/ different elements
737 | """
738 | funcs = {"classes": AllDisjoint, "instances": AllDifferent}
739 | with self.onto:
740 | for ds in distinct_sets:
741 | try:
742 | func = funcs[ds[0]]
743 | func([self.onto[elem] for elem in ds[1]])
744 | except KeyError:
745 | self.logger.warning(f"unknown distinction type {ds[0]}")
746 | self.onto.save(file=self.path)
747 |
748 | def remove_elements(self, elem_list: list) -> None:
749 | """remove elements, all their descendents and (in case of classes) instances,
750 | and all references from axioms
751 |
752 | :param elem_list: list of elements to be removed from onto
753 | """
754 | with self.onto:
755 | for elem in elem_list:
756 | for desc in self.onto[elem].descendants():
757 | if Thing in desc.ancestors():
758 | for i in desc.instances():
759 | destroy_entity(i)
760 | if desc != self.onto[elem]:
761 | destroy_entity(desc)
762 | destroy_entity(self.onto[elem])
763 | self.onto.save(file=self.path)
764 |
765 | def add_label(self, name: str, label: str, lang: str = None) -> None:
766 | """add label in language specified as localized string, defaults to
767 | regular string if no language is specified
768 |
769 | :param name: entity name
770 | :param label: label to be appended
771 | :param lang: label's language (optional)
772 | """
773 | entity = self._get_entity_by_name(name)
774 | if not entity:
775 | return
776 | desc = entity.label
777 | self._add_description_generic(desc, label, lang)
778 |
779 | def add_annotation(self, name: str, comment: str, lang: str = None) -> None:
780 | """add annotation in language specified as localized string, defaults to
781 | regular string if no language is specified
782 |
783 | :param name: entity name
784 | :param comment: annotation to append
785 | :param lang: annotation's language (optional)
786 | """
787 | entity = self._get_entity_by_name(name)
788 | if not entity:
789 | return
790 | desc = entity.comment
791 | self._add_description_generic(desc, comment, lang)
792 |
793 | def _get_entity_by_name(self, name: str) -> typing.Optional[Thing]:
794 | entity = None
795 | try:
796 | entity = self.onto[name]
797 | except AttributeError:
798 | self.logger.info(
799 | f"unexpected entity: {name}, return None and continue anyways"
800 | )
801 | return entity
802 |
803 | def _add_description_generic(
804 | self, desc_list: list, description: str, lang: typing.Optional[str]
805 | ) -> None:
806 | """add description in language specified as localized string, defaults to
807 | regular string if no language is specified
808 |
809 | :param desc_list: list to which to append the description
810 | :param description: description to append
811 | :param lang: description's language (optional)
812 | """
813 | if lang:
814 | desc_list.append(locstr(description, lang=lang))
815 | else:
816 | desc_list.append(description)
817 | self.onto.save(file=self.path)
818 |
819 | def remove_from_taxo(self, elem_list: list, reassign: bool = True) -> None:
820 | """remove a class from the taxonomy, but keep all subclasses and instances
821 | by relating them to parent
822 | NOTE: elem is not replaced in axioms bc this may be semantically incorrect
823 |
824 | :param elem_list: list of elements to be removed from onto
825 | :param reassign: add all restrictions to subclasses via is_a
826 | """
827 | with self.onto:
828 | for elem in elem_list:
829 | parents = list(
830 | set(self.onto[elem].ancestors()).intersection(self.onto[elem].is_a)
831 | )
832 | parent = [p for p in parents if not p in self._prop_types]
833 | if len(parent) > 1:
834 | self.logger.warning(f"unexpected parent classes: {parents}")
835 | descendants = list(self.onto[elem].descendants())
836 | descendants.remove(self.onto[elem])
837 | individuals = list(self.onto[elem].instances())
838 | if reassign:
839 | sc_res = self.get_class_restrictions(
840 | self.onto[elem].name, res_type="is_a"
841 | )
842 | eq_res = self.get_class_restrictions(
843 | self.onto[elem].name, res_type="equivalent_to"
844 | )
845 | for desc in descendants:
846 | desc.is_a.append(parent[0])
847 | if reassign:
848 | desc.is_a = desc.is_a + sc_res + eq_res
849 | for ind in individuals:
850 | ind.is_a.append(parent[0])
851 | destroy_entity(self.onto[elem])
852 | self.onto.save(file=self.path)
853 |
854 | def get_class_restrictions(
855 | self, class_name: str, res_type: str = "is_a", res_only: bool = True
856 | ) -> list:
857 | """retrieve restrictions on specific class by restriction type
858 |
859 | :param class_name: name of the class for which restrictions shall be returned
860 | :param res_only: only returns Restrictions if set to True, if set to False
861 | parent class(es) are also included
862 | :param res_type: restriction type, either is_a or equivalent_to
863 | :return: list of restrictions on class
864 | """
865 | with self.onto:
866 | if res_type == "is_a":
867 | elems = self.onto[class_name].is_a
868 | elif res_type == "equivalent_to":
869 | elems = self.onto[class_name].equivalent_to
870 | else:
871 | self.logger.warning(f"unexpected res_type: {res_type}")
872 | sys.exit(1)
873 | if res_only:
874 | elems = [x for x in elems if isinstance(x, Restriction)]
875 | return elems
876 |
877 | def remove_restrictions_on_class(self, class_name: str) -> None:
878 | """remove all restrictions on a given class
879 |
880 | :param class_name: name of the class for which restrictions shall be removed
881 | """
882 | with self.onto:
883 | for lst in self.onto[class_name].is_a, self.onto[class_name].equivalent_to:
884 | self._remove_restr_from_class_def(lst)
885 | self.onto.save(file=self.path)
886 |
887 | def remove_restrictions_including_prop(self, prop_name: str) -> None:
888 | """remove class restrictions that include a certain property
889 |
890 | :param prop_name: name of the property for which all class restrictions
891 | shall be removed
892 | """
893 | with self.onto:
894 | for c in self.onto.classes():
895 | for lst in c.is_a, c.equivalent_to:
896 | self._remove_restr_from_class_def(lst, self.onto[prop_name])
897 | self.onto.save(file=self.path)
898 |
899 | @staticmethod
900 | def _remove_restr_from_class_def(cls_restrictions, prop=None) -> None:
901 | """remove all restrictions from list
902 |
903 | :param cls_restrictions: restrictions on a class, either is_a or equivalent_to
904 | :param prop: optional; limits results to restrictions including a certain property
905 | """
906 | for r in [r for r in cls_restrictions if isinstance(r, Restriction)]:
907 | if not prop or prop and r.property == prop:
908 | cls_restrictions.remove(r)
909 |
910 | def reasoning(
911 | self, reasoner: str = "hermit", save: bool = False, debug: bool = False
912 | ) -> list:
913 | """run reasoner to check consistency and infer new facts
914 |
915 | :param reasoner: reasoner can be eiter hermit or pellet
916 | :param save: bool - save inferences into original file
917 | :param debug: bool - log pellet explanations for inconsistencies; only
918 | works with Pellet
919 | :return: returns list of inconsistent classes if there are any
920 | """
921 | inconsistent_classes = []
922 | # add temporary world for inferences
923 | inferences = World()
924 | self._check_reasoner(reasoner)
925 | inf_onto = inferences.get_ontology(self.path).load()
926 | with inf_onto:
927 | try:
928 | with self._redirect_to_log():
929 | if reasoner == "hermit":
930 | sync_reasoner_hermit([inf_onto])
931 | elif reasoner == "pellet":
932 | # pellet explanations are generated if debug is set to >=2
933 | sync_reasoner_pellet(
934 | [inf_onto],
935 | infer_property_values=True,
936 | infer_data_property_values=True,
937 | debug=debug + 1,
938 | )
939 | inconsistent_classes = list(inf_onto.inconsistent_classes())
940 | except Exception as exc:
941 | if reasoner == "pellet" and debug:
942 | inconsistent_classes = self._analyze_pellet_results(str(exc))
943 | else:
944 | inconsistent_classes = self.reasoning("pellet", False, True)
945 | if inconsistent_classes:
946 | self.logger.warning(f"the ontology is inconsistent: {inconsistent_classes}")
947 | if Nothing in inconsistent_classes:
948 | inconsistent_classes.remove(Nothing)
949 | elif save and not inconsistent_classes:
950 | inf_onto.save(file=self.path)
951 | self._reload_from_file()
952 | return inconsistent_classes
953 |
954 | def _check_reasoner(self, reasoner: str) -> None:
955 | reasoners = ["hermit", "pellet"]
956 | if reasoner not in reasoners:
957 | self.logger.warning(
958 | f"unexpected reasoner: {reasoner} - available reasoners: {reasoners}"
959 | )
960 |
961 | def _analyze_pellet_results(self, exc: str) -> list:
962 | """analyze the explanation returned by Pellet, print it and return
963 | inconsistent classes
964 | IDEA: also consider restrictions on properties and facts about instances
965 |
966 | :param exc: string of exception thrown during reasoning process
967 | :return: list of classes identified as problematic
968 | """
969 | inconsistent_classes = []
970 | self.logger.error(repr(exc))
971 | expl = self._extract_pellet_explanation(traceback.format_exc())
972 | if expl[0]:
973 | print("Pellet provides the following explanation(s):")
974 | print(*expl[0], sep="\n")
975 | inconsistent_classes = [
976 | self.onto[ax[0]]
977 | for ex in expl[1]
978 | for ax in ex
979 | if self.onto[ax[0]] in self.onto.classes()
980 | ]
981 | else:
982 | print("There was a more complex issue, check log for traceback")
983 | self.logger.error(self._indent_log(traceback.format_exc()))
984 | return list(set(inconsistent_classes))
985 |
986 | @staticmethod
987 | def _extract_pellet_explanation(pellet_traceback: str) -> tuple:
988 | """extract reasoner explanation
989 |
990 | :param pellet_traceback: traceback created when running reasoner
991 | :return: tuple of entire explanation and list of axioms included in explanation
992 | """
993 | rex = re.compile(r"Explanation\(s\): \n(.*?)\n\n", re.DOTALL | re.MULTILINE)
994 | res = set(re.findall(rex, pellet_traceback))
995 | axioms: list = []
996 | if res:
997 | expls = [[l[5:] for l in expl.split("\n")] for expl in res]
998 | axioms = [[axiom.split() for axiom in block] for block in expls]
999 | return (res, axioms)
1000 |
1001 | def debug_onto(
1002 | self, reasoner: str = "hermit", assume_correct_taxo: bool = True
1003 | ) -> None:
1004 | """interactively (CLI) fix inconsistencies
1005 |
1006 | :param assume_correct_taxo: if True, the user interactions will be limited
1007 | to restrictions, i.e., options to delete taxonomical relations are
1008 | not included, e.g., A rdfs:subClassOf B
1009 | :param reasoner: reasoner to be used for inferences
1010 | """
1011 | self._check_reasoner(reasoner)
1012 | inconsistent_classes = self.reasoning(reasoner=reasoner, save=False)
1013 | if not inconsistent_classes:
1014 | print("No inconsistencies detected.")
1015 | elif inconsistent_classes:
1016 | print(f"Inconsistent classes are: {inconsistent_classes}")
1017 | if self._bool_user_interaction("Show further information?"):
1018 | debug = World()
1019 | debug_onto = debug.get_ontology(self.path).load()
1020 | with debug_onto:
1021 | try:
1022 | sync_reasoner_pellet(
1023 | [debug_onto],
1024 | infer_property_values=True,
1025 | infer_data_property_values=True,
1026 | debug=2,
1027 | )
1028 | except base.OwlReadyInconsistentOntologyError as err:
1029 | self.logger.error(repr(err))
1030 | self.logger.error(self._indent_log(traceback.format_exc()))
1031 | print(
1032 | "There was an issue with the input ontology; check the log for details."
1033 | )
1034 | self._analyze_pellet_results(traceback.format_exc())
1035 | # IDEA: further analyze reasoner results to pin down cause of inconsistency
1036 | if assume_correct_taxo:
1037 | pot_probl_ax = {
1038 | "is_a": self._get_incon_class_res("is_a", inconsistent_classes),
1039 | "equivalent_to": self._get_incon_class_res(
1040 | "equivalent_to", inconsistent_classes
1041 | ),
1042 | }
1043 | else:
1044 | pot_probl_ax = {
1045 | "is_a": [self.onto[ic.name].is_a for ic in inconsistent_classes],
1046 | "equivalent_to": [
1047 | self.onto[ic.name].equivalent_to for ic in inconsistent_classes
1048 | ],
1049 | }
1050 | ax_msg = "Potentially inconsistent axiom: "
1051 | for rel in "is_a", "equivalent_to":
1052 | self._interactively_delete_axs_by_rel(
1053 | rel, inconsistent_classes, pot_probl_ax, ax_msg
1054 | )
1055 | self.onto.save(file=self.path)
1056 | self.debug_onto(reasoner, assume_correct_taxo)
1057 |
1058 | def _get_incon_class_res(self, restype: str, inconsistent_classes: list) -> list:
1059 | """
1060 | :param restype: type of class restriction, either is_a or equivalent_to
1061 | :return: list of class restrictions for inconsistent_classes - does not return parent classes
1062 | """
1063 | return [
1064 | self.get_class_restrictions(ic.name, res_type=restype, res_only=True)
1065 | for ic in inconsistent_classes
1066 | ]
1067 |
1068 | def _interactively_delete_axs_by_rel(
1069 | self, rel: str, classes: list, axioms: dict, msg: str
1070 | ) -> None:
1071 | """
1072 | :param rel: relation between class and axioms - is_a or equivalent_to
1073 | :param classes: classes for which axioms are to be removed
1074 | :param axioms: axioms which should be checked for removal
1075 | :param msg: message to be displayed when prompting user
1076 | """
1077 | for count, ic in enumerate(classes):
1078 | for ax in axioms[rel][count]:
1079 | if self._bool_user_interaction(
1080 | "Delete " + rel + " axiom?",
1081 | msg + ic.name + " " + rel + " " + str(ax),
1082 | ):
1083 | if isinstance(ax, ThingClass):
1084 | getattr(self.onto[ic.name], rel).remove(self.onto[ax.name])
1085 | else:
1086 | getattr(self.onto[ic.name], rel).remove(ax)
1087 | # IDEA: instead of simply deleting axioms, also allow user to edit them
1088 |
1089 | @staticmethod
1090 | def _bool_user_interaction(question: str, info: str = None) -> bool:
1091 | """simple CLI for yes/ no/ quit interaction"""
1092 | answer = {"y": True, "n": False}
1093 | if info:
1094 | print(info)
1095 | print(question + " [y(es), n(o), q(uit)]")
1096 | user_input = input()
1097 | while user_input not in ["y", "n", "q"]:
1098 | print("invalid choice, please try again")
1099 | user_input = input()
1100 | if user_input == "q":
1101 | print("quitting - process needs to be restarted")
1102 | sys.exit(0)
1103 | else:
1104 | return answer[user_input]
1105 |
1106 | @staticmethod
1107 | def _remove_nt_brackets(triple: list) -> list:
1108 | for c, _ in enumerate(triple):
1109 | triple[c] = triple[c].replace("<", "")
1110 | triple[c] = triple[c].replace(">", "")
1111 | return triple
1112 |
1113 | @staticmethod
1114 | def _df_to_nx_incl_labels(df: pd.DataFrame, coloring: dict) -> nx.MultiDiGraph:
1115 | """turns a pandas dataframe into a networkx graph
1116 |
1117 | :param df: pandas df with spo-triples
1118 | :param coloring: dict with colors as keys and lists of nodes as values
1119 | :return: nxgraph for the ontology including labels and coloring
1120 | """
1121 | nxgraph = nx.from_pandas_edgelist(
1122 | df,
1123 | source="subject",
1124 | target="object",
1125 | edge_attr="predicate",
1126 | create_using=nx.MultiDiGraph(),
1127 | )
1128 | # manually set predicates as labels
1129 | for e in nxgraph.edges.items():
1130 | e[1]["label"] = e[1].pop("predicate")
1131 | # assert that a node may not have more than one color
1132 | assert not set(list(coloring.values())[0]).intersection(
1133 | *list(coloring.values())
1134 | ), "Several colors specified for one node"
1135 | for n in nxgraph.nodes.items():
1136 | for color in coloring.keys():
1137 | if n[0] in coloring[color]:
1138 | n[1]["color"] = color
1139 | return nxgraph
1140 |
1141 | def _ntriples_to_df(self) -> pd.DataFrame:
1142 | self.export_ntriples()
1143 | with open(self.path.rsplit(".", 1)[0] + ".nt", "r") as f:
1144 | lines = f.readlines()
1145 | df = pd.DataFrame(columns=["subject", "predicate", "object"])
1146 | for rownum, row in enumerate(lines):
1147 | df.loc[rownum] = self._remove_nt_brackets(
1148 | row.rsplit(".", 1)[0].split(" ")[:3]
1149 | )
1150 | return df
1151 |
1152 | @staticmethod
1153 | def _query_results_to_df(query_results: list) -> pd.DataFrame:
1154 | clean_data = [
1155 | [str(elem).rsplit("#", maxsplit=1)[-1] for elem in row]
1156 | for row in query_results
1157 | ]
1158 | df = pd.DataFrame(clean_data, columns=["subject", "predicate", "object"])
1159 | return df
1160 |
1161 | def _plot_nxgraph(
1162 | self,
1163 | nxgraph: nx.MultiDiGraph,
1164 | bgcolor: str = "#222222",
1165 | font_color: str = "#FFFFFF",
1166 | open_html: bool = False,
1167 | interactive: bool = False,
1168 | ) -> None:
1169 | """create html file for the network's plot
1170 |
1171 | :param nxgraph: networkx graph including the ontology's triples
1172 | :param bgcolor: background color as a hex code
1173 | :param font_color: font color for nodes as a hex code
1174 | :param open_html: directly open the html file created using the default program
1175 | :param interactive: activates mode for changing network appearance
1176 | """
1177 | net = Network(
1178 | directed=True,
1179 | height="100%",
1180 | width="100%",
1181 | bgcolor=bgcolor,
1182 | font_color=font_color,
1183 | )
1184 | net.set_options(pkg_resources.read_text(config, "network_visualization.config"))
1185 | net.from_nx(nxgraph)
1186 | if interactive:
1187 | net.show_buttons()
1188 | html_name = self.path.rsplit(".", 1)[0] + ".html"
1189 | if open_html:
1190 | net.show(html_name)
1191 | else:
1192 | net.write_html(html_name)
1193 |
1194 | def _config_plot_query_body(
1195 | self,
1196 | classes: list = None,
1197 | properties: list = None,
1198 | focusnode: str = None,
1199 | radius: int = None,
1200 | tbox_only: bool = False,
1201 | include_class_res: bool = True,
1202 | show_class_descendants: bool = True,
1203 | ) -> str:
1204 | """configure body for SPARQL query that identifies triples for plot
1205 |
1206 | :param classes: classes to be returned including their instances
1207 | :param properties: properties to be returned
1208 | :param focusnode: node whose environment shall be displayed
1209 | :param radius: maximum distance, i.e., relations, between a node and focusnode
1210 | :param tbox_only: limit query to TBox if set to True
1211 | :param include_class_res: also return simplified spo-triples for class
1212 | restrictions if True
1213 | :param show_class_descendants: also explicitly include subclasses of the classes specified
1214 | :return: body for SPARQL query
1215 | """
1216 | max_radius = 5
1217 | nodes_to_be_ignored = [
1218 | "owl:Class",
1219 | "owl:Thing",
1220 | "owl:NamedIndividual",
1221 | "owl:Restriction",
1222 | ]
1223 |
1224 | if classes and show_class_descendants:
1225 | descendent_lists = [
1226 | [desc.name for desc in self.onto[c].descendants()] for c in classes
1227 | ]
1228 | subclasses = list({c for sublist in descendent_lists for c in sublist})
1229 | elif classes and not show_class_descendants:
1230 | subclasses = classes
1231 |
1232 | def _sparql_set_values(node, values):
1233 | return (
1234 | "VALUES ?"
1235 | + node
1236 | + " {rdf:type rdfs:subClassOf "
1237 | + " ".join([":" + v for v in values])
1238 | + "} . "
1239 | )
1240 |
1241 | def _sparql_set_in(node, values, sep=None):
1242 | if not sep:
1243 | sep = ""
1244 | return (
1245 | "FILTER ( ?"
1246 | + node
1247 | + " IN ("
1248 | + ", ".join([sep + v for v in values])
1249 | + ") ) . "
1250 | )
1251 |
1252 | querypt_class_rels = (
1253 | "?s rdfs:subClassOf | owl:equivalentClass ?res . \n"
1254 | "?res a owl:Restriction . \n"
1255 | "?res owl:onProperty ?p . \n"
1256 | "?res owl:onClass | owl:someValuesFrom | owl:allValuesFrom | owl:hasValue ?o . "
1257 | )
1258 | querypt1 = "SELECT DISTINCT ?s ?p ?o WHERE {\n"
1259 |
1260 | if include_class_res:
1261 | # NOTE: only atomic axioms are currently supported
1262 | querypt1 += "{\n?s ?p ?o . \n} UNION {\n" + querypt_class_rels + "\n}"
1263 | else:
1264 | querypt1 += "?s ?p ?o . \n"
1265 | querypt2 = "}"
1266 | if properties:
1267 | querypt_rels = _sparql_set_values("p", properties)
1268 | else:
1269 | querypt_rels = ""
1270 | if classes:
1271 | query_nodes_dict: dict = {}
1272 | for node in ["s", "o"]:
1273 | querypt_classes = "?s ?p ?o . \n" + _sparql_set_in(
1274 | node, subclasses, ":"
1275 | )
1276 | querypt_class_res = (
1277 | querypt_class_rels + "\n" + _sparql_set_in(node, subclasses, ":")
1278 | )
1279 | querypt_instances = (
1280 | "{\n?"
1281 | + node
1282 | + " a/rdfs:subClassOf* ?"
1283 | + node
1284 | + "class . \n"
1285 | + _sparql_set_in(node + "class", classes, ":")
1286 | + "\n} UNION {\n?s ?p ?o . \nFILTER NOT EXISTS {?"
1287 | + node
1288 | + " a ?"
1289 | + node
1290 | + "p . }\nFILTER NOT EXISTS {?"
1291 | + node
1292 | + " rdfs:subClassOf ?"
1293 | + node
1294 | + "p . } \n}"
1295 | )
1296 | query_nodes_dict[node] = (
1297 | "{\n"
1298 | + querypt_classes
1299 | + "\n} UNION {\n"
1300 | + querypt_class_res
1301 | + "\n}"
1302 | )
1303 | if not tbox_only:
1304 | query_nodes_dict[node] += " UNION {\n"
1305 | query_nodes_dict[node] += querypt_instances
1306 | query_nodes_dict[node] += "\n}"
1307 | querypt_nodes = "\n".join(query_nodes_dict.values())
1308 | else:
1309 | querypt_nodes = ""
1310 | query_rel_lim = ""
1311 | if focusnode and radius:
1312 | assert radius <= max_radius, "max radius violated"
1313 | if properties:
1314 | rels = properties
1315 | else:
1316 | rels = [p.name for p in self.onto.properties()]
1317 | query_rel_lim = (
1318 | ":"
1319 | + focusnode
1320 | + " "
1321 | + "?/".join(
1322 | ["(rdf:type|rdfs:subClassOf|:" + "|:".join(rels) + ")"] * radius
1323 | )
1324 | + "? ?o . "
1325 | )
1326 | elif focusnode and not radius or not focusnode and radius:
1327 | self.logger.warning(
1328 | "focus: both a focusnode and a radius must be specified - ignoring the focus"
1329 | )
1330 | querypt_ignore = ""
1331 | for node in ["s", "o"]:
1332 | querypt_ignore += (
1333 | "\nMINUS {\n?s ?p ?o . \n"
1334 | + _sparql_set_in(node, nodes_to_be_ignored)
1335 | + "\n}"
1336 | )
1337 | querypt_ignore += "\nMINUS {\n?s ?p ?o . \n ?o a owl:Restriction . \n}"
1338 | query_body = "\n".join(
1339 | [
1340 | querypt1,
1341 | querypt_rels,
1342 | querypt_nodes,
1343 | query_rel_lim,
1344 | querypt_ignore,
1345 | querypt2,
1346 | ]
1347 | )
1348 | return query_body
1349 |
1350 | def _render_by_label(
1351 | self, graph: nx.MultiDiGraph, lang: str = None
1352 | ) -> nx.MultiDiGraph:
1353 | """relabel the networkx graph's nodes and edges using the labels specified
1354 | in the ontology (if there are labels available); defaults to first label
1355 |
1356 | :param graph: input graph w/ names
1357 | :param lang: desired label language
1358 | :return: graph w/ labels instead of names
1359 | """
1360 | mapping: dict = {}
1361 | for n in graph.nodes():
1362 | label = self._name_to_label(n, lang)
1363 | if label != n:
1364 | mapping[n] = label
1365 | graph = nx.relabel_nodes(graph, mapping)
1366 | for e in graph.edges.items():
1367 | label = self._name_to_label(e[1]["label"], lang)
1368 | if label != e[1]["label"]:
1369 | e[1]["label"] = label
1370 | return graph
1371 |
1372 | def _name_to_label(self, name: str, lang: str = None) -> str:
1373 | """return (first) label for an entity in the language specified
1374 |
1375 | :param elem: name of the ontology's element
1376 | :param lang: indicates desired label language, can be none to simply use
1377 | first label available
1378 | :return: elem's (first) label, defaults to name if there is no label
1379 | available in the language specified available
1380 | """
1381 | try:
1382 | elem = self.onto[name]
1383 | if not lang and elem.label.first():
1384 | label = elem.label.first()
1385 | elif [l for l in elem.label if l.lang == lang]:
1386 | label = [l for l in elem.label if l.lang == lang][0]
1387 | else:
1388 | label = name
1389 | # catch literals
1390 | except AttributeError:
1391 | label = name
1392 | return label
1393 |
1394 | def visualize(
1395 | self,
1396 | classes: list = None,
1397 | properties: list = None,
1398 | focusnode: str = None,
1399 | radius: int = None,
1400 | bylabel: bool = False,
1401 | lang: str = None,
1402 | open_html: bool = False,
1403 | tbox_only: bool = False,
1404 | bgcolor: str = "#222222",
1405 | classcolor: str = "#0065bd",
1406 | instancecolor: str = "#98c6ea",
1407 | font_color: str = "#FFFFFF",
1408 | ) -> None:
1409 | """visualize onto as a graph; generates html
1410 |
1411 | :param classes: list of classes to be included in plot
1412 | :param properties: list of properties to be included in plot
1413 | :param focusnode: node around which a partial graph shall be displayed
1414 | :param radius: maximum number of relations between a node and a node of
1415 | one of the classes specified
1416 | :param bylabel: render visualization by labels (if available)
1417 | :param lang: language of the labels to be displayed
1418 | :param open_html: open html file generated
1419 | :param tbox_only: only visualizes TBox if set to True
1420 | :param bgcolor: background color for the plot as a hex code
1421 | :param classcolor: color of class nodes as a hex code
1422 | :param instancecolor: color of instance nodes as a hex code
1423 | :param font_color: font color for nodes as a hex code
1424 | :return: None
1425 | """
1426 | # graph coloring settings; note that literals default to grey
1427 | coloring = {
1428 | classcolor: [c.name for c in self.onto.classes()],
1429 | instancecolor: [i.name for i in self.onto.individuals()],
1430 | }
1431 |
1432 | if not classes and not properties and not focusnode and not radius:
1433 | graphdata = self._ntriples_to_df()
1434 | else:
1435 | query_body = self._config_plot_query_body(
1436 | classes, properties, focusnode, radius, tbox_only
1437 | )
1438 | query_results = self.query_onto(self._build_query(query_body))
1439 | graphdata = self._query_results_to_df(query_results)
1440 | nxgraph = self._df_to_nx_incl_labels(graphdata, coloring)
1441 | if bylabel:
1442 | nxgraph = self._render_by_label(nxgraph, lang)
1443 | self._plot_nxgraph(
1444 | nxgraph=nxgraph, open_html=open_html, bgcolor=bgcolor, font_color=font_color
1445 | )
1446 |
--------------------------------------------------------------------------------
/src/ontor/queries/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felixocker/ontor/eb2088eccb2b25f129d75041684729555b947373/src/ontor/queries/__init__.py
--------------------------------------------------------------------------------
/src/ontor/queries/class_axioms.sparql:
--------------------------------------------------------------------------------
1 | # body for query that extracts class axioms
2 |
3 | #
4 | # This file is part of ontor (https://github.com/felixocker/ontor).
5 | # Copyright (c) 2021 Felix Ocker.
6 | #
7 | # ontor is free software: you can redistribute it and/or modify
8 | # it under the terms of the GNU General Public License as published by
9 | # the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # ontor is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 | # GNU General Public License for more details.
16 | #
17 | # You should have received a copy of the GNU General Public License
18 | # along with ontor. If not, see .
19 | #
20 |
21 | # NOTE: this query assumes DNF of axioms
22 | # NOTE: currently unsupported features: disjoint union, general class axiom, target for key
23 |
24 | SELECT DISTINCT ?class ?eq ?rel ?p ?o ?minex ?minin ?maxex ?maxin WHERE
25 | {
26 | # get axiomatized classes
27 | VALUES ?rel { owl:equivalentClass rdfs:subClassOf owl:disjointWith }
28 | ?class ?rel ?eq .
29 | # ignore general class axioms
30 | FILTER NOT EXISTS { ?class owl:intersectionOf ?x . }
31 | FILTER NOT EXISTS { ?class owl:unionOf ?x . }
32 | FILTER NOT EXISTS { ?class a owl:Restriction . }
33 | # axiom consisting of single class or pred-obj triple
34 | {
35 | {
36 | ?eq a owl:Class .
37 | BIND( ?eq as ?o )
38 | } UNION {
39 | ?eq a owl:Restriction ;
40 | owl:onProperty ?p .
41 | {
42 | ?eq owl:onClass ?o .
43 | } UNION {
44 | ?eq owl:someValuesFrom ?o .
45 | FILTER NOT EXISTS { ?o owl:onDatatype ?dt . }
46 | } UNION {
47 | ?eq owl:someValuesFrom/owl:onDatatype ?o .
48 | OPTIONAL { ?eq owl:someValuesFrom/owl:withRestrictions/rdf:first/xsd:minExclusive ?minex . }
49 | OPTIONAL { ?eq owl:someValuesFrom/owl:withRestrictions/rdf:first/xsd:minInclusive ?minin . }
50 | OPTIONAL { ?eq owl:someValuesFrom/owl:withRestrictions/rdf:rest/rdf:first/xsd:maxExclusive ?maxex . }
51 | OPTIONAL { ?eq owl:someValuesFrom/owl:withRestrictions/rdf:rest/rdf:first/xsd:maxInclusive ?maxin . }
52 | }
53 | }
54 | FILTER NOT EXISTS { ?eq owl:unionOf _:u . }
55 | FILTER NOT EXISTS { ?eq owl:intersectionOf _:i . }
56 | }
57 | UNION
58 | # axiom that is an intersection or union of at least two predicates and objects
59 | {
60 | {
61 | ?eq owl:unionOf/rdf:rest*/rdf:first ?lvl1 .
62 | FILTER NOT EXISTS { ?lvl1 owl:unionOf _:uu . }
63 | FILTER NOT EXISTS { ?lvl1 owl:intersectionOf _:ui . }
64 | } UNION {
65 | ?eq owl:intersectionOf/rdf:rest*/rdf:first ?lvl1 .
66 | FILTER NOT EXISTS { ?lvl1 owl:unionOf _:iu . }
67 | FILTER NOT EXISTS { ?lvl1 owl:intersectionOf _:ii . }
68 | }
69 | OPTIONAL {
70 | {
71 | ?lvl1 a owl:Class .
72 | BIND( ?lvl1 as ?o )
73 | } UNION {
74 | ?lvl1 a owl:Restriction ;
75 | owl:onProperty ?p .
76 | {
77 | ?lvl1 owl:onClass ?o .
78 | } UNION {
79 | ?lvl1 owl:someValuesFrom ?o .
80 | FILTER NOT EXISTS { ?o owl:onDatatype ?dt . }
81 | } UNION {
82 | ?lvl1 owl:someValuesFrom/owl:onDatatype ?o .
83 | OPTIONAL { ?lvl1 owl:someValuesFrom/owl:withRestrictions/rdf:first/xsd:minExclusive ?minex . }
84 | OPTIONAL { ?lvl1 owl:someValuesFrom/owl:withRestrictions/rdf:first/xsd:minInclusive ?minin . }
85 | OPTIONAL { ?lvl1 owl:someValuesFrom/owl:withRestrictions/rdf:rest/rdf:first/xsd:maxExclusive ?maxex . }
86 | OPTIONAL { ?lvl1 owl:someValuesFrom/owl:withRestrictions/rdf:rest/rdf:first/xsd:maxInclusive ?maxin . }
87 | }
88 | }
89 | }
90 | }
91 | UNION
92 | # axiom that is a nested intersection or union of at least two predicates and objects
93 | {
94 | # union of unions
95 | {
96 | ?eq owl:unionOf/rdf:rest*/rdf:first ?lvl1 .
97 | ?lvl1 owl:unionOf/rdf:rest*/rdf:first ?lvl2 .
98 | FILTER NOT EXISTS { ?lvl2 owl:unionOf _:uuu . }
99 | FILTER NOT EXISTS { ?lvl2 owl:intersectionOf _:uui . }
100 | } UNION
101 | # union of intersections (only relevant case if DNF assumption holds)
102 | {
103 | ?eq owl:unionOf/rdf:rest*/rdf:first ?lvl1 .
104 | ?lvl1 owl:intersectionOf/rdf:rest*/rdf:first ?lvl2 .
105 | FILTER NOT EXISTS { ?lvl2 owl:unionOf _:uiu . }
106 | FILTER NOT EXISTS { ?lvl2 owl:intersectionOf _:uii . }
107 | } UNION
108 | # intersection of unions
109 | {
110 | ?eq owl:intersectionOf/rdf:rest*/rdf:first ?lvl1 .
111 | ?lvl1 owl:unionOf/rdf:rest*/rdf:first ?lvl2 .
112 | FILTER NOT EXISTS { ?lvl2 owl:unionOf _:iuu . }
113 | FILTER NOT EXISTS { ?lvl2 owl:intersectionOf _:iui . }
114 | } UNION
115 | # intersection of intersections
116 | {
117 | ?eq owl:intersectionOf/rdf:rest*/rdf:first ?lvl1 .
118 | ?lvl1 owl:intersectionOf/rdf:rest*/rdf:first ?lvl2 .
119 | FILTER NOT EXISTS { ?lvl2 owl:unionOf _:iiu . }
120 | FILTER NOT EXISTS { ?lvl2 owl:intersectionOf _:iii . }
121 | }
122 | OPTIONAL {
123 | {
124 | ?lvl2 a owl:Class .
125 | BIND( ?lvl2 as ?o )
126 | } UNION {
127 | ?lvl2 a owl:Restriction ;
128 | owl:onProperty ?p .
129 | {
130 | ?lvl2 owl:onClass ?o .
131 | } UNION {
132 | ?lvl2 owl:someValuesFrom ?o .
133 | FILTER NOT EXISTS { ?o owl:onDatatype ?dt . }
134 | } UNION {
135 | ?lvl2 owl:someValuesFrom/owl:onDatatype ?o .
136 | OPTIONAL { ?lvl2 owl:someValuesFrom/owl:withRestrictions/rdf:first/xsd:minExclusive ?minex . }
137 | OPTIONAL { ?lvl2 owl:someValuesFrom/owl:withRestrictions/rdf:first/xsd:minInclusive ?minin . }
138 | OPTIONAL { ?lvl2 owl:someValuesFrom/owl:withRestrictions/rdf:rest/rdf:first/xsd:maxExclusive ?maxex . }
139 | OPTIONAL { ?lvl2 owl:someValuesFrom/owl:withRestrictions/rdf:rest/rdf:first/xsd:maxInclusive ?maxin . }
140 | }
141 | }
142 | }
143 | }
144 | }
145 | ORDER BY ?class ?eq ?p ?o
146 |
--------------------------------------------------------------------------------
/src/ontor/queries/dp_axioms.sparql:
--------------------------------------------------------------------------------
1 | # body for query that extracts dp axioms
2 |
3 | #
4 | # This file is part of ontor (https://github.com/felixocker/ontor).
5 | # Copyright (c) 2021 Felix Ocker.
6 | #
7 | # ontor is free software: you can redistribute it and/or modify
8 | # it under the terms of the GNU General Public License as published by
9 | # the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # ontor is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 | # GNU General Public License for more details.
16 | #
17 | # You should have received a copy of the GNU General Public License
18 | # along with ontor. If not, see .
19 | #
20 |
21 | SELECT DISTINCT ?dp ?domain ?range ?minex ?minin ?maxex ?maxin ?functional ?equivalent ?parent ?disjoint WHERE
22 | {
23 | ?dp a owl:DatatypeProperty .
24 | OPTIONAL { ?dp rdfs:domain ?domain . }
25 | OPTIONAL {
26 | ?dp rdfs:range ?range .
27 | FILTER NOT EXISTS { ?range owl:onDatatype ?dt . }
28 | }
29 | OPTIONAL { ?dp rdfs:range/owl:onDatatype ?range . }
30 | OPTIONAL { ?dp rdfs:range/owl:withRestrictions/rdf:rest*/rdf:first/xsd:minExclusive ?minex . }
31 | OPTIONAL { ?dp rdfs:range/owl:withRestrictions/rdf:rest*/rdf:first/xsd:minInclusive ?minin . }
32 | OPTIONAL { ?dp rdfs:range/owl:withRestrictions/rdf:rest*/rdf:first/xsd:maxExclusive ?maxex . }
33 | OPTIONAL { ?dp rdfs:range/owl:withRestrictions/rdf:rest*/rdf:first/xsd:maxInclusive ?maxin . }
34 | OPTIONAL { ?dp a owl:FunctionalProperty . BIND ( TRUE AS ?functional ) }
35 | OPTIONAL { ?dp owl:equivalentProperty ?equivalent . }
36 | OPTIONAL { ?dp rdfs:subPropertyOf ?parent . }
37 | OPTIONAL { ?dp owl:propertyDisjointWith ?disjoint . }
38 | }
39 | ORDER BY ?dp
40 |
--------------------------------------------------------------------------------
/src/ontor/queries/op_axioms.sparql:
--------------------------------------------------------------------------------
1 | # body for query that extracts op axioms
2 |
3 | #
4 | # This file is part of ontor (https://github.com/felixocker/ontor).
5 | # Copyright (c) 2021 Felix Ocker.
6 | #
7 | # ontor is free software: you can redistribute it and/or modify
8 | # it under the terms of the GNU General Public License as published by
9 | # the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # ontor is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 | # GNU General Public License for more details.
16 | #
17 | # You should have received a copy of the GNU General Public License
18 | # along with ontor. If not, see .
19 | #
20 |
21 | SELECT DISTINCT ?op ?domain ?range ?functional ?inversefunctional ?symmetric ?asymmetric ?transitive
22 | ?reflexive ?irreflexive ?parent ?inverseparent ?equivalent ?inverse ?disjoint ?propchain WHERE
23 | {
24 | ?op a owl:ObjectProperty .
25 | OPTIONAL { ?op rdfs:domain ?domain . }
26 | OPTIONAL { ?op rdfs:range ?range . }
27 | # properties
28 | # NOTE: more elegant, but currently unsupported expression
29 | # BIND ( EXISTS { ?op a owl:FunctionalProperty } AS ?functional )
30 | OPTIONAL { ?op a owl:FunctionalProperty . BIND ( TRUE AS ?functional ) }
31 | OPTIONAL { ?op a owl:InverseFunctionalProperty . BIND ( TRUE AS ?inversefunctional ) }
32 | OPTIONAL { ?op a owl:SymmetricProperty . BIND ( TRUE AS ?symmetric ) }
33 | OPTIONAL { ?op a owl:AsymmetricProperty . BIND ( TRUE AS ?asymmetric ) }
34 | OPTIONAL { ?op a owl:TransitiveProperty . BIND ( TRUE AS ?transitive ) }
35 | OPTIONAL { ?op a owl:ReflexiveProperty . BIND ( TRUE AS ?reflexive ) }
36 | OPTIONAL { ?op a owl:IrreflexiveProperty . BIND ( TRUE AS ?irreflexive ) }
37 | # additional info
38 | OPTIONAL {
39 | ?op rdfs:subPropertyOf ?parent .
40 | FILTER NOT EXISTS { ?parent owl:inverseOf ?placeholder . }
41 | }
42 | OPTIONAL { ?op rdfs:subPropertyOf/owl:inverseOf ?inverseparent . }
43 | OPTIONAL {
44 | ?op owl:equivalentProperty ?equivalent .
45 | FILTER NOT EXISTS { ?equivalent owl:inverseOf ?placeholder . }
46 | }
47 | OPTIONAL { ?op owl:equivalentProperty/owl:inverseOf ?inverse . }
48 | OPTIONAL { ?op owl:inverseOf ?inverse . }
49 | OPTIONAL { ?op owl:propertyDisjointWith ?disjoint . }
50 | OPTIONAL { ?op owl:propertyChainAxiom/rdf:first*/rdf:rest*/rdf:first+ ?propchain . }
51 | }
52 | ORDER BY ?op
53 |
--------------------------------------------------------------------------------
/src/ontor/queries/prefixes.sparql:
--------------------------------------------------------------------------------
1 | # generic prefixes
2 |
3 | #
4 | # This file is part of ontor (https://github.com/felixocker/ontor).
5 | # Copyright (c) 2021 Felix Ocker.
6 | #
7 | # ontor is free software: you can redistribute it and/or modify
8 | # it under the terms of the GNU General Public License as published by
9 | # the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # ontor is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 | # GNU General Public License for more details.
16 | #
17 | # You should have received a copy of the GNU General Public License
18 | # along with ontor. If not, see .
19 | #
20 |
21 | PREFIX rdf:
22 | PREFIX owl:
23 | PREFIX rdfs:
24 | PREFIX xsd:
25 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felixocker/ontor/eb2088eccb2b25f129d75041684729555b947373/tests/__init__.py
--------------------------------------------------------------------------------
/tests/data/gold_visu.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |