Search
40 | 41 | 49 | 50 | 51 |52 | Searching for multiple words only shows matches that contain 53 | all words. 54 |
55 | 56 | 57 | 62 | 63 | 64 | 65 | 66 | 67 |├── germanetpy ├── __init__.py ├── compoundInfo.py ├── __pycache__ │ ├── frames.cpython-37.pyc │ ├── synset.cpython-37.pyc │ ├── utils.cpython-37.pyc │ ├── __init__.cpython-37.pyc │ ├── germanet.cpython-37.pyc │ ├── iliLoader.cpython-37.pyc │ ├── iliRecord.cpython-37.pyc │ ├── lexunit.cpython-37.pyc │ ├── compoundInfo.cpython-37.pyc │ ├── filterconfig.cpython-37.pyc │ ├── relationLoader.cpython-37.pyc │ ├── synsetLoader.cpython-37.pyc │ ├── wictionaryLoader.cpython-37.pyc │ ├── longest_shortest_path.cpython-37.pyc │ └── wictionaryparaphrase.cpython-37.pyc ├── semrel_measures.py ├── utils.py ├── wiktionaryparaphrase.py ├── wiktionaryLoader.py ├── iliLoader.py ├── iliRecord.py ├── relationLoader.py ├── longest_shortest_path.py └── frames.py ├── __init__.py ├── docs ├── requirements.txt ├── _build │ ├── html │ │ ├── _static │ │ │ ├── custom.css │ │ │ ├── file.png │ │ │ ├── minus.png │ │ │ ├── plus.png │ │ │ ├── documentation_options.js │ │ │ ├── pygments.css │ │ │ ├── doctools.js │ │ │ ├── language_data.js │ │ │ └── sphinx_highlight.js │ │ ├── objects.inv │ │ ├── _sources │ │ │ ├── modules.rst.txt │ │ │ ├── _autosummary │ │ │ │ ├── germanetpy.rst.txt │ │ │ │ ├── germanetpy.frames.rst.txt │ │ │ │ ├── germanetpy.germanet.rst.txt │ │ │ │ ├── germanetpy.iliRecord.rst.txt │ │ │ │ ├── germanetpy.filterconfig.rst.txt │ │ │ │ ├── germanetpy.utils.rst.txt │ │ │ │ ├── germanetpy.iliLoader.rst.txt │ │ │ │ ├── germanetpy.semrel_measures.rst.txt │ │ │ │ ├── germanetpy.lexunit.rst.txt │ │ │ │ ├── germanetpy.icbased_similarity.rst.txt │ │ │ │ ├── germanetpy.synset.rst.txt │ │ │ │ ├── germanetpy.wictionaryparaphrase.rst.txt │ │ │ │ ├── germanetpy.relationLoader.rst.txt │ │ │ │ ├── germanetpy.wictionaryLoader.rst.txt │ │ │ │ ├── germanetpy.compoundInfo.rst.txt │ │ │ │ ├── germanetpy.path_based_relatedness_measures.rst.txt │ │ │ │ ├── germanetpy.synsetLoader.rst.txt │ │ │ │ └── germanetpy.longest_shortest_path.rst.txt │ │ │ ├── index.rst.txt │ │ │ └── germanetpy.rst.txt │ │ ├── search.html │ │ ├── _autosummary │ │ │ ├── germanetpy.html │ │ │ ├── germanetpy.frames.html │ │ │ ├── germanetpy.iliRecord.html │ │ │ ├── germanetpy.semrel_measures.html │ │ │ ├── germanetpy.icbased_similarity.html │ │ │ ├── germanetpy.filterconfig.html │ │ │ ├── germanetpy.utils.html │ │ │ ├── germanetpy.germanet.html │ │ │ ├── germanetpy.iliLoader.html │ │ │ ├── germanetpy.wictionaryparaphrase.html │ │ │ ├── germanetpy.relationLoader.html │ │ │ ├── germanetpy.lexunit.html │ │ │ ├── germanetpy.wictionaryLoader.html │ │ │ ├── germanetpy.path_based_relatedness_measures.html │ │ │ ├── germanetpy.synset.html │ │ │ └── germanetpy.longest_shortest_path.html │ │ ├── _modules │ │ │ ├── germanetpy │ │ │ │ └── semrel_measures.html │ │ │ └── index.html │ │ └── index.html │ └── doctrees │ │ ├── index.doctree │ │ ├── modules.doctree │ │ ├── environment.pickle │ │ ├── germanetpy.doctree │ │ └── _autosummary │ │ ├── germanetpy.doctree │ │ ├── germanetpy.frames.doctree │ │ ├── germanetpy.synset.doctree │ │ ├── germanetpy.utils.doctree │ │ ├── germanetpy.germanet.doctree │ │ ├── germanetpy.iliLoader.doctree │ │ ├── germanetpy.iliRecord.doctree │ │ ├── germanetpy.lexunit.doctree │ │ ├── germanetpy.compoundInfo.doctree │ │ ├── germanetpy.filterconfig.doctree │ │ ├── germanetpy.relationLoader.doctree │ │ ├── germanetpy.synsetLoader.doctree │ │ ├── germanetpy.semrel_measures.doctree │ │ ├── germanetpy.wictionaryLoader.doctree │ │ ├── germanetpy.icbased_similarity.doctree │ │ ├── germanetpy.longest_shortest_path.doctree │ │ ├── germanetpy.wictionaryparaphrase.doctree │ │ └── germanetpy.path_based_relatedness_measures.doctree ├── modules.rst ├── index.rst ├── Makefile ├── make.bat ├── germanetpy.rst └── conf.py ├── tests ├── __pycache__ │ ├── __init__.cpython-37.pyc │ ├── test_filter.cpython-37.pyc │ ├── test_frames.cpython-37.pyc │ ├── test_synset.cpython-37.pyc │ ├── test_germanet.cpython-37.pyc │ ├── test_lexunit.cpython-37.pyc │ ├── test_filter.cpython-37-pytest-5.3.2.pyc │ ├── test_frames.cpython-37-pytest-5.3.2.pyc │ ├── test_synset.cpython-37-pytest-5.3.2.pyc │ ├── test_germanet.cpython-37-pytest-5.3.2.pyc │ ├── test_lexunit.cpython-37-pytest-5.3.2.pyc │ └── test_similarity.cpython-37-pytest-5.3.2.pyc ├── test_germanet.py ├── test_frames.py └── test_lexunit.py ├── .readthedocs.yaml ├── .gitignore ├── setup.py └── README.md /germanetpy/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.1.1" 2 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx 2 | sphinx-rtd-theme -------------------------------------------------------------------------------- /docs/_build/html/_static/custom.css: -------------------------------------------------------------------------------- 1 | /* This file intentionally left blank. */ 2 | -------------------------------------------------------------------------------- /docs/modules.rst: -------------------------------------------------------------------------------- 1 | germanetpy 2 | ========== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | germanetpy 8 | -------------------------------------------------------------------------------- /germanetpy/compoundInfo.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/germanetpy/compoundInfo.py -------------------------------------------------------------------------------- /docs/_build/html/objects.inv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/html/objects.inv -------------------------------------------------------------------------------- /docs/_build/doctrees/index.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/index.doctree -------------------------------------------------------------------------------- /docs/_build/html/_static/file.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/html/_static/file.png -------------------------------------------------------------------------------- /docs/_build/html/_static/minus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/html/_static/minus.png -------------------------------------------------------------------------------- /docs/_build/html/_static/plus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/html/_static/plus.png -------------------------------------------------------------------------------- /docs/_build/doctrees/modules.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/modules.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/environment.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/environment.pickle -------------------------------------------------------------------------------- /docs/_build/doctrees/germanetpy.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/germanetpy.doctree -------------------------------------------------------------------------------- /docs/_build/html/_sources/modules.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy 2 | ========== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | germanetpy 8 | -------------------------------------------------------------------------------- /tests/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/tests/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /germanetpy/__pycache__/frames.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/germanetpy/__pycache__/frames.cpython-37.pyc -------------------------------------------------------------------------------- /germanetpy/__pycache__/synset.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/germanetpy/__pycache__/synset.cpython-37.pyc -------------------------------------------------------------------------------- /germanetpy/__pycache__/utils.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/germanetpy/__pycache__/utils.cpython-37.pyc -------------------------------------------------------------------------------- /tests/__pycache__/test_filter.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/tests/__pycache__/test_filter.cpython-37.pyc -------------------------------------------------------------------------------- /tests/__pycache__/test_frames.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/tests/__pycache__/test_frames.cpython-37.pyc -------------------------------------------------------------------------------- /tests/__pycache__/test_synset.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/tests/__pycache__/test_synset.cpython-37.pyc -------------------------------------------------------------------------------- /germanetpy/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/germanetpy/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /germanetpy/__pycache__/germanet.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/germanetpy/__pycache__/germanet.cpython-37.pyc -------------------------------------------------------------------------------- /germanetpy/__pycache__/iliLoader.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/germanetpy/__pycache__/iliLoader.cpython-37.pyc -------------------------------------------------------------------------------- /germanetpy/__pycache__/iliRecord.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/germanetpy/__pycache__/iliRecord.cpython-37.pyc -------------------------------------------------------------------------------- /germanetpy/__pycache__/lexunit.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/germanetpy/__pycache__/lexunit.cpython-37.pyc -------------------------------------------------------------------------------- /tests/__pycache__/test_germanet.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/tests/__pycache__/test_germanet.cpython-37.pyc -------------------------------------------------------------------------------- /tests/__pycache__/test_lexunit.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/tests/__pycache__/test_lexunit.cpython-37.pyc -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.doctree -------------------------------------------------------------------------------- /germanetpy/__pycache__/compoundInfo.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/germanetpy/__pycache__/compoundInfo.cpython-37.pyc -------------------------------------------------------------------------------- /germanetpy/__pycache__/filterconfig.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/germanetpy/__pycache__/filterconfig.cpython-37.pyc -------------------------------------------------------------------------------- /germanetpy/__pycache__/relationLoader.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/germanetpy/__pycache__/relationLoader.cpython-37.pyc -------------------------------------------------------------------------------- /germanetpy/__pycache__/synsetLoader.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/germanetpy/__pycache__/synsetLoader.cpython-37.pyc -------------------------------------------------------------------------------- /germanetpy/__pycache__/wictionaryLoader.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/germanetpy/__pycache__/wictionaryLoader.cpython-37.pyc -------------------------------------------------------------------------------- /tests/__pycache__/test_filter.cpython-37-pytest-5.3.2.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/tests/__pycache__/test_filter.cpython-37-pytest-5.3.2.pyc -------------------------------------------------------------------------------- /tests/__pycache__/test_frames.cpython-37-pytest-5.3.2.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/tests/__pycache__/test_frames.cpython-37-pytest-5.3.2.pyc -------------------------------------------------------------------------------- /tests/__pycache__/test_synset.cpython-37-pytest-5.3.2.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/tests/__pycache__/test_synset.cpython-37-pytest-5.3.2.pyc -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.frames.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.frames.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.synset.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.synset.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.utils.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.utils.doctree -------------------------------------------------------------------------------- /germanetpy/__pycache__/longest_shortest_path.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/germanetpy/__pycache__/longest_shortest_path.cpython-37.pyc -------------------------------------------------------------------------------- /germanetpy/__pycache__/wictionaryparaphrase.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/germanetpy/__pycache__/wictionaryparaphrase.cpython-37.pyc -------------------------------------------------------------------------------- /tests/__pycache__/test_germanet.cpython-37-pytest-5.3.2.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/tests/__pycache__/test_germanet.cpython-37-pytest-5.3.2.pyc -------------------------------------------------------------------------------- /tests/__pycache__/test_lexunit.cpython-37-pytest-5.3.2.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/tests/__pycache__/test_lexunit.cpython-37-pytest-5.3.2.pyc -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.germanet.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.germanet.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.iliLoader.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.iliLoader.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.iliRecord.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.iliRecord.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.lexunit.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.lexunit.doctree -------------------------------------------------------------------------------- /tests/__pycache__/test_similarity.cpython-37-pytest-5.3.2.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/tests/__pycache__/test_similarity.cpython-37-pytest-5.3.2.pyc -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.compoundInfo.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.compoundInfo.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.filterconfig.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.filterconfig.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.relationLoader.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.relationLoader.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.synsetLoader.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.synsetLoader.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.semrel_measures.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.semrel_measures.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.wictionaryLoader.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.wictionaryLoader.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.icbased_similarity.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.icbased_similarity.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.longest_shortest_path.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.longest_shortest_path.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.wictionaryparaphrase.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.wictionaryparaphrase.doctree -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy 2 | ========== 3 | 4 | .. automodule:: germanetpy 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /docs/_build/doctrees/_autosummary/germanetpy.path_based_relatedness_measures.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Germanet-sfs/germanetpy/HEAD/docs/_build/doctrees/_autosummary/germanetpy.path_based_relatedness_measures.doctree -------------------------------------------------------------------------------- /docs/_build/html/_sources/index.rst.txt: -------------------------------------------------------------------------------- 1 | Welcome to germanetpy's documentation! 2 | ====================================== 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | 7 | germanetpy 8 | 9 | 10 | 11 | Indices and tables 12 | ------------------ 13 | 14 | * :ref:`modindex` 15 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to germanetpy's documentation! 2 | ====================================== 3 | 4 | API reference 5 | ============= 6 | 7 | .. autosummary:: 8 | :toctree: _autosummary 9 | :recursive: 10 | 11 | germanetpy 12 | 13 | 14 | Indices and tables 15 | ------------------ 16 | 17 | * :ref:`modindex` 18 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.frames.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy.frames 2 | ================= 3 | 4 | .. automodule:: germanetpy.frames 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | .. rubric:: Classes 13 | 14 | .. autosummary:: 15 | 16 | Frames 17 | 18 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.germanet.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy.germanet 2 | =================== 3 | 4 | .. automodule:: germanetpy.germanet 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | .. rubric:: Classes 13 | 14 | .. autosummary:: 15 | 16 | Germanet 17 | 18 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.iliRecord.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy.iliRecord 2 | ==================== 3 | 4 | .. automodule:: germanetpy.iliRecord 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | .. rubric:: Classes 13 | 14 | .. autosummary:: 15 | 16 | IliRecord 17 | 18 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.filterconfig.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy.filterconfig 2 | ======================= 3 | 4 | .. automodule:: germanetpy.filterconfig 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | .. rubric:: Classes 13 | 14 | .. autosummary:: 15 | 16 | Filterconfig 17 | 18 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.utils.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy.utils 2 | ================ 3 | 4 | .. automodule:: germanetpy.utils 5 | 6 | 7 | 8 | .. rubric:: Functions 9 | 10 | .. autosummary:: 11 | 12 | convert_to_boolean 13 | parse_xml 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /germanetpy/semrel_measures.py: -------------------------------------------------------------------------------- 1 | import fastenum 2 | 3 | 4 | class SemRelMeasure(fastenum.Enum): 5 | """This Enum represents the semantic relatedness measures""" 6 | SimplePath = "SimplePath" 7 | LeacockAndChodorow = "LeacockAndChodorow" 8 | WuAndPalmer = "WuAndPalmer" 9 | Resnik = "Resnik" 10 | Lin = "Lin" 11 | JiangAndConrath = "JiangAndConrath" -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.iliLoader.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy.iliLoader 2 | ==================== 3 | 4 | .. automodule:: germanetpy.iliLoader 5 | 6 | 7 | 8 | .. rubric:: Functions 9 | 10 | .. autosummary:: 11 | 12 | create_ili_record 13 | load_ili 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.semrel_measures.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy.semrel\_measures 2 | =========================== 3 | 4 | .. automodule:: germanetpy.semrel_measures 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | .. rubric:: Classes 13 | 14 | .. autosummary:: 15 | 16 | SemRelMeasure 17 | 18 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.lexunit.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy.lexunit 2 | ================== 3 | 4 | .. automodule:: germanetpy.lexunit 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | .. rubric:: Classes 13 | 14 | .. autosummary:: 15 | 16 | LexRel 17 | Lexunit 18 | OrthFormVariant 19 | 20 | 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.icbased_similarity.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy.icbased\_similarity 2 | ============================== 3 | 4 | .. automodule:: germanetpy.icbased_similarity 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | .. rubric:: Classes 13 | 14 | .. autosummary:: 15 | 16 | ICBasedSimilarity 17 | 18 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.synset.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy.synset 2 | ================= 3 | 4 | .. automodule:: germanetpy.synset 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | .. rubric:: Classes 13 | 14 | .. autosummary:: 15 | 16 | ConRel 17 | Synset 18 | WordCategory 19 | WordClass 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-22.04 5 | tools: 6 | python: "3.12" 7 | 8 | sphinx: 9 | configuration: docs/conf.py # ← tells RTD your Sphinx config lives in docs/ 10 | 11 | python: 12 | install: 13 | - method: pip 14 | path: . # ← installs your package so autodoc can import it 15 | - requirements: docs/requirements.txt 16 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.wictionaryparaphrase.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy.wiktionaryparaphrase 2 | =============================== 3 | 4 | .. automodule:: germanetpy.wiktionaryparaphrase 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | .. rubric:: Classes 13 | 14 | .. autosummary:: 15 | 16 | WiktionaryParaphrase 17 | 18 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.relationLoader.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy.relationLoader 2 | ========================= 3 | 4 | .. automodule:: germanetpy.relationLoader 5 | 6 | 7 | 8 | .. rubric:: Functions 9 | 10 | .. autosummary:: 11 | 12 | get_relation_attributes 13 | load_relations 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.wictionaryLoader.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy.wiktionaryLoader 2 | =========================== 3 | 4 | .. automodule:: germanetpy.wiktionaryLoader 5 | 6 | 7 | 8 | .. rubric:: Functions 9 | 10 | .. autosummary:: 11 | 12 | create_wiktionary 13 | load_wiktionary 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.compoundInfo.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy.compoundInfo 2 | ======================= 3 | 4 | .. automodule:: germanetpy.compoundInfo 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | .. rubric:: Classes 13 | 14 | .. autosummary:: 15 | 16 | CompoundCategory 17 | CompoundInfo 18 | CompoundProperty 19 | 20 | 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /docs/_build/html/_static/documentation_options.js: -------------------------------------------------------------------------------- 1 | const DOCUMENTATION_OPTIONS = { 2 | VERSION: '0.2.0', 3 | LANGUAGE: 'en', 4 | COLLAPSE_INDEX: false, 5 | BUILDER: 'html', 6 | FILE_SUFFIX: '.html', 7 | LINK_SUFFIX: '.html', 8 | HAS_SOURCE: true, 9 | SOURCELINK_SUFFIX: '.txt', 10 | NAVIGATION_WITH_KEYS: false, 11 | SHOW_SEARCH_SUMMARY: true, 12 | ENABLE_SEARCH_SHORTCUTS: true, 13 | }; -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.path_based_relatedness_measures.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy.path\_based\_relatedness\_measures 2 | ============================================= 3 | 4 | .. automodule:: germanetpy.path_based_relatedness_measures 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | .. rubric:: Classes 13 | 14 | .. autosummary:: 15 | 16 | PathBasedRelatedness 17 | 18 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.synsetLoader.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy.synsetLoader 2 | ======================= 3 | 4 | .. automodule:: germanetpy.synsetLoader 5 | 6 | 7 | 8 | .. rubric:: Functions 9 | 10 | .. autosummary:: 11 | 12 | add_orth_forms 13 | create_compound_info 14 | create_lexunit 15 | get_attribute_element 16 | load_lexunits 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/_autosummary/germanetpy.longest_shortest_path.rst.txt: -------------------------------------------------------------------------------- 1 | germanetpy.longest\_shortest\_path 2 | ================================== 3 | 4 | .. automodule:: germanetpy.longest_shortest_path 5 | 6 | 7 | 8 | .. rubric:: Functions 9 | 10 | .. autosummary:: 11 | 12 | get_greatest_depth 13 | get_longest_possible_shortest_distance 14 | get_overall_longest_shortest_distance 15 | print_longest_shortest_distances 16 | print_maximum_depths 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *pyc 2 | !/.gitignore 3 | germanetpy.egg-info 4 | dist 5 | /Levenshtein-0.25.1-cp312-cp312-macosx_10_9_x86_64.whl 6 | /fastenum-1.1.1-py3-none-any.whl 7 | /germanetpy-0.2.3-py3-none-any.whl 8 | /iniconfig-2.0.0-py3-none-any.whl 9 | /lxml-5.2.1-cp312-cp312-macosx_10_9_x86_64.whl 10 | /numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl 11 | /packaging-24.0-py3-none-any.whl 12 | /pluggy-1.5.0-py3-none-any.whl 13 | /pytest-8.2.0-py3-none-any.whl 14 | /python_Levenshtein-0.25.1-py3-none-any.whl 15 | /rapidfuzz-3.9.0-cp312-cp312-macosx_10_9_x86_64.whl 16 | /tqdm-4.66.4-py3-none-any.whl 17 | .DS_Store 18 | .project 19 | .pydevproject 20 | .settings/ 21 | build/ 22 | docs/.DS_Store 23 | docs/_build/.DS_Store 24 | docs/_build/doctrees/.DS_Store 25 | docs/_build/html/.DS_Store 26 | docs/_build/html/.buildinfo 27 | docs/_build/html/_modules/.DS_Store 28 | docs/_build/html/_sources/.DS_Store 29 | docs/_build/man/ 30 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | import setuptools 3 | 4 | # The directory containing this file 5 | HERE = pathlib.Path(__file__).parent 6 | requirements = ["numpy>=1.18.1", "lxml>=4.4.2", "pytest>=5.3.2", "fastenum>=0.0.1", "tqdm>=4.14", 7 | "python-Levenshtein==0.27.3"] 8 | 9 | # The text of the README file 10 | long_description = (HERE / "README.md").read_text() 11 | setuptools.setup( 12 | name="germanetpy", 13 | version="0.2.5", 14 | author="Ben Campbell", 15 | author_email="ben.campbell@uni-tuebingen.de", 16 | description="Python API for GermaNet", 17 | long_description=long_description, 18 | long_description_content_type="text/markdown", 19 | url="https://github.com/Germanet-sfs/germanetpy.git", 20 | install_requires=requirements, 21 | packages=["germanetpy"], 22 | classifiers=[ 23 | "Programming Language :: Python :: 3", 24 | "Programming Language :: Python :: 3.12", 25 | "License :: OSI Approved :: Apache Software License", 26 | "Operating System :: OS Independent", 27 | ], 28 | ) 29 | -------------------------------------------------------------------------------- /germanetpy/utils.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from lxml import etree as ET 3 | 4 | 5 | def convert_to_boolean(attribute: str) -> bool: 6 | """ 7 | Converts the given String into a boolean. 8 | 9 | :param attribute: The attribute that needs to be converted into a boolean 10 | :return: True, False or an Error message if the attribute doesn't have the right value 11 | """ 12 | assert attribute == "yes" or attribute == "no", "cannot be converted to boolean" 13 | if attribute == 'no': 14 | return False 15 | if attribute == 'yes': 16 | return True 17 | 18 | 19 | def parse_xml(datadir: str, f: str) -> ET: 20 | """ 21 | Parses an XML file and returns the XML tree 22 | 23 | :param datadir: The directory where the file is located 24 | :param f: the filename 25 | :return: The parsed XML tree 26 | """ 27 | d = '/'.join([datadir, f]) 28 | try: 29 | tree = ET.parse(d) 30 | except ET.ParseError: 31 | print("Unable to load GermaNet data at {0} . Aborting...".format(d)) 32 | sys.exit(0) 33 | else: 34 | return tree 35 | -------------------------------------------------------------------------------- /tests/test_germanet.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | from germanetpy.germanet import Germanet 4 | from lxml import etree as ET 5 | import numpy as np 6 | from pathlib import Path 7 | 8 | logger = logging.getLogger('logging_test_germanet') 9 | d = str(Path(__file__).parent.parent) + "/data" 10 | try: 11 | germanet_data = Germanet(d) 12 | except ET.ParseError: 13 | message = ("Unable to load GermaNet data at {0} . Aborting...").format(d) 14 | logger.error(message, 15 | ET.ParseError) 16 | sys.exit(0) 17 | except IOError: 18 | message = ("GermaNet data not found at {0} . Aborting...").format(d) 19 | logger.error(message, IOError) 20 | sys.exit(0) 21 | 22 | 23 | def test_number_of_instances(): 24 | """Test whether the total numbers of synsets, lexunits, compounds, ili records, wiktionary entries corresponds to 25 | the numbers for the current release""" 26 | number_synsets = len(germanet_data.synsets) 27 | number_lexunits = len(germanet_data.lexunits) 28 | number_compounds = len(germanet_data.compounds) 29 | number_ili_records = len(germanet_data.ili_records) 30 | number_wiktionary = len(germanet_data.wiktionary_entries) 31 | np.testing.assert_equal(number_synsets, 179438) 32 | np.testing.assert_equal(number_lexunits, 231500) 33 | np.testing.assert_equal(number_compounds, 134070) 34 | np.testing.assert_equal(number_ili_records, 28561) 35 | np.testing.assert_equal(number_wiktionary, 29539) -------------------------------------------------------------------------------- /germanetpy/wiktionaryparaphrase.py: -------------------------------------------------------------------------------- 1 | class WiktionaryParaphrase: 2 | 3 | def __init__(self, lexunit_id: str, wiktionary_id: str, wiktionary_sense_id: int, wiktionary_sense: str, 4 | edited: bool): 5 | """ 6 | This class holds the Wiktionary paraphrase object. A wiktionary paraphrase can be part of lexical units. The 7 | contain a definition of the lexical unit which helps to differentiate between different sense of a word. 8 | 9 | :param lexunit_id: The lexical unit id, this wiktionary entry belongs to 10 | :param wiktionary_id: The corresponding wiktionary identifier 11 | :param wiktionary_sense_id: The sense identifier 12 | :param wiktionary_sense: The sense definition 13 | :param edited: If this paraphrase was edited. 14 | """ 15 | self._lexunit_id = lexunit_id 16 | self._wiktionary_id = wiktionary_id 17 | self._wiktionary_sense_id = wiktionary_sense_id 18 | self._wiktionary_sense = wiktionary_sense 19 | self._edited = edited 20 | 21 | def __repr__(self): 22 | return f'Wiktionary(LexUnit ID={self.lexunit_id}, definition={self.wiktionary_sense})' 23 | 24 | @property 25 | def lexunit_id(self): 26 | return self._lexunit_id 27 | 28 | @property 29 | def wiktionary_id(self): 30 | return self._wiktionary_id 31 | 32 | @property 33 | def wiktionary_sense_id(self): 34 | return self._wiktionary_sense_id 35 | 36 | @property 37 | def wiktionary_sense(self): 38 | return self._wiktionary_sense 39 | 40 | @property 41 | def edited(self): 42 | return self._edited 43 | -------------------------------------------------------------------------------- /germanetpy/wiktionaryLoader.py: -------------------------------------------------------------------------------- 1 | from germanetpy.wiktionaryparaphrase import WiktionaryParaphrase 2 | from germanetpy.utils import convert_to_boolean 3 | 4 | LEXID = 'lexUnitId' 5 | ID = 'wiktionaryId' 6 | SENSEID = 'wiktionarySenseId' 7 | SENSE = 'wiktionarySense' 8 | EDITED = 'edited' 9 | 10 | 11 | def create_wiktionary(attributes) -> WiktionaryParaphrase: 12 | """ 13 | Creates a wiktionary object given the XML attributes that contain the required information 14 | 15 | :param attributes: XML attributes that contain information about the wiktionary paraphrase 16 | :return: a wiktionary object 17 | """ 18 | lex_id = attributes[LEXID] 19 | wiktionary_id = attributes[ID] 20 | wiktionary_sense_id = int(attributes[SENSEID]) 21 | wiktionary_sense = attributes[SENSE] 22 | edited = convert_to_boolean(attributes[EDITED]) 23 | wiki = WiktionaryParaphrase(lexunit_id=lex_id, wiktionary_id=wiktionary_id, wiktionary_sense_id=wiktionary_sense_id, 24 | wiktionary_sense=wiktionary_sense, edited=edited) 25 | return wiki 26 | 27 | 28 | def load_wiktionary(germanet, tree): 29 | """ 30 | Given a XML tree this method initialized the wiktionary objects and adds them to the germanet object and the 31 | corresponding lexunits 32 | 33 | :type tree: etree 34 | :type germanet: Germanet 35 | :param germanet: The germane object 36 | :param tree: The XML tree of the wiktionary file 37 | """ 38 | root = tree.getroot() 39 | for child in root: 40 | attributes = child.attrib 41 | wiktionary = create_wiktionary(attributes) 42 | lexunit = germanet.get_lexunit_by_id(wiktionary.lexunit_id) 43 | lexunit.wiktionary_paraphrases.append(wiktionary) 44 | germanet.wiktionary_entries.append(wiktionary) 45 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # germanetpy 2 | Welcome. This is the Python API for the German wordnet GermaNet. GermaNet is a lexical-semantic net that relates German nouns, verbs, and adjectives semantically by grouping lexical units that express the same concept into synsets and by defining semantic relations between these synsets. 3 | This API can be used to extract structured information from the GermaNet with python. 4 | More information about GermaNet can be found on the following page: 5 | 6 | https://uni-tuebingen.de/en/142806 7 | 8 | ## Installation 9 | 10 | You can install germanetpy from PyPI (https://pypi.org/project/germanetpy/): 11 | 12 | pip install germanetpy 13 | 14 | Get the GermaNet data as XML files and put all files in a data directory. When you use the API to load the data, the path pointing to the directory containing the XML files needs to be specified. The API is supported with Python 3. 15 | 16 | ## How to use 17 | 18 | You can read the documentation [here](https://germanetpydocs.readthedocs.io/en/master/) 19 | 20 | The API provides functionality that can be used to load the data from GermaNet and query it. The data has to be retrieved 21 | via a license agreement from the University of Tübingen, Seminar für Sprachwissenschaften: 22 | 23 | https://uni-tuebingen.de/en/142828 24 | 25 | To use the data for queries you first have to create a Germanet object, which loads the data specified as an argument once. This takes a few seconds: 26 | 27 | from germanetpy import germanet 28 | 29 | germanet_object = germanet.Germanet(path_to_the_GermaNet_XML_files) 30 | 31 | This repository also provides a Tutorial [https://github.com/Blubberli/germanetpy/blob/master/germanetpy_tutorial.ipynb] that shows how to use the API to query GermaNet. 32 | If you want to use the tutorial as an interactive jupyter notebook, clone this repository: 33 | ``` 34 | git clone https://github.com/Germanet-sfs/germanetTutorials.git 35 | ``` 36 | and follow the instructions in the README https://github.com/Germanet-sfs/germanetTutorials 37 | -------------------------------------------------------------------------------- /germanetpy/iliLoader.py: -------------------------------------------------------------------------------- 1 | from germanetpy.iliRecord import IliRecord 2 | 3 | LEXID = "lexUnitId" 4 | EWNREL = 'ewnRelation' 5 | PWNWORD = 'pwnWord' 6 | PWN20ID = 'pwn20Id' 7 | PWN30ID = 'pwn30Id' 8 | SOURCE = 'source' 9 | PWN20PARAPHRASE = 'pwn20paraphrase' 10 | 11 | 12 | def create_ili_record(attributes, synonyms) -> IliRecord: 13 | """ 14 | Creates the ili record given the XML attributes. 15 | 16 | :type synonyms: list(String) 17 | :type attributes: xml attributes 18 | :param attributes: The XML attributes that contain the required information about the ili record. 19 | :param synonyms: A list of Strings, containing the synonyms of the ili record. 20 | :return: The ili record object 21 | """ 22 | lex_id = attributes[LEXID] 23 | ewnrelation = attributes[EWNREL] 24 | pwnword = attributes[PWNWORD] 25 | pwn20Id = attributes[PWN20ID] 26 | pwn30Id = attributes[PWN30ID] 27 | source = attributes[SOURCE] 28 | pwn20paraphrase = attributes.get(PWN20PARAPHRASE) 29 | ili = IliRecord(lexunit_id=lex_id, ewnRelation=ewnrelation, pwnWord=pwnword, pwn20Id=pwn20Id, 30 | pwn30Id=pwn30Id, source=source, pwn20synonyms=synonyms, pwn20paraphrase=pwn20paraphrase) 31 | 32 | return ili 33 | 34 | 35 | def load_ili(germanet, tree): 36 | """ 37 | This method creates the ili record objects given a datafile and adds them to the GermaNet object and the 38 | corresponding lexical unit. 39 | 40 | :type tree: Element Tree 41 | :type germanet: Germanet 42 | :param germanet: The GermaNet object 43 | :param tree: The XML tree containing the data about the ili records 44 | """ 45 | root = tree.getroot() 46 | for child in root: 47 | attributes = child.attrib 48 | synonyms = [] 49 | for subchild in child: 50 | for subsubchild in subchild: 51 | synonyms.append(subsubchild.text) 52 | ili = create_ili_record(attributes, synonyms) 53 | lexunit = germanet.lexunits[ili.lexunit_id] 54 | lexunit.ili_records.append(ili) 55 | germanet.ili_records.append(ili) 56 | -------------------------------------------------------------------------------- /germanetpy/iliRecord.py: -------------------------------------------------------------------------------- 1 | class IliRecord: 2 | 3 | def __init__(self, lexunit_id: str, ewnRelation: str, pwnWord: str, pwn20Id: str, pwn30Id: str, source: str, 4 | pwn20synonyms: list, pwn20paraphrase: str = None): 5 | """ 6 | This class holds an ili record object. These store a mapping between a lexical unit and the correponding 7 | English lexical unit (from WordNet) 8 | 9 | :param lexunit_id: The lexical unit id this ili record belongs to 10 | :param ewnRelation: WordNet relation 11 | :param pwnWord: word (orth form) in WordNet 12 | :param pwn20Id: WordNet ID (WordNet 2.0) 13 | :param pwn30Id: WordNet ID (WordNet 3.0) 14 | :param source: source of this ili record 15 | :param pwn20synonyms: English synonyms 16 | :param pwn20paraphrase: English sense definition 17 | """ 18 | self._lexunit_id = lexunit_id 19 | self._relation = ewnRelation 20 | self._english_equivalent = pwnWord 21 | self._pwn20id = pwn20Id 22 | self._pwn30id = pwn30Id 23 | self._pwn20synonyms = pwn20synonyms 24 | self._pwn20paraphrase = pwn20paraphrase 25 | self._source = source 26 | 27 | def __repr__(self): 28 | return f'IliRecord(LexUnit ID={self.lexunit_id}, relation={self.relation}, english_equivalent={self.english_equivalent})' 29 | 30 | @property 31 | def lexunit_id(self): 32 | return self._lexunit_id 33 | 34 | @property 35 | def relation(self): 36 | return self._relation 37 | 38 | @property 39 | def english_equivalent(self): 40 | return self._english_equivalent 41 | 42 | @property 43 | def pwn20id(self): 44 | return self._pwn20id 45 | 46 | @property 47 | def pwn30id(self): 48 | return self._pwn30id 49 | 50 | @property 51 | def pwn20synonyms(self): 52 | return self._pwn20synonyms 53 | 54 | @property 55 | def pwn20paraphrase(self): 56 | return self._pwn20paraphrase 57 | 58 | @property 59 | def source(self): 60 | return self._source 61 | -------------------------------------------------------------------------------- /germanetpy/relationLoader.py: -------------------------------------------------------------------------------- 1 | from germanetpy.lexunit import LexRel 2 | from germanetpy.synset import ConRel 3 | 4 | NAME = 'name' 5 | FROM_NODE = 'from' 6 | TO_NODE = 'to' 7 | DIRECTION = 'dir' 8 | INVERSE = 'inv' 9 | LEX_REL = 'lex_rel' 10 | CON_REL = 'con_rel' 11 | 12 | 13 | def get_relation_attributes(attributes) -> (str, str, str, str): 14 | """ 15 | 16 | :type attributes: XML attribute 17 | :param attributes: The XML attributes the information can be extracted from 18 | :return: The information as Strings or None if the information is not present. The name of the relation,the id of the start node, the id of the end node, the type of direction and if the relation is inverse 19 | """ 20 | relation = attributes[NAME] 21 | from_node = attributes[FROM_NODE] 22 | to_node = attributes[TO_NODE] 23 | direction = attributes[DIRECTION] 24 | inverse_relation = attributes.get(INVERSE) 25 | return relation, from_node, to_node, direction, inverse_relation 26 | 27 | 28 | def load_relations(germanet, tree): 29 | """ 30 | Loads the information about the related synsets ans lexunits from the data and adds the edges between the objects. 31 | 32 | :type tree: Element Tree 33 | :type germanet: Germanet 34 | :param germanet: The Germanet object that is populated with Synsets and Lexunits 35 | :param tree: The XML tree of the relation data. 36 | """ 37 | root = tree.getroot() 38 | for child in root: 39 | tag = child.tag 40 | if tag == LEX_REL: 41 | relation, from_node, to_node, direction, inverse_relation = get_relation_attributes(child.attrib) 42 | lexunit1 = germanet.lexunits[from_node] 43 | lexunit2 = germanet.lexunits[to_node] 44 | lexunit1.relations[LexRel[relation]].add(lexunit2) 45 | lexunit2.incoming_relations[LexRel[relation]].add(lexunit1) 46 | if inverse_relation is not None: 47 | lexunit2.relations[LexRel[inverse_relation]].add(lexunit1) 48 | 49 | elif tag == CON_REL: 50 | relation, from_node, to_node, dir, inverse_relation = get_relation_attributes(child.attrib) 51 | synset1 = germanet.synsets[from_node] 52 | synset2 = germanet.synsets[to_node] 53 | synset1.relations[ConRel[relation]].add(synset2) 54 | synset2.incoming_relations[ConRel[relation]].add(synset1) 55 | if inverse_relation is not None: 56 | synset2.relations[ConRel[inverse_relation]].add(synset1) 57 | -------------------------------------------------------------------------------- /docs/_build/html/search.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 |
5 | 6 | 7 |52 | Searching for multiple words only shows matches that contain 53 | all words. 54 |
55 | 56 | 57 | 62 | 63 | 64 | 65 | 66 | 67 |
111 |
112 |
113 |
114 |
115 |
116 |
--------------------------------------------------------------------------------
/docs/germanetpy.rst:
--------------------------------------------------------------------------------
1 | germanetpy package
2 | ==================
3 |
4 | Submodules
5 | ----------
6 |
7 | germanetpy.compoundInfo module
8 | ------------------------------
9 |
10 | .. automodule:: germanetpy.compoundInfo
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | germanetpy.filterconfig module
16 | ------------------------------
17 |
18 | .. automodule:: germanetpy.filterconfig
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | germanetpy.frames module
24 | ------------------------
25 |
26 | .. automodule:: germanetpy.frames
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 | germanetpy.germanet module
32 | --------------------------
33 |
34 | .. automodule:: germanetpy.germanet
35 | :members:
36 | :undoc-members:
37 | :show-inheritance:
38 |
39 | germanetpy.icbased\_similarity module
40 | -------------------------------------
41 |
42 | .. automodule:: germanetpy.icbased_similarity
43 | :members:
44 | :undoc-members:
45 | :show-inheritance:
46 |
47 | germanetpy.iliLoader module
48 | ---------------------------
49 |
50 | .. automodule:: germanetpy.iliLoader
51 | :members:
52 | :undoc-members:
53 | :show-inheritance:
54 |
55 | germanetpy.iliRecord module
56 | ---------------------------
57 |
58 | .. automodule:: germanetpy.iliRecord
59 | :members:
60 | :undoc-members:
61 | :show-inheritance:
62 |
63 | germanetpy.lexunit module
64 | -------------------------
65 |
66 | .. automodule:: germanetpy.lexunit
67 | :members:
68 | :undoc-members:
69 | :show-inheritance:
70 |
71 | germanetpy.longest\_shortest\_path module
72 | -----------------------------------------
73 |
74 | .. automodule:: germanetpy.longest_shortest_path
75 | :members:
76 | :undoc-members:
77 | :show-inheritance:
78 |
79 | germanetpy.path\_based\_relatedness\_measures module
80 | ----------------------------------------------------
81 |
82 | .. automodule:: germanetpy.path_based_relatedness_measures
83 | :members:
84 | :undoc-members:
85 | :show-inheritance:
86 |
87 | germanetpy.relationLoader module
88 | --------------------------------
89 |
90 | .. automodule:: germanetpy.relationLoader
91 | :members:
92 | :undoc-members:
93 | :show-inheritance:
94 |
95 | germanetpy.semrel\_measures module
96 | ----------------------------------
97 |
98 | .. automodule:: germanetpy.semrel_measures
99 | :members:
100 | :undoc-members:
101 | :show-inheritance:
102 |
103 | germanetpy.synset module
104 | ------------------------
105 |
106 | .. automodule:: germanetpy.synset
107 | :members:
108 | :undoc-members:
109 | :show-inheritance:
110 |
111 | germanetpy.synsetLoader module
112 | ------------------------------
113 |
114 | .. automodule:: germanetpy.synsetLoader
115 | :members:
116 | :undoc-members:
117 | :show-inheritance:
118 |
119 | germanetpy.utils module
120 | -----------------------
121 |
122 | .. automodule:: germanetpy.utils
123 | :members:
124 | :undoc-members:
125 | :show-inheritance:
126 |
127 | germanetpy.wiktionaryLoader module
128 | ----------------------------------
129 |
130 | .. automodule:: germanetpy.wiktionaryLoader
131 | :members:
132 | :undoc-members:
133 | :show-inheritance:
134 |
135 | germanetpy.wiktionaryparaphrase module
136 | --------------------------------------
137 |
138 | .. automodule:: germanetpy.wiktionaryparaphrase
139 | :members:
140 | :undoc-members:
141 | :show-inheritance:
142 |
143 |
144 | Module contents
145 | ---------------
146 |
147 | .. automodule:: germanetpy
148 | :members:
149 | :undoc-members:
150 | :show-inheritance:
151 |
--------------------------------------------------------------------------------
/tests/test_frames.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import sys
3 | import logging
4 | from lxml import etree as ET
5 | import numpy as np
6 | from germanetpy.germanet import Germanet
7 |
8 | logger = logging.getLogger('logging_test_frames')
9 | d = str(Path(__file__).parent.parent) + "/data"
10 | try:
11 | germanet_data = Germanet(d)
12 | except ET.ParseError:
13 | message = "Unable to load GermaNet data at {0} . Aborting...".format(d)
14 | logger.error(message,
15 | ET.ParseError)
16 | sys.exit(0)
17 | except IOError:
18 | message = "GermaNet data not found at {0} . Aborting...".format(d)
19 | logger.error(message, IOError)
20 | sys.exit(0)
21 |
22 |
23 | def test_accusative_complements():
24 | """Test whether "sehen" is in the list of verbs that can take accusative complements"""
25 | acc_complements = germanet_data.frames.extract_accusative_complement()
26 | sehen = germanet_data.get_lexunit_by_id('l82290')
27 | np.testing.assert_equal(sehen in acc_complements, True)
28 |
29 |
30 | def test_dative_complements():
31 | """Test whether "schenken" is in the list of verbs that can take dative complements"""
32 | dative_complements = germanet_data.frames.extract_dative_complement()
33 | schenken = germanet_data.get_lexunit_by_id('l73802')
34 | np.testing.assert_equal(schenken in dative_complements, True)
35 |
36 |
37 | def test_genitive_complements():
38 | """Test whether "berauben" is in the list of verbs that can take genitive complements"""
39 | genitive_complements = germanet_data.frames.extract_gentive_complement()
40 | berauben = germanet_data.get_lexunit_by_id('l74138')
41 | np.testing.assert_equal(berauben in genitive_complements, True)
42 |
43 |
44 | def test_praepositional_complements():
45 | """Test whether "umfahren" is in the list of verbs that can take prepositional complements"""
46 | praep_complements = germanet_data.frames.extract_prepositional_complement()
47 | umfahren = germanet_data.get_lexunit_by_id('l82560')
48 | np.testing.assert_equal(umfahren in praep_complements, True)
49 |
50 |
51 | def test_adverbial_complements():
52 | """Test whether "wohnen" and "kommen" are in the list of verbs that can form adverbials"""
53 | adverbials = germanet_data.frames.extract_adverbials()
54 | wohnen = germanet_data.get_lexunit_by_id('l73312')
55 | kommen = germanet_data.get_lexunit_by_id('l73778')
56 | np.testing.assert_equal(wohnen in adverbials, True)
57 | np.testing.assert_equal(kommen in adverbials, True)
58 |
59 |
60 | def test_expletives():
61 | """Test whether "regnen" is in the list of verbs that can be used with an expletive"""
62 | expletives = germanet_data.frames.extract_expletives()
63 | regnen = germanet_data.get_lexunit_by_id('l82091')
64 | np.testing.assert_equal(regnen in expletives, True)
65 |
66 |
67 | def test_reflexives():
68 | """Test whether "rächen" is in the list of verbs that can be reflexive"""
69 | reflexives = germanet_data.frames.extract_reflexives()
70 | raechen = germanet_data.get_lexunit_by_id('l76169')
71 | np.testing.assert_equal(raechen in reflexives, True)
72 |
73 |
74 | def test_transitives():
75 | """Test whether "backen" is in the list of transitive verbs"""
76 | transitives = germanet_data.frames.extract_transitives()
77 | backen = germanet_data.get_lexunit_by_id('l84453')
78 | np.testing.assert_equal(backen in transitives, True)
79 |
80 |
81 | def test_intransitives():
82 | """Test whether "liegen" is in the list of intransitive verbs"""
83 | intransitives = germanet_data.frames.extract_intransitives()
84 | liegen = germanet_data.get_lexunit_by_id('l80908')
85 | np.testing.assert_equal(liegen in intransitives, True)
86 |
--------------------------------------------------------------------------------
/docs/_build/html/_sources/germanetpy.rst.txt:
--------------------------------------------------------------------------------
1 | germanetpy package
2 | ==================
3 |
4 | Submodules
5 | ----------
6 |
7 | germanetpy.compoundInfo module
8 | ------------------------------
9 |
10 | .. automodule:: germanetpy.compoundInfo
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | germanetpy.filterconfig module
16 | ------------------------------
17 |
18 | .. automodule:: germanetpy.filterconfig
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | germanetpy.frames module
24 | ------------------------
25 |
26 | .. automodule:: germanetpy.frames
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 | germanetpy.germanet module
32 | --------------------------
33 |
34 | .. automodule:: germanetpy.germanet
35 | :members:
36 | :undoc-members:
37 | :show-inheritance:
38 |
39 | germanetpy.icbased\_similarity module
40 | -------------------------------------
41 |
42 | .. automodule:: germanetpy.icbased_similarity
43 | :members:
44 | :undoc-members:
45 | :show-inheritance:
46 |
47 | germanetpy.iliLoader module
48 | ---------------------------
49 |
50 | .. automodule:: germanetpy.iliLoader
51 | :members:
52 | :undoc-members:
53 | :show-inheritance:
54 |
55 | germanetpy.iliRecord module
56 | ---------------------------
57 |
58 | .. automodule:: germanetpy.iliRecord
59 | :members:
60 | :undoc-members:
61 | :show-inheritance:
62 |
63 | germanetpy.lexunit module
64 | -------------------------
65 |
66 | .. automodule:: germanetpy.lexunit
67 | :members:
68 | :undoc-members:
69 | :show-inheritance:
70 |
71 | germanetpy.longest\_shortest\_path module
72 | -----------------------------------------
73 |
74 | .. automodule:: germanetpy.longest_shortest_path
75 | :members:
76 | :undoc-members:
77 | :show-inheritance:
78 |
79 | germanetpy.path\_based\_relatedness\_measures module
80 | ----------------------------------------------------
81 |
82 | .. automodule:: germanetpy.path_based_relatedness_measures
83 | :members:
84 | :undoc-members:
85 | :show-inheritance:
86 |
87 | germanetpy.relationLoader module
88 | --------------------------------
89 |
90 | .. automodule:: germanetpy.relationLoader
91 | :members:
92 | :undoc-members:
93 | :show-inheritance:
94 |
95 | germanetpy.semrel\_measures module
96 | ----------------------------------
97 |
98 | .. automodule:: germanetpy.semrel_measures
99 | :members:
100 | :undoc-members:
101 | :show-inheritance:
102 |
103 | germanetpy.synset module
104 | ------------------------
105 |
106 | .. automodule:: germanetpy.synset
107 | :members:
108 | :undoc-members:
109 | :show-inheritance:
110 |
111 | germanetpy.synsetLoader module
112 | ------------------------------
113 |
114 | .. automodule:: germanetpy.synsetLoader
115 | :members:
116 | :undoc-members:
117 | :show-inheritance:
118 |
119 | germanetpy.utils module
120 | -----------------------
121 |
122 | .. automodule:: germanetpy.utils
123 | :members:
124 | :undoc-members:
125 | :show-inheritance:
126 |
127 | germanetpy.wiktionaryLoader module
128 | ----------------------------------
129 |
130 | .. automodule:: germanetpy.wiktionaryLoader
131 | :members:
132 | :undoc-members:
133 | :show-inheritance:
134 |
135 | germanetpy.wiktionaryparaphrase module
136 | --------------------------------------
137 |
138 | .. automodule:: germanetpy.wiktionaryparaphrase
139 | :members:
140 | :undoc-members:
141 | :show-inheritance:
142 |
143 |
144 | Module contents
145 | ---------------
146 |
147 | .. automodule:: germanetpy
148 | :members:
149 | :undoc-members:
150 | :show-inheritance:
151 |
--------------------------------------------------------------------------------
/docs/_build/html/_autosummary/germanetpy.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
99 |
100 |
101 |
102 |
103 |
104 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import sys
15 | sys.path.insert(0, os.path.abspath('..'))
16 |
17 |
18 | # -- Project information -----------------------------------------------------
19 |
20 | project = 'germanetpy'
21 | copyright = '2025, Ben Campbell'
22 | author = 'Ben Campbell'
23 |
24 | # The full version, including alpha/beta/rc tags
25 | release = '0.2.0'
26 |
27 |
28 | # -- General configuration ---------------------------------------------------
29 |
30 | # Add any Sphinx extension module names here, as strings. They can be
31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
32 | # ones.
33 | extensions = [
34 | 'sphinx.ext.autodoc',
35 | 'sphinx.ext.viewcode',
36 | 'sphinx.ext.intersphinx',
37 | ## Include autosymmary
38 | 'sphinx.ext.autosummary',
39 | ]
40 |
41 | ## Include Python objects as they appear in source files
42 | ## Default: alphabetically ('alphabetical')
43 | autodoc_member_order = 'bysource'
44 | ## Default flags used by autodoc directives
45 | # Modern defaults (replaces deprecated autodoc_default_flags)
46 | autodoc_default_options = {
47 | 'members': True,
48 | 'undoc-members': True, # optional
49 | 'show-inheritance': True,
50 | 'inherited-members': True, # optional
51 | }
52 | ## Generate autodoc stubs with summaries from code
53 | autosummary_generate = True
54 | autosummary_imported_members = True
55 |
56 | # If imports fail (C wheels, optional deps, or import-time I/O), mock them:
57 | autodoc_mock_imports = [
58 | 'numpy', 'lxml', 'Levenshtein', 'tqdm', 'fastenum'
59 | ]
60 |
61 | source_suffix = '.rst'
62 | # master_doc = 'index'
63 | root_doc = 'index' # keeps 'index' as the entry point; 'master_doc' is deprecated
64 | # Add any paths that contain templates here, relative to this directory.
65 | templates_path = ['_templates']
66 |
67 | # List of patterns, relative to source directory, that match files and
68 | # directories to ignore when looking for source files.
69 | # This pattern also affects html_static_path and html_extra_path.
70 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
71 |
72 |
73 | # -- Options for HTML output -------------------------------------------------
74 |
75 | # The theme to use for HTML and HTML Help pages. See the documentation for
76 | # a list of builtin themes.
77 | #
78 | html_theme = 'alabaster'
79 |
80 | # Add any paths that contain custom static files (such as style sheets) here,
81 | # relative to this directory. They are copied after the builtin static files,
82 | # so a file named "default.css" will overwrite the builtin "default.css".
83 | html_static_path = ['_static']
84 |
85 | pygments_style = 'sphinx'
86 |
87 | # -- Extension configuration -------------------------------------------------
88 |
89 | html_theme_options = {
90 | 'github_button': True, ## Use v2 button
91 | 'github_user': 'bencampbell30',
92 | 'github_repo': 'germanetpy',
93 | 'github_banner': True,
94 | }
95 |
96 | html_sidebars = {
97 | '**': [
98 | 'about.html',
99 | 'navigation.html',
100 | 'searchbox.html',
101 | ]
102 | }
103 |
104 | html_show_sourcelink = True
105 |
106 | intersphinx_mapping = {
107 | 'python': ('https://docs.python.org/3', None),
108 | }
109 |
--------------------------------------------------------------------------------
/docs/_build/html/_modules/germanetpy/semrel_measures.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
33 | import fastenum
34 |
35 |
36 |
37 | [docs]
38 | class SemRelMeasure(fastenum.Enum):
39 | """This Enum represents the semantic relatedness measures"""
40 | SimplePath = "SimplePath"
41 | LeacockAndChodorow = "LeacockAndChodorow"
42 | WuAndPalmer = "WuAndPalmer"
43 | Resnik = "Resnik"
44 | Lin = "Lin"
45 | JiangAndConrath = "JiangAndConrath"
46 |
47 |
103 |
104 |
105 |
106 |
107 |
108 |
--------------------------------------------------------------------------------
/docs/_build/html/_modules/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
106 |
107 |
108 |
109 |
110 |
111 |
--------------------------------------------------------------------------------
/docs/_build/html/_static/pygments.css:
--------------------------------------------------------------------------------
1 | pre { line-height: 125%; }
2 | td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
3 | span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
4 | td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
5 | span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
6 | .highlight .hll { background-color: #ffffcc }
7 | .highlight { background: #eeffcc; }
8 | .highlight .c { color: #408090; font-style: italic } /* Comment */
9 | .highlight .err { border: 1px solid #FF0000 } /* Error */
10 | .highlight .k { color: #007020; font-weight: bold } /* Keyword */
11 | .highlight .o { color: #666666 } /* Operator */
12 | .highlight .ch { color: #408090; font-style: italic } /* Comment.Hashbang */
13 | .highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */
14 | .highlight .cp { color: #007020 } /* Comment.Preproc */
15 | .highlight .cpf { color: #408090; font-style: italic } /* Comment.PreprocFile */
16 | .highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */
17 | .highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */
18 | .highlight .gd { color: #A00000 } /* Generic.Deleted */
19 | .highlight .ge { font-style: italic } /* Generic.Emph */
20 | .highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */
21 | .highlight .gr { color: #FF0000 } /* Generic.Error */
22 | .highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
23 | .highlight .gi { color: #00A000 } /* Generic.Inserted */
24 | .highlight .go { color: #333333 } /* Generic.Output */
25 | .highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */
26 | .highlight .gs { font-weight: bold } /* Generic.Strong */
27 | .highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
28 | .highlight .gt { color: #0044DD } /* Generic.Traceback */
29 | .highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */
30 | .highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */
31 | .highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */
32 | .highlight .kp { color: #007020 } /* Keyword.Pseudo */
33 | .highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */
34 | .highlight .kt { color: #902000 } /* Keyword.Type */
35 | .highlight .m { color: #208050 } /* Literal.Number */
36 | .highlight .s { color: #4070a0 } /* Literal.String */
37 | .highlight .na { color: #4070a0 } /* Name.Attribute */
38 | .highlight .nb { color: #007020 } /* Name.Builtin */
39 | .highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */
40 | .highlight .no { color: #60add5 } /* Name.Constant */
41 | .highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */
42 | .highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */
43 | .highlight .ne { color: #007020 } /* Name.Exception */
44 | .highlight .nf { color: #06287e } /* Name.Function */
45 | .highlight .nl { color: #002070; font-weight: bold } /* Name.Label */
46 | .highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */
47 | .highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */
48 | .highlight .nv { color: #bb60d5 } /* Name.Variable */
49 | .highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */
50 | .highlight .w { color: #bbbbbb } /* Text.Whitespace */
51 | .highlight .mb { color: #208050 } /* Literal.Number.Bin */
52 | .highlight .mf { color: #208050 } /* Literal.Number.Float */
53 | .highlight .mh { color: #208050 } /* Literal.Number.Hex */
54 | .highlight .mi { color: #208050 } /* Literal.Number.Integer */
55 | .highlight .mo { color: #208050 } /* Literal.Number.Oct */
56 | .highlight .sa { color: #4070a0 } /* Literal.String.Affix */
57 | .highlight .sb { color: #4070a0 } /* Literal.String.Backtick */
58 | .highlight .sc { color: #4070a0 } /* Literal.String.Char */
59 | .highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */
60 | .highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */
61 | .highlight .s2 { color: #4070a0 } /* Literal.String.Double */
62 | .highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */
63 | .highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */
64 | .highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */
65 | .highlight .sx { color: #c65d09 } /* Literal.String.Other */
66 | .highlight .sr { color: #235388 } /* Literal.String.Regex */
67 | .highlight .s1 { color: #4070a0 } /* Literal.String.Single */
68 | .highlight .ss { color: #517918 } /* Literal.String.Symbol */
69 | .highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */
70 | .highlight .fm { color: #06287e } /* Name.Function.Magic */
71 | .highlight .vc { color: #bb60d5 } /* Name.Variable.Class */
72 | .highlight .vg { color: #bb60d5 } /* Name.Variable.Global */
73 | .highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */
74 | .highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */
75 | .highlight .il { color: #208050 } /* Literal.Number.Integer.Long */
--------------------------------------------------------------------------------
/docs/_build/html/_static/doctools.js:
--------------------------------------------------------------------------------
1 | /*
2 | * doctools.js
3 | * ~~~~~~~~~~~
4 | *
5 | * Base JavaScript utilities for all Sphinx HTML documentation.
6 | *
7 | * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS.
8 | * :license: BSD, see LICENSE for details.
9 | *
10 | */
11 | "use strict";
12 |
13 | const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([
14 | "TEXTAREA",
15 | "INPUT",
16 | "SELECT",
17 | "BUTTON",
18 | ]);
19 |
20 | const _ready = (callback) => {
21 | if (document.readyState !== "loading") {
22 | callback();
23 | } else {
24 | document.addEventListener("DOMContentLoaded", callback);
25 | }
26 | };
27 |
28 | /**
29 | * Small JavaScript module for the documentation.
30 | */
31 | const Documentation = {
32 | init: () => {
33 | Documentation.initDomainIndexTable();
34 | Documentation.initOnKeyListeners();
35 | },
36 |
37 | /**
38 | * i18n support
39 | */
40 | TRANSLATIONS: {},
41 | PLURAL_EXPR: (n) => (n === 1 ? 0 : 1),
42 | LOCALE: "unknown",
43 |
44 | // gettext and ngettext don't access this so that the functions
45 | // can safely bound to a different name (_ = Documentation.gettext)
46 | gettext: (string) => {
47 | const translated = Documentation.TRANSLATIONS[string];
48 | switch (typeof translated) {
49 | case "undefined":
50 | return string; // no translation
51 | case "string":
52 | return translated; // translation exists
53 | default:
54 | return translated[0]; // (singular, plural) translation tuple exists
55 | }
56 | },
57 |
58 | ngettext: (singular, plural, n) => {
59 | const translated = Documentation.TRANSLATIONS[singular];
60 | if (typeof translated !== "undefined")
61 | return translated[Documentation.PLURAL_EXPR(n)];
62 | return n === 1 ? singular : plural;
63 | },
64 |
65 | addTranslations: (catalog) => {
66 | Object.assign(Documentation.TRANSLATIONS, catalog.messages);
67 | Documentation.PLURAL_EXPR = new Function(
68 | "n",
69 | `return (${catalog.plural_expr})`
70 | );
71 | Documentation.LOCALE = catalog.locale;
72 | },
73 |
74 | /**
75 | * helper function to focus on search bar
76 | */
77 | focusSearchBar: () => {
78 | document.querySelectorAll("input[name=q]")[0]?.focus();
79 | },
80 |
81 | /**
82 | * Initialise the domain index toggle buttons
83 | */
84 | initDomainIndexTable: () => {
85 | const toggler = (el) => {
86 | const idNumber = el.id.substr(7);
87 | const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`);
88 | if (el.src.substr(-9) === "minus.png") {
89 | el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`;
90 | toggledRows.forEach((el) => (el.style.display = "none"));
91 | } else {
92 | el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`;
93 | toggledRows.forEach((el) => (el.style.display = ""));
94 | }
95 | };
96 |
97 | const togglerElements = document.querySelectorAll("img.toggler");
98 | togglerElements.forEach((el) =>
99 | el.addEventListener("click", (event) => toggler(event.currentTarget))
100 | );
101 | togglerElements.forEach((el) => (el.style.display = ""));
102 | if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler);
103 | },
104 |
105 | initOnKeyListeners: () => {
106 | // only install a listener if it is really needed
107 | if (
108 | !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS &&
109 | !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS
110 | )
111 | return;
112 |
113 | document.addEventListener("keydown", (event) => {
114 | // bail for input elements
115 | if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return;
116 | // bail with special keys
117 | if (event.altKey || event.ctrlKey || event.metaKey) return;
118 |
119 | if (!event.shiftKey) {
120 | switch (event.key) {
121 | case "ArrowLeft":
122 | if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break;
123 |
124 | const prevLink = document.querySelector('link[rel="prev"]');
125 | if (prevLink && prevLink.href) {
126 | window.location.href = prevLink.href;
127 | event.preventDefault();
128 | }
129 | break;
130 | case "ArrowRight":
131 | if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break;
132 |
133 | const nextLink = document.querySelector('link[rel="next"]');
134 | if (nextLink && nextLink.href) {
135 | window.location.href = nextLink.href;
136 | event.preventDefault();
137 | }
138 | break;
139 | }
140 | }
141 |
142 | // some keyboard layouts may need Shift to get /
143 | switch (event.key) {
144 | case "/":
145 | if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break;
146 | Documentation.focusSearchBar();
147 | event.preventDefault();
148 | }
149 | });
150 | },
151 | };
152 |
153 | // quick alias for translations
154 | const _ = Documentation.gettext;
155 |
156 | _ready(Documentation.init);
157 |
--------------------------------------------------------------------------------
/germanetpy/longest_shortest_path.py:
--------------------------------------------------------------------------------
1 | def get_overall_longest_shortest_distance(germanet, category) -> (dict, int):
2 | """
3 | Iterate trough the synsets of a given wordcategory. For each synset, extract all possible hypernyms and compute the
4 | shortest possible distance to each hypernym. From these distances, also store the longest possible shortest
5 | distance.
6 |
7 | :type category: WordCategory
8 | :type germanet: Germanet
9 | :param germanet: the germanet graph
10 | :param category: the wordcategory
11 | :return: a dictionary with each synset and its longest shortest distance, the overall longest shortest distance
12 | """
13 | synsets = germanet.get_synsets_by_wordcategory(category)
14 | longest_shortest_distances = []
15 | for synset in synsets:
16 | distances = synset.get_distances_hypernym_dic()
17 | longest_shortest_dist = max(distances.values())
18 | longest_shortest_distances.append(longest_shortest_dist)
19 |
20 | overall_maxlen = max(longest_shortest_distances)
21 | dist_dic = dict(zip(synsets, longest_shortest_distances))
22 | sorted_dist_dic = sorted(dist_dic.items(), key=lambda kv: kv[1], reverse=True)
23 | return sorted_dist_dic, overall_maxlen
24 |
25 |
26 | def get_greatest_depth(germanet, category) -> int:
27 | """
28 | Iterate trough the synsets of a given word category. For each synset check the depth and return the greatest depth
29 | that has been seen.
30 |
31 | :type category: WordCategory
32 | :type germanet: Germanet
33 | :param germanet: the germanet graph
34 | :param category: the wordcategory
35 | :return: the greatest depth for a given word category. The depth of a synset is defined by the shortest path length between the synset and the root node
36 | """
37 | synsets = germanet.get_synsets_by_wordcategory(category)
38 | max_depth = 0
39 | for synset in synsets:
40 | depth = synset.min_depth()
41 | if depth >= max_depth:
42 | max_depth = depth
43 | return max_depth
44 |
45 |
46 | def get_longest_possible_shortest_distance(germanet, wordcategory):
47 | """
48 | set a maxdistcounter = 0
49 | for each synset:
50 | get the corresponding longest shortest distance.
51 | if this plus the overall longest shortest distance is smaller than maxdistance:
52 | continue with the next synset
53 | if it is larger:
54 | go trough each synset and get the corresponding longest shortest distance.
55 | if this plus the longest shortest distance of the synset of interest is smaller than maxdistance:
56 | continue
57 | else:
58 | compute the actual path distance and update the maxdistance if it is larger
59 |
60 | :rtype: (int, int, tuple(Synset, Synset)
61 | :type wordcategory: WordCategory
62 | :type germanet: Germanet
63 | :param wordcategory: the wordcategory for which this maxlen should be computed
64 | :param germanet: the germanet graph
65 | :return: the longest possible shortest distance between two synsets of a specified wordcategory, the maximum depth
66 | of any synset (lenght to the root) and a Tuple with two synsets that have the longest shortest distance
67 | """
68 | sorted_dist_dic, overall_maxlen = get_overall_longest_shortest_distance(germanet=germanet, category=wordcategory)
69 | longest_possible_shortest_distance = 0
70 | synset_pair_longest_distance = (germanet.root, germanet.root)
71 |
72 | for synset, longest_shortest_dist in sorted_dist_dic:
73 | if longest_shortest_dist + overall_maxlen <= longest_possible_shortest_distance:
74 | continue
75 | for current_synset, current_shortest_dist in sorted_dist_dic:
76 | if current_shortest_dist + longest_shortest_dist <= longest_possible_shortest_distance:
77 | continue
78 | pathdist = current_synset.shortest_path_distance(synset)
79 | if pathdist > longest_possible_shortest_distance:
80 | longest_possible_shortest_distance = pathdist
81 | synset_pair_longest_distance = (synset, current_synset)
82 | return longest_possible_shortest_distance, overall_maxlen, synset_pair_longest_distance
83 |
84 |
85 | def print_longest_shortest_distances(germanet, word_category):
86 | """Computes and prints the longest shortest distances for the given word category.
87 |
88 | :type word_category: WordCategory
89 | :type germanet: Germanet
90 | """
91 | longest_possible_shortest_distance, overall_maxlen, synset_pair_longest_distance = \
92 | get_longest_possible_shortest_distance(
93 | germanet=germanet, wordcategory=word_category)
94 | print(
95 | "retrieved the following information {}: \n"
96 | "longest shortest distance : {:5d} \n"
97 | "maximum depth : {:5d} \n, "
98 | "between the following synsets {}".format(
99 | str(word_category), longest_possible_shortest_distance,
100 | overall_maxlen, synset_pair_longest_distance))
101 |
102 |
103 | def print_maximum_depths(germanet, word_category):
104 | """Computes and prints the maximum depth for the given word_category.
105 |
106 | :type word_category: WordCategory
107 | :type germanet: Germanet
108 | """
109 | print(
110 | "retrieved the following information {}: \n"
111 | "longest shortest distance : {:5d}".format(
112 | str(word_category), get_greatest_depth(germanet=germanet, category=word_category)))
113 |
--------------------------------------------------------------------------------
/docs/_build/html/_static/language_data.js:
--------------------------------------------------------------------------------
1 | /*
2 | * language_data.js
3 | * ~~~~~~~~~~~~~~~~
4 | *
5 | * This script contains the language-specific data used by searchtools.js,
6 | * namely the list of stopwords, stemmer, scorer and splitter.
7 | *
8 | * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS.
9 | * :license: BSD, see LICENSE for details.
10 | *
11 | */
12 |
13 | var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"];
14 |
15 |
16 | /* Non-minified version is copied as a separate JS file, if available */
17 |
18 | /**
19 | * Porter Stemmer
20 | */
21 | var Stemmer = function() {
22 |
23 | var step2list = {
24 | ational: 'ate',
25 | tional: 'tion',
26 | enci: 'ence',
27 | anci: 'ance',
28 | izer: 'ize',
29 | bli: 'ble',
30 | alli: 'al',
31 | entli: 'ent',
32 | eli: 'e',
33 | ousli: 'ous',
34 | ization: 'ize',
35 | ation: 'ate',
36 | ator: 'ate',
37 | alism: 'al',
38 | iveness: 'ive',
39 | fulness: 'ful',
40 | ousness: 'ous',
41 | aliti: 'al',
42 | iviti: 'ive',
43 | biliti: 'ble',
44 | logi: 'log'
45 | };
46 |
47 | var step3list = {
48 | icate: 'ic',
49 | ative: '',
50 | alize: 'al',
51 | iciti: 'ic',
52 | ical: 'ic',
53 | ful: '',
54 | ness: ''
55 | };
56 |
57 | var c = "[^aeiou]"; // consonant
58 | var v = "[aeiouy]"; // vowel
59 | var C = c + "[^aeiouy]*"; // consonant sequence
60 | var V = v + "[aeiou]*"; // vowel sequence
61 |
62 | var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
63 | var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
64 | var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
65 | var s_v = "^(" + C + ")?" + v; // vowel in stem
66 |
67 | this.stemWord = function (w) {
68 | var stem;
69 | var suffix;
70 | var firstch;
71 | var origword = w;
72 |
73 | if (w.length < 3)
74 | return w;
75 |
76 | var re;
77 | var re2;
78 | var re3;
79 | var re4;
80 |
81 | firstch = w.substr(0,1);
82 | if (firstch == "y")
83 | w = firstch.toUpperCase() + w.substr(1);
84 |
85 | // Step 1a
86 | re = /^(.+?)(ss|i)es$/;
87 | re2 = /^(.+?)([^s])s$/;
88 |
89 | if (re.test(w))
90 | w = w.replace(re,"$1$2");
91 | else if (re2.test(w))
92 | w = w.replace(re2,"$1$2");
93 |
94 | // Step 1b
95 | re = /^(.+?)eed$/;
96 | re2 = /^(.+?)(ed|ing)$/;
97 | if (re.test(w)) {
98 | var fp = re.exec(w);
99 | re = new RegExp(mgr0);
100 | if (re.test(fp[1])) {
101 | re = /.$/;
102 | w = w.replace(re,"");
103 | }
104 | }
105 | else if (re2.test(w)) {
106 | var fp = re2.exec(w);
107 | stem = fp[1];
108 | re2 = new RegExp(s_v);
109 | if (re2.test(stem)) {
110 | w = stem;
111 | re2 = /(at|bl|iz)$/;
112 | re3 = new RegExp("([^aeiouylsz])\\1$");
113 | re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
114 | if (re2.test(w))
115 | w = w + "e";
116 | else if (re3.test(w)) {
117 | re = /.$/;
118 | w = w.replace(re,"");
119 | }
120 | else if (re4.test(w))
121 | w = w + "e";
122 | }
123 | }
124 |
125 | // Step 1c
126 | re = /^(.+?)y$/;
127 | if (re.test(w)) {
128 | var fp = re.exec(w);
129 | stem = fp[1];
130 | re = new RegExp(s_v);
131 | if (re.test(stem))
132 | w = stem + "i";
133 | }
134 |
135 | // Step 2
136 | re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
137 | if (re.test(w)) {
138 | var fp = re.exec(w);
139 | stem = fp[1];
140 | suffix = fp[2];
141 | re = new RegExp(mgr0);
142 | if (re.test(stem))
143 | w = stem + step2list[suffix];
144 | }
145 |
146 | // Step 3
147 | re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
148 | if (re.test(w)) {
149 | var fp = re.exec(w);
150 | stem = fp[1];
151 | suffix = fp[2];
152 | re = new RegExp(mgr0);
153 | if (re.test(stem))
154 | w = stem + step3list[suffix];
155 | }
156 |
157 | // Step 4
158 | re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;
159 | re2 = /^(.+?)(s|t)(ion)$/;
160 | if (re.test(w)) {
161 | var fp = re.exec(w);
162 | stem = fp[1];
163 | re = new RegExp(mgr1);
164 | if (re.test(stem))
165 | w = stem;
166 | }
167 | else if (re2.test(w)) {
168 | var fp = re2.exec(w);
169 | stem = fp[1] + fp[2];
170 | re2 = new RegExp(mgr1);
171 | if (re2.test(stem))
172 | w = stem;
173 | }
174 |
175 | // Step 5
176 | re = /^(.+?)e$/;
177 | if (re.test(w)) {
178 | var fp = re.exec(w);
179 | stem = fp[1];
180 | re = new RegExp(mgr1);
181 | re2 = new RegExp(meq1);
182 | re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
183 | if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
184 | w = stem;
185 | }
186 | re = /ll$/;
187 | re2 = new RegExp(mgr1);
188 | if (re.test(w) && re2.test(w)) {
189 | re = /.$/;
190 | w = w.replace(re,"");
191 | }
192 |
193 | // and turn initial Y back to y
194 | if (firstch == "y")
195 | w = firstch.toLowerCase() + w.substr(1);
196 | return w;
197 | }
198 | }
199 |
200 |
--------------------------------------------------------------------------------
/docs/_build/html/_static/sphinx_highlight.js:
--------------------------------------------------------------------------------
1 | /* Highlighting utilities for Sphinx HTML documentation. */
2 | "use strict";
3 |
4 | const SPHINX_HIGHLIGHT_ENABLED = true
5 |
6 | /**
7 | * highlight a given string on a node by wrapping it in
8 | * span elements with the given class name.
9 | */
10 | const _highlight = (node, addItems, text, className) => {
11 | if (node.nodeType === Node.TEXT_NODE) {
12 | const val = node.nodeValue;
13 | const parent = node.parentNode;
14 | const pos = val.toLowerCase().indexOf(text);
15 | if (
16 | pos >= 0 &&
17 | !parent.classList.contains(className) &&
18 | !parent.classList.contains("nohighlight")
19 | ) {
20 | let span;
21 |
22 | const closestNode = parent.closest("body, svg, foreignObject");
23 | const isInSVG = closestNode && closestNode.matches("svg");
24 | if (isInSVG) {
25 | span = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
26 | } else {
27 | span = document.createElement("span");
28 | span.classList.add(className);
29 | }
30 |
31 | span.appendChild(document.createTextNode(val.substr(pos, text.length)));
32 | const rest = document.createTextNode(val.substr(pos + text.length));
33 | parent.insertBefore(
34 | span,
35 | parent.insertBefore(
36 | rest,
37 | node.nextSibling
38 | )
39 | );
40 | node.nodeValue = val.substr(0, pos);
41 | /* There may be more occurrences of search term in this node. So call this
42 | * function recursively on the remaining fragment.
43 | */
44 | _highlight(rest, addItems, text, className);
45 |
46 | if (isInSVG) {
47 | const rect = document.createElementNS(
48 | "http://www.w3.org/2000/svg",
49 | "rect"
50 | );
51 | const bbox = parent.getBBox();
52 | rect.x.baseVal.value = bbox.x;
53 | rect.y.baseVal.value = bbox.y;
54 | rect.width.baseVal.value = bbox.width;
55 | rect.height.baseVal.value = bbox.height;
56 | rect.setAttribute("class", className);
57 | addItems.push({ parent: parent, target: rect });
58 | }
59 | }
60 | } else if (node.matches && !node.matches("button, select, textarea")) {
61 | node.childNodes.forEach((el) => _highlight(el, addItems, text, className));
62 | }
63 | };
64 | const _highlightText = (thisNode, text, className) => {
65 | let addItems = [];
66 | _highlight(thisNode, addItems, text, className);
67 | addItems.forEach((obj) =>
68 | obj.parent.insertAdjacentElement("beforebegin", obj.target)
69 | );
70 | };
71 |
72 | /**
73 | * Small JavaScript module for the documentation.
74 | */
75 | const SphinxHighlight = {
76 |
77 | /**
78 | * highlight the search words provided in localstorage in the text
79 | */
80 | highlightSearchWords: () => {
81 | if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight
82 |
83 | // get and clear terms from localstorage
84 | const url = new URL(window.location);
85 | const highlight =
86 | localStorage.getItem("sphinx_highlight_terms")
87 | || url.searchParams.get("highlight")
88 | || "";
89 | localStorage.removeItem("sphinx_highlight_terms")
90 | url.searchParams.delete("highlight");
91 | window.history.replaceState({}, "", url);
92 |
93 | // get individual terms from highlight string
94 | const terms = highlight.toLowerCase().split(/\s+/).filter(x => x);
95 | if (terms.length === 0) return; // nothing to do
96 |
97 | // There should never be more than one element matching "div.body"
98 | const divBody = document.querySelectorAll("div.body");
99 | const body = divBody.length ? divBody[0] : document.querySelector("body");
100 | window.setTimeout(() => {
101 | terms.forEach((term) => _highlightText(body, term, "highlighted"));
102 | }, 10);
103 |
104 | const searchBox = document.getElementById("searchbox");
105 | if (searchBox === null) return;
106 | searchBox.appendChild(
107 | document
108 | .createRange()
109 | .createContextualFragment(
110 | '' + 111 | '' + 112 | _("Hide Search Matches") + 113 | "
" 114 | ) 115 | ); 116 | }, 117 | 118 | /** 119 | * helper function to hide the search marks again 120 | */ 121 | hideSearchWords: () => { 122 | document 123 | .querySelectorAll("#searchbox .highlight-link") 124 | .forEach((el) => el.remove()); 125 | document 126 | .querySelectorAll("span.highlighted") 127 | .forEach((el) => el.classList.remove("highlighted")); 128 | localStorage.removeItem("sphinx_highlight_terms") 129 | }, 130 | 131 | initEscapeListener: () => { 132 | // only install a listener if it is really needed 133 | if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; 134 | 135 | document.addEventListener("keydown", (event) => { 136 | // bail for input elements 137 | if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; 138 | // bail with special keys 139 | if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; 140 | if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { 141 | SphinxHighlight.hideSearchWords(); 142 | event.preventDefault(); 143 | } 144 | }); 145 | }, 146 | }; 147 | 148 | _ready(() => { 149 | /* Do not call highlightSearchWords() when we are on the search page. 150 | * It will highlight words from the *previous* search query. 151 | */ 152 | if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords(); 153 | SphinxHighlight.initEscapeListener(); 154 | }); 155 | -------------------------------------------------------------------------------- /docs/_build/html/_autosummary/germanetpy.frames.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 |Classes
38 |
|
45 | 46 | |
126 |
127 |
128 |
129 |
130 |
131 |
--------------------------------------------------------------------------------
/docs/_build/html/_autosummary/germanetpy.iliRecord.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Classes
38 |
|
45 | 46 | |
126 |
127 |
128 |
129 |
130 |
131 |
--------------------------------------------------------------------------------
/docs/_build/html/_autosummary/germanetpy.semrel_measures.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Classes
38 |
|
45 | This Enum represents the semantic relatedness measures |
46 |
126 |
127 |
128 |
129 |
130 |
131 |
--------------------------------------------------------------------------------
/docs/_build/html/_autosummary/germanetpy.icbased_similarity.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Classes
38 |
|
45 | The IC-based measures are computed based on relative frequencies of words in a large corpus. |
46 |
126 |
127 |
128 |
129 |
130 |
131 |
--------------------------------------------------------------------------------
/docs/_build/html/_autosummary/germanetpy.filterconfig.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Classes
38 |
|
45 | This class is a configuration object, that helps to filter GermaNets lexical units and Synsets to extract the ones with certain interesting properties. |
46 |
126 |
127 |
128 |
129 |
130 |
131 |
--------------------------------------------------------------------------------
/germanetpy/frames.py:
--------------------------------------------------------------------------------
1 | from functools import reduce
2 |
3 |
4 | class Frames:
5 | EXPLETIVE = 'NE'
6 | SUBJECT = 'NN'
7 | ACCOBJ = 'AN'
8 | DATOBJ = 'DN'
9 | GENOBJ = 'GN'
10 | PREPOBJ = 'PP'
11 | LOC = 'BL'
12 | DIR = 'BD'
13 | TEMP = 'BT'
14 | MAN = 'BM'
15 | INST = 'BS'
16 | CAUSE = 'BC'
17 | ROLE = 'BR'
18 | COM = 'BO'
19 | reflexives = ['DR', 'AR']
20 |
21 | def __init__(self, frames2lexunits: dict):
22 | """
23 | This class holds functionality to extract verbs with specific frame types. These subcategorisation patterns
24 | can help to disambiguate verbs in specific contexts and how many arguments a verb can take.
25 |
26 | :param frames2lexunits: A dictionary that stores the frames as keys and the corresponding lexunits as values.
27 | """
28 |
29 | self._frames2verbs = frames2lexunits
30 |
31 | def extract_expletives(self) -> set:
32 | """
33 | This method extracts all verbs that can take expletives as an argument. Example: "[Es] regnet."
34 |
35 | :return: A set of lexical units that stores all verbs as Lexunits that have the specified frame.
36 | """
37 | return self.extract_specific_complements(self.EXPLETIVE)
38 |
39 | def extract_accusative_complement(self) -> set:
40 | """
41 | This method returns all verbs that can take an accusative complement. Example: "Sie sieht [ihn]"
42 |
43 | :return: A set of lexical units that stores all verbs as Lexunits that have the specified frame.
44 | """
45 | return self.extract_specific_complements(self.ACCOBJ)
46 |
47 | def extract_dative_complement(self) -> set:
48 | """
49 | This method returns all verbs that can take an dative complement. Example: "Sie schenkt [ihm] einen Hund."
50 |
51 | :return: A set of lexical units that stores all verbs as Lexunits that have the specified frame.
52 | """
53 | return self.extract_specific_complements(self.DATOBJ)
54 |
55 | def extract_gentive_complement(self) -> set:
56 | """
57 | This method returns all verbs that can take an genetive complement. Example: "Ihre Eltern berauben sie [ihrer
58 | Freiheit]."
59 |
60 | :return: A set of lexical units that stores all verbs as Lexunits that have the specified frame.
61 | """
62 | return self.extract_specific_complements(self.GENOBJ)
63 |
64 | def extract_prepositional_complement(self) -> set:
65 | """
66 | This method returns all verbs that can take an prepositional complement. Example: "Die Kugel klackte [an die
67 | Fensterscheibe]."
68 |
69 | :return: A set of lexical units that stores all verbs as Lexunits that have the specified frame.
70 | """
71 | return self.extract_specific_complements(self.PREPOBJ)
72 |
73 | def extract_reflexives(self) -> set:
74 | """
75 | This method returns all verbs that can take an reflexive complement. Example: "Sie wird [sich] rächen."
76 |
77 | :return: A set of lexical units that stores all verbs as Lexunits that have the specified frame.
78 | """
79 | return self.extract_specific_complements(self.reflexives[0]).union(
80 | self.extract_specific_complements(self.reflexives[1]))
81 |
82 | def extract_adverbials(self) -> set:
83 | """
84 | This method returns all verbs that can take an adverbial complement. Example: "Sie wohnt [in einem Haus]."
85 |
86 | :return: A set of lexical units that stores all verbs as Lexunits that have the specified frame.
87 | """
88 | return self.extract_specific_complements(self.LOC) \
89 | .union(self.extract_specific_complements(self.DIR)
90 | .union(self.extract_specific_complements(self.TEMP)
91 | .union(self.extract_specific_complements(self.MAN)
92 | .union(self.extract_specific_complements(self.INST)
93 | .union(self.extract_specific_complements(self.CAUSE)
94 | .union(self.extract_specific_complements(self.ROLE)
95 | .union(self.extract_specific_complements(self.COM))))))))
96 |
97 | def extract_transitives(self) -> set:
98 | """
99 | This method returns all transitive verbs. A transitive verb is any verb that can have objects.
100 |
101 | :return: A set of lexical units that stores all transitive verbs as Lexunits.
102 | """
103 | return self.extract_specific_complements(self.ACCOBJ) \
104 | .union(self.extract_specific_complements(self.DATOBJ)
105 | .union(self.extract_specific_complements(self.GENOBJ)
106 | .union(self.extract_specific_complements(self.PREPOBJ))))
107 |
108 | def extract_intransitives(self) -> set:
109 | """
110 | This method returns all intransitive verbs. An intransitive verb is any verb that does not have objects.
111 |
112 | :return: A set of lexical units that stores all intransitive verbs as Lexunits.
113 | """
114 | transitives = self.extract_transitives()
115 | all_verbs = reduce(set.union, self.frames2verbs.values())
116 | return all_verbs.difference(transitives)
117 |
118 | def extract_specific_complements(self, complement: str) -> set:
119 | """
120 | This method returns all verbs that can take a given complement. This is specified in the frames of a verb.
121 |
122 | :param: complement : a syntactic complement (e.g NN for subject), the complements are specified as class variables of this class
123 | :return: A set of lexical units that stores all verbs as Lexunits that can take the specified complement.
124 | """
125 | complements = set()
126 | for (key, val) in self._frames2verbs.items():
127 | if complement in key:
128 | for unit in val:
129 | complements.add(unit)
130 | return complements
131 |
132 | @property
133 | def frames2verbs(self):
134 | return self._frames2verbs
135 |
--------------------------------------------------------------------------------
/docs/_build/html/_autosummary/germanetpy.utils.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Functions
38 |
|
45 | Converts the given String into a boolean. |
46 |
|
48 | Parses an XML file and returns the XML tree :param datadir: The directory where the file is located :param f: the filename :return: The parsed XML tree |
49 |
129 |
130 |
131 |
132 |
133 |
134 |
--------------------------------------------------------------------------------
/docs/_build/html/_autosummary/germanetpy.germanet.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Classes
38 |
|
45 | 46 | |
129 |
130 |
131 |
132 |
133 |
134 |
--------------------------------------------------------------------------------
/docs/_build/html/_autosummary/germanetpy.iliLoader.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Functions
38 |
|
45 | Creates the ili record given the XML attributes. |
46 |
|
48 | This method creates the ili record objects given a datafile and adds them to the GermaNet object and the corresponding lexical unit. |
49 |
129 |
130 |
131 |
132 |
133 |
134 |
--------------------------------------------------------------------------------
/docs/_build/html/_autosummary/germanetpy.wictionaryparaphrase.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Classes
37 |
|
44 | 45 | |
128 |
129 |
130 |
131 |
132 |
133 |
--------------------------------------------------------------------------------
/docs/_build/html/_autosummary/germanetpy.relationLoader.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Functions
38 |
|
45 |
|
51 |
|
53 | Loads the information about the related synsets ans lexunits from the data and adds the edges between the objects. |
54 |
134 |
135 |
136 |
137 |
138 |
139 |
--------------------------------------------------------------------------------
/docs/_build/html/_autosummary/germanetpy.lexunit.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Classes
38 |
|
45 | This enum represents the lexical relation (short: LexRel) that a Lexunit can have in GermaNet. |
46 |
|
48 | This class holds the lexical unit object of GermaNet. |
49 |
|
51 | This enum represents the four possible orthographical variations |
52 |
132 |
133 |
134 |
135 |
136 |
137 |
--------------------------------------------------------------------------------
/docs/_build/html/_autosummary/germanetpy.wictionaryLoader.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Functions
37 |
|
44 | Creates a wiktionary object given the XML attributes that contain the required information :param attributes: XML attributes that contain information about the wiktionary paraphrase :return: a wiktionary object |
45 |
|
47 | Given a XML tree this method initialized the wiktionary objects and adds them to the germanet object and the corresponding lexunits :type tree: etree :type germanet: Germanet :param germanet: The germane object :param tree: The XML tree of the wiktionary file |
48 |
128 |
129 |
130 |
131 |
132 |
133 |
--------------------------------------------------------------------------------
/docs/_build/html/_autosummary/germanetpy.path_based_relatedness_measures.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
129 |
130 |
131 |
132 |
133 |
134 |
--------------------------------------------------------------------------------
/tests/test_lexunit.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import sys
3 | import logging
4 | import pytest
5 | from germanetpy.germanet import Germanet
6 | import numpy as np
7 | from lxml import etree as ET
8 | from germanetpy.lexunit import LexRel
9 | from germanetpy.compoundInfo import CompoundProperty, CompoundCategory
10 |
11 | logger = logging.getLogger('logging_test_lexunit')
12 | d = str(Path(__file__).parent.parent) + "/data"
13 | try:
14 | germanet_data = Germanet(d)
15 | except ET.ParseError:
16 | message = ("Unable to load GermaNet data at {0} . Aborting...").format(d)
17 | logger.error(message,
18 | ET.ParseError)
19 | sys.exit(0)
20 | except IOError:
21 | message = ("GermaNet data not found at {0} . Aborting...").format(d)
22 | logger.error(message, IOError)
23 | sys.exit(0)
24 |
25 | lexical_relations = [
26 | ('l66160', LexRel.has_part, ['l9112']),
27 | ('l81451', LexRel.has_synonym, ['l81448', 'l81449', 'l81450']),
28 | ('l12419', LexRel.has_synonym, ['l12418', 'l12424', 'l12426', 'l126607', 'l140807', ]),
29 | ('l2825', LexRel.has_antonym, ['l2847'])
30 | ]
31 |
32 | lexical_incoming_relations = [
33 | ('l66183', LexRel.is_part_of, ['l66955', 'l66960']),
34 | ('l66183', LexRel.has_habitat, ['l71123']),
35 | ('l66183', LexRel.has_topic, ['l25890']),
36 | ('l62003', LexRel.has_member, ['l124229']),
37 |
38 | ]
39 |
40 | # Relation, Equivalent, pwn20Id, pwn30Id, synonyms, pwn20paraphrase, source
41 | iliRecords = [
42 | ('l37670', 'synonym', 'hearing', 'ENG20-05331473-n', 'ENG30-05657718-n',
43 | ['audition', 'auditory sense', 'sense of hearing', 'auditory modality'],
44 | 'the ability to hear; the auditory faculty; "his hearing was impaired"', 'extension2'),
45 | ('l41587', 'synonym', 'newsletter', 'ENG20-06270913-n', 'ENG30-06681976-n', ['newssheet'],
46 | 'report or open letter giving informal or confidential news of interest to a special group', 'extension2')
47 | ]
48 |
49 | # modifier, head, modifier1Property, modifier1Category, modifier2, modifier2Property, modifier2Category, headProperty
50 | compound_info = [
51 | ('l66936', 'Apfel', 'Baum', None, CompoundCategory.Nomen, None, None, None, None),
52 | ('l57979', 'Him', 'Beere', CompoundProperty.opaquesMorphem, None, None, None, None, None),
53 | ('l23978', 'Kegel', 'Tour', None, CompoundCategory.Nomen, 'kegeln', None, CompoundCategory.Verb, None),
54 | ('l23312', 'Spiel', 'Ausgang', None, CompoundCategory.Nomen, 'spielen', None, CompoundCategory.Verb, None),
55 | ('l17119', 'mikro', 'Chip', CompoundProperty.Konfix, None, None, None, None, None)
56 |
57 | ]
58 |
59 | # lexunit, wiktionaryId, wiktionarySenseId, wiktionarySense, edited
60 | wiktionary = [("l76274", "w74102", 0,
61 | "wird im Vergleich zu benutzen regional verschieden gewertet, von synonym bis in Nuancen entwertenden "
62 | "Touch: verwenden, aus etwas Nutzen ziehen, ausnutzen, zum Vorteil anwenden, brauchen, gebrauchen",
63 | False),
64 | ('l101788', 'w29023', 2, 'der Unterhaltung dienendes Gebäude, Etablissement', False),
65 | ('l173', 'w136562', 2, 'großartig, unglaublich (Ausdruck der Bewunderung), sagenhaft', False)]
66 |
67 |
68 | @pytest.mark.parametrize('id,lexrel,expected_ids', lexical_relations)
69 | def test_lexical_relations(id, lexrel, expected_ids):
70 | """Test whether the given lexunit contains the correct lexical relations"""
71 | lexunit = germanet_data.lexunits[id]
72 | related = lexunit.relations[lexrel]
73 | np.testing.assert_equal(sorted([lex.id for lex in related]), sorted(expected_ids))
74 |
75 |
76 | @pytest.mark.parametrize('id,lexrel,expected_ids', lexical_incoming_relations)
77 | def test_incoming_lexical_relations(id, lexrel, expected_ids):
78 | """Test whether the given lexunit contains the correct incoming lexical relations."""
79 | lexunit = germanet_data.lexunits[id]
80 | related = lexunit.incoming_relations[lexrel]
81 | np.testing.assert_equal(sorted([lex.id for lex in related]), sorted(expected_ids))
82 |
83 |
84 | @pytest.mark.parametrize(
85 | 'id,modifier, head, modifier1Property, modifier1Category, modifier2, modifier2Property, modifier2Category, '
86 | 'headProperty',
87 | compound_info)
88 | def test_compoundInfo(id, modifier, head, modifier1Property, modifier1Category, modifier2, modifier2Property,
89 | modifier2Category,
90 | headProperty):
91 | """Test if a compound info is stored correctly."""
92 | lexunit = germanet_data.lexunits[id]
93 | compoundinfo = lexunit.compound_info
94 | np.testing.assert_equal(compoundinfo.modifier1, modifier)
95 | np.testing.assert_equal(compoundinfo.head, head)
96 | np.testing.assert_equal(compoundinfo.modifier1_property == modifier1Property, True)
97 | np.testing.assert_equal(compoundinfo.modifier1_category == modifier1Category, True)
98 | np.testing.assert_equal(compoundinfo.modifier2 == modifier2, True)
99 | np.testing.assert_equal(compoundinfo.modifier2_property == modifier2Property, True)
100 | np.testing.assert_equal(compoundinfo.modifier2_category == modifier2Category, True)
101 | np.testing.assert_equal(compoundinfo.head_property == headProperty, True)
102 |
103 |
104 | @pytest.mark.parametrize('id, relation, english_equivalent, pwn20Id, pwn30Id, pwn20synonyms, pwn20paraphrase, source',
105 | iliRecords)
106 | def test_iliRecords(id, relation, english_equivalent, pwn20Id, pwn30Id, pwn20synonyms, pwn20paraphrase, source):
107 | """Test if an ili record is stored correctly."""
108 | lexunit = germanet_data.lexunits[id]
109 | ilirecord = lexunit.ili_records[0]
110 | np.testing.assert_equal(ilirecord.relation, relation)
111 | np.testing.assert_equal(ilirecord.english_equivalent, english_equivalent)
112 | np.testing.assert_equal(ilirecord.pwn20id, pwn20Id)
113 | np.testing.assert_equal(ilirecord.pwn30id, pwn30Id)
114 | np.testing.assert_equal(ilirecord.pwn20synonyms, pwn20synonyms)
115 | np.testing.assert_equal(ilirecord.pwn20paraphrase, pwn20paraphrase)
116 | np.testing.assert_equal(ilirecord.source, source)
117 |
118 |
119 | @pytest.mark.parametrize('id, wiktionaryId, wiktionarySenseId, wiktionarySense, edited', wiktionary)
120 | def test_wiktionary(id, wiktionaryId, wiktionarySenseId, wiktionarySense, edited):
121 | """Test if a wiktionary paraphrase is stored correctly."""
122 | lexunit = germanet_data.lexunits[id]
123 | wiktionary = lexunit.wiktionary_paraphrases[0]
124 | np.testing.assert_equal(wiktionary.wiktionary_id, wiktionaryId)
125 | np.testing.assert_equal(wiktionary.wiktionary_sense_id, wiktionarySenseId)
126 | np.testing.assert_equal(wiktionary.wiktionary_sense, wiktionarySense)
127 | np.testing.assert_equal(wiktionary.edited, edited)
128 |
--------------------------------------------------------------------------------
/docs/_build/html/_autosummary/germanetpy.synset.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Classes
38 |
|
45 | This Enum class contains the conceptual relations (short: ConRel) that synsets can have to other synsets. |
46 |
|
48 | This class holds a Synset object. |
49 |
|
51 | This Enum class contains the three part-of-speech tags (WortCategory), a Synset can have in GermaNet. |
52 |
|
54 | This Enum class contains the semantic wordclasses / semantic fields a Synset can have in GermaNet. |
55 |
135 |
136 |
137 |
138 |
139 |
140 |
--------------------------------------------------------------------------------
/docs/_build/html/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
128 |
129 |
130 |
131 |
132 |
133 |
--------------------------------------------------------------------------------
/docs/_build/html/_autosummary/germanetpy.longest_shortest_path.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Functions
38 |
|
45 | Iterate trough the synsets of a given word category. |
46 |
|
48 | set a maxdistcounter = 0 for each synset: get the corresponding longest shortest distance. |
49 |
|
51 | Iterate trough the synsets of a given wordcategory. |
52 |
|
54 | Computes and prints the longest shortest distances for the given word category. |
55 |
|
57 | Computes and prints the maximum depth for the given word_category. |
58 |
138 |
139 |
140 |
141 |
142 |
143 |
--------------------------------------------------------------------------------