├── test
├── __init__.py
├── resources
│ ├── scripts
│ │ ├── odml_convert
│ │ │ ├── test_broken
│ │ │ │ ├── empty.xml
│ │ │ │ └── invalid_xml.xml
│ │ │ ├── test_recursive
│ │ │ │ ├── conversion_example_sub_root.xml
│ │ │ │ └── sub
│ │ │ │ │ └── conversion_example_sub_sub.xml
│ │ │ ├── conversion_example_A.xml
│ │ │ └── conversion_example_B.xml
│ │ └── odml_to_rdf
│ │ │ └── test_invalid
│ │ │ ├── not_xml.md
│ │ │ ├── not_odml.xml
│ │ │ └── invalid.xml
│ ├── missing_root.yaml
│ ├── missing_root.json
│ ├── missing_version.yaml
│ ├── missing_version.xml
│ ├── invalid_root.xml
│ ├── invalid_version.xml
│ ├── invalid_version.yaml
│ ├── local_repository_file_v1.0.xml
│ ├── local_repository_file_v1.1.xml
│ ├── missing_version.json
│ ├── invalid_version.json
│ ├── validation_section.yaml
│ ├── version_conversion.yaml
│ ├── validation_section.xml
│ ├── validation_section.json
│ ├── version_conversion.xml
│ ├── version_conversion.json
│ ├── ignore_errors.xml
│ ├── version_conversion_int.yaml
│ └── version_conversion_int.xml
├── test_bugs.py
├── util.py
├── test_find_section.py
├── test_fileio.py
├── test_parser_xml.py
├── test_links.py
├── test_dumper.py
├── test_util.py
├── test_version_converter_integration.py
├── test_iterators.py
├── test_xml_writer.py
├── test_script_odml_convert.py
├── test_script_odml_to_rdf.py
├── test_infer_type.py
├── test_terminology.py
├── test_validation_integration.py
├── test_format_converter.py
└── test_rdf_reader.py
├── odml
├── rdf
│ └── __init__.py
├── scripts
│ ├── __init__.py
│ ├── odml_view.py
│ ├── odml_convert.py
│ └── odml_to_rdf.py
├── tools
│ ├── converters
│ │ └── __init__.py
│ ├── __init__.py
│ ├── version_converter.py
│ ├── doc_inherit.py
│ ├── parser_utils.py
│ └── dumper.py
├── info.py
├── info.json
├── fileio.py
├── util.py
├── resources
│ └── section_subclasses.yaml
├── terminology.py
├── __init__.py
└── format.py
├── requirements-test.txt
├── scripts
├── release_tests
│ ├── resources
│ │ ├── test_convert_script
│ │ │ └── test_broken
│ │ │ │ ├── 5.xml
│ │ │ │ └── 29.xml
│ │ ├── test_rdf_export_script
│ │ │ └── test_recursive
│ │ │ │ ├── nonxml.md
│ │ │ │ └── nonodml.xml
│ │ ├── test_odmltools
│ │ │ └── datacite_namespace
│ │ │ │ └── DataCitePreviousNS.xml
│ │ └── scripts
│ │ │ └── odml_basics.py
│ ├── run_test_local_odml.sh
│ ├── run_test_pypi_odmltools.sh
│ ├── run_test_pypi_nixodmlconverter.sh
│ ├── run_test_matrix.sh
│ ├── run_pypi_odml.sh
│ ├── run_test_pypi_odml.sh
│ └── README.md
└── README.md
├── docs
├── images
│ ├── erModel.png
│ └── odMLLogo.png
├── assets
│ └── images
│ │ ├── background.png
│ │ ├── g-node-logo.png
│ │ └── g-node_logo2.png
├── _config.yml
└── data_model.md
├── doc
├── rdf
│ ├── RDF_example_graph.png
│ ├── rdf_generator.py
│ └── sparql_example_queries.py
├── reference.rst
├── index.rst
├── support-classes.rst
├── base-classes.rst
├── example_odMLs
│ ├── sample_odml.odml
│ ├── sample_odml.rdf
│ └── sample_odml.yaml
├── tools.rst
└── Makefile
├── MANIFEST.in
├── .gitignore
├── GSoC.md
├── LICENSE
├── appveyor.yml
├── setup.py
├── CONTRIBUTING.md
├── .github
└── workflows
│ └── run-tests.yml
└── README.md
/test/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/odml/rdf/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/odml/scripts/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/requirements-test.txt:
--------------------------------------------------------------------------------
1 | owlrl
2 | pytest
3 | requests
4 |
--------------------------------------------------------------------------------
/test/resources/scripts/odml_convert/test_broken/empty.xml:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/resources/missing_root.yaml:
--------------------------------------------------------------------------------
1 | Root:
2 | some: value
3 |
--------------------------------------------------------------------------------
/scripts/release_tests/resources/test_convert_script/test_broken/5.xml:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/resources/scripts/odml_to_rdf/test_invalid/not_xml.md:
--------------------------------------------------------------------------------
1 | some text
2 |
--------------------------------------------------------------------------------
/scripts/release_tests/resources/test_rdf_export_script/test_recursive/nonxml.md:
--------------------------------------------------------------------------------
1 | some text
2 |
--------------------------------------------------------------------------------
/docs/images/erModel.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/G-Node/python-odml/HEAD/docs/images/erModel.png
--------------------------------------------------------------------------------
/docs/images/odMLLogo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/G-Node/python-odml/HEAD/docs/images/odMLLogo.png
--------------------------------------------------------------------------------
/doc/rdf/RDF_example_graph.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/G-Node/python-odml/HEAD/doc/rdf/RDF_example_graph.png
--------------------------------------------------------------------------------
/docs/assets/images/background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/G-Node/python-odml/HEAD/docs/assets/images/background.png
--------------------------------------------------------------------------------
/docs/assets/images/g-node-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/G-Node/python-odml/HEAD/docs/assets/images/g-node-logo.png
--------------------------------------------------------------------------------
/docs/assets/images/g-node_logo2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/G-Node/python-odml/HEAD/docs/assets/images/g-node_logo2.png
--------------------------------------------------------------------------------
/test/resources/missing_root.json:
--------------------------------------------------------------------------------
1 | {
2 | "odml-version": "1.1",
3 | "sometag": {
4 | "Document": "somecontent"
5 | }
6 | }
--------------------------------------------------------------------------------
/test/resources/missing_version.yaml:
--------------------------------------------------------------------------------
1 | Document:
2 | author: Test author
3 | date: '2018-02-27'
4 | sections: []
5 | version: '1.0'
6 |
--------------------------------------------------------------------------------
/test/resources/missing_version.xml:
--------------------------------------------------------------------------------
1 |
2 | 2018-02-27
3 | 1.0
4 | Test author
5 |
--------------------------------------------------------------------------------
/test/resources/invalid_root.xml:
--------------------------------------------------------------------------------
1 |
2 | 2018-02-27
3 | 1.0
4 | Test author
5 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE README.md CHANGELOG.md
2 | include odml/info.json
3 | recursive-include odml/resources *
4 | recursive-include test/resources *
5 |
--------------------------------------------------------------------------------
/test/resources/invalid_version.xml:
--------------------------------------------------------------------------------
1 |
2 | 2018-02-27
3 | 1.0
4 | Test author
5 |
--------------------------------------------------------------------------------
/test/resources/invalid_version.yaml:
--------------------------------------------------------------------------------
1 | Document:
2 | author: Test author
3 | date: '2018-02-27'
4 | sections: []
5 | version: '1.0'
6 | odml-version: 'totallyUnsupported'
--------------------------------------------------------------------------------
/test/resources/scripts/odml_to_rdf/test_invalid/not_odml.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | somevalue
4 |
5 |
--------------------------------------------------------------------------------
/docs/_config.yml:
--------------------------------------------------------------------------------
1 | theme: jekyll-theme-leap-day
2 | title: Open metadata markup language - odML -
3 | description: data model for storing arbitrary metadata
4 | show_downloads: "false"
5 |
--------------------------------------------------------------------------------
/odml/tools/converters/__init__.py:
--------------------------------------------------------------------------------
1 | from .version_converter import VersionConverter
2 | # FormatConverter depends on VersionConverter
3 | from .format_converter import FormatConverter
4 |
--------------------------------------------------------------------------------
/doc/reference.rst:
--------------------------------------------------------------------------------
1 | ===============
2 | Class-Reference
3 | ===============
4 |
5 | .. toctree::
6 | :maxdepth: 2
7 |
8 | base-classes
9 | support-classes
10 | tools
11 |
--------------------------------------------------------------------------------
/scripts/release_tests/resources/test_rdf_export_script/test_recursive/nonodml.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | somevalue
4 |
5 |
--------------------------------------------------------------------------------
/test/resources/local_repository_file_v1.0.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | Repository test
4 | odML 1.0 document
5 |
6 |
7 |
--------------------------------------------------------------------------------
/test/resources/local_repository_file_v1.1.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | Repository test
4 | odML 1.1 document
5 |
6 |
7 |
--------------------------------------------------------------------------------
/test/resources/missing_version.json:
--------------------------------------------------------------------------------
1 | {
2 | "Document": {
3 | "date": "2018-02-27",
4 | "version": "1.0",
5 | "sections": [],
6 | "author": "Test author"
7 | }
8 | }
--------------------------------------------------------------------------------
/odml/tools/__init__.py:
--------------------------------------------------------------------------------
1 | from .dict_parser import DictReader, DictWriter
2 | from .odmlparser import ODMLReader, ODMLWriter
3 | from .rdf_converter import RDFReader, RDFWriter
4 | from .xmlparser import XMLReader, XMLWriter
5 |
--------------------------------------------------------------------------------
/test/resources/invalid_version.json:
--------------------------------------------------------------------------------
1 | {
2 | "odml-version": "invalid",
3 | "Document": {
4 | "date": "2018-02-27",
5 | "version": "1.0",
6 | "sections": [],
7 | "author": "Test author"
8 | }
9 | }
--------------------------------------------------------------------------------
/scripts/release_tests/resources/test_convert_script/test_broken/29.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | http://portal.g-node.org/odml/terminologies/v1.0/terminologies.xml
5 |
6 |
--------------------------------------------------------------------------------
/test/resources/validation_section.yaml:
--------------------------------------------------------------------------------
1 | Document:
2 | id: 468dfc92-d794-4ae9-9ffe-a0944a957d36
3 | sections:
4 | - id: 5279b30a-3dfd-4c44-ac18-2a80e7647449
5 | name: sec_type_undefined
6 | properties: []
7 | sections: []
8 | - id: 2033d777-fc2b-4b73-a8f0-ae164bb1ea10
9 | name: sec_type_empty
10 | properties: []
11 | sections: []
12 | type: ''
13 | odml-version: '1.1'
14 |
--------------------------------------------------------------------------------
/scripts/release_tests/resources/test_odmltools/datacite_namespace/DataCitePreviousNS.xml:
--------------------------------------------------------------------------------
1 |
2 | 10.5072/example-full
3 |
4 |
--------------------------------------------------------------------------------
/odml/info.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 |
4 | INSTALL_PATH = os.path.dirname(__file__)
5 |
6 | with open(os.path.join(INSTALL_PATH, "info.json")) as infofile:
7 | infodict = json.load(infofile)
8 |
9 | VERSION = infodict["VERSION"]
10 | FORMAT_VERSION = infodict["FORMAT_VERSION"]
11 | AUTHOR = infodict["AUTHOR"]
12 | COPYRIGHT = infodict["COPYRIGHT"]
13 | CONTACT = infodict["CONTACT"]
14 | HOMEPAGE = infodict["HOMEPAGE"]
15 | CLASSIFIERS = infodict["CLASSIFIERS"]
16 |
--------------------------------------------------------------------------------
/test/resources/version_conversion.yaml:
--------------------------------------------------------------------------------
1 | Document:
2 | version: v1.13
3 | author: author
4 | date: '2008-07-07'
5 | sections:
6 | - name: sec_one
7 | type: mainsec
8 | sections:
9 | - name: subsec_one
10 | type: subsec
11 | properties:
12 | - name: prop_name
13 | values:
14 | - dtype:
15 | - string
16 | value: '[''one'', ''two'']'
17 | - dtype:
18 | - int
19 | value: '1'
20 | odml-version: '1'
21 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # python files and dirs
2 | *.pyc
3 | *.pyo
4 | *.egg
5 | *.egg-info
6 | dist
7 | build
8 | eggs
9 | .eggs
10 | parts
11 |
12 | # odml files
13 | # *.odml
14 |
15 | # Include a sample file
16 | !THGTTG.odml
17 |
18 | # installer logs
19 | pip-log.txt
20 |
21 | # Unit test / coverage reports
22 | .coverage
23 | .tox
24 | .cache
25 |
26 | # temp files
27 | *~
28 | *.log
29 |
30 | # idea / pycharm files
31 | .idea
32 | *.iml
33 |
34 | # doc/_build files
35 | doc/_build/
36 |
--------------------------------------------------------------------------------
/test/resources/validation_section.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | 69585a6e-bdfd-4a49-b227-d4f00e42723e
5 |
6 | e50cfe1b-1c0b-4f47-b76b-5500508b661a
7 | sec_type_undefined
8 |
9 |
10 | d2fd6a82-2568-4ebc-9bd4-93e386a980b7
11 |
12 | sec_type_empty
13 |
14 |
15 |
--------------------------------------------------------------------------------
/test/test_bugs.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | import odml
4 |
5 | from odml.terminology import REPOSITORY
6 |
7 |
8 | class BugTests(unittest.TestCase):
9 |
10 | def test_doc_repository_attribute_init(self):
11 | doc = odml.Document(repository=REPOSITORY)
12 | self.assertEqual(doc._repository, REPOSITORY,
13 | "Document needs to init its baseclass first, "
14 | "as it overwrites the repository attribute")
15 | self.assertEqual(doc.repository, REPOSITORY)
16 |
--------------------------------------------------------------------------------
/odml/tools/version_converter.py:
--------------------------------------------------------------------------------
1 | """
2 | This module provides backwards compatibility for the VersionConverter class.
3 | It is deprecated and will be removed in future versions.
4 | """
5 | import warnings
6 |
7 | from .converters import VersionConverter
8 |
9 | _MSG = "The VersionConverter file has been moved to "\
10 | "'odml.tools.converters' and will be removed from 'odml.tools' in future "\
11 | "odML releases. Please update the imports in your code accordingly."
12 | warnings.warn(_MSG, category=DeprecationWarning, stacklevel=2)
13 |
--------------------------------------------------------------------------------
/test/resources/scripts/odml_to_rdf/test_invalid/invalid.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | http://portal.g-node.org/odml/terminologies/v1.0/terminologies.xml
5 |
6 | Recording
7 | Recording-some-date-ah
8 |
9 | Temperature
10 | 24floatC
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/test/resources/scripts/odml_convert/test_broken/invalid_xml.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | http://portal.g-node.org/odml/terminologies/v1.0/terminologies.xml
5 |
6 | Recording
7 | Recording-some-date-ah
8 |
9 | Temperature
10 | 24floatC
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/doc/index.rst:
--------------------------------------------------------------------------------
1 | .. python-odml documentation master file, created by
2 | sphinx-quickstart on Wed Mar 2 14:54:09 2011.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Welcome to python-odml's documentation!
7 | =======================================
8 |
9 | Contents:
10 |
11 | .. toctree::
12 | :maxdepth: 2
13 |
14 | tutorial
15 | advanced_features
16 | odmltordf
17 | reference
18 |
19 | Indices and tables
20 | ==================
21 |
22 | * :ref:`genindex`
23 | * :ref:`modindex`
24 | * :ref:`search`
25 |
26 |
--------------------------------------------------------------------------------
/scripts/README.md:
--------------------------------------------------------------------------------
1 | ## python-odml convenience scripts
2 |
3 | This folder contains convenience scripts for build and deployment tests and should not be part of any release. Check the README files in the specific folders for details.
4 |
5 | The `release_tests` folder contains scripts and resources to test the odML library and all its dependent libraries like odmltools, odmlui, odmlconverter and nix-odml-converter from a local odML installation, from Test-PyPI and PyPI packages.
6 | The local version tests the installation via `pip install .` and `python setup.py install`. The Test-PyPI and PyPI package tests use conda environments to test the installation with all Python versions >= 3.5.
7 |
--------------------------------------------------------------------------------
/test/resources/validation_section.json:
--------------------------------------------------------------------------------
1 | {
2 | "Document": {
3 | "id": "318c4323-f83c-4abc-adbd-daf47816fe87",
4 | "sections": [
5 | {
6 | "id": "552c620d-ac5f-46c4-869e-f2b8170e7a1e",
7 | "name": "sec_type_undefined",
8 | "sections": [],
9 | "properties": []
10 | },
11 | {
12 | "id": "52540350-3533-42bc-b438-f031f5ac3641",
13 | "type": "",
14 | "name": "sec_type_empty",
15 | "sections": [],
16 | "properties": []
17 | }
18 | ]
19 | },
20 | "odml-version": "1.1"
21 | }
--------------------------------------------------------------------------------
/test/resources/version_conversion.xml:
--------------------------------------------------------------------------------
1 |
2 | 2008-07-07
3 |
4 | sec_one
5 | mainsec
6 |
7 | subsec_one
8 | subsec
9 |
10 |
11 | prop_name
12 |
13 | ['one', 'two']
14 | string
15 |
16 |
17 | 1
18 | int
19 |
20 |
21 |
22 | author
23 |
24 |
--------------------------------------------------------------------------------
/odml/info.json:
--------------------------------------------------------------------------------
1 | {
2 | "VERSION": "1.5.4",
3 | "FORMAT_VERSION": "1.1",
4 | "AUTHOR": "Hagen Fritsch, Jan Grewe, Christian Kellner, Achilleas Koutsou, Michael Sonntag, Lyuba Zehl",
5 | "COPYRIGHT": "(c) 2011-2023, German Neuroinformatics Node",
6 | "CONTACT": "dev@g-node.org",
7 | "HOMEPAGE": "https://github.com/G-Node/python-odml",
8 | "CLASSIFIERS": [
9 | "Development Status :: 5 - Production/Stable",
10 | "Programming Language :: Python",
11 | "Programming Language :: Python :: 3.7",
12 | "Programming Language :: Python :: 3.8",
13 | "Programming Language :: Python :: 3.9",
14 | "Programming Language :: Python :: 3.10",
15 | "Topic :: Scientific/Engineering",
16 | "Intended Audience :: Science/Research",
17 | "License :: OSI Approved :: BSD License"
18 | ]
19 | }
20 |
--------------------------------------------------------------------------------
/doc/support-classes.rst:
--------------------------------------------------------------------------------
1 | .. _support_classes:
2 |
3 | odML-Support Classes
4 | ====================
5 |
6 | These classes are
7 |
8 |
9 | Validation
10 | ----------
11 | .. autoclass:: odml.validation.Validation
12 | :members:
13 | :inherited-members:
14 | :undoc-members:
15 |
16 | IssueID
17 | -------
18 | .. autoclass:: odml.validation.IssueID
19 | :members:
20 | :inherited-members:
21 | :undoc-members:
22 |
23 | ValidationError
24 | ---------------
25 | .. autoclass:: odml.validation.ValidationError
26 | :members:
27 | :inherited-members:
28 | :undoc-members:
29 |
30 | TemplateHandler
31 | ---------------
32 | .. autoclass:: odml.templates.TemplateHandler
33 | :members:
34 | :inherited-members:
35 | :undoc-members:
36 |
37 | Terminologies
38 | -------------
39 | .. autoclass:: odml.terminology.Terminologies
40 | :members:
41 | :inherited-members:
42 | :undoc-members:
43 |
--------------------------------------------------------------------------------
/test/resources/scripts/odml_convert/test_recursive/conversion_example_sub_root.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | 0.0um
8 | Depth
9 |
10 |
11 | P-unit
12 | CellType
13 |
14 |
15 | Nerve
16 | Structure
17 |
18 |
19 | 0.1
20 | CV
21 |
22 |
23 | 12.8Hz
24 | Baseline rate
25 |
26 | Cell
27 | Cell
28 |
29 | tester
30 |
31 |
--------------------------------------------------------------------------------
/test/resources/scripts/odml_convert/test_recursive/sub/conversion_example_sub_sub.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | 0.0um
8 | Depth
9 |
10 |
11 | P-unit
12 | CellType
13 |
14 |
15 | Nerve
16 | Structure
17 |
18 |
19 | 0.1
20 | CV
21 |
22 |
23 | 12.8Hz
24 | Baseline rate
25 |
26 | Cell
27 | Cell
28 |
29 | tester
30 |
31 |
--------------------------------------------------------------------------------
/test/util.py:
--------------------------------------------------------------------------------
1 | """
2 | Utilities for the odml test package.
3 | """
4 | import os
5 | import tempfile
6 |
7 | ODML_CACHE_DIR = os.path.join(tempfile.gettempdir(), "odml.cache")
8 |
9 | TEST_RESOURCES_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)),
10 | "resources")
11 |
12 | TEST_TEMP_DIR = os.path.join(tempfile.gettempdir(), "odml_test")
13 | if not os.path.exists(TEST_TEMP_DIR):
14 | os.mkdir(TEST_TEMP_DIR)
15 |
16 |
17 | def create_test_dir(script_name):
18 | """
19 | Takes the name of a test script and creates a like-named directory
20 | in the main test directory if it does not yet exist.
21 |
22 | :param script_name: String that will be used to create the test directory
23 |
24 | :return: Path to the test directory.
25 | """
26 | dir_name = "_%s" % os.path.basename(os.path.splitext(script_name)[0])
27 |
28 | return tempfile.mkdtemp(suffix=dir_name, dir=TEST_TEMP_DIR)
29 |
--------------------------------------------------------------------------------
/docs/data_model.md:
--------------------------------------------------------------------------------
1 | # The odML data model
2 |
3 | Data exchange requires that annotations, metadata, are also
4 | shared. In order to allow interoperability both a common
5 | (meta) data model, the format in which the metadata are exchanged, and
6 | a common terminology are required.
7 |
8 | Here, we briefly describe the *odML* data model. It is based on
9 | the idea of key-value pairs like ``temperature = 26°C``.
10 |
11 | The model is as simple as possible while being flexible, allowing
12 | interoperability, and being customizable. The model defines three
13 | entities (Property, Section, Document) whose relations and
14 | elements are shown in the figure below.
15 |
16 | 
17 |
18 | Property and Section are the core entities. A Section contains
19 | Properties and can further have subsection thus building a tree-like
20 | structure. The model does not constrain the content, which offers the
21 | flexibility essential for comprehensive annotation of neuroscience
22 | data.
23 |
--------------------------------------------------------------------------------
/doc/base-classes.rst:
--------------------------------------------------------------------------------
1 | .. _class-reference:
2 |
3 | odML-Base Classes
4 | =================
5 |
6 | These classes are the core data-structures of odML.
7 | To sum things up, an odML-Document consists of several Sections.
8 | Each Section may contain other Sections and Properties.
9 | Again each Property can have multiple Values.
10 |
11 | The odml Module contains wrappers, that are shortcuts for creating the main objects::
12 |
13 | >>> from odml import Document, Section, Property
14 | >>> Document(version=0.9, author="Kermit")
15 |
16 |
17 | Several modules exist to extend the implementation.
18 | The ones included in the library are those:
19 |
20 | Document
21 | --------
22 | .. autoclass:: odml.doc.BaseDocument
23 | :members:
24 | :inherited-members:
25 | :undoc-members:
26 |
27 | Section
28 | -------
29 | .. autoclass:: odml.section.BaseSection
30 | :members:
31 | :inherited-members:
32 | :undoc-members:
33 |
34 | Property
35 | --------
36 | .. autoclass:: odml.property.BaseProperty
37 | :members:
38 | :inherited-members:
39 | :undoc-members:
40 |
--------------------------------------------------------------------------------
/test/resources/version_conversion.json:
--------------------------------------------------------------------------------
1 | {
2 | "Document": {
3 | "version": "v1.13",
4 | "date": "2018-07-07",
5 | "author": "author",
6 | "sections": [
7 | {
8 | "name": "sec_one",
9 | "type" : "mainsec",
10 | "properties": [
11 | {
12 | "name": "prop_one",
13 | "values": [
14 | {
15 | "value": "['one', 'two']",
16 | "dtype": "string"
17 | },
18 | {
19 | "value": "1",
20 | "dtype": "int"
21 | }
22 | ]
23 | }
24 | ],
25 | "sections": [
26 | {
27 | "name": "subsec_one",
28 | "sections": [],
29 | "type": "subsec"
30 | }
31 | ]
32 | }
33 | ]
34 | },
35 | "odml-version": "1"
36 | }
37 |
--------------------------------------------------------------------------------
/doc/rdf/rdf_generator.py:
--------------------------------------------------------------------------------
1 | from rdflib import Graph, BNode, Literal
2 | from rdflib.namespace import XSD
3 |
4 | from odml.tools.rdf_converter import ODML_NS
5 |
6 | g = Graph()
7 |
8 | doc = BNode("d1")
9 | s1 = BNode("s1")
10 | p12 = BNode("p1")
11 |
12 | g.add((doc, ODML_NS.version, Literal(1.1)))
13 | g.add((doc, ODML_NS.docversion, Literal(42)))
14 | g.add((doc, ODML_NS.author, Literal('D. N. Adams')))
15 | g.add((doc, ODML_NS.date, Literal('1979-10-12', datatype=XSD.date)))
16 | g.add((doc, ODML_NS.hasSection, s1))
17 |
18 | g.add((s1, ODML_NS.property, p12))
19 | g.add((s1, ODML_NS.type, Literal('crew')))
20 | g.add((s1, ODML_NS.description, Literal('Information on the crew')))
21 | g.add((s1, ODML_NS.name, Literal('TheCrew')))
22 |
23 | content = '[Arthur Philip Dent,Zaphod Beeblebrox,Tricia Marie McMillan,Ford Prefect]'
24 | g.add((p12, ODML_NS.hasValue, Literal(content)))
25 | g.add((p12, ODML_NS.description, Literal('List of crew members names')))
26 | g.add((p12, ODML_NS.dtype, Literal('person')))
27 | g.add((p12, ODML_NS.name, Literal('NameCrewMembers')))
28 |
29 | res = g.serialize(format='application/rdf+xml').decode("utf-8")
30 | print(res)
31 |
32 | with open("generated_odml_rdf.xml", "w") as f:
33 | f.write(res)
34 |
--------------------------------------------------------------------------------
/test/test_find_section.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from odml import Section
4 |
5 |
6 | class TestFindSections(unittest.TestCase):
7 |
8 | def setUp(self):
9 | self.root_section = Section("Type test", "test")
10 | self.root_section.append(Section("sub_1", "sub_1"))
11 | self.root_section.append(Section("sub_2", "sub_2"))
12 | self.root_section.append(Section("sub_2_b", "sub_2"))
13 | self.root_section.append(Section("sub_3", "sub_2/sub_3"))
14 |
15 | def test_find_by_name(self):
16 | ret = self.root_section.find("sub_1")
17 | self.assertTrue(ret.name == "sub_1")
18 |
19 | ret = self.root_section.find("unknown_type")
20 | self.assertIsNone(ret)
21 |
22 | def test_find_by_type(self):
23 | ret = self.root_section.find(type="sub_1")
24 | self.assertTrue(ret is not None and ret.type == "sub_1")
25 |
26 | ret = self.root_section.find(type="sub_2", findAll=True)
27 | self.assertTrue(len(ret) == 2)
28 |
29 | ret = self.root_section.find(key=None, type="sub_2", findAll=True,
30 | include_subtype=True)
31 |
32 | self.assertTrue(len(ret) == 3)
33 |
34 | def test_find_by_name_and_type(self):
35 | ret = self.root_section.find(key="sub_1", type="sub_1")
36 | self.assertTrue(ret.name == "sub_1")
37 |
38 | ret = self.root_section.find(key="sub_1", type="sub_2")
39 | self.assertIsNone(ret)
40 |
--------------------------------------------------------------------------------
/odml/tools/doc_inherit.py:
--------------------------------------------------------------------------------
1 | """
2 | This is a working hack to provide inherited docstrings.
3 | The only other working way I tried would involve metaclasses.
4 |
5 | Each method to inherit a docstring is flagged using the @inherit_docstring
6 | decorator.
7 |
8 | The actual inheritance is done in the class decorator @allow_inherit_docstring,
9 | which uses the classes base-classes and its mro and copies the first docstring
10 | it finds.
11 | """
12 |
13 |
14 | def allow_inherit_docstring(cls):
15 | """
16 | The base classes of a provided class will be used to copy and inherit the first
17 | docstring it finds.
18 |
19 | :param cls: class the decorator function will be used on to inherit the docstring
20 | from its base classes.
21 | :returns: class with the inherited docstring.
22 | """
23 | bases = cls.__bases__
24 | for attr, attribute in cls.__dict__.items():
25 | if hasattr(attribute, "inherit_docstring"):
26 | if not attribute.__doc__:
27 | for mro_cls in (mro_cls for base in bases
28 | for mro_cls in base.mro()
29 | if hasattr(mro_cls, attr)):
30 | doc = getattr(getattr(mro_cls, attr), '__doc__')
31 | if doc:
32 | attribute.__doc__ = doc
33 | break
34 | return cls
35 |
36 |
37 | def inherit_docstring(obj):
38 | """
39 | Sets the inherit_docstring attribute of an object to True and returns the object.
40 | """
41 | obj.inherit_docstring = True
42 |
43 | return obj
44 |
--------------------------------------------------------------------------------
/GSoC.md:
--------------------------------------------------------------------------------
1 | Google Summer of Code contributions
2 | ===================================
3 |
4 |
5 | General guidelines
6 | ------------------
7 |
8 | Google Summer of Code candidates should follow the [general contribution guidelines](CONTRIBUTING.md) before beginning work on an issue and submitting pull requests.
9 | Students interested in working on python-odml as part of GSoC 2017 should read the guidelines described in the [GSoC student guide](http://write.flossmanuals.net/gsocstudentguide/making-first-contact/) regarding making first contact.
10 | They're quite useful for general open source contributions as well.
11 |
12 |
13 | Open communication
14 | ------------------
15 |
16 | The GSoC programme encourages open communication and so do we.
17 | While directly contacting the mentors may get a response, please refrain from doing so unless discussing personal matters.
18 | For all topics regarding the project, issues, patches, preparing proposals, please use the [discussion thread on Trellis](https://www.trelliscience.com/#/discussions-about/13798/0), or comment directly on a relevant issue or pull request, whichever is more appropriate.
19 |
20 | There is a #gnode IRC channel on Freenode which you may join for more casual discussions with the team.
21 |
22 |
23 | Discussion venues
24 | -----------------
25 |
26 | Please keep discussion topics in their relevant venue.
27 | Thoughts and concerns regarding python-odml should be discussed in GitHub issues.
28 | Project ideas should be discussed on Trellis.
29 | Less formal discussions can be had in the IRC chatroom.
30 | If you are new to IRC, this [etiquette guide](https://github.com/fizerkhan/irc-etiquette) may be useful.
31 |
32 |
--------------------------------------------------------------------------------
/odml/tools/parser_utils.py:
--------------------------------------------------------------------------------
1 | """
2 | Utility file to provide constants, exceptions and functions
3 | commonly used by the odML tools parsers and converters.
4 | """
5 |
6 | SUPPORTED_PARSERS = ['XML', 'YAML', 'JSON', 'RDF']
7 |
8 |
9 | RDF_CONVERSION_FORMATS = {
10 | # rdflib version "4.2.2" serialization formats
11 | 'xml': '.rdf',
12 | 'pretty-xml': '.rdf',
13 | 'trix': '.rdf',
14 | 'n3': '.n3',
15 | 'turtle': '.ttl',
16 | 'ttl': '.ttl',
17 | 'ntriples': '.nt',
18 | 'nt': '.nt',
19 | 'nt11': '.nt',
20 | 'trig': '.trig',
21 | 'json-ld': '.jsonld'
22 | }
23 |
24 |
25 | class ParserException(Exception):
26 | """
27 | Exception wrapper used by various odML parsers.
28 | """
29 |
30 |
31 | class InvalidVersionException(ParserException):
32 | """
33 | Exception wrapper to indicate a non-compatible odML version.
34 | """
35 |
36 |
37 | def odml_tuple_export(odml_tuples):
38 | """
39 | Converts odml style tuples to a parsable string representation.
40 | Every tuple is represented by brackets '()'. The individual elements of a tuple are
41 | separated by a semicolon ';'. The individual tuples are separated by a comma ','.
42 | An odml 3-tuple list of 2 tuples would be serialized to: "[(11;12;13),(21;22;23)]".
43 |
44 | :param odml_tuples: List of odml style tuples.
45 | :return: string
46 | """
47 | str_tuples = ""
48 | for val in odml_tuples:
49 | str_val = ";".join(val)
50 | if str_tuples:
51 | str_tuples = "%s,(%s)" % (str_tuples, str_val)
52 | else:
53 | str_tuples = "(%s)" % str_val
54 |
55 | return "[%s]" % str_tuples
56 |
--------------------------------------------------------------------------------
/test/test_fileio.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import unittest
4 |
5 | try:
6 | from StringIO import StringIO
7 | except ImportError:
8 | from io import StringIO
9 |
10 | import odml
11 |
12 | from .util import TEST_RESOURCES_DIR as RES_DIR
13 |
14 |
15 | class TestTypes(unittest.TestCase):
16 |
17 | def setUp(self):
18 | self.file = os.path.join(RES_DIR, "example.odml")
19 | # Do not allow anything to be printed on STDOUT
20 | self.captured_stdout = StringIO()
21 | sys.stdout = self.captured_stdout
22 |
23 | def test_load_save(self):
24 | doc = odml.load(self.file)
25 | self.assertTrue(isinstance(doc, odml.doc.BaseDocument))
26 | file_name = "%s_copy" % self.file
27 | odml.save(doc, file_name)
28 | os.remove(file_name)
29 |
30 | def test_save_kwargs(self):
31 | doc = odml.load(self.file)
32 | file_name = "%s_copy" % self.file
33 |
34 | # Test unsupported kwarg does not raise an exception
35 | odml.save(doc, file_name, unsupported_kwarg="I do not matter")
36 | os.remove(file_name)
37 |
38 | def test_display(self):
39 | doc = odml.load(self.file)
40 | odml.display(doc)
41 |
42 | def test_invalid_parser(self):
43 | with self.assertRaises(NotImplementedError):
44 | odml.load(self.file, "html")
45 |
46 | doc = odml.load(self.file)
47 | with self.assertRaises(NotImplementedError):
48 | file_name = "%s_copy_html" % self.file
49 | odml.save(doc, file_name, "html")
50 |
51 | with self.assertRaises(NotImplementedError):
52 | odml.display(doc, "html")
53 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2011-2021, German Neuroinformatics Node (G-Node)
2 |
3 | All rights reserved.
4 |
5 | Redistribution and use in source and binary forms, with or without
6 | modification, are permitted provided that the following conditions are met:
7 | 1. Redistributions of source code must retain the above copyright
8 | notice, this list of conditions and the following disclaimer.
9 | 2. Redistributions in binary form must reproduce the above copyright
10 | notice, this list of conditions and the following disclaimer in the
11 | documentation and/or other materials provided with the distribution.
12 | 3. All advertising materials mentioning features or use of this software
13 | must display the following acknowledgement:
14 | This product includes software developed by the G-Node.
15 | 4. Neither the name of the G-Node nor the
16 | names of its contributors may be used to endorse or promote products
17 | derived from this software without specific prior written permission.
18 |
19 | THIS SOFTWARE IS PROVIDED BY G-NODE ''AS IS'' AND ANY
20 | EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 | DISCLAIMED. IN NO EVENT SHALL G-NODE BE LIABLE FOR ANY
23 | DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
28 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 |
--------------------------------------------------------------------------------
/test/test_parser_xml.py:
--------------------------------------------------------------------------------
1 | import os
2 | import unittest
3 |
4 | from odml.tools import xmlparser
5 | from odml.tools.parser_utils import ParserException, InvalidVersionException
6 | from .util import TEST_RESOURCES_DIR as RES_DIR
7 |
8 |
9 | class TestXMLParser(unittest.TestCase):
10 |
11 | def setUp(self):
12 | self.base_path = RES_DIR
13 |
14 | self.xml_reader = xmlparser.XMLReader()
15 | self.xml_reader_ignore = xmlparser.XMLReader(ignore_errors=True)
16 |
17 | def test_invalid_root(self):
18 | filename = "invalid_root.xml"
19 | message = "Expecting "
20 |
21 | with self.assertRaises(ParserException) as exc:
22 | _ = self.xml_reader.from_file(os.path.join(self.base_path, filename))
23 |
24 | self.assertIn(message, str(exc.exception))
25 |
26 | def test_missing_version(self):
27 | filename = "missing_version.xml"
28 | message = "Could not find format version attribute"
29 |
30 | with self.assertRaises(ParserException) as exc:
31 | _ = self.xml_reader.from_file(os.path.join(self.base_path, filename))
32 |
33 | self.assertIn(message, str(exc.exception))
34 |
35 | def test_invalid_version(self):
36 | filename = "invalid_version.xml"
37 |
38 | with self.assertRaises(InvalidVersionException):
39 | _ = self.xml_reader.from_file(os.path.join(self.base_path, filename))
40 |
41 | def test_ignore_errors(self):
42 | filename = "ignore_errors.xml"
43 |
44 | with self.assertRaises(ParserException):
45 | _ = self.xml_reader.from_file(os.path.join(self.base_path, filename))
46 |
47 | doc = self.xml_reader_ignore.from_file(os.path.join(self.base_path, filename))
48 | doc.pprint()
49 |
--------------------------------------------------------------------------------
/appveyor.yml:
--------------------------------------------------------------------------------
1 | version: 1.4.{build}
2 |
3 | image: Visual Studio 2017
4 |
5 | environment:
6 | matrix:
7 | - PYTHON: "C:\\Python37"
8 | PYVER: 3
9 | BITS: 32
10 | - PYTHON: "C:\\Python38"
11 | PYVER: 3
12 | BITS: 32
13 | - PYTHON: "C:\\Python39"
14 | APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
15 | PYVER: 3
16 | BITS: 32
17 | - PYTHON: "C:\\Python310"
18 | APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
19 | PYVER: 3
20 | BITS: 32
21 | - PYTHON: "C:\\Python311"
22 | APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
23 | PYVER: 3
24 | BITS: 32
25 | - PYTHON: "C:\\Python37-x64"
26 | PYVER: 3
27 | BITS: 64
28 | - PYTHON: "C:\\Python38-x64"
29 | PYVER: 3
30 | BITS: 64
31 | - PYTHON: "C:\\Python39-x64"
32 | APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
33 | PYVER: 3
34 | BITS: 64
35 | - PYTHON: "C:\\Python310-x64"
36 | APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
37 | PYVER: 3
38 | BITS: 64
39 | - PYTHON: "C:\\Python311-x64"
40 | APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
41 | PYVER: 3
42 | BITS: 64
43 |
44 | init:
45 | - "ECHO %PYTHON% %vcvars% (%bits%)"
46 | - ps: $env:PATH = "$env:PYTHON;$env:PATH;"
47 | - python -c "import sys;print('Python version is {}'.format(sys.version))"
48 |
49 | build: false
50 |
51 | install:
52 | - python -m pip install --upgrade pip setuptools wheel
53 | - python -m pip install -r requirements-test.txt
54 | - python -m pip install .
55 |
56 | test_script:
57 | - python --version
58 | - python -m pytest -v -k "not handle_include and not handle_repository"
59 | # appveyor issues with SSL certificates; deactivating the affected tests,
60 | # since they run fine on GH actions Windows builds.
61 | # - python -m pytest -v
--------------------------------------------------------------------------------
/test/test_links.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from . import test_samplefile as samplefile
4 |
5 |
6 | class TestLinks(unittest.TestCase):
7 |
8 | def setUp(self):
9 | self.doc = samplefile.SampleFileCreator().create_document()
10 | # for s in self.doc: xmlparser.dump_section(s)
11 |
12 | def test_link_creation(self):
13 | obj = self.doc.sections[0].sections[0]
14 | dst = self.doc.sections[1].sections[1]
15 |
16 | self.assertNotEqual(obj, dst)
17 | obj.link = "/sec 1/sec 1,1"
18 | self.assertIsNot(obj, dst)
19 | self.assertEqual(obj.sections, dst.sections)
20 | self.assertEqual(obj.properties, dst.properties)
21 |
22 | obj.clean()
23 | self.assertNotEqual(obj, dst)
24 |
25 | def no_test_circles(self):
26 | # TODO this currently just works, although, maybe it shouldn't?
27 |
28 | # we cannot allow self-referencing links
29 | obj = self.doc.sections[0].sections[0]
30 | dst = self.doc.sections[0]
31 |
32 | samplefile.dumper.dump_section(dst)
33 |
34 | obj.link = "/sec 0"
35 | # self.assertEqual(obj.sections, dst.sections) # this will FAIL
36 | # self.assertEqual(obj.properties, dst.properties)
37 | obj.clean()
38 |
39 | samplefile.dumper.dump_section(dst)
40 |
41 | def test_merge(self):
42 | obj = self.doc.sections[0].sections[0] # must be an empty section
43 | dst = self.doc.sections[1] # .sections[1]
44 | org = obj.clone()
45 |
46 | obj.link = '/sec 1'
47 | self.assertEqual(obj.sections, dst.sections)
48 | self.assertEqual(obj.properties, dst.properties)
49 | self.assertEqual(obj._merged, dst)
50 |
51 | obj.clean()
52 | self.assertIsNone(obj._merged)
53 | self.assertEqual(obj.sections, org.sections)
54 | self.assertEqual(obj.properties, org.properties)
55 |
--------------------------------------------------------------------------------
/doc/example_odMLs/sample_odml.odml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | D. N. Adams
6 | https://terminologies.g-node.org/v1.1/terminologies.xml
7 |
8 |
9 | person
10 | [Arthur Philip Dent,Zaphod Beeblebrox,Tricia Marie McMillan,Ford Prefect]
11 | 7b0572c1-fcfc-4010-aebf-730397ac29af
12 | NameCrewMembers
13 | List of crew members names
14 |
15 |
16 | int
17 | 1
18 | [4]
19 | The Hitchhiker's guide to the Galaxy (novel)
20 | 298938a6-0996-4a92-982b-837a08a1a220
21 | NoCrewMembers
22 | Number of crew members
23 |
24 | crew
25 | Information on the crew
26 | 0f96a050-2d9b-498f-a532-fbfcc6aae55e
27 | TheCrew
28 |
29 |
30 | string
31 | [Human]
32 | 635035cc-1c05-4e44-a4b7-0fb3d6295abf
33 | Species
34 | Species to which subject belongs to
35 |
36 |
37 | string
38 | []
39 | 0cec018b-20e3-4b13-bb95-b41de12db97f
40 | Nickname
41 | Nickname(s) of the subject
42 |
43 | crew/person
44 | Information on Arthur Dent
45 | 1d6469b5-5322-408b-916e-3224b7a22008
46 | Arthur Philip Dent
47 |
48 |
49 | 1979-10-12
50 | 79b613eb-a256-46bf-84f6-207df465b8f7
51 | 42
52 |
53 |
--------------------------------------------------------------------------------
/test/resources/ignore_errors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | D. N. Adams
6 | hello
7 |
8 |
9 | int
10 | 123
11 |
12 |
13 | int
14 | [zergrve sdic, 1]
15 | ["string 1", "string 2"]
16 | The Hitchhiker's guide to the Galaxy (novel)
17 | wrongid
18 | NoCrewMembers
19 | Number of crew members
20 |
21 |
22 | NoBones
23 |
24 | 235
25 | wrongid4
26 | Number of bones within the crew member.
27 |
28 | crew
29 | Information on the crew
30 | 0f96a050-2d9b-498f-a532-fbfcc6aae55e
31 | TheCrew
32 |
33 |
34 | string
35 | [1, 2]
36 | wrongid2
37 | Nickname
38 | Nickname(s) of the subject
39 |
40 |
41 | int
42 | 535
43 | wrongid3
44 | Nickname(s) of the subject
45 |
46 | ADent
47 | Information on Arthur Dent
48 |
49 |
50 | ztuz
51 | 79b613eb-a256-46bf-84f6-207df465b8f7
52 | new version
53 |
54 |
--------------------------------------------------------------------------------
/doc/tools.rst:
--------------------------------------------------------------------------------
1 | .. _datatypes:
2 |
3 | Data Types
4 | ==========
5 |
6 | .. automodule:: odml.dtypes
7 | :members:
8 | :inherited-members:
9 | :undoc-members:
10 |
11 | .. _tools:
12 |
13 | Tools
14 | =====
15 | Several tools are provided with the :py:mod:`odml.tools` package.
16 |
17 | DictParser
18 | ----------
19 | .. automodule:: odml.tools.dict_parser
20 | :members:
21 | :inherited-members:
22 | :undoc-members:
23 |
24 | ODMLParser
25 | ----------
26 | .. automodule:: odml.tools.odmlparser
27 | :members:
28 | :inherited-members:
29 | :undoc-members:
30 |
31 | RDFConverter
32 | ----------------
33 | .. automodule:: odml.tools.rdf_converter
34 | :members:
35 | :inherited-members:
36 | :undoc-members:
37 |
38 | XMLParser
39 | ---------
40 | .. automodule:: odml.tools.xmlparser
41 | :members:
42 | :inherited-members:
43 | :undoc-members:
44 |
45 | .. _converters:
46 |
47 | Convenience converters
48 | ======================
49 | Several convenience converters are provided with the :py:mod:`odml.tools.converters` package.
50 |
51 | FormatConverter
52 | ----------------
53 | .. automodule:: odml.tools.converters.format_converter
54 | :members:
55 | :inherited-members:
56 | :undoc-members:
57 |
58 | VersionConverter
59 | ----------------
60 | .. automodule:: odml.tools.converters.version_converter
61 | :members:
62 | :inherited-members:
63 | :undoc-members:
64 |
65 | .. _cli_scripts:
66 |
67 | Command line scripts
68 | ====================
69 | Several cli convenience scripts are automatically installed and are available from the command line.
70 |
71 | odML conversion script
72 | ----------------------
73 | .. automodule:: odml.scripts.odml_convert
74 | :members:
75 | :inherited-members:
76 | :undoc-members:
77 |
78 | odML to RDF script
79 | ------------------
80 | .. automodule:: odml.scripts.odml_to_rdf
81 | :members:
82 | :inherited-members:
83 | :undoc-members:
84 |
85 | odML view (browse odml files locally)
86 | -------------------------------------
87 | .. automodule:: odml.scripts.odml_view
88 | :members:
89 | :inherited-members:
90 | :undoc-members:
91 |
--------------------------------------------------------------------------------
/scripts/release_tests/resources/scripts/odml_basics.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import unittest
3 |
4 | try:
5 | # Test possible imports of all parsers without importing the full odML package
6 | from odml.tools import ODMLReader, ODMLWriter, RDFReader, RDFWriter
7 | from odml.tools.converters import FormatConverter, VersionConverter
8 | from odml.tools import XMLReader, XMLWriter, DictReader, DictWriter
9 |
10 | import odml
11 | except Exception as exc:
12 | print("-- Failed on an import: %s" % exc)
13 | sys.exit(-1)
14 |
15 |
16 | class TestODMLBasics(unittest.TestCase):
17 |
18 | def test_load(self):
19 | print("-- Load odml xml file")
20 | xdoc = odml.load('./load.odml.xml')
21 | print(xdoc.pprint())
22 |
23 | print("-- Load odml json file")
24 | jdoc = odml.load('./load.odml.json', 'JSON')
25 | self.assertEqual(xdoc, jdoc)
26 |
27 | print("-- Load odml yaml file")
28 | ydoc = odml.load('./load.odml.yaml', 'YAML')
29 | self.assertEqual(xdoc, ydoc)
30 |
31 | print("-- Document loading tests success")
32 |
33 | def test_version_load(self):
34 | print("-- Test invalid version exception xml file load")
35 | with self.assertRaises(odml.tools.parser_utils.InvalidVersionException):
36 | _ = odml.load('./load_v1.odml.xml')
37 | print("-- Invalid version loading test success")
38 |
39 | def test_tools_init(self):
40 | _ = ODMLReader()
41 | _ = ODMLWriter()
42 | _ = RDFReader()
43 | _ = RDFWriter([odml.Document()])
44 | _ = FormatConverter()
45 | _ = VersionConverter("/I/do/not/exist.txt")
46 | _ = XMLReader()
47 | _ = XMLWriter(odml.Document())
48 | _ = DictReader()
49 | _ = DictWriter()
50 |
51 |
52 | if __name__ == "__main__":
53 | try:
54 | svi = sys.version_info
55 | print("-- Using Python '%s.%s.%s'" % (svi.major, svi.minor, svi.micro))
56 | print("-- Testing odml Version: '%s'" % odml.VERSION)
57 |
58 | unittest.main()
59 |
60 | except Exception as exc:
61 | print("-- Failed on a test: %s" % exc)
62 | sys.exit(-1)
63 |
--------------------------------------------------------------------------------
/odml/fileio.py:
--------------------------------------------------------------------------------
1 | """
2 | This module provides convenience functions for saving and loading of odML files.
3 | """
4 |
5 | import os
6 | from .tools.odmlparser import ODMLReader, ODMLWriter
7 |
8 |
9 | def load(filename, backend="xml", show_warnings=True):
10 | """
11 | Load an odML document from file.
12 | :param filename: Path and filename from where the odML document
13 | is to be loaded and parsed.
14 | :param backend: File format of the file containing the odML document.
15 | The default format is XML.
16 | :param show_warnings: Toggle whether to print warnings to the command line.
17 | :return: The parsed odML document.
18 | """
19 | if not os.path.exists(filename):
20 | msg = "File \'%s\' was not found!" % \
21 | (filename if len(filename) < 20 else "...%s" % filename[19:])
22 | raise FileNotFoundError(msg)
23 |
24 | reader = ODMLReader(backend, show_warnings)
25 | return reader.from_file(filename)
26 |
27 |
28 | def save(obj, filename, backend="xml", **kwargs):
29 | """
30 | Save an open odML document to file of a specified format.
31 | :param obj: odML document do be saved.
32 | :param filename: Filename and path where the odML document
33 | should be saved.
34 | :param backend: Format in which the odML document is to be saved.
35 | The default format is XML.
36 | :param kwargs: Writer backend keyword arguments e.g. for adding specific
37 | stylesheets for xml documents or specifying an RDF format.
38 | Refer to the documentation of the available parsers to check
39 | which arguments are supported.
40 | """
41 | writer = ODMLWriter(backend)
42 | if "." not in filename.split(os.pathsep)[-1]:
43 | filename = filename + ".%s" % backend
44 | return writer.write_file(obj, filename, **kwargs)
45 |
46 |
47 | def display(obj, backend="xml"):
48 | """
49 | Print an open odML document to the command line, formatted in the
50 | specified format.
51 | :param obj: odML document to be displayed.
52 | :param backend: Format in which the odML document is to be displayed.
53 | The default format is XML.
54 | """
55 | writer = ODMLWriter(backend)
56 | print(writer.to_string(obj))
57 |
--------------------------------------------------------------------------------
/test/test_dumper.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import sys
3 | try:
4 | from StringIO import StringIO
5 | except ImportError:
6 | from io import StringIO
7 |
8 | import odml
9 |
10 | from odml.tools.dumper import dump_doc
11 |
12 |
13 | class TestTypes(unittest.TestCase):
14 |
15 | def setUp(self):
16 | s_type = "type"
17 |
18 | self.doc = odml.Document(author='Rave', version='1.0')
19 | sec1 = odml.Section(name='Cell', type=s_type)
20 | prop1 = odml.Property(name='Type', values='Rechargeable')
21 | sec1.append(prop1)
22 |
23 | sec2 = odml.Section(name='Electrolyte', type=s_type)
24 | prop2 = odml.Property(name='Composition', values='Ni-Cd')
25 | sec2.append(prop2)
26 | sec1.append(sec2)
27 |
28 | sec3 = odml.Section(name='Electrode', type=s_type)
29 | prop3 = odml.Property(name='Material', values='Nickel')
30 | prop4 = odml.Property(name='Models', values=['AA', 'AAA'])
31 | sec3.append(prop3)
32 | sec3.append(prop4)
33 | sec2.append(sec3)
34 |
35 | self.doc.append(sec1)
36 |
37 | def test_dump_doc(self):
38 | # Capture the output printed by the functions to STDOUT, and use it for
39 | # testing purposes. It needs to be reset after the capture.
40 | captured_stdout = StringIO()
41 | sys.stdout = captured_stdout
42 |
43 | # This test dumps the whole document and checks it word by word.
44 | # If possible, maybe some better way of testing this?
45 | dump_doc(self.doc)
46 | output = [x.strip() for x in captured_stdout.getvalue().split('\n') if x]
47 |
48 | # Reset stdout
49 | sys.stdout = sys.__stdout__
50 |
51 | expected_output = ["*Cell (type='type')",
52 | ":Type (values=Rechargeable, dtype='string')",
53 | "*Electrolyte (type='type')",
54 | ":Composition (values=Ni-Cd, dtype='string')",
55 | "*Electrode (type='type')",
56 | ":Material (values=Nickel, dtype='string')",
57 | ":Models (values=[AA,AAA], dtype='string')"]
58 |
59 | self.assertEqual(len(output), len(expected_output))
60 | for i, _ in enumerate(output):
61 | self.assertEqual(output[i], expected_output[i])
62 |
--------------------------------------------------------------------------------
/odml/tools/dumper.py:
--------------------------------------------------------------------------------
1 | """
2 | The dumper module provides functions to dump odML objects;
3 | Document, Section, Property; to the command line.
4 | """
5 | from .xmlparser import to_csv
6 |
7 |
8 | def get_props(obj, props):
9 | """
10 | Retrieves the values of a list of provided properties
11 | from an object and returns all values as a concatenated string.
12 |
13 | :param obj: odml object from which to retrieve specific property values.
14 | :param props: list of properties
15 | :returns: the obj property values as a concatenated string
16 | """
17 | out = []
18 | for prop in props:
19 | if hasattr(obj, prop):
20 | curr = getattr(obj, prop)
21 | if curr is not None:
22 | if isinstance(curr, (list, tuple)):
23 | out.append("%s=%s" % (prop, to_csv(curr)))
24 | else:
25 | out.append("%s=%s" % (prop, repr(curr)))
26 |
27 | return ", ".join(out)
28 |
29 |
30 | def dump_property(prop, indent=1):
31 | """
32 | Prints the content of an odml.Property.
33 |
34 | :param prop: odml.Property
35 | :param indent: number of prepended whitespaces. Default is 1.
36 | """
37 | prop_list = ["definition", "values", "uncertainty", "unit", "dtype",
38 | "value_reference", "dependency", "dependencyValue"]
39 | prop_string = get_props(prop, prop_list)
40 | print("%*s:%s (%s)" % (indent, " ", prop.name, prop_string))
41 |
42 |
43 | def dump_section(section, indent=1):
44 | """
45 | Prints the content of an odml.Section including any subsections
46 | and odml.Properties.
47 |
48 | :param section: odml.Section
49 | :param indent: number of prepended whitespaces. Default is 1.
50 | """
51 | if section is None:
52 | return
53 |
54 | prop_list = ["type", "definition", "link", "include", "repository"]
55 | prop_string = get_props(section, prop_list)
56 | print("%*s*%s (%s)" % (indent, " ", section.name, prop_string))
57 |
58 | for prop in section.properties:
59 | dump_property(prop, indent + 1)
60 |
61 | for sub in section.sections:
62 | dump_section(sub, indent * 2)
63 |
64 |
65 | def dump_doc(doc):
66 | """
67 | Prints the content of an odml.Document including any subsections
68 | and odml.Properties.
69 |
70 | :param doc: odml.Section
71 | """
72 | for sec in doc:
73 | dump_section(sec)
74 |
--------------------------------------------------------------------------------
/odml/util.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8
2 | """
3 | Module containing general utility functions.
4 | """
5 |
6 |
7 | def format_cardinality(in_val):
8 | """
9 | Checks an input value and formats it towards a custom tuple format
10 | used in odml Section, Property and Values cardinality.
11 |
12 | The following cases are supported:
13 | (n, n) - default, no restriction
14 | (d, n) - minimally d entries, no maximum
15 | (n, d) - maximally d entries, no minimum
16 | (d, d) - minimally d entries, maximally d entries
17 |
18 | Only positive integers are supported. 'None' is used to denote
19 | no restrictions on a maximum or minimum.
20 |
21 | :param in_val: Can either be 'None', a positive integer, which will set
22 | the maximum or an integer 2-tuple of the format '(min, max)'.
23 |
24 | :returns: None or the value as tuple. A ValueError is raised, if the
25 | provided value was not in an acceptable format.
26 | """
27 | exc_msg = "Can only assign positive single int or int-tuples of the format '(min, max)'"
28 |
29 | # Empty values reset the cardinality to None.
30 | if not in_val:
31 | return None
32 |
33 | # Catch tuple edge cases (0, 0); (None, None); (0, None); (None, 0)
34 | if isinstance(in_val, (tuple, list)) and len(in_val) == 2 and not in_val[0] and not in_val[1]:
35 | return None
36 |
37 | # Providing a single integer sets the maximum value in a tuple.
38 | if isinstance(in_val, int) and in_val > 0:
39 | return None, in_val
40 |
41 | # Integer 2-tuples of the format '(min, max)' are supported to set the cardinality.
42 | # Also support lists with a length of 2 without advertising it.
43 | if isinstance(in_val, (tuple, list)) and len(in_val) == 2:
44 | v_min = in_val[0]
45 | v_max = in_val[1]
46 |
47 | min_int = isinstance(v_min, int) and v_min >= 0
48 | max_int = isinstance(v_max, int) and v_max >= 0
49 |
50 | if max_int and min_int and v_max >= v_min:
51 | return v_min, v_max
52 |
53 | if max_int and not v_min:
54 | return None, v_max
55 |
56 | if min_int and not v_max:
57 | return v_min, None
58 |
59 | # Use helpful exception message in the following case:
60 | if max_int and min_int and v_max < v_min:
61 | exc_msg = "Minimum larger than maximum (min=%s, max=%s)" % (v_min, v_max)
62 |
63 | raise ValueError(exc_msg)
64 |
--------------------------------------------------------------------------------
/odml/resources/section_subclasses.yaml:
--------------------------------------------------------------------------------
1 | analysis: Analysis
2 | analysis/power_spectrum: PowerSpectrum
3 | analysis/psth: PSTH
4 | cell: Cell
5 | datacite/alternate_identifier: AlternateIdentifier
6 | datacite/contributor: Contributer
7 | datacite/contributor/affiliation: Affiliation
8 | datacite/contributor/named_identifier: NamedIdentifier
9 | datacite/creator: Creator
10 | datacite/creator/affiliation: Affiliation
11 | datacite/creator/named_identifier: NamedIdentifier
12 | datacite/date: Date
13 | datacite/description: Description
14 | datacite/format: Format
15 | datacite/funding_reference: FundingReference
16 | datacite/geo_location: GeoLocation
17 | datacite/identifier: Identifier
18 | datacite/related_identifier: RelatedIdentifier
19 | datacite/resource_type: ResourceType
20 | datacite/rights: Rights
21 | datacite/size: Size
22 | datacite/subject: Subject
23 | datacite/title: Title
24 | dataset: Dataset
25 | data_reference: DataReference
26 | blackrock: Blackrock
27 | electrode: Electrode
28 | event: Event
29 | event_list: EventList
30 | experiment: Experiment
31 | experiment/behavior: Behavior
32 | experiment/electrophysiology: Electrophysiology
33 | experiment/imaging: Imaging
34 | experiment/psychophysics: Psychophysics
35 | hardware_properties: HardwareProperties
36 | hardware_settings: HardwareSettings
37 | hardware: Hardware
38 | hardware/amplifier: Amplifier
39 | hardware/attenuator: Attenuator
40 | hardware/camera_objective: CameraObjective
41 | hardware/daq: DataAcquisition
42 | hardware/eyetracker: Eyetracker
43 | hardware/filter: Filter
44 | hardware/filter_set: Filterset
45 | hardware/iaq: ImageAcquisition
46 | hardware/light_source: Lightsource
47 | hardware/microscope: Microscope
48 | hardware/microscope_objective: MicroscopeObjective
49 | hardware/scanner: Scanner
50 | hardware/stimulus_isolator: StimulusIsolator
51 | model/lif: LeakyIntegrateAndFire
52 | model/pif: PerfectIntegrateAndFire
53 | model/multi_compartment: MultiCompartmentModel
54 | model/single_compartment: SingleCompartmentModel
55 | person: Person
56 | preparation: Preparation
57 | project: Project
58 | protocol: Protocol
59 | recording: Recording
60 | setup: Setup
61 | stimulus: Stimulus
62 | stimulus/dc: DC
63 | stimulus/gabor: Gabor
64 | stimulus/grating: Grating
65 | stimulus/pulse: Pulse
66 | stimulus/movie: Movie
67 | stimulus/ramp: Ramp
68 | stimulus/random_dot: RandomDot
69 | stimulus/sawtooth: Sawtooth
70 | stimulus/sine_wave: Sinewave
71 | stimulus/square_wave: Squarewave
72 | stimulus/white_noise: Whitenoise
73 | subject: Subject
74 |
--------------------------------------------------------------------------------
/scripts/release_tests/run_test_local_odml.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo
4 | echo "-- MAKE SURE TO RUN THIS SCRIPT IN INTERACTIVE MODE '-i' --"
5 |
6 | PY_VER_ARRAY=("|3.5|3.6|3.7|3.8|3.9|3.10|3.11|")
7 |
8 | if [[ $# != 1 ]]; then
9 | echo
10 | echo "-- [FAILED] Please provide a valid Python version: ${PY_VER_ARRAY}"
11 | exit 1
12 | fi
13 |
14 | PYVER=$1
15 |
16 | if [[ ! "${PY_VER_ARRAY}" =~ "|${PYVER}|" ]]; then
17 | echo
18 | echo "-- [FAILED] Please provide a valid Python version: ${PY_VER_ARRAY}"
19 | exit 1
20 | fi
21 |
22 | echo
23 | echo "-- Using Python version ${PYVER}"
24 |
25 | SCRIPT_DIR=$(pwd)
26 | cd ../..
27 | ROOT_DIR=$(pwd)
28 |
29 | echo
30 | echo "-- Running directory check: ${ROOT_DIR}"
31 | CHECK_DIR=$(basename ${ROOT_DIR})
32 | if [[ ! "$CHECK_DIR" = "python-odml" ]]; then
33 | echo "-- [FAILED] In wrong directory ${ROOT_DIR}"
34 | exit 1
35 | fi
36 |
37 | echo
38 | echo "-- Running active conda env check: ${CONDA_PREFIX}"
39 | if [[ ! -z "${CONDA_PREFIX}" ]]; then
40 | echo "-- Deactivating conda env: ${CONDA_PREFIX}"
41 | conda deactivate
42 | fi
43 |
44 | CONDA_ENV_SETUP=odmlsetup${PYVER}
45 | CONDA_ENV_PIP=odmlpip${PYVER}
46 |
47 | echo
48 | echo "-- Testing local pip installation"
49 | echo "-- Cleanup previous conda environment and create new one"
50 | echo
51 | conda remove -q -n ${CONDA_ENV_PIP} --all -y
52 | conda create -q -n ${CONDA_ENV_PIP} python=${PYVER} -y
53 |
54 | conda activate ${CONDA_ENV_PIP}
55 | pip install -q --upgrade pip
56 |
57 | echo
58 | echo "-- Local installation (pip)"
59 | echo
60 | pip install .
61 |
62 | echo
63 | echo "-- Installing test dependencies"
64 | pip install -r requirements-test.txt
65 |
66 | echo
67 | echo "-- Running tests"
68 | pytest -v
69 |
70 | conda deactivate
71 |
72 | echo
73 | echo "-- Testing local setup installation"
74 | echo "-- Cleanup previous conda environment and create new one"
75 | echo
76 | conda remove -q -n ${CONDA_ENV_SETUP} --all -y
77 | conda create -q -n ${CONDA_ENV_SETUP} python=${PYVER} -y
78 |
79 | conda activate ${CONDA_ENV_SETUP}
80 | pip install -q --upgrade pip
81 |
82 | echo
83 | echo "-- Local installation (setup.py)"
84 | echo
85 | python setup.py install
86 |
87 | echo
88 | echo "-- Installing test dependencies"
89 | pip install -r requirements-test.txt
90 |
91 | echo
92 | echo "-- Running tests"
93 | pytest -v
94 |
95 | conda deactivate
96 |
97 | echo
98 | echo "-- Returning to script directory ${SCRIPT_DIR}"
99 | echo
100 | cd ${SCRIPT_DIR}
101 |
102 | echo "-- Done"
103 | echo
104 |
--------------------------------------------------------------------------------
/test/test_util.py:
--------------------------------------------------------------------------------
1 | """
2 | This file tests odml util functions.
3 | """
4 |
5 | import unittest
6 |
7 | from odml.util import format_cardinality
8 |
9 |
10 | class TestUtil(unittest.TestCase):
11 |
12 | def test_format_cardinality(self):
13 | # Test empty set
14 | self.assertIsNone(format_cardinality(None))
15 | self.assertIsNone(format_cardinality([]))
16 | self.assertIsNone(format_cardinality({}))
17 | self.assertIsNone(format_cardinality(""))
18 | self.assertIsNone(format_cardinality(()))
19 |
20 | # Test empty tuple edge cases
21 | self.assertIsNone(format_cardinality((None, None)))
22 | self.assertIsNone(format_cardinality((0, 0)))
23 | self.assertIsNone(format_cardinality((None, 0)))
24 | self.assertIsNone(format_cardinality((0, None)))
25 |
26 | # Test single int max set
27 | self.assertEqual(format_cardinality(10), (None, 10))
28 |
29 | # Test tuple set
30 | set_val = (2, None)
31 | self.assertEqual(format_cardinality(set_val), set_val)
32 | set_val = (None, 2)
33 | self.assertEqual(format_cardinality(set_val), set_val)
34 | set_val = (2, 3)
35 | self.assertEqual(format_cardinality(set_val), set_val)
36 |
37 | # Test list simple list set
38 | set_val = [2, None]
39 | self.assertEqual(format_cardinality(set_val), tuple(set_val))
40 | set_val = [None, 2]
41 | self.assertEqual(format_cardinality(set_val), tuple(set_val))
42 | set_val = [2, 3]
43 | self.assertEqual(format_cardinality(set_val), tuple(set_val))
44 |
45 | # Test exact value tuple set
46 | set_val = (5, 5)
47 | self.assertEqual(format_cardinality(set_val), set_val)
48 |
49 | # Test set failures
50 | with self.assertRaises(ValueError):
51 | format_cardinality("a")
52 |
53 | with self.assertRaises(ValueError):
54 | format_cardinality([1])
55 |
56 | with self.assertRaises(ValueError):
57 | format_cardinality([1, 2, 3])
58 |
59 | with self.assertRaises(ValueError):
60 | format_cardinality({1: 2, 3: 4})
61 |
62 | with self.assertRaises(ValueError):
63 | format_cardinality(-1)
64 |
65 | with self.assertRaises(ValueError):
66 | format_cardinality((1, "b"))
67 |
68 | with self.assertRaises(ValueError):
69 | format_cardinality((1, 2, 3))
70 |
71 | with self.assertRaises(ValueError):
72 | format_cardinality((-1, 1))
73 |
74 | with self.assertRaises(ValueError):
75 | format_cardinality((1, -5))
76 |
77 | with self.assertRaises(ValueError) as exc:
78 | format_cardinality((5, 1))
79 | self.assertIn("Minimum larger than maximum ", str(exc))
80 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 |
4 | from sys import version_info as _python_version
5 |
6 | try:
7 | from setuptools import setup
8 | except ImportError as ex:
9 | from distutils.core import setup
10 |
11 | with open(os.path.join("odml", "info.json")) as infofile:
12 | infodict = json.load(infofile)
13 |
14 | VERSION = infodict["VERSION"]
15 | FORMAT_VERSION = infodict["FORMAT_VERSION"]
16 | AUTHOR = infodict["AUTHOR"]
17 | COPYRIGHT = infodict["COPYRIGHT"]
18 | CONTACT = infodict["CONTACT"]
19 | HOMEPAGE = infodict["HOMEPAGE"]
20 | CLASSIFIERS = infodict["CLASSIFIERS"]
21 |
22 |
23 | packages = [
24 | 'odml',
25 | 'odml.rdf',
26 | 'odml.resources',
27 | 'odml.scripts',
28 | 'odml.tools',
29 | 'odml.tools.converters'
30 | ]
31 |
32 | with open('README.md') as f:
33 | description_text = f.read()
34 |
35 | install_req = ["docopt", "lxml", "pyyaml>=5.1", "rdflib>=6.0.0"]
36 | # owlrl depends on rdflib; update any changes in requirements-test.txt as well.
37 | tests_req = ["owlrl", "pytest", "requests"]
38 |
39 | # Keep support for for Python versions below 3.7; relevant for the
40 | # rdflib usage; rdflib >= 6 does not support Python versions below 3.7.
41 | if _python_version.minor <= 6:
42 | # pyparsing needs to be pinned to 2.4.7 due to issues with the rdflib 5.0.0 library.
43 | install_req = ["docopt", "lxml", "pyyaml>=5.1", "rdflib==5.0.0", "pyparsing==2.4.7"]
44 |
45 | # owlrl depends on rdflib and needs to be pinned to a corresponding version.
46 | tests_req = ["owlrl==5.2.3", "pytest", "requests"]
47 |
48 | setup(
49 | name='odML',
50 | version=VERSION,
51 | description='open metadata Markup Language',
52 | author=AUTHOR,
53 | author_email=CONTACT,
54 | url=HOMEPAGE,
55 | packages=packages,
56 | test_suite='test',
57 | install_requires=install_req,
58 | tests_require=tests_req,
59 | include_package_data=True,
60 | long_description=description_text,
61 | long_description_content_type="text/markdown",
62 | classifiers=CLASSIFIERS,
63 | license="BSD",
64 | entry_points={'console_scripts': ['odmltordf=odml.scripts.odml_to_rdf:main',
65 | 'odmlconversion=odml.scripts.odml_convert:dep_note',
66 | 'odmlconvert=odml.scripts.odml_convert:main',
67 | 'odmlview=odml.scripts.odml_view:main']}
68 | )
69 |
70 | # Make this the last thing people read after a setup.py install
71 | if _python_version.major < 3:
72 | msg = "Python 2 has reached end of live."
73 | msg += "\n\todML support for Python 2 has been dropped."
74 | print(msg)
75 | elif _python_version.major == 3 and _python_version.minor < 7:
76 | msg = "\n\nThis package is not tested with your Python version. "
77 | msg += "\n\tPlease consider upgrading to the latest Python distribution."
78 | print(msg)
79 |
--------------------------------------------------------------------------------
/scripts/release_tests/run_test_pypi_odmltools.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo
4 | echo "-- MAKE SURE TO RUN THIS SCRIPT IN INTERACTIVE MODE '-i' --"
5 |
6 | PY_VER_ARRAY=("|3.5|3.6|3.7|3.8|3.9|3.10|3.11|")
7 |
8 | if [[ $# != 1 ]]; then
9 | echo
10 | echo "-- [FAILED] Please provide a valid Python version: ${PY_VER_ARRAY}"
11 | exit 1
12 | fi
13 |
14 | PYVER=$1
15 |
16 | if [[ ! "${PY_VER_ARRAY}" =~ "|${PYVER}|" ]]; then
17 | echo
18 | echo "-- [FAILED] Please provide a valid Python version: ${PY_VER_ARRAY}"
19 | exit 1
20 | fi
21 |
22 | if [[ "${PYVER}" == "3.5" ]]; then
23 | echo
24 | echo "-- Ignoring unsupported Python version 3.5"
25 | exit 0
26 | fi
27 |
28 | echo
29 | echo "-- Using Python version ${PYVER}"
30 |
31 | ROOT_DIR=$(pwd)
32 | CONDA_ENV=odmlpip${PYVER}
33 |
34 | echo
35 | echo "-- Running directory check: ${ROOT_DIR}"
36 | CHECK_DIR=$(basename ${ROOT_DIR})
37 | if [[ ! "$CHECK_DIR" = "release_tests" ]]; then
38 | echo "-- [FAILED] In wrong directory ${ROOT_DIR}"
39 | exit 1
40 | fi
41 |
42 | echo
43 | echo "-- Running active conda env check: ${CONDA_PREFIX}"
44 | if [[ ! -z "${CONDA_PREFIX}" ]]; then
45 | echo "-- Deactivating conda env: ${CONDA_PREFIX}"
46 | conda deactivate
47 | fi
48 |
49 | echo
50 | echo "-- Cleanup previous conda environment and create new one"
51 | echo
52 | conda remove -q -n ${CONDA_ENV} --all -y
53 |
54 | conda create -q -n ${CONDA_ENV} python=${PYVER} -y
55 |
56 | conda activate ${CONDA_ENV}
57 | pip install -q --upgrade pip
58 | pip install -q ipython
59 |
60 | echo
61 | echo "-- Installing odmltools from PyPI test"
62 |
63 | pip install -q --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -I odmltools
64 |
65 | if ! [[ -x "$(command -v odmlimportdatacite)" ]]; then
66 | conda deactivate
67 | cd ${ROOT_DIR}
68 | echo
69 | echo "-- [FAILED] odmlimportdatacite not installed"
70 | exit
71 | fi
72 |
73 | OUT_DIR=/tmp/odml/out/${PYVER}/odmltools
74 | mkdir -vp ${OUT_DIR}
75 | cd ${ROOT_DIR}/resources/test_odmltools
76 |
77 | echo
78 | echo "-- checking odml version"
79 | python -c "import odml; print('-- Testing against odml version v%s' % odml.VERSION)"
80 |
81 | echo
82 | echo "-- running odmltools conversion tests"
83 | odmlimportdatacite -o ${OUT_DIR} -r ./datacite
84 | odmlimportdatacite -o ${OUT_DIR} -r -f RDF ./datacite
85 | odmlimportdatacite -o ${OUT_DIR} -r -f YAML ./datacite
86 | odmlimportdatacite -o ${OUT_DIR} -r -f JSON ./datacite
87 |
88 | echo
89 | echo "-- checking namespace test success"
90 | odmlimportdatacite -o ${OUT_DIR} ./datacite_namespace/fullDataCiteSchemaNS.xml
91 |
92 | echo
93 | echo "-- checking namespace test fail"
94 | odmlimportdatacite -o ${OUT_DIR} ./datacite_namespace/DataCitePreviousNS.xml
95 |
96 | echo
97 | echo "-- checking namespace escape test success"
98 | odmlimportdatacite -o ${OUT_DIR} -n http://datacite.org/schema/kernel-2 ./datacite_namespace/DataCitePreviousNS.xml
99 |
100 | echo
101 | echo "-- Returning to root"
102 | cd ${ROOT_DIR}
103 |
104 | conda deactivate
105 |
106 | echo "-- Done"
107 | echo
108 |
--------------------------------------------------------------------------------
/test/test_version_converter_integration.py:
--------------------------------------------------------------------------------
1 | """
2 | This file tests proper conversion of complex v1.0 odML files
3 | to v1.1 with all supported file formats.
4 | """
5 |
6 | import os
7 | import shutil
8 | import unittest
9 |
10 | from odml import load
11 | from odml.tools.converters import VersionConverter as VC
12 | from .util import create_test_dir, TEST_RESOURCES_DIR as RES_DIR
13 |
14 |
15 | class TestVersionConverterIntegration(unittest.TestCase):
16 | def setUp(self):
17 | # Set up test environment
18 | self.jsonfile = os.path.join(RES_DIR, "version_conversion_int.json")
19 | self.xmlfile = os.path.join(RES_DIR, "version_conversion_int.xml")
20 | self.yamlfile = os.path.join(RES_DIR, "version_conversion_int.yaml")
21 |
22 | self.tmp_dir = create_test_dir(__file__)
23 | self.outfile = os.path.join(self.tmp_dir, "version_conversion.xml")
24 |
25 | def tearDown(self):
26 | if self.tmp_dir and os.path.exists(self.tmp_dir):
27 | shutil.rmtree(self.tmp_dir)
28 |
29 | def test_convert_xml(self):
30 | VC(self.xmlfile).write_to_file(self.outfile)
31 | self.assertTrue(os.path.exists(self.outfile))
32 |
33 | self.check_result()
34 |
35 | def test_convert_yaml(self):
36 | with self.assertRaises(Exception):
37 | VC(self.yamlfile).write_to_file(self.outfile)
38 |
39 | VC(self.yamlfile).write_to_file(self.outfile, "YAML")
40 | self.assertTrue(os.path.exists(self.outfile))
41 |
42 | self.check_result()
43 |
44 | def test_convert_json(self):
45 | with self.assertRaises(Exception):
46 | VC(self.jsonfile).write_to_file(self.outfile)
47 |
48 | VC(self.jsonfile).write_to_file(self.outfile, "JSON")
49 | self.assertTrue(os.path.exists(self.outfile))
50 |
51 | self.check_result()
52 |
53 | def check_result(self):
54 | doc = load(self.outfile)
55 |
56 | # Test document attribute export
57 | self.assertEqual(doc.author, "author")
58 | self.assertEqual(doc.version, "v1.13")
59 | repo = "https://terminologies.g-node.org/v1.1/terminologies.xml"
60 | self.assertEqual(doc.repository, repo)
61 | self.assertEqual(len(doc.sections), 3)
62 |
63 | sec = doc.sections["sec_one"]
64 | self.assertEqual(sec.definition, "def s1")
65 | self.assertEqual(sec.reference, "ref s1")
66 | self.assertEqual(sec.type, "mainsec")
67 | self.assertEqual(len(sec.sections), 1)
68 | self.assertEqual(len(sec.properties), 3)
69 |
70 | prop = sec.properties["prop_one"]
71 | self.assertEqual(prop.definition, "def prop1")
72 | self.assertEqual(prop.dependency, "dep p1")
73 | self.assertEqual(prop.dependency_value, "dep val p1")
74 | self.assertEqual(prop.reference, "ref val 1")
75 | self.assertEqual(prop.uncertainty, "11")
76 | self.assertEqual(prop.unit, "arbitrary")
77 | self.assertEqual(prop.value_origin, "filename val 1")
78 | self.assertEqual(prop.dtype, "string")
79 | self.assertEqual(len(prop.values), 3)
80 |
81 | prop = sec.properties["prop_two"]
82 | self.assertEqual(len(prop.values), 8)
83 |
84 | prop = sec.properties["prop_three"]
85 | self.assertEqual(len(prop.values), 0)
86 |
--------------------------------------------------------------------------------
/doc/rdf/sparql_example_queries.py:
--------------------------------------------------------------------------------
1 | from rdflib import Graph, RDF
2 | from rdflib.plugins.sparql import prepareQuery
3 |
4 | from odml.tools.rdf_converter import ODML_NS
5 |
6 | rdf_namespace = {"odml": ODML_NS, "rdf": RDF}
7 |
8 | resource = "./odml_RDF_example_A.ttl"
9 |
10 | g = Graph()
11 | g.parse(resource, format='turtle')
12 | # select d.* from dataset d, stimulus s where s.contrast = '20%'
13 | q1 = prepareQuery("""SELECT *
14 | WHERE {
15 | ?d rdf:type odml:Document .
16 | ?d odml:hasSection ?s .
17 | ?s rdf:type odml:Section .
18 | ?s odml:hasName "Stimulus" .
19 | ?s odml:hasProperty ?p .
20 | ?p odml:hasName "Contrast" .
21 | ?p odml:hasValue ?v .
22 | ?p odml:hasUnit "%" .
23 | ?v rdf:type rdf:Bag .
24 | ?v rdf:li "20.0" .
25 | }""", initNs=rdf_namespace)
26 |
27 | g = Graph()
28 | g.parse(resource, format='turtle')
29 | # select d.* from dataset d, stimulus s, cell c where s.contrast = '20%' and c.celltype='P-unit'
30 | q2 = prepareQuery("""SELECT *
31 | WHERE {
32 | ?d rdf:type odml:Document .
33 | ?d odml:hasSection ?s .
34 | ?s rdf:type odml:Section .
35 | ?s odml:hasName "Stimulus" .
36 | ?s odml:hasProperty ?p .
37 |
38 | ?p odml:hasName "Contrast" .
39 | ?p odml:hasValue ?v .
40 | ?p odml:hasUnit "%" .
41 | ?v rdf:type rdf:Bag .
42 | ?v rdf:li "20.0" .
43 |
44 |
45 | ?d odml:hasSection ?s1 .
46 | ?s1 odml:hasName "Cell" .
47 | ?s1 odml:hasProperty ?p1 .
48 |
49 | ?p1 odml:hasName "CellType" .
50 | ?p1 odml:hasValue ?v1 .
51 | ?v1 rdf:li "P-unit" .
52 | }""", initNs=rdf_namespace)
53 |
54 | # select d.* from dataset d, CellProperties s, EOD Frequency c where c.unit = 'Hz'
55 | g = Graph()
56 | g.parse(resource, format='turtle')
57 | q3 = prepareQuery("""SELECT *
58 | WHERE {
59 | ?d rdf:type odml:Document .
60 | ?d odml:hasSection ?s .
61 | ?s rdf:type odml:CellProperties .
62 | ?s odml:hasProperty ?p .
63 |
64 | ?p odml:hasName "EOD Frequency" .
65 | ?p odml:hasValue ?v .
66 | ?p odml:hasUnit "Hz" .
67 | ?v rdf:type rdf:Bag .
68 | ?v rdf:li ?value .
69 | }""", initNs=rdf_namespace)
70 |
71 | print("q1")
72 | for row in g.query(q1):
73 | print("Doc: {0}, Sec: {1}, \n"
74 | "Prop: {2}, Bag: {3}".format(row.d, row.s, row.p, row.v))
75 |
76 | print("q2")
77 | for row in g.query(q2):
78 | print("Doc: {0}, Sec: {1}, \n"
79 | "Prop: {2}, Bag: {3}".format(row.d, row.s, row.p, row.v))
80 | print("Doc: {0}, Sec: {1}, \n"
81 | "Prop: {2}, Bag: {3}".format(row.d, row.s1, row.p1, row.v1))
82 |
83 | print("q3")
84 | for row in g.query(q3):
85 | print("Doc: {0}, Sec: {1}, \n"
86 | "Prop: {2}, Bag: {3} \n"
87 | "Value: {4}".format(row.d, row.s, row.p, row.v, row.value))
88 |
--------------------------------------------------------------------------------
/scripts/release_tests/run_test_pypi_nixodmlconverter.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo
4 | echo "-- MAKE SURE TO RUN THIS SCRIPT IN INTERACTIVE MODE '-i' --"
5 |
6 | PY_VER_ARRAY=("|3.5|3.6|3.7|3.8|3.9|3.10|3.11|")
7 |
8 | if [[ $# != 1 ]]; then
9 | echo
10 | echo "-- [FAILED] Please provide a valid Python version: ${PY_VER_ARRAY}"
11 | exit 1
12 | fi
13 |
14 | PYVER=$1
15 |
16 | if [[ ! "${PY_VER_ARRAY}" =~ "|${PYVER}|" ]]; then
17 | echo
18 | echo "-- [FAILED] Please provide a valid Python version: ${PY_VER_ARRAY}"
19 | exit 1
20 | fi
21 |
22 | if [[ "${PYVER}" == "3.5" ]]; then
23 | echo
24 | echo "-- Ignoring unsupported Python version 3.5"
25 | exit 0
26 | fi
27 |
28 | echo
29 | echo "-- Using Python version ${PYVER}"
30 |
31 | ROOT_DIR=$(pwd)
32 | CONDA_ENV=odmlpip${PYVER}
33 |
34 | echo
35 | echo "-- Running directory check: ${ROOT_DIR}"
36 | CHECK_DIR=$(basename ${ROOT_DIR})
37 | if [[ ! "$CHECK_DIR" = "release_tests" ]]; then
38 | echo "-- [FAILED] In wrong directory ${ROOT_DIR}"
39 | exit 1
40 | fi
41 |
42 | echo
43 | echo "-- Running active conda env check: ${CONDA_PREFIX}"
44 | if [[ ! -z "${CONDA_PREFIX}" ]]; then
45 | echo "-- Deactivating conda env: ${CONDA_PREFIX}"
46 | conda deactivate
47 | fi
48 |
49 | echo
50 | echo "-- Cleanup previous conda environment and create new one"
51 | echo
52 | conda remove -q -n ${CONDA_ENV} --all -y
53 |
54 | conda create -q -n ${CONDA_ENV} python=${PYVER} -y
55 |
56 | conda activate ${CONDA_ENV}
57 | pip install -q --upgrade pip
58 | pip install -q ipython
59 |
60 | echo
61 | echo "-- Installing nixodmlconverter from PyPI test"
62 |
63 | pip install -q --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -I nixodmlconverter
64 |
65 | if ! [[ -x "$(command -v nixodmlconverter)" ]]; then
66 | conda deactivate
67 | cd ${ROOT_DIR}
68 | echo
69 | echo "-- [FAILED] nixodmlconverter not installed"
70 | exit
71 | fi
72 |
73 |
74 | OUT_DIR=/tmp/odml/out/${PYVER}/nixodmlconverter
75 | mkdir -vp ${OUT_DIR}
76 | cp ${ROOT_DIR}/resources/test_nixodmlconv/example.odml.xml ${OUT_DIR}/
77 |
78 | cd ${OUT_DIR}
79 |
80 | echo
81 | echo "-- checking odml version"
82 | python -c "import odml; print('-- Testing against odml version v%s' % odml.VERSION)"
83 |
84 | echo
85 | echo "-- running nixodmlconverter help"
86 | nixodmlconverter -h
87 |
88 | echo
89 | echo "-- running nixodmlconversion odml->nix"
90 | if [[ -f example.odml.nix ]]; then
91 | rm example.odml.nix
92 | fi
93 | nixodmlconverter example.odml.xml
94 |
95 | if ! [[ -f example.odml.nix ]]; then
96 | conda deactivate
97 | cd ${ROOT_DIR}
98 | echo
99 | echo "-- [FAILED] nixodmlconverter conversion odml->nix"
100 | exit
101 | fi
102 |
103 | cp example.odml.nix export.odml.nix
104 |
105 | echo
106 | echo "-- running nixodmlconversion nix->odml"
107 | if [[ -f export.odml.xml ]]; then
108 | rm export.odml.xml
109 | fi
110 | nixodmlconverter export.odml.nix
111 |
112 | if ! [[ -f export.odml.xml ]]; then
113 | conda deactivate
114 | cd ${ROOT_DIR}
115 | echo
116 | echo "-- [FAILED] nixodmlconverter conversion nix->odml"
117 | exit
118 | fi
119 |
120 | echo
121 | echo "-- Returning to root"
122 | cd ${ROOT_DIR}
123 |
124 | conda deactivate
125 |
126 | echo "-- Done"
127 | echo
128 |
--------------------------------------------------------------------------------
/doc/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # Internal variables.
11 | PAPEROPT_a4 = -D latex_paper_size=a4
12 | PAPEROPT_letter = -D latex_paper_size=letter
13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
14 |
15 | .PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest
16 |
17 | help:
18 | @echo "Please use \`make ' where is one of"
19 | @echo " html to make standalone HTML files"
20 | @echo " dirhtml to make HTML files named index.html in directories"
21 | @echo " pickle to make pickle files"
22 | @echo " json to make JSON files"
23 | @echo " htmlhelp to make HTML files and a HTML help project"
24 | @echo " qthelp to make HTML files and a qthelp project"
25 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
26 | @echo " changes to make an overview of all changed/added/deprecated items"
27 | @echo " linkcheck to check all external links for integrity"
28 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
29 |
30 | clean:
31 | -rm -rf $(BUILDDIR)/*
32 |
33 | html:
34 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
35 | @echo
36 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
37 |
38 | dirhtml:
39 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
40 | @echo
41 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
42 |
43 | pickle:
44 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
45 | @echo
46 | @echo "Build finished; now you can process the pickle files."
47 |
48 | json:
49 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
50 | @echo
51 | @echo "Build finished; now you can process the JSON files."
52 |
53 | htmlhelp:
54 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
55 | @echo
56 | @echo "Build finished; now you can run HTML Help Workshop with the" \
57 | ".hhp project file in $(BUILDDIR)/htmlhelp."
58 |
59 | qthelp:
60 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
61 | @echo
62 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
63 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
64 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/python-odml.qhcp"
65 | @echo "To view the help file:"
66 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/python-odml.qhc"
67 |
68 | latex:
69 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
70 | @echo
71 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
72 | @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
73 | "run these through (pdf)latex."
74 |
75 | changes:
76 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
77 | @echo
78 | @echo "The overview file is in $(BUILDDIR)/changes."
79 |
80 | linkcheck:
81 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
82 | @echo
83 | @echo "Link check complete; look for any errors in the above output " \
84 | "or in $(BUILDDIR)/linkcheck/output.txt."
85 |
86 | doctest:
87 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
88 | @echo "Testing of doctests in the sources finished, look at the " \
89 | "results in $(BUILDDIR)/doctest/output.txt."
90 |
--------------------------------------------------------------------------------
/scripts/release_tests/run_test_matrix.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo
4 | echo "-- Running python-odml package test installation matrix"
5 |
6 | print_options () {
7 | echo
8 | echo "-- Missing or invalid test script selection letter (A|B|C|D|E), please provide one of the following"
9 | echo " A: local install test (odml)"
10 | echo " B: PyPI TEST install test (odml)"
11 | echo " C: PyPI TEST install test (odmltools)"
12 | echo " D: PyPI TEST install test (nixodmlconverter)"
13 | echo " E: PyPI LIVE install test (odml)"
14 | }
15 |
16 | if [[ $# != 1 ]]; then
17 | print_options
18 | exit 1
19 | fi
20 |
21 | TEST_ARRAY=("|A|B|C|D|E|")
22 | TEST=$1
23 |
24 | if [[ ! "${TEST_ARRAY}" =~ "|${TEST}|" ]]; then
25 | print_options
26 | exit 1
27 | fi
28 |
29 | if [[ "${TEST}" == "A" ]]; then
30 | echo
31 | echo "-- Running local test odml installations"
32 | LOG_DIR=/tmp/odml/local_install_odml
33 | SCRIPT=./run_test_local_odml.sh
34 | echo "${SCRIPT}"
35 | fi
36 |
37 | if [[ "${TEST}" == "B" ]]; then
38 | echo
39 | echo "-- Running PyPI TEST server odml installations"
40 | LOG_DIR=/tmp/odml/pypi_test_install_odml
41 | SCRIPT=./run_test_pypi_odml.sh
42 | fi
43 |
44 | if [[ "${TEST}" == "C" ]]; then
45 | echo
46 | echo "-- Running PyPI TEST server odmltools installations"
47 | LOG_DIR=/tmp/odml/pypi_test_install_odmltools
48 | SCRIPT=./run_test_pypi_odmltools.sh
49 | fi
50 |
51 | if [[ "${TEST}" == "D" ]]; then
52 | echo
53 | echo "-- Running PyPI TEST server nixodmlconverter installations"
54 | LOG_DIR=/tmp/odml/pypi_test_install_nixodmlconverter
55 | SCRIPT=./run_test_pypi_nixodmlconverter.sh
56 | fi
57 |
58 | if [[ "${TEST}" == "E" ]]; then
59 | echo
60 | echo "-- Running PyPI LIVE server odml installations"
61 | LOG_DIR=/tmp/odml/pypi_install_odml
62 | SCRIPT=./run_pypi_odml.sh
63 | fi
64 |
65 | ROOT_DIR=$(pwd)
66 |
67 | echo
68 | echo "-- Running directory check: ${ROOT_DIR}"
69 | CHECK_DIR=$(basename ${ROOT_DIR})
70 | if [[ ! "${CHECK_DIR}" = "release_tests" ]]; then
71 | echo
72 | echo "-- In wrong directory ${ROOT_DIR}"
73 | exit 1
74 | fi
75 |
76 | echo
77 | echo "-- Creating log directory ${LOG_DIR}"
78 | mkdir -vp ${LOG_DIR}
79 | if [[ ! -d "${LOG_DIR}" ]]; then
80 | echo
81 | echo "-- Cannot find ${LOG_DIR} output directory"
82 | exit 1
83 | fi
84 |
85 | echo
86 | echo "-- Log files of all tests can be found in ${LOG_DIR}"
87 |
88 | function run_script () {
89 | echo
90 | echo "-- Running script for Python version ${PYVER}"
91 | bash -i ${SCRIPT} ${PYVER} > ${LOG_DIR}/${PYVER}_testrun.log 2>&1
92 | FAIL_COUNT=$(cat ${LOG_DIR}/${PYVER}_testrun.log | grep -c FAILED)
93 | if [[ "${FAIL_COUNT}" -gt 0 ]]; then
94 | echo "-- Test fail in Python ${PYVER} tests. Check ${LOG_DIR}/${PYVER}_testrun.log"
95 | fi
96 | PY_ERR_COUNT=$(cat ${LOG_DIR}/${PYVER}_testrun.log | grep -c Traceback)
97 | if [[ "${PY_ERR_COUNT}" -gt 0 ]]; then
98 | echo "-- Runtime error in Python ${PYVER} tests. Check ${LOG_DIR}/${PYVER}_testrun.log"
99 | fi
100 | }
101 |
102 | PYVER=3.11
103 | run_script
104 |
105 | PYVER=3.10
106 | run_script
107 |
108 | PYVER=3.9
109 | run_script
110 |
111 | PYVER=3.8
112 | run_script
113 |
114 | PYVER=3.7
115 | run_script
116 |
117 | PYVER=3.6
118 | run_script
119 |
120 | PYVER=3.5
121 | run_script
122 |
123 | echo
124 | echo "-- Done"
125 |
--------------------------------------------------------------------------------
/test/test_iterators.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from odml import Property, Section, Document
4 |
5 |
6 | class TestValidation(unittest.TestCase):
7 |
8 | def setUp(self):
9 | """
10 | doc -- --
11 | \
12 | -- --
13 | """
14 | doc = Document("author")
15 |
16 | sec_main = Section("sec_main", "maintype")
17 | doc.append(sec_main)
18 | sec_main.append(Property("strprop", "somestring"))
19 | sec_main.append(Property("txtprop", "some\ntext"))
20 |
21 | sub_main = Section("sub_main", "maintype")
22 | sec_main.append(sub_main)
23 | sub_main.append(Property("strprop", "somestring"))
24 | sub_main.append(Property("txtprop", "some\ntext"))
25 |
26 | sec_branch = Section("sec_branch", "branchtype")
27 | sec_main.append(sec_branch)
28 | sec_branch.append(Property("strprop", "otherstring"))
29 | sec_branch.append(Property("txtprop", "other\ntext"))
30 |
31 | sub_branch = Section("sub_branch", "branchtype")
32 | sec_branch.append(sub_branch)
33 | sub_branch.append(Property("strprop", "otherstring"))
34 | sub_branch.append(Property("txtprop", "other\ntext"))
35 |
36 | self.doc = doc
37 |
38 | def test_itersections(self):
39 | sec_all = list(self.doc.itersections())
40 | self.assertEqual(len(sec_all), 4)
41 |
42 | filter_func = lambda x: getattr(x, "name") == "sec_main"
43 | sec_filtered = list(self.doc.itersections(filter_func=filter_func))
44 | self.assertEqual(len(sec_filtered), 1)
45 |
46 | filter_func = lambda x: getattr(x, "type").find("branch") > -1
47 | sec_filtered = list(self.doc.itersections(filter_func=filter_func))
48 | self.assertEqual(len(sec_filtered), 2)
49 |
50 | sec_filtered = list(self.doc.itersections(max_depth=2))
51 | self.assertEqual(len(sec_filtered), 3)
52 |
53 | sec_filtered = list(self.doc.itersections(max_depth=1))
54 | self.assertEqual(len(sec_filtered), 1)
55 |
56 | sec_filtered = list(self.doc.itersections(max_depth=0))
57 | self.assertEqual(len(sec_filtered), 0)
58 |
59 | def test_iterproperties(self):
60 | prop_all = list(self.doc.iterproperties())
61 | self.assertEqual(len(prop_all), 8)
62 |
63 | filter_func = lambda x: getattr(x, "name").find("strprop") > -1
64 | prop_filtered = list(self.doc.iterproperties(filter_func=filter_func))
65 | self.assertEqual(len(prop_filtered), 4)
66 |
67 | prop_filtered = list(self.doc.iterproperties(filter_func=filter_func, max_depth=2))
68 | self.assertEqual(len(prop_filtered), 3)
69 |
70 | prop_filtered = list(self.doc.iterproperties(filter_func=filter_func, max_depth=1))
71 | self.assertEqual(len(prop_filtered), 1)
72 |
73 | def test_itervalues(self):
74 | val_all = list(self.doc.itervalues())
75 | self.assertEqual(len(val_all), 8)
76 |
77 | filter_func = lambda x: str(x).find("text") > -1
78 | val_filtered = list(self.doc.itervalues(filter_func=filter_func))
79 | self.assertEqual(len(val_filtered), 4)
80 |
81 | val_filtered = list(self.doc.itervalues(filter_func=filter_func, max_depth=2))
82 | self.assertEqual(len(val_filtered), 3)
83 |
84 | val_filtered = list(self.doc.itervalues(filter_func=filter_func, max_depth=1))
85 | self.assertEqual(len(val_filtered), 1)
86 |
--------------------------------------------------------------------------------
/test/resources/scripts/odml_convert/conversion_example_A.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | 0.0um
8 | Depth
9 |
10 |
11 | P-unit
12 | CellType
13 |
14 |
15 | Nerve
16 | Structure
17 |
18 |
19 | 0.1
20 | CV
21 |
22 |
23 | 12.8Hz
24 | Baseline rate
25 |
26 | Cell
27 | Cell
28 |
29 |
30 |
31 | weak response
32 | Comment
33 |
34 |
35 | 2020-06-24-ab
36 | Name
37 |
38 |
39 | Good
40 | Recording quality
41 |
42 |
43 | 12.8.7 (20/06/24)
44 | Software version
45 |
46 |
47 | Ex Perimenter
48 | Experimenter
49 |
50 |
51 | Acquisition
52 | Mode
53 |
54 |
55 | conversion_example.xml
56 | File
57 |
58 |
59 | 16:35:51
60 | Time
61 |
62 |
63 | 2020-06-24
64 | Date
65 |
66 |
67 | 100m
68 | Recording duration
69 |
70 |
71 | ./odml_convert
72 | Folder
73 |
74 |
75 | SomeEphysSoftware
76 | Software
77 |
78 | Recording
79 | Recording
80 |
81 |
82 |
83 | 12.0Hz
84 | Firing Rate1
85 |
86 |
87 | 12.0Hz
88 | EOD Frequency
89 |
90 |
91 | 0.1
92 | P-Value1
93 |
94 | Cell properties
95 | Cell properties
96 |
97 |
98 |
99 | unknown
100 | Gender
101 |
102 |
103 | Xen Omorph
104 | Species
105 |
106 |
107 | 10.0cm
108 | Size
109 |
110 |
111 | 1.0mV
112 | Transdermal amplitude
113 |
114 |
115 | 1.4Hz
116 | EOD Frequency
117 |
118 | Subject
119 | Subject
120 |
121 | tester
122 |
123 |
--------------------------------------------------------------------------------
/test/resources/scripts/odml_convert/conversion_example_B.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | 0.0um
8 | Depth
9 |
10 |
11 | P-unit
12 | CellType
13 |
14 |
15 | Nerve
16 | Structure
17 |
18 |
19 | 0.1
20 | CV
21 |
22 |
23 | 12.8Hz
24 | Baseline rate
25 |
26 | Cell
27 | Cell
28 |
29 |
30 |
31 | weak response
32 | Comment
33 |
34 |
35 | 2020-06-24-ab
36 | Name
37 |
38 |
39 | Good
40 | Recording quality
41 |
42 |
43 | 12.8.7 (20/06/24)
44 | Software version
45 |
46 |
47 | Ex Perimenter
48 | Experimenter
49 |
50 |
51 | Acquisition
52 | Mode
53 |
54 |
55 | conversion_example.xml
56 | File
57 |
58 |
59 | 16:35:51
60 | Time
61 |
62 |
63 | 2020-06-24
64 | Date
65 |
66 |
67 | 100m
68 | Recording duration
69 |
70 |
71 | ./odml_convert
72 | Folder
73 |
74 |
75 | SomeEphysSoftware
76 | Software
77 |
78 | Recording
79 | Recording
80 |
81 |
82 |
83 | 12.0Hz
84 | Firing Rate1
85 |
86 |
87 | 12.0Hz
88 | EOD Frequency
89 |
90 |
91 | 0.1
92 | P-Value1
93 |
94 | Cell properties
95 | Cell properties
96 |
97 |
98 |
99 | unknown
100 | Gender
101 |
102 |
103 | Xen Omorph
104 | Species
105 |
106 |
107 | 10.0cm
108 | Size
109 |
110 |
111 | 1.0mV
112 | Transdermal amplitude
113 |
114 |
115 | 1.4Hz
116 | EOD Frequency
117 |
118 | Subject
119 | Subject
120 |
121 | tester
122 |
123 |
--------------------------------------------------------------------------------
/test/test_xml_writer.py:
--------------------------------------------------------------------------------
1 | import os
2 | import shutil
3 | import unittest
4 |
5 | import odml
6 |
7 | from odml.tools.xmlparser import XML_HEADER, EXTERNAL_STYLE_HEADER, \
8 | INFILE_STYLE_HEADER, INFILE_STYLE_TEMPLATE
9 | from odml.tools import XMLWriter
10 | from .util import create_test_dir, TEST_RESOURCES_DIR as RES_DIR
11 |
12 |
13 | class TestXMLWriter(unittest.TestCase):
14 | def setUp(self):
15 | # Set up test environment
16 | self.xmlfile = os.path.join(RES_DIR, "version_conversion_int.xml")
17 |
18 | self.tmp_dir = create_test_dir(__file__)
19 | self.outfile = os.path.join(self.tmp_dir, "xml_writer.xml")
20 |
21 | doc = odml.Document()
22 | sec = doc.create_section(name="sec", type="test")
23 | _ = sec.create_property(name="prop", value=['a', 'b', 'c', 'μ'])
24 |
25 | self.doc = doc
26 | self.writer = XMLWriter(doc)
27 |
28 | def tearDown(self):
29 | if self.tmp_dir and os.path.exists(self.tmp_dir):
30 | shutil.rmtree(self.tmp_dir)
31 |
32 | def test_write_default(self):
33 | self.writer.write_file(self.outfile)
34 |
35 | # make sure the file can be read again without errors
36 | doc = odml.load(self.outfile)
37 | self.assertEqual(doc, self.doc)
38 |
39 | # test style content in saved file
40 | with open(self.outfile) as test_file:
41 | content = test_file.read()
42 |
43 | self.assertIn(XML_HEADER, content)
44 | self.assertIn(EXTERNAL_STYLE_HEADER, content)
45 |
46 | def test_write_style_default(self):
47 | self.writer.write_file(self.outfile, local_style=True)
48 |
49 | # make sure the file can be read again without errors
50 | doc = odml.load(self.outfile)
51 | self.assertEqual(doc, self.doc)
52 |
53 | # test style content in saved file
54 | with open(self.outfile) as test_file:
55 | content = test_file.read()
56 |
57 | self.assertIn(XML_HEADER, content)
58 | self.assertIn(INFILE_STYLE_HEADER, content)
59 | self.assertIn(INFILE_STYLE_TEMPLATE, content)
60 |
61 | def test_write_style_custom(self):
62 | # template stub just to see if its written properly; will not render anything
63 | cust_tmpl = ""
64 |
65 | self.writer.write_file(self.outfile, local_style=True, custom_template=cust_tmpl)
66 |
67 | # make sure the file can be read again without errors
68 | doc = odml.load(self.outfile)
69 | self.assertEqual(doc, self.doc)
70 |
71 | # test style content in saved file
72 | with open(self.outfile) as test_file:
73 | content = test_file.read()
74 |
75 | self.assertIn(XML_HEADER, content)
76 | self.assertIn(INFILE_STYLE_HEADER, content)
77 | self.assertNotIn(INFILE_STYLE_TEMPLATE, content)
78 | self.assertIn(cust_tmpl, content)
79 |
80 | # --- test second possible way to save
81 | self.writer.write_file(self.outfile, local_style=False, custom_template=cust_tmpl)
82 |
83 | # make sure the file can be read again without errors
84 | doc = odml.load(self.outfile)
85 | self.assertEqual(doc, self.doc)
86 |
87 | # test style content in saved file
88 | with open(self.outfile) as test_file:
89 | content = test_file.read()
90 |
91 | self.assertIn(XML_HEADER, content)
92 | self.assertIn(INFILE_STYLE_HEADER, content)
93 | self.assertNotIn(INFILE_STYLE_TEMPLATE, content)
94 | self.assertIn(cust_tmpl, content)
95 |
--------------------------------------------------------------------------------
/test/test_script_odml_convert.py:
--------------------------------------------------------------------------------
1 | import os
2 | import shutil
3 | import unittest
4 |
5 | from docopt import DocoptExit
6 |
7 | from odml import load as odml_load
8 | from odml.scripts import odml_convert
9 | from . import util
10 |
11 |
12 | class TestScriptOdmlConvert(unittest.TestCase):
13 |
14 | def setUp(self):
15 | self.tmp_dir = util.create_test_dir(__file__)
16 | self.dir_files = os.path.join(util.TEST_RESOURCES_DIR, "scripts", "odml_convert")
17 | self.dir_broken = os.path.join(self.dir_files, "test_broken")
18 | self.dir_recursive = os.path.join(self.dir_files, "test_recursive")
19 |
20 | def tearDown(self):
21 | if self.tmp_dir and os.path.exists(self.tmp_dir):
22 | shutil.rmtree(self.tmp_dir)
23 |
24 | def test_script_exit(self):
25 | with self.assertRaises(DocoptExit):
26 | odml_convert.main([])
27 |
28 | with self.assertRaises(DocoptExit):
29 | odml_convert.main(["-o", self.tmp_dir])
30 |
31 | with self.assertRaises(SystemExit):
32 | odml_convert.main(["-h"])
33 |
34 | with self.assertRaises(SystemExit):
35 | odml_convert.main(["--version"])
36 |
37 | def test_broken(self):
38 | # make sure temp dir is empty
39 | self.assertListEqual(os.listdir(self.tmp_dir), [])
40 |
41 | # run converter on directory with invalid files
42 | odml_convert.main(["-o", self.tmp_dir, self.dir_broken])
43 |
44 | # make sure an output directory has been created
45 | out_dir_lst = os.listdir(self.tmp_dir)
46 | self.assertEqual(len(out_dir_lst), 1)
47 | out_dir = os.path.join(self.tmp_dir, out_dir_lst[0])
48 | self.assertTrue(os.path.isdir(out_dir))
49 |
50 | # make sure no file has been created
51 | self.assertListEqual(os.listdir(out_dir), [])
52 |
53 | def test_valid_conversion(self):
54 | # make sure temp dir is empty
55 | self.assertListEqual(os.listdir(self.tmp_dir), [])
56 |
57 | # run converter on root directory containing two files
58 | odml_convert.main(["-o", self.tmp_dir, self.dir_files])
59 |
60 | # make sure an output directory has been created
61 | out_dir_lst = os.listdir(self.tmp_dir)
62 | self.assertEqual(len(out_dir_lst), 1)
63 | out_dir = os.path.join(self.tmp_dir, out_dir_lst[0])
64 | self.assertTrue(os.path.isdir(out_dir))
65 |
66 | # make sure two files have been created
67 | file_lst = os.listdir(out_dir)
68 | self.assertEqual(len(file_lst), 2)
69 |
70 | # make sure the files are valid odml files
71 | _ = odml_load(os.path.join(out_dir, file_lst[0]))
72 | _ = odml_load(os.path.join(out_dir, file_lst[1]))
73 |
74 | def test_recursive_conversion(self):
75 | # make sure temp dir is empty
76 | self.assertListEqual(os.listdir(self.tmp_dir), [])
77 |
78 | # run converter on root directory containing two files
79 | odml_convert.main(["-r", "-o", self.tmp_dir, self.dir_recursive])
80 |
81 | # make sure an output directory has been created
82 | out_dir_lst = os.listdir(self.tmp_dir)
83 | self.assertEqual(len(out_dir_lst), 1)
84 | out_dir = os.path.join(self.tmp_dir, out_dir_lst[0])
85 | self.assertTrue(os.path.isdir(out_dir))
86 |
87 | # make sure two files have been created
88 | file_lst = os.listdir(out_dir)
89 | self.assertEqual(len(file_lst), 2)
90 |
91 | # make sure the files are valid odml files
92 | _ = odml_load(os.path.join(out_dir, file_lst[0]))
93 | _ = odml_load(os.path.join(out_dir, file_lst[1]))
94 |
--------------------------------------------------------------------------------
/scripts/release_tests/run_pypi_odml.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo
4 | echo "-- MAKE SURE TO RUN THIS SCRIPT IN INTERACTIVE MODE '-i' --"
5 |
6 | PY_VER_ARRAY=("|3.5|3.6|3.7|3.8|3.9|3.10|3.11|")
7 |
8 | if [[ $# != 1 ]]; then
9 | echo
10 | echo "-- [FAILED] Please provide a valid Python version: ${PY_VER_ARRAY}"
11 | exit 1
12 | fi
13 |
14 | PYVER=$1
15 |
16 | if [[ ! "${PY_VER_ARRAY}" =~ "|${PYVER}|" ]]; then
17 | echo
18 | echo "-- [FAILED] Please provide a valid Python version: ${PY_VER_ARRAY}"
19 | exit 1
20 | fi
21 |
22 | echo
23 | echo "-- Using Python version ${PYVER}"
24 |
25 | ROOT_DIR=$(pwd)
26 | CONDA_ENV=odmlpip${PYVER}
27 |
28 | echo
29 | echo "-- Running directory check: ${ROOT_DIR}"
30 | CHECK_DIR=$(basename ${ROOT_DIR})
31 | if [[ ! "$CHECK_DIR" = "release_tests" ]]; then
32 | echo "-- [FAILED] In wrong directory ${ROOT_DIR}"
33 | exit 1
34 | fi
35 |
36 | echo
37 | echo "-- Running active conda env check: ${CONDA_PREFIX}"
38 | if [[ ! -z "${CONDA_PREFIX}" ]]; then
39 | echo "-- Deactivating conda env: ${CONDA_PREFIX}"
40 | conda deactivate
41 | fi
42 |
43 | echo
44 | echo "-- Cleanup previous conda environment and create new one"
45 | echo
46 | conda remove -q -n ${CONDA_ENV} --all -y
47 |
48 | conda create -q -n ${CONDA_ENV} python=${PYVER} -y
49 |
50 | conda activate ${CONDA_ENV}
51 | pip install -q --upgrade pip
52 | pip install -q ipython
53 |
54 | echo
55 | echo "-- Installing odml from PyPI proper"
56 | echo
57 |
58 | pip install -q odml
59 |
60 | echo
61 | echo "-- Installing dependencies and odml-ui from PyPI proper"
62 | echo
63 |
64 | conda install -q -c pkgw/label/superseded gtk3 -y
65 | conda install -q -c conda-forge pygobject -y
66 | conda install -q -c conda-forge gdk-pixbuf -y
67 | conda install -q -c pkgw-forge adwaita-icon-theme -y
68 |
69 | pip install -q odml-ui
70 |
71 | echo
72 | echo "-- checking odml version"
73 | python -c "import odml; print('-- Testing against odml version v%s' % odml.VERSION)"
74 | python -c "import odmlui; print('-- Testing against odmlui version v%s' % odmlui.info.VERSION)"
75 |
76 | echo
77 | echo "-- Running basic tests"
78 | cd ${ROOT_DIR}/resources/test_load
79 | BASIC_SCRIPT=${ROOT_DIR}/resources/scripts/odml_basics.py
80 | python ${BASIC_SCRIPT}
81 |
82 | if [[ ! $? -eq 0 ]]; then
83 | cd ${ROOT_DIR}
84 | conda deactivate
85 | echo
86 | echo "-- [FAILED] Encountered error in script ${BASIC_SCRIPT}"
87 | exit
88 | fi
89 |
90 | echo
91 | echo "-- Returning to root"
92 | cd ${ROOT_DIR}
93 |
94 | echo
95 | echo "-- Creating convert output folder"
96 | OUT_DIR=/tmp/odml/out/${PYVER}/convert
97 | mkdir -vp ${OUT_DIR}
98 |
99 | echo
100 | echo "-- Running conversion script tests"
101 |
102 | if ! [[ -x "$(command -v odmlconvert)" ]]; then
103 | conda deactivate
104 | cd ${ROOT_DIR}
105 | echo
106 | echo "-- [FAILED] odmlconvert not installed"
107 | exit
108 | fi
109 |
110 | cd ${ROOT_DIR}/resources/test_convert_script
111 | odmlconvert -o ${OUT_DIR} -r .
112 |
113 | echo
114 | echo "-- Returning to root"
115 | cd ${ROOT_DIR}
116 |
117 | echo
118 | echo "-- Creating rdf output folder"
119 | OUT_DIR=/tmp/odml/out/${PYVER}/rdf
120 | mkdir -vp ${OUT_DIR}
121 |
122 | if ! [[ -x "$(command -v odmltordf)" ]]; then
123 | conda deactivate
124 | cd ${ROOT_DIR}
125 | echo
126 | echo "-- [FAILED] odmltordf not installed"
127 | exit
128 | fi
129 |
130 | echo
131 | echo "-- Running rdf conversion script test"
132 | cd ${ROOT_DIR}/resources/test_rdf_export_script
133 | odmltordf -o ${OUT_DIR} -r .
134 |
135 | echo
136 | echo "-- Returning to root"
137 | cd ${ROOT_DIR}
138 |
139 | conda deactivate
140 |
141 | echo "-- Done"
142 | echo
143 |
--------------------------------------------------------------------------------
/test/test_script_odml_to_rdf.py:
--------------------------------------------------------------------------------
1 | import os
2 | import shutil
3 | import unittest
4 |
5 | from docopt import DocoptExit
6 | from rdflib import Graph
7 |
8 | from odml.scripts import odml_to_rdf
9 |
10 | from . import util
11 |
12 |
13 | class TestScriptOdmlToRDF(unittest.TestCase):
14 |
15 | def setUp(self):
16 | self.tmp_dir = util.create_test_dir(__file__)
17 | self.dir_files = os.path.join(util.TEST_RESOURCES_DIR, "scripts", "odml_to_rdf")
18 | self.dir_invalid = os.path.join(self.dir_files, "test_invalid")
19 | self.dir_recursive = os.path.join(self.dir_files, "test_recursive")
20 |
21 | def tearDown(self):
22 | if self.tmp_dir and os.path.exists(self.tmp_dir):
23 | shutil.rmtree(self.tmp_dir)
24 |
25 | def test_script_exit(self):
26 | with self.assertRaises(DocoptExit):
27 | odml_to_rdf.main([])
28 |
29 | with self.assertRaises(DocoptExit):
30 | odml_to_rdf.main(["-o", self.tmp_dir])
31 |
32 | with self.assertRaises(SystemExit):
33 | odml_to_rdf.main(["-h"])
34 |
35 | with self.assertRaises(SystemExit):
36 | odml_to_rdf.main(["--version"])
37 |
38 | def _check_intermediate_dirs(self):
39 | # make sure an odml version conversion output directory has been created
40 | out_dir_lst = os.listdir(self.tmp_dir)
41 | self.assertEqual(len(out_dir_lst), 1)
42 | out_dir = os.path.join(self.tmp_dir, out_dir_lst[0])
43 | self.assertTrue(os.path.isdir(out_dir))
44 |
45 | # make sure an rdf conversion output directory has been created
46 | rdf_dir_lst = os.listdir(out_dir)
47 | self.assertEqual(len(rdf_dir_lst), 1)
48 | rdf_dir = os.path.join(out_dir, rdf_dir_lst[0])
49 | self.assertTrue(os.path.isdir(rdf_dir))
50 |
51 | return rdf_dir
52 |
53 | def test_valid_conversion(self):
54 | # make sure temp dir is empty
55 | self.assertListEqual(os.listdir(self.tmp_dir), [])
56 |
57 | # run converter on root directory containing two files
58 | odml_to_rdf.main(["-o", self.tmp_dir, self.dir_files])
59 |
60 | rdf_dir = self._check_intermediate_dirs()
61 |
62 | # make sure two files have been created
63 | file_lst = os.listdir(rdf_dir)
64 | self.assertEqual(len(file_lst), 2)
65 |
66 | # make sure the files are valid RDF files
67 | curr_graph = Graph()
68 | curr_graph.parse(os.path.join(rdf_dir, file_lst[0]))
69 | curr_graph.parse(os.path.join(rdf_dir, file_lst[1]))
70 |
71 | def test_invalid(self):
72 | # make sure temp dir is empty
73 | self.assertListEqual(os.listdir(self.tmp_dir), [])
74 |
75 | # run converter on directory with invalid files
76 | odml_to_rdf.main(["-o", self.tmp_dir, self.dir_invalid])
77 |
78 | rdf_dir = self._check_intermediate_dirs()
79 |
80 | # make sure no file has been created
81 | self.assertListEqual(os.listdir(rdf_dir), [])
82 |
83 | def test_recursive_conversion(self):
84 | # make sure temp dir is empty
85 | self.assertListEqual(os.listdir(self.tmp_dir), [])
86 |
87 | # run converter on root directory containing two files
88 | odml_to_rdf.main(["-r", "-o", self.tmp_dir, self.dir_recursive])
89 |
90 | rdf_dir = self._check_intermediate_dirs()
91 |
92 | # make sure two files have been created
93 | file_lst = os.listdir(rdf_dir)
94 | self.assertEqual(len(file_lst), 2)
95 |
96 | # make sure the files are valid RDF files
97 | curr_graph = Graph()
98 | curr_graph.parse(os.path.join(rdf_dir, file_lst[0]))
99 | curr_graph.parse(os.path.join(rdf_dir, file_lst[1]))
100 |
--------------------------------------------------------------------------------
/scripts/release_tests/run_test_pypi_odml.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo
4 | echo "-- MAKE SURE TO RUN THIS SCRIPT IN INTERACTIVE MODE '-i' --"
5 |
6 | PY_VER_ARRAY=("|3.5|3.6|3.7|3.8|3.9|3.10|3.11|")
7 |
8 | if [[ $# != 1 ]]; then
9 | echo
10 | echo "-- [FAILED] Please provide a valid Python version: ${PY_VER_ARRAY}"
11 | exit 1
12 | fi
13 |
14 | PYVER=$1
15 |
16 | if [[ ! "${PY_VER_ARRAY}" =~ "|${PYVER}|" ]]; then
17 | echo
18 | echo "-- [FAILED] Please provide a valid Python version: ${PY_VER_ARRAY}"
19 | exit 1
20 | fi
21 |
22 | echo
23 | echo "-- Using Python version ${PYVER}"
24 |
25 | ROOT_DIR=$(pwd)
26 | CONDA_ENV=odmlpip${PYVER}
27 |
28 | echo
29 | echo "-- Running directory check: ${ROOT_DIR}"
30 | CHECK_DIR=$(basename ${ROOT_DIR})
31 | if [[ ! "$CHECK_DIR" = "release_tests" ]]; then
32 | echo "-- [FAILED] In wrong directory ${ROOT_DIR}"
33 | exit 1
34 | fi
35 |
36 | echo
37 | echo "-- Running active conda env check: ${CONDA_PREFIX}"
38 | if [[ ! -z "${CONDA_PREFIX}" ]]; then
39 | echo "-- Deactivating conda env: ${CONDA_PREFIX}"
40 | conda deactivate
41 | fi
42 |
43 | echo
44 | echo "-- Cleanup previous conda environment and create new one"
45 | echo
46 | conda remove -q -n ${CONDA_ENV} --all -y
47 |
48 | conda create -q -n ${CONDA_ENV} python=${PYVER} -y
49 |
50 | conda activate ${CONDA_ENV}
51 | pip install -q --upgrade pip
52 | pip install -q ipython
53 |
54 | echo
55 | echo "-- Installing odml from PyPI test"
56 | echo
57 |
58 | pip install -q --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -I odml
59 |
60 | echo
61 | echo "-- Installing dependencies and odml-ui from PyPI test"
62 | echo
63 |
64 | conda install -q -c pkgw/label/superseded gtk3 -y
65 | conda install -q -c conda-forge pygobject -y
66 | conda install -q -c conda-forge gdk-pixbuf -y
67 | conda install -q -c pkgw-forge adwaita-icon-theme -y
68 |
69 | pip install -q --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -I odml-ui
70 |
71 | echo
72 | echo "-- checking odml version"
73 | python -c "import odml; print('-- Testing against odml version v%s' % odml.VERSION)"
74 | python -c "import odmlui; print('-- Testing against odmlui version v%s' % odmlui.info.VERSION)"
75 |
76 | echo
77 | echo "-- Running basic tests"
78 | cd ${ROOT_DIR}/resources/test_load
79 | BASIC_SCRIPT=${ROOT_DIR}/resources/scripts/odml_basics.py
80 | python ${BASIC_SCRIPT}
81 |
82 | if [[ ! $? -eq 0 ]]; then
83 | cd ${ROOT_DIR}
84 | conda deactivate
85 | echo
86 | echo "-- [FAILED] Encountered error in script ${BASIC_SCRIPT}"
87 | exit
88 | fi
89 |
90 | echo
91 | echo "-- Returning to root"
92 | cd ${ROOT_DIR}
93 |
94 | echo
95 | echo "-- Creating convert output folder"
96 | OUT_DIR=/tmp/odml/out/${PYVER}/convert
97 | mkdir -vp ${OUT_DIR}
98 |
99 | echo
100 | echo "-- Running conversion script tests"
101 |
102 | if ! [[ -x "$(command -v odmlconvert)" ]]; then
103 | conda deactivate
104 | cd ${ROOT_DIR}
105 | echo
106 | echo "-- [FAILED] odmlconvert not installed"
107 | exit
108 | fi
109 |
110 | cd ${ROOT_DIR}/resources/test_convert_script
111 | odmlconvert -o ${OUT_DIR} -r .
112 |
113 | echo
114 | echo "-- Returning to root"
115 | cd ${ROOT_DIR}
116 |
117 | echo
118 | echo "-- Creating rdf output folder"
119 | OUT_DIR=/tmp/odml/out/${PYVER}/rdf
120 | mkdir -vp ${OUT_DIR}
121 |
122 | if ! [[ -x "$(command -v odmltordf)" ]]; then
123 | conda deactivate
124 | cd ${ROOT_DIR}
125 | echo
126 | echo "-- [FAILED] odmltordf not installed"
127 | exit
128 | fi
129 |
130 | echo
131 | echo "-- Running rdf conversion script test"
132 | cd ${ROOT_DIR}/resources/test_rdf_export_script
133 | odmltordf -o ${OUT_DIR} -r .
134 |
135 | echo
136 | echo "-- Returning to root"
137 | cd ${ROOT_DIR}
138 |
139 | conda deactivate
140 |
141 | echo "-- Done"
142 | echo
143 |
--------------------------------------------------------------------------------
/test/test_infer_type.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from datetime import datetime as dt, date, time
4 |
5 | from odml import Property, Section, Document
6 | from odml.tools.xmlparser import XMLReader, XMLWriter
7 |
8 |
9 | class TestInferType(unittest.TestCase):
10 |
11 | def test_string(self):
12 | prop = Property("test", value="some_string")
13 | self.assertEqual(prop.dtype, "string")
14 | self.assertIsInstance(prop.values[0], str)
15 |
16 | def test_text(self):
17 | prop = Property("test", value="some\nstring")
18 | self.assertEqual(prop.dtype, "text")
19 | self.assertIsInstance(prop.values[0], str)
20 |
21 | def test_int(self):
22 | prop = Property("test", value=111)
23 | self.assertEqual(prop.dtype, "int")
24 | self.assertIsInstance(prop.values[0], int)
25 |
26 | def test_float(self):
27 | prop = Property("test", value=3.14)
28 | self.assertEqual(prop.dtype, "float")
29 | self.assertIsInstance(prop.values[0], float)
30 |
31 | def test_datetime(self):
32 | prop = Property("test", value=dt.now())
33 | self.assertEqual(prop.dtype, "datetime")
34 | self.assertIsInstance(prop.values[0], dt)
35 |
36 | def test_date(self):
37 | prop = Property("test", dt.now().date())
38 | self.assertEqual(prop.dtype, "date")
39 | self.assertIsInstance(prop.values[0], date)
40 |
41 | def test_time(self):
42 | prop = Property("test", value=dt.now().time())
43 | self.assertEqual(prop.dtype, "time")
44 | self.assertIsInstance(prop.values[0], time)
45 |
46 | def test_boolean(self):
47 | prop = Property("test", True)
48 | self.assertEqual(prop.dtype, "boolean")
49 | self.assertIsInstance(prop.values[0], bool)
50 |
51 | prop = Property("test", False)
52 | self.assertEqual(prop.dtype, "boolean")
53 | self.assertIsInstance(prop.values[0], bool)
54 |
55 | def test_read_write(self):
56 | doc = Document("author")
57 | sec = Section("name", "type", parent=doc)
58 |
59 | sec.append(Property("strprop", "somestring"))
60 | sec.append(Property("txtprop", "some\ntext"))
61 | sec.append(Property("intprop", 200))
62 | sec.append(Property("floatprop", 2.00))
63 | sec.append(Property("datetimeprop", dt.now()))
64 | sec.append(Property("dateprop", dt.now().date()))
65 | sec.append(Property("timeprop", dt.now().time()))
66 | sec.append(Property("boolprop", True))
67 |
68 | str_doc = str(XMLWriter(doc))
69 |
70 | new_doc = XMLReader().from_string(str_doc)
71 | new_sec = new_doc.sections[0]
72 |
73 | prop = new_sec.properties["strprop"]
74 | self.assertEqual(prop.dtype, "string")
75 | self.assertIsInstance(prop.values[0], str)
76 |
77 | prop = new_sec.properties["txtprop"]
78 | self.assertEqual(prop.dtype, "text")
79 | self.assertIsInstance(prop.values[0], str)
80 |
81 | prop = new_sec.properties["intprop"]
82 | self.assertEqual(prop.dtype, "int")
83 | self.assertIsInstance(prop.values[0], int)
84 |
85 | prop = new_sec.properties["floatprop"]
86 | self.assertEqual(prop.dtype, "float")
87 | self.assertIsInstance(prop.values[0], float)
88 |
89 | prop = new_sec.properties["datetimeprop"]
90 | self.assertEqual(prop.dtype, "datetime")
91 | self.assertIsInstance(prop.values[0], dt)
92 |
93 | prop = new_sec.properties["dateprop"]
94 | self.assertEqual(prop.dtype, "date")
95 | self.assertIsInstance(prop.values[0], date)
96 |
97 | prop = new_sec.properties["timeprop"]
98 | self.assertEqual(prop.dtype, "time")
99 | self.assertIsInstance(prop.values[0], time)
100 |
101 | prop = new_sec.properties["boolprop"]
102 | self.assertEqual(prop.dtype, "boolean")
103 | self.assertIsInstance(prop.values[0], bool)
104 |
--------------------------------------------------------------------------------
/odml/scripts/odml_view.py:
--------------------------------------------------------------------------------
1 | """odmlview
2 |
3 | odmlview sets up a minimal webserver to view odml files saved in the
4 | XML format via the webbrowser. After it is started, the webserver will
5 | open a new tab in the default webbrowser and display the content of
6 | the directory the server was started from. odML files can then be
7 | viewed from there.
8 | To properly render XML, an odML file may contain the element
9 | '' where the
10 | 'odmlDocument.xsl' stylesheet should reside in the same directory as the
11 | odML file to be rendered. By using the '--fetch' flag the latest version
12 | of this stylesheet will be downloaded from 'templates.g-node.org' to
13 | the current directory when starting up the service.
14 |
15 | Usage: odmlview [-p PORT] [--fetch]
16 |
17 | Options:
18 | -p PORT Port the server will use. Default: 8000
19 | --fetch Fetch latest stylesheet from templates.g-node.org
20 | to current directory
21 | -h --help Show this screen
22 | --version Show version
23 | """
24 |
25 | import os
26 | try:
27 | import http.server as hs
28 | except ImportError:
29 | print("This script is only supported with Python 3")
30 | exit(-1)
31 |
32 | import socketserver
33 | import sys
34 | import urllib.request as urllib2
35 | import webbrowser
36 |
37 | from docopt import docopt
38 |
39 | PORT = 8000
40 | REPOSITORY = "https://templates.g-node.org/_resources/"
41 | STYLESHEET = "odmlDocument.xsl"
42 | CSS = "odml_style.css"
43 |
44 |
45 | def download_file(repo, filename):
46 | """
47 | download_file fetches 'filename' from url 'repo' and
48 | saves it in the current directory as file 'filename'.
49 | """
50 | try:
51 | data = urllib2.urlopen("%s%s" % (repo, filename)).read()
52 | data = data.decode("utf-8")
53 | except Exception as err:
54 | print("[Warning] Failed loading '%s%s': %s" % (repo, filename, err))
55 | return
56 |
57 | with open(filename, "w") as local_file:
58 | local_file.write(str(data))
59 |
60 |
61 | def run(port=PORT, extensions=None):
62 | """
63 | run starts a simple webserver on localhost serving the current directory.
64 | Once started, it will open a tab on the default webbrowser and will continue
65 | to serve until manually stopped.
66 |
67 | :param port: server port
68 | :param extensions: dictionary containing additional file extension - mime type
69 | mappings the server should be aware of.
70 | e.g. {'.xml': 'application/xml'}
71 | """
72 | handler = hs.SimpleHTTPRequestHandler
73 |
74 | if extensions:
75 | handler.extensions_map.update(extensions)
76 |
77 | server_address = ('', port)
78 |
79 | socketserver.TCPServer.allow_reuse_address = True
80 | with socketserver.TCPServer(server_address, handler) as httpd:
81 | webbrowser.open_new_tab('http://localhost:%s' % port)
82 | try:
83 | print("[Info] The server can be stopped by pressing Ctrl+C")
84 | httpd.serve_forever()
85 | except KeyboardInterrupt:
86 | print("[Info] Received Keyboard interrupt, shutting down")
87 | httpd.shutdown()
88 | httpd.server_close()
89 |
90 |
91 | def main(args=None):
92 | parser = docopt(__doc__, argv=args, version="0.1.0")
93 |
94 | # Fetch stylesheet
95 | if parser['--fetch'] and not os.path.exists(STYLESHEET):
96 | print("[Info] Downloading stylesheet '%s'" % STYLESHEET)
97 | download_file(REPOSITORY, STYLESHEET)
98 | print("[Info] Downloading stylesheet '%s'" % CSS)
99 | download_file(REPOSITORY, CSS)
100 |
101 | server_port = int(parser['-p']) if parser['-p'] else PORT
102 |
103 | # files with odML file extensions should be interpreted as XML
104 | extensions = {'.odml': 'application/xml'}
105 |
106 | run(server_port, extensions)
107 |
108 |
109 | if __name__ == "__main__":
110 | main(sys.argv[1:])
111 |
--------------------------------------------------------------------------------
/scripts/release_tests/README.md:
--------------------------------------------------------------------------------
1 | # odml and odml dependent libraries installation tests
2 |
3 | Used to document the minimal automated tests for `python-odml` and `odmltools` and not fully automated tests of `odml-ui` installations with a special focus on the execution of command line scripts and gui with different local installation methods.
4 |
5 | ## Automated odml and dependent library tests
6 |
7 | The tests include
8 | - basic odml import and file loading and saving
9 | - `odml` command line script execution using realistic example files
10 | - odmlview
11 | - odmltordf
12 | - odmlconversion
13 | - `odmltools` command line script execution
14 | - odmlimportdatacite
15 | - basic odml-ui installation
16 |
17 | ### Local installation tests
18 |
19 | To test the various local installations of odml, execute `run_test_matrix.sh` with option 'A'. odml will be installed into fresh conda environments using `pip install .` and `python setup.py install` and all Python versions >= 3.5.
20 |
21 | ### odml PyPI TEST installation tests
22 |
23 | To test the installation of the odml package from the PyPI TEST repository, execute `run_test_matrix.sh` with option 'B'. odml will be installed into fresh conda environments using `pip install odml` and all Python versions >= 3.5.
24 | The package `odml-ui` will be installed as well and all installable odml command line scripts will be tested after the odml installation.
25 |
26 | ### odmltools PyPI server installation tests
27 |
28 | When executing `run_test_matrix.sh` with option 'C', the odml dependent package `odmltools` will be pip installed into fresh conda environments for all Python versions >= 3.6 from the PyPI TEST repository and appropriate conversion tests will be run using the installed command line tool.
29 |
30 | ### nixodmlconverter PyPI TEST installation tests
31 |
32 | When executing `run_test_matrix.sh` with option 'D' the odml dependent package `nixodmlconverter` will be pip installed into fresh conda environments for all Python versions >= 3.6 from the PyPI TEST repository and appropriate conversion tests will be run using the installed command line tool.
33 |
34 | ### odml PyPI LIVE installation tests
35 |
36 | To test the installation of the odml package from PyPI proper, execute `run_test_matrix.sh` with option 'E'. odml will be installed into fresh conda environments using `pip install odml` and all Python versions >= 3.5.
37 | The package `odml-ui` will be installed as well and all installable odml command line scripts will be tested after the odml installation.
38 |
39 | ## Manual odml-ui tests
40 |
41 | To set up conda environments and run local or PyPI TEST installations run the script `run_test_matrix.sh` with option `B` from the current directory.
42 | Once set up, the conda environments can be used to manually test `odml-ui` as well.
43 |
44 | Activate python installation environment
45 |
46 | CONDA_ENV_SETUP=pyinst
47 | CONDA_ENV_PIP=pipinst
48 | ROOT_DIR=$(pwd)
49 | cd $ROOT_DIR/resources/test_load
50 | conda activate ${CONDA_ENV_SETUP}
51 | odmlui
52 |
53 | Run the following most tests:
54 | - open `test_load\load_v1.odml.xml`
55 | - check fail message
56 | - import `test_load\load_v1.odml.xml`
57 | - save as `pyi_conv.xml`
58 | - save as `pyi_conv.yaml`
59 | - save as `pyi_conv.json`
60 | - open `pyi_conv.xml`
61 | - open `pyi_conv.yaml`
62 | - open `pyi_conv.json`
63 | - check importing a terminology using the document wizard
64 |
65 | Exit and switch to pip environment
66 |
67 | conda deactivate
68 | conda activate ${CONDA_ENV_PIP}
69 | odmlui
70 |
71 | Run manual tests again
72 |
73 | Test odmltables plugin
74 |
75 | pip install odmltables
76 | pip install odmltables[gui]
77 | odmlui
78 |
79 | Run the following minimal tests
80 | - open `pyi_conv.xml`
81 | - use odmltables `convert` button, save as csv file
82 | - use odmltables `filter` button
83 |
84 | Exit, move back to the root and cleanup
85 |
86 | cd $ROOT_DIR
87 | conda deactivate
88 | rm $ROOT_DIR/resources/test_load/load_v1.odml_converted.xml
89 | rm $ROOT_DIR/resources/test_load/pyi_conv.json
90 | rm $ROOT_DIR/resources/test_load/pyi_conv.xml
91 | rm $ROOT_DIR/resources/test_load/pyi_conv.yaml
92 |
--------------------------------------------------------------------------------
/doc/example_odMLs/sample_odml.rdf:
--------------------------------------------------------------------------------
1 | @prefix odml: .
2 | @prefix rdf: .
3 | @prefix rdfs: .
4 | @prefix xml: .
5 | @prefix xsd: .
6 |
7 | odml:Hub odml:hasDocument ,
8 | ;
9 | odml:hasTerminology .
10 |
11 | a odml:Property ;
12 | odml:hasDefinition "Nickname(s) of the subject" ;
13 | odml:hasDtype "string" ;
14 | odml:hasName "Nickname" .
15 |
16 | a odml:Section ;
17 | odml:hasDefinition "Information on the crew" ;
18 | odml:hasName "TheCrew" ;
19 | odml:hasProperty ,
20 | ;
21 | odml:hasSection ;
22 | odml:hasType "crew" .
23 |
24 | a odml:Section ;
25 | odml:hasDefinition "Information on Arthur Dent" ;
26 | odml:hasName "Arthur Philip Dent" ;
27 | odml:hasProperty ,
28 | ;
29 | odml:hasType "crew/person" .
30 |
31 | a odml:Property ;
32 | odml:hasDefinition "Number of crew members" ;
33 | odml:hasDtype "int" ;
34 | odml:hasName "NoCrewMembers" ;
35 | odml:hasUncertainty "1" ;
36 | odml:hasValue odml:ce5f326a-91c9-4c41-9425-d4b8495aa832 .
37 |
38 | a odml:Document ;
39 | odml:hasAuthor "D. N. Adams" ;
40 | odml:hasDate "1979-10-12"^^xsd:date ;
41 | odml:hasDocVersion "42" ;
42 | odml:hasSection odml:b44da455-8ee8-47b7-a011-016b9c3a6f9d .
43 |
44 | a rdf:Bag ;
45 | rdf:li "Human" .
46 |
47 | a odml:Property ;
48 | odml:hasDefinition "Species to which subject belongs to" ;
49 | odml:hasDtype "string" ;
50 | odml:hasName "Species" ;
51 | odml:hasValue .
52 |
53 | a odml:Document ;
54 | odml:hasAuthor "D. N. Adams" ;
55 | odml:hasDate "1979-10-12"^^xsd:date ;
56 | odml:hasDocVersion "42" ;
57 | odml:hasSection ;
58 | odml:hasTerminology .
59 |
60 | a odml:Property ;
61 | odml:hasDefinition "List of crew members names" ;
62 | odml:hasDtype "person" ;
63 | odml:hasName "NameCrewMembers" ;
64 | odml:hasValue odml:d98afe9b-3982-44bf-9373-12aaa4798628 .
65 |
66 | odml:b44da455-8ee8-47b7-a011-016b9c3a6f9d a odml:Section ;
67 | odml:hasDefinition "Information on the crew" ;
68 | odml:hasName "TheCrew" ;
69 | odml:hasProperty odml:bb02a18d-7de2-45aa-8be1-ca34b74f2360 ;
70 | odml:hasType "crew" .
71 |
72 | odml:bb02a18d-7de2-45aa-8be1-ca34b74f2360 a odml:Property ;
73 | odml:hasDefinition "Number of crew members" ;
74 | odml:hasDtype "int" ;
75 | odml:hasName "NoCrewMembers" .
76 |
77 | odml:ce5f326a-91c9-4c41-9425-d4b8495aa832 a rdf:Bag ;
78 | rdf:li 4 .
79 |
80 | odml:d98afe9b-3982-44bf-9373-12aaa4798628 a rdf:Bag ;
81 | rdf:li "Arthur Philip Dent",
82 | "Ford Prefect",
83 | "Tricia Marie McMillan",
84 | "Zaphod Beeblebrox" .
85 |
86 | a .
87 |
88 |
--------------------------------------------------------------------------------
/odml/terminology.py:
--------------------------------------------------------------------------------
1 | """
2 | Handles (deferred) loading of terminology data and access to it for odML documents.
3 | """
4 |
5 | import datetime
6 | import os
7 | import tempfile
8 | import threading
9 | try:
10 | import urllib.request as urllib2
11 | except ImportError:
12 | import urllib2
13 |
14 | from hashlib import md5
15 |
16 | from .tools.parser_utils import ParserException
17 | from .tools.xmlparser import XMLReader
18 |
19 |
20 | REPOSITORY_BASE = 'https://terminologies.g-node.org'
21 | REPOSITORY = '/'.join([REPOSITORY_BASE, 'v1.1', 'terminologies.xml'])
22 |
23 | CACHE_AGE = datetime.timedelta(days=1)
24 |
25 |
26 | def cache_load(url, replace_file=False):
27 | """
28 | Loads the url and store it in a temporary cache directory
29 | subsequent requests for this url will use the cached version.
30 |
31 | :param url: URL from where to load an odML terminology file from.
32 | :param replace_file: True, if file should be reloaded
33 | """
34 | filename = '.'.join([md5(url.encode()).hexdigest(), os.path.basename(url)])
35 | cache_dir = os.path.join(tempfile.gettempdir(), "odml.cache")
36 | if not os.path.exists(cache_dir):
37 | try:
38 | os.makedirs(cache_dir)
39 | except OSError: # might happen due to concurrency
40 | if not os.path.exists(cache_dir):
41 | raise
42 | cache_file = os.path.join(cache_dir, filename)
43 | if not os.path.exists(cache_file) \
44 | or replace_file \
45 | or datetime.datetime.fromtimestamp(os.path.getmtime(cache_file)) < \
46 | datetime.datetime.now() - CACHE_AGE:
47 | try:
48 | data = urllib2.urlopen(url).read()
49 | data = data.decode("utf-8")
50 | except Exception as exc:
51 | print("failed loading '%s': %s" % (url, exc))
52 | return
53 |
54 | file_obj = open(cache_file, "w")
55 | file_obj.write(str(data))
56 | file_obj.close()
57 |
58 | return open(cache_file)
59 |
60 |
61 | class Terminologies(dict):
62 | """
63 | Terminologies facilitates synchronous and deferred loading, caching,
64 | browsing and importing of full or partial odML terminologies.
65 | """
66 | loading = {}
67 | reload_cache = False
68 |
69 | def load(self, url):
70 | """
71 | Loads and caches an odML XML file from a URL.
72 |
73 | :param url: location of an odML XML file.
74 | :return: The odML document loaded from url.
75 | """
76 | if url in self:
77 | return self[url]
78 |
79 | if url in self.loading:
80 | self.loading[url].join()
81 | self.loading.pop(url, None)
82 | return self.load(url)
83 |
84 | return self._load(url)
85 |
86 | def _load(self, url):
87 | """
88 | Cache loads an odML XML file from a URL and returns
89 | the result as a parsed odML document.
90 |
91 | :param url: location of an odML XML file.
92 | :return: The odML document loaded from url.
93 | It will silently return None, if any exceptions
94 | occur to enable loading of nested odML files.
95 | """
96 | file_obj = cache_load(url, self.reload_cache)
97 | if file_obj is None:
98 | print("did not successfully load '%s'" % url)
99 | return
100 | try:
101 | term = XMLReader(filename=url, ignore_errors=True).from_file(file_obj)
102 | term.finalize()
103 | except ParserException as exc:
104 | print("Failed to load %s due to parser errors" % url)
105 | print(' "%s"' % exc)
106 | term = None
107 | self[url] = term
108 | return term
109 |
110 | def deferred_load(self, url):
111 | """
112 | Starts a background thread to load an odML XML file from a URL.
113 |
114 | :param url: location of an odML XML file.
115 | """
116 | if url in self or url in self.loading:
117 | return
118 | self.loading[url] = threading.Thread(target=self._load, args=(url,))
119 | self.loading[url].start()
120 |
121 | def refresh(self, url):
122 | """
123 | Deletes and reloads all cached odML XML files given in the
124 | terminology file from a URL.
125 |
126 | :param url: location of an odML XML file.
127 | """
128 | self.reload_cache = True
129 | self.clear()
130 | self.load(url)
131 | self.reload_cache = False
132 |
133 |
134 | terminologies = Terminologies()
135 | load = terminologies.load
136 | deferred_load = terminologies.deferred_load
137 | refresh = terminologies.refresh
138 |
139 |
140 | if __name__ == "__main__":
141 | FILE_OBJECT = cache_load(REPOSITORY)
142 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | How to contribute to python-odml
2 | ================================
3 |
4 | This document gives some information about how to contribute to the odML project.
5 |
6 |
7 | Governance model
8 | ----------------
9 |
10 | The project has a core team of **maintainers** who are long-term contributors and responsible for coordinating the project. There is also a set of one-time or short period **contributors**. We consider everyone a contributor who reports issues, opens pull requests, or takes part in issue discussions. Any such contribution is welcome. Maintainers have commit access to the repository and will review and merge pull requests. Becoming a maintainer is possible for everyone who wants to help maintaining and shaping the project and to take over more responsibility. Requests to become a maintainer need to be approved by the existing maintainers.
11 |
12 | Any addition to the code happens via pull requests (there are only very few exceptions in which someone pushes directly to master, see below for more information). Thus, any contribution will be reviewed before it is merged. Bug fixes and other contributions to the API will undergo this pragmatic approach. Format or API changes, especially those that would lead to breaking changes, will be discussed via the issue tracker and video meetings and need to be agreed on by the maintainers. We aim at consensus decisions, but where this is not possible decisions are made by majority vote among the maintainers.
13 |
14 |
15 | Contributing
16 | ------------
17 |
18 | If you want to contribute to the project please first create a fork of the repository on GitHub.
19 | When you are done with implementing a new feature or with fixing a bug, please send
20 | us a pull request.
21 |
22 | If you contribute to the project regularly, it would be very much appreciated if you
23 | would stick to the following development workflow:
24 |
25 | 1. Select an *issue* from the issue tracker that you want to work on and assign the issue to your account.
26 | If the *issue* is about a relatively complex matter or requires larger API changes the description of the
27 | *issue* or its respective discussion should contain a brief concept about how the solution will look like.
28 |
29 | 2. During the implementation of the feature or bug-fix add your changes in small atomic commits.
30 | Commit messages should be short but expressive.
31 | The first line of the message should not exceed **50** characters and the 2nd line should be empty.
32 | If you want to add further text you can do so from the 3rd line on without limitations.
33 | If possible reference fixed issues in the commit message (e.g. "fixes #101").
34 |
35 | 3. When done with the implementation, compile and test the code.
36 | If your work includes a new function or class please write a small unit test for it.
37 |
38 | 4. Send us a pull request with your changes.
39 | The pull request message should explain the changes and reference the *issue* addressed by your code.
40 | Your pull request will be reviewed by one of our team members.
41 | Pull requests should never be merged by the author of the contribution, but by another team member.
42 | Merge conflicts or errors reported by continous integration tools should be resolved by the original author before the request is merged.
43 |
44 |
45 | Google Summer of Code contributors
46 | ---------------------
47 |
48 | Please see the corresponding [Google Summer of Code](GSoC.md) file if you are interested in contributing as part of the GSoC programme.
49 |
50 |
51 | The issue tracker
52 | -----------------
53 |
54 | Please try to avoid duplicates of issues. If you encounter duplicated issues, please close all of them except
55 | one, reference the closed issues in the one that is left open and add missing information from the closed issues
56 | (if necessary) to the remaining issue.
57 |
58 | Assign meaningful tags to newly crated issues and if possible assign them to milestones.
59 |
60 |
61 | Reviewing pull requests
62 | -----------------------
63 |
64 | Every code (even small contributions from core developers) should be added to the project via pull requests.
65 | Before reviewing a pull request it should pass all builds and tests on travis-ci.
66 | Each pull request that passes all builds and tests should be reviewed by at least one of the core developers.
67 | If a contribution is rather complex or leads to significant API changes, the respective pull request should be
68 | reviewed by two other developers.
69 | In such cases the first reviewer or the contributor should request a second review in a comment.
70 | To streamline the review process reviewers are encouraged to use the github "review" option.
71 |
72 | Testing
73 | -------
74 |
75 | * Unit tests can be found in the test sub directory. Currently, the test coverage is a bit low but we are working on improving it.
76 |
77 | * Provide a unit test for every class, method or function.
78 |
79 | * Please make sure that all tests pass before merging/sending pull requests.
80 |
81 |
82 | Style guide
83 | -----------
84 |
85 | Always keep your code PEP8 compliant.
86 |
--------------------------------------------------------------------------------
/odml/__init__.py:
--------------------------------------------------------------------------------
1 | import warnings
2 |
3 | from sys import version_info as _python_version
4 |
5 | _property = property
6 |
7 | from . import doc
8 | from . import property
9 | from . import section
10 | from .dtypes import DType
11 | from .fileio import load, save, display
12 | from .info import VERSION
13 | from .tools.parser_utils import SUPPORTED_PARSERS as PARSERS
14 |
15 |
16 | def _format_warning(warn_msg, *args, **kwargs):
17 | """
18 | Used to provide users with deprecation warnings via the warnings module
19 | but without spamming them with full stack traces.
20 | """
21 | final_msg = "%s\n" % str(warn_msg)
22 | # If available add category name to the message
23 | if args and hasattr(args[0], "__name__"):
24 | final_msg = "%s: %s" % (args[0].__name__, final_msg)
25 |
26 | return final_msg
27 |
28 |
29 | # Monkey patch formatting 'warnings' messages for the whole module.
30 | warnings.formatwarning = _format_warning
31 |
32 | if _python_version.major < 3:
33 | msg = "Python 2 has reached end of live."
34 | msg += "\n\todML support for Python 2 has been dropped."
35 | warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
36 | elif _python_version.major == 3 and _python_version.minor < 7:
37 | msg = "The '%s' package is not tested with your Python version. " % __name__
38 | msg += "\n\tPlease consider upgrading to the latest Python distribution."
39 | warnings.warn(msg)
40 |
41 | __version__ = VERSION
42 |
43 |
44 | class odml_implementation(object):
45 | name = None
46 | provides = []
47 | Property = None
48 | Section = None
49 | Document = None
50 |
51 |
52 | class BasicImplementation(odml_implementation):
53 | name = 'basic'
54 | provides = ['basic']
55 |
56 | @_property
57 | def Section(self):
58 | return section.BaseSection
59 |
60 | @_property
61 | def Property(self):
62 | return property.BaseProperty
63 |
64 | @_property
65 | def Document(self):
66 | return doc.BaseDocument
67 |
68 | # here the available implementations are stored
69 | impls = {}
70 |
71 | # the default implementation
72 | current_implementation = BasicImplementation()
73 | minimum_implementation = current_implementation
74 |
75 |
76 | def addImplementation(implementation, make_minimum=False,
77 | make_default=False, key=None):
78 | """register a new available implementation"""
79 | impls[implementation.name] = implementation
80 | if make_minimum and key is not None:
81 | setMinimumImplementation(key)
82 | if make_default and key is not None:
83 | setDefaultImplementation(key)
84 |
85 |
86 | def getImplementation(key=None):
87 | """retrieve a implementation named *key*"""
88 | if key is None:
89 | return current_implementation
90 | implementation = impls[key]
91 | return implementation
92 |
93 |
94 | def setDefaultImplementation(key):
95 | """
96 | set a new default implementation
97 |
98 | if it does not fulfill the minimum requirements, a TypeError is raised
99 | """
100 | global current_implementation
101 | if minimum_implementation.name not in impls[key].provides:
102 | raise TypeError(
103 | "Cannot set default odml-implementation to '%s', "
104 | "because %s-capabilities are required which are not "
105 | "provided (provides: %s)" %
106 | (key, minimum_implementation.name, ', '.join(impls[key].provides)))
107 | current_implementation = impls[key]
108 |
109 |
110 | def setMinimumImplementation(key):
111 | """
112 | Set a new minimum requirement for a default implementation.
113 | This can only be increased, i.e. 'downgrades' are not possible.
114 | If the current_implementation does not provide the requested capability,
115 | make the minimum implementation the default.
116 | """
117 | global minimum_implementation
118 | if key in minimum_implementation.provides:
119 | return # the minimum implementation is already capable of this feature
120 | if minimum_implementation.name not in impls[key].provides:
121 | raise TypeError(
122 | "Cannot set new minimum odml-implementation to '%s', "
123 | "because %s-capabilities are already required which are "
124 | "not provided (provides: %s)" %
125 | (key, minimum_implementation.name, ', '.join(impls[key].provides)))
126 | if key not in current_implementation.provides:
127 | setDefaultImplementation(key)
128 | minimum_implementation = impls[key]
129 |
130 |
131 | addImplementation(current_implementation)
132 |
133 |
134 | def Property(*args, **kwargs):
135 | return current_implementation.Property(*args, **kwargs)
136 |
137 |
138 | def Section(*args, **kwargs):
139 | return current_implementation.Section(*args, **kwargs)
140 |
141 |
142 | def Document(*args, **kwargs):
143 | return current_implementation.Document(*args, **kwargs)
144 |
145 | # __all__ = [Property, Section, Document]
146 |
--------------------------------------------------------------------------------
/test/test_terminology.py:
--------------------------------------------------------------------------------
1 | """
2 | Tests functions and classes from the odml terminology module.
3 | """
4 |
5 | import os
6 | import unittest
7 |
8 | from glob import glob
9 | from sys import platform
10 | from time import sleep
11 | try:
12 | from urllib.request import pathname2url
13 | except ImportError:
14 | from urllib import pathname2url
15 |
16 | from odml import Document, save, Section, terminology
17 | from .util import ODML_CACHE_DIR as CACHE_DIR, create_test_dir
18 |
19 |
20 | class TestTerminology(unittest.TestCase):
21 |
22 | def setUp(self):
23 | """
24 | Set up local temporary terminology files in a temporary folder
25 | """
26 | tmp_dir = create_test_dir(__file__)
27 | tmp_name = os.path.basename(tmp_dir)
28 |
29 | main_name = "%s_main.xml" % tmp_name
30 | main_file_path = os.path.join(tmp_dir, main_name)
31 | main_url = "file://%s" % pathname2url(main_file_path)
32 |
33 | include_name = "%s_include.xml" % tmp_name
34 | include_file_path = os.path.join(tmp_dir, include_name)
35 | include_url = "file://%s" % pathname2url(include_file_path)
36 |
37 | include_doc = Document()
38 | _ = Section(name="include_sec", type="test", parent=include_doc)
39 | save(include_doc, include_file_path)
40 |
41 | main_doc = Document()
42 | _ = Section(name="main_sec", type="test", include=include_url, parent=main_doc)
43 | save(main_doc, main_file_path)
44 |
45 | self.main_terminology_url = main_url
46 | self.temp_dir_base = tmp_name
47 |
48 | def tearDown(self):
49 | """
50 | Remove all created files from the odml.cache to not cross pollute other tests.
51 | The created tmp directory should be cleaned up automatically upon startup.
52 | """
53 | temp_file_glob = "*%s*" % self.temp_dir_base
54 | find_us = os.path.join(CACHE_DIR, temp_file_glob)
55 |
56 | for file_path in glob(find_us):
57 | os.remove(file_path)
58 |
59 | @staticmethod
60 | def _cache_files_map(file_filter="*"):
61 | """
62 | Returns a dict mapping the basefilenames of cached odml files
63 | to their md5 hash and mtime.
64 |
65 | :param file_filter: a valid glob to search for files in the odml cache directory.
66 | The cache directory is provided and must not be part of the glob.
67 | Default value is '*'.
68 |
69 | :return: dict of the format {filename: [md5_hash, mtime]}
70 | """
71 | temp_file_glob = os.path.join(CACHE_DIR, file_filter)
72 |
73 | curr_map = {}
74 | for file_path in glob(temp_file_glob):
75 | split_name = os.path.basename(file_path).split('.')
76 | file_mtime = os.path.getmtime(file_path)
77 | curr_map[split_name[1]] = [split_name[0], file_mtime]
78 |
79 | return curr_map
80 |
81 | def test_terminology_refresh(self):
82 | """
83 | Test terminology cache refresh using local files to detach
84 | loading and resolving from the live online terminology repository.
85 | """
86 | # Fetch current cache content specific to the two local terminologies
87 | # With the default file glob '*' all files in the odml cache directory would be
88 | # included in the test.
89 | file_filter = "*%s*" % self.temp_dir_base
90 | main_url = self.main_terminology_url
91 |
92 | # Initially load main and included file from temp directory into the odml cache directory
93 | terminology.load(main_url)
94 |
95 | orig_map = self._cache_files_map(file_filter)
96 |
97 | # Test cache content does not change
98 | terminology.load(main_url)
99 | load_map = self._cache_files_map(file_filter)
100 |
101 | self.assertEqual(len(orig_map), len(load_map))
102 | for curr_file in orig_map:
103 | self.assertIn(curr_file, load_map)
104 | self.assertEqual(orig_map[curr_file], load_map[curr_file])
105 |
106 | sleep_time = 0.5
107 | if platform == "darwin":
108 | sleep_time = 2
109 |
110 | # Sleep is needed since the tests might be too fast to result in a
111 | # different file mtime. Travis macOS seems to require sleep time > 1s.
112 | sleep(sleep_time)
113 |
114 | # Test refresh loads same cached files but changes them.
115 | # Different mtimes and id strings are sufficient.
116 | terminology.refresh(main_url)
117 | refresh_map = self._cache_files_map(file_filter)
118 | self.assertEqual(len(orig_map), len(refresh_map))
119 | for curr_file in orig_map:
120 | self.assertIn(curr_file, refresh_map)
121 | # Check identical md5 hash
122 | self.assertEqual(orig_map[curr_file][0], refresh_map[curr_file][0])
123 | # Check different mtime
124 | self.assertLess(orig_map[curr_file][1], refresh_map[curr_file][1])
125 |
--------------------------------------------------------------------------------
/test/test_validation_integration.py:
--------------------------------------------------------------------------------
1 | """
2 | This file tests built-in odml validations.
3 | """
4 |
5 | import sys
6 | import unittest
7 |
8 | try:
9 | from StringIO import StringIO
10 | except ImportError:
11 | from io import StringIO
12 |
13 | import odml
14 |
15 |
16 | class TestValidationIntegration(unittest.TestCase):
17 |
18 | def setUp(self):
19 | # Redirect stdout to test messages
20 | self.stdout_orig = sys.stdout
21 | self.capture = StringIO()
22 | sys.stdout = self.capture
23 |
24 | self.msg_base = "Property values cardinality violated"
25 |
26 | def tearDown(self):
27 | # Reset stdout; resetting using 'sys.__stdout__' fails on windows
28 | sys.stdout = self.stdout_orig
29 | self.capture.close()
30 |
31 | def _clear_output(self):
32 | self.capture.seek(0)
33 | self.capture.truncate()
34 |
35 | def _get_captured_output(self):
36 | out = [txt.strip() for txt in self.capture.getvalue().split('\n') if txt]
37 |
38 | # Buffer reset
39 | self.capture.seek(0)
40 | self.capture.truncate()
41 |
42 | return out
43 |
44 | def test_property_values_cardinality(self):
45 | # -- Test assignment validation warnings
46 | doc = odml.Document()
47 | sec = odml.Section(name="sec", type="sec_type", parent=doc)
48 |
49 | # Making sure only the required warnings are tested
50 | self._clear_output()
51 |
52 | # -- Test cardinality validation warnings on Property init
53 | # Test warning when setting invalid minimum
54 | _ = odml.Property(name="prop_card_min", values=[1], val_cardinality=(2, None), parent=sec)
55 | output = self._get_captured_output()
56 | test_msg = "%s (minimum %s values, %s found)" % (self.msg_base, 2, 1)
57 | self.assertEqual(len(output), 1)
58 | self.assertIn(test_msg, output[0])
59 |
60 | # Test warning when setting invalid maximum
61 | _ = odml.Property(name="prop_card_max", values=[1, 2, 3], val_cardinality=2, parent=sec)
62 | output = self._get_captured_output()
63 | test_msg = "%s (maximum %s values, %s found)" % (self.msg_base, 2, 3)
64 | self.assertEqual(len(output), 1)
65 | self.assertIn(test_msg, output[0])
66 |
67 | # Test no warning on valid init
68 | prop_card = odml.Property(name="prop_card", values=[1, 2],
69 | val_cardinality=(1, 5), parent=sec)
70 | output = self._get_captured_output()
71 | self.assertEqual(output, [])
72 |
73 | # -- Test cardinality validation warnings on cardinality updates
74 | # Test warning when setting minimally required values cardinality
75 | prop_card.val_cardinality = (3, None)
76 | output = self._get_captured_output()
77 | test_msg = "%s (minimum %s values, %s found)" % (self.msg_base, 3, 2)
78 | self.assertEqual(len(output), 1)
79 | self.assertIn(test_msg, output[0])
80 |
81 | # Test warning when setting maximally required values cardinality
82 | prop_card.values = [1, 2, 3]
83 | prop_card.val_cardinality = 2
84 | output = self._get_captured_output()
85 | test_msg = "%s (maximum %s values, %s found)" % (self.msg_base, 2, 3)
86 | self.assertEqual(len(output), 1)
87 | self.assertIn(test_msg, output[0])
88 |
89 | # Test no warning on valid cardinality
90 | prop_card.val_cardinality = (1, 10)
91 | output = self._get_captured_output()
92 | self.assertEqual(output, [])
93 |
94 | # Test no warning when setting cardinality to None
95 | prop_card.val_cardinality = None
96 | output = self._get_captured_output()
97 | self.assertEqual(output, [])
98 |
99 | # -- Test cardinality validation warnings on values updates
100 | # Test warning when violating minimally required values cardinality
101 | prop_card.val_cardinality = (3, None)
102 | prop_card.values = [1, 2]
103 | output = self._get_captured_output()
104 | test_msg = "%s (minimum %s values, %s found)" % (self.msg_base, 3, 2)
105 | self.assertEqual(len(output), 1)
106 | self.assertIn(test_msg, output[0])
107 |
108 | # Test warning when violating maximally required values cardinality
109 | prop_card.val_cardinality = (None, 2)
110 | prop_card.values = [1, 2, 3]
111 | output = self._get_captured_output()
112 | test_msg = "%s (maximum %s values, %s found)" % (self.msg_base, 2, 3)
113 | self.assertEqual(len(output), 1)
114 | self.assertIn(test_msg, output[0])
115 |
116 | # Test no warning when setting correct number of values
117 | prop_card.values = [1, 2]
118 | output = self._get_captured_output()
119 | self.assertEqual(output, [])
120 |
121 | # Test no warning when setting values to None
122 | prop_card.values = None
123 | output = self._get_captured_output()
124 | self.assertEqual(output, [])
125 |
--------------------------------------------------------------------------------
/odml/scripts/odml_convert.py:
--------------------------------------------------------------------------------
1 | """odmlConvert
2 |
3 | odmlConvert searches for odML files within a provided SEARCHDIR
4 | and converts them to the newest odML format version.
5 | Original files will never be overwritten. New files will be
6 | written either to a new directory at the current or a specified
7 | location.
8 |
9 | Usage: odmlconvert [-r] [-o OUT] SEARCHDIR
10 |
11 | Arguments:
12 | SEARCHDIR Directory to search for odML files.
13 |
14 | Options:
15 | -o OUT Output directory. Must exist if specified.
16 | If not specified, output files will be
17 | written to the current directory.
18 | -r Search recursively. Directory structures
19 | will not be retained.
20 | -h --help Show this screen.
21 | --version Show version.
22 | """
23 |
24 | import os
25 | import pathlib
26 | import sys
27 | import tempfile
28 |
29 | from docopt import docopt
30 |
31 | try:
32 | from StringIO import StringIO
33 | except ImportError:
34 | from io import StringIO
35 |
36 | import odml
37 |
38 | from odml.tools.converters import VersionConverter as VerConf
39 |
40 |
41 | def run_conversion(file_list, output_dir, report, source_format="XML"):
42 | """
43 | Convert a list of odML files to the latest odML version.
44 | :param file_list: list of files to be converted.
45 | :param output_dir: Directory where odML files converted to
46 | the latest odML version will be saved.
47 | :param report: Reporting StringIO.
48 | :param source_format: Original file format of the odML source files.
49 | XML, JSON and YAML are supported, default is XML.
50 | """
51 | # Exceptions are kept as broad as possible to ignore any non-odML or
52 | # invalid odML files and ensuring everything that can be will be converted.
53 | for curr_file in file_list:
54 | file_path = str(curr_file.absolute())
55 | report.write("[Info] Handling file '%s'\n" % file_path)
56 | # When loading the current file succeeds, it is
57 | # a recent odML format file and can be ignored.
58 | try:
59 | odml.load(file_path, source_format)
60 | report.write("[Info] Skip recent version file '%s'" % file_path)
61 | except Exception as exc:
62 | out_name = os.path.splitext(os.path.basename(file_path))[0]
63 | outfile = os.path.join(output_dir, "%s_conv.xml" % out_name)
64 | try:
65 | VerConf(file_path).write_to_file(outfile, source_format)
66 | except Exception as exc:
67 | # Ignore files we cannot parse or convert
68 | report.write("[Error] version converting file '%s': '%s'\n" %
69 | (file_path, exc))
70 |
71 |
72 | def main(args=None):
73 | """
74 | Convenience script to automatically convert odML files
75 | within a directory (tree) to the newest file version.
76 | Check the cli help for details.
77 | :param args: Command line arguments
78 | """
79 | parser = docopt(__doc__, argv=args, version="0.1.0")
80 |
81 | root = parser['SEARCHDIR']
82 | if not os.path.isdir(root):
83 | print(docopt(__doc__, "-h"))
84 | exit(1)
85 |
86 | # Handle all supported odML file formats.
87 | if parser['-r']:
88 | xfiles = list(pathlib.Path(root).rglob('*.odml'))
89 | xfiles.extend(list(pathlib.Path(root).rglob('*.xml')))
90 | jfiles = list(pathlib.Path(root).rglob('*.json'))
91 | yfiles = list(pathlib.Path(root).rglob('*.yaml'))
92 | else:
93 | xfiles = list(pathlib.Path(root).glob('*.odml'))
94 | xfiles.extend(list(pathlib.Path(root).glob('*.xml')))
95 | jfiles = list(pathlib.Path(root).glob('*.json'))
96 | yfiles = list(pathlib.Path(root).glob('*.yaml'))
97 |
98 | out_root = os.getcwd()
99 | if parser["-o"]:
100 | if not os.path.isdir(parser["-o"]):
101 | print("[Error] Could not find output directory '%s'" % parser["-o"])
102 | exit(1)
103 |
104 | out_root = parser["-o"]
105 |
106 | out_dir = tempfile.mkdtemp(prefix="odmlconv_", dir=out_root)
107 |
108 | # Use this monkeypatch reporter until there is a way
109 | # to run the converters silently.
110 | report = StringIO()
111 | report.write("[Info] Files will be saved to '%s'\n" % out_dir)
112 |
113 | run_conversion(xfiles, out_dir, report)
114 | run_conversion(jfiles, out_dir, report, "JSON")
115 | run_conversion(yfiles, out_dir, report, "YAML")
116 |
117 | print(report.getvalue())
118 | report.close()
119 |
120 |
121 | def dep_note(args=None):
122 | """
123 | Print deprecation warning and call main function.
124 |
125 | :param args: Command line arguments
126 | """
127 |
128 | print("\n[DEPRECATION WARNING] 'odmlconversion' will be removed with \n"
129 | "the next version release. Please use 'odmlconvert' instead.\n")
130 | main(args)
131 |
132 |
133 | if __name__ == "__main__":
134 | main(sys.argv[1:])
135 |
--------------------------------------------------------------------------------
/.github/workflows/run-tests.yml:
--------------------------------------------------------------------------------
1 | name: run-tests
2 | on:
3 | push:
4 | branches:
5 | - master
6 | - dev
7 | pull_request:
8 | branches:
9 | - master
10 | jobs:
11 | linux_tests:
12 | runs-on: ${{ matrix.os }}
13 | strategy:
14 | matrix:
15 | os: [ubuntu-latest]
16 | python-version: ["3.8", "3.9", "3.10", "3.11"]
17 | steps:
18 | - uses: actions/checkout@v2
19 | - name: Setup Python ${{ matrix.python-version }}
20 | uses: actions/setup-python@v2
21 | with:
22 | python-version: ${{ matrix.python-version }}
23 | - name: Display Python version
24 | run: python -c "import sys; print(sys.version)"
25 | - name: Install dependencies
26 | run: |
27 | pip install . install
28 | pip install -r requirements-test.txt
29 | - name: Run tests
30 | run: pytest
31 | # The default Linux Ubuntu 22+ does not support Python < 3.8 any longer
32 | # To keep testing Python < 3.8, use Ubuntu 20
33 | linux_legacy_tests:
34 | runs-on: ${{ matrix.os }}
35 | strategy:
36 | matrix:
37 | os: [ubuntu-20.04]
38 | python-version: ["3.7"]
39 | steps:
40 | - uses: actions/checkout@v2
41 | - name: Setup Python ${{ matrix.python-version }}
42 | uses: actions/setup-python@v2
43 | with:
44 | python-version: ${{ matrix.python-version }}
45 | - name: Display Python version
46 | run: python -c "import sys; print(sys.version)"
47 | - name: Install dependencies
48 | run: |
49 | pip install . install
50 | pip install -r requirements-test.txt
51 | - name: Run tests
52 | run: pytest
53 | mac_tests:
54 | runs-on: ${{ matrix.os }}
55 | strategy:
56 | matrix:
57 | os: [macos-latest]
58 | # github actions currently resolves the pyyaml pip install package
59 | # for python 3.9 not to the required macos package, but to the
60 | # default pyyaml 6.0.tar package, which is a linux distribution
61 | # and fails on install. MacOS Python version 3.9 build will stay
62 | # disabled until this is resolved.
63 | #python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
64 | python-version: ["3.7", "3.8", "3.10"]
65 | steps:
66 | - uses: actions/checkout@v2
67 | - name: Setup Python ${{ matrix.python-version }}
68 | uses: actions/setup-python@v2
69 | with:
70 | python-version: ${{ matrix.python-version }}
71 | - name: Display Python version
72 | run: python -c "import sys; print(sys.version)"
73 | - name: Install dependencies
74 | run: |
75 | pip install .
76 | pip install -r requirements-test.txt
77 | - name: Run tests
78 | run: pytest
79 | # The current rdflib dependency causes random issues on some of
80 | # the github actions windows builds; since the appveyor builds
81 | # are passing without any issue, keep most gh actions windows
82 | # builds deactivated until the rdflib dependency has been
83 | # been updated.
84 | win_tests:
85 | continue-on-error: true
86 | runs-on: ${{ matrix.os }}
87 | strategy:
88 | matrix:
89 | os: [windows-latest]
90 | python-version: ["3.10"]
91 | steps:
92 | - uses: actions/checkout@v2
93 | - name: Setup Python ${{ matrix.python-version }}
94 | uses: actions/setup-python@v2
95 | with:
96 | python-version: ${{ matrix.python-version }}
97 | - name: Display Python version
98 | run: python -c "import sys; print(sys.version)"
99 | - name: Install dependencies
100 | run: |
101 | pip install .
102 | pip install -r requirements-test.txt
103 | - name: Run tests
104 | run: pytest
105 | run-coverall:
106 | runs-on: [ubuntu-latest]
107 | steps:
108 | - uses: actions/checkout@v2
109 | - name: Setup Python 3.10
110 | uses: actions/setup-python@v2
111 | with:
112 | python-version: "3.10"
113 | - name: Display Python version
114 | run: python -c "import sys; print(sys.version)"
115 | - name: Install dependencies
116 | run: |
117 | pip install .
118 | pip install -r requirements-test.txt
119 | pip install pytest coveralls
120 | - name: Create coverage
121 | run: |
122 | coverage run --source=odml -m pytest test/
123 | - name: Submit to coveralls
124 | env:
125 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
126 | run: coveralls --service=github
127 | run-codecov:
128 | runs-on: [ubuntu-latest]
129 | steps:
130 | - uses: actions/checkout@v2
131 | - name: Setup Python 3.10
132 | uses: actions/setup-python@v2
133 | with:
134 | python-version: "3.10"
135 | - name: Install dependencies
136 | run: |
137 | pip install .
138 | pip install -r requirements-test.txt
139 | pip install pytest pytest-cov
140 | - name: Create coverage
141 | run: |
142 | pytest --cov=./ --cov-report=xml
143 | - uses: codecov/codecov-action@v1
144 | with:
145 | name: Submit Codecov coverage
146 | files: ./coverage.xml
147 | verbose: true # optional (default = false)
148 |
--------------------------------------------------------------------------------
/test/resources/version_conversion_int.yaml:
--------------------------------------------------------------------------------
1 | Document:
2 | author: author
3 | date: '2018-02-02'
4 | repository: https://terminologies.g-node.org/v1.0/terminologies.xml
5 | sections:
6 | - definition: def s1
7 | include: url s1
8 | name: sec_one
9 | properties:
10 | - definition: def prop1
11 | dependency: dep p1
12 | dependency_value: dep val p1
13 | name: prop_one
14 | values:
15 | - dtype: string
16 | - definition: def val 1
17 | dtype:
18 | - string
19 | filename: filename val 1
20 | reference: ref val 1
21 | uncertainty: 11
22 | unit: arbitrary
23 | value: '1'
24 | - definition: def val 2
25 | dtype:
26 | - int
27 | filename: filename val 2
28 | reference: ref val 2
29 | uncertainty: 12
30 | unit: arbitrary 2
31 | value: '2'
32 | - definition: def val 3
33 | dtype:
34 | - string
35 | filename: filename val 3
36 | reference: ref val 3
37 | uncertainty: 13
38 | unit: arbitrary 3
39 | value: '3'
40 | - definition: def prop2
41 | dependency: dep p2
42 | dependency_value: dep val p2
43 | name: prop_two
44 | values:
45 | - dtype: string
46 | - definition: def val 1
47 | dtype:
48 | - int
49 | filename: filename val 1
50 | reference: ref val 1
51 | uncertainty: 12
52 | unit: arbitrary
53 | value: '1'
54 | - definition: def val 1
55 | dtype:
56 | - int
57 | filename: filename val 1
58 | reference: ref val 1
59 | uncertainty: 12
60 | unit: arbitrary
61 | value: '2'
62 | - definition: def val 1
63 | dtype:
64 | - int
65 | filename: filename val 1
66 | reference: ref val 1
67 | uncertainty: 12
68 | unit: arbitrary
69 | value: '3'
70 | - definition: def val 1
71 | dtype:
72 | - int
73 | filename: filename val 1
74 | reference: ref val 1
75 | uncertainty: 12
76 | unit: arbitrary
77 | value: '4'
78 | - definition: def val 1
79 | dtype:
80 | - int
81 | filename: filename val 1
82 | reference: ref val 1
83 | uncertainty: 12
84 | unit: arbitrary
85 | value: '4'
86 | - definition: def val 1
87 | dtype:
88 | - int
89 | filename: filename val 1
90 | reference: ref val 1
91 | uncertainty: 12
92 | unit: arbitrary
93 | value: '4'
94 | - definition: def val 1
95 | dtype:
96 | - int
97 | filename: filename val 1
98 | reference: ref val 1
99 | uncertainty: 12
100 | unit: arbitrary
101 | value: '4'
102 | - definition: def val 1
103 | dtype:
104 | - int
105 | filename: filename val 1
106 | reference: ref val 1
107 | uncertainty: 12
108 | unit: arbitrary
109 | value: '4'
110 | - definition: def prop3
111 | dependency: dep p3
112 | dependency_value: dep val p3
113 | name: prop_three
114 | values:
115 | - dtype: string
116 | reference: ref s1
117 | sections:
118 | - definition: def subs1
119 | link: somewhere subs1
120 | name: subsection one
121 | properties:
122 | - definition: def prop1
123 | dependency: dep p1
124 | dependency_value: dep val p1
125 | name: prop one
126 | values:
127 | - dtype: string
128 | - definition: def prop3
129 | dependency: dep p3
130 | dependency_value: dep val p3
131 | name: prop three
132 | values:
133 | - dtype: string
134 | - definition: def val 1
135 | dtype:
136 | - int
137 | filename: filename val 1
138 | reference: ref val 1
139 | uncertainty: 12
140 | unit: arbitrary
141 | value: '4'
142 | - definition: def val 1
143 | dtype:
144 | - int
145 | filename: filename val 1
146 | reference: ref val 1
147 | uncertainty: 12
148 | unit: arbitrary
149 | value: '4'
150 | - definition: def val 1
151 | dtype:
152 | - int
153 | filename: filename val 1
154 | reference: ref val 1
155 | uncertainty: 12
156 | unit: arbitrary
157 | value: '4'
158 | - definition: def val 1
159 | dtype:
160 | - int
161 | filename: filename val 1
162 | reference: ref val 1
163 | uncertainty: 12
164 | unit: arbitrary
165 | value: '2'
166 | - definition: def prop2
167 | dependency: dep p2
168 | dependency_value: dep val p2
169 | name: prop two
170 | values:
171 | - dtype: string
172 | reference: ref subs1
173 | sections: []
174 | type: subsec
175 | type: mainsec
176 | - definition: def s2
177 | include: url s2
178 | name: section two
179 | properties: []
180 | reference: ref s2
181 | sections: []
182 | type: mainsec
183 | - definition: def s3
184 | link: somewhere s3
185 | name: section three
186 | properties: []
187 | reference: ref s3
188 | sections: []
189 | type: mainsec
190 | version: v1.13
191 | odml-version: '1'
192 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [](https://github.com/G-Node/python-odml/actions)
2 | [](https://ci.appveyor.com/project/G-Node/python-odml/branch/master)
3 | [](https://coveralls.io/github/G-Node/python-odml)
4 | [](https://pypi.org/project/odML/)
5 | [](https://python-odml.readthedocs.io/en/latest/)
6 |
7 |
8 | # odML (Open metaData Markup Language) core library
9 |
10 | The open metadata Markup Language is a file based format (XML, JSON, YAML) for storing
11 | metadata in an organised human- and machine-readable way. odML is an initiative to define
12 | and establish an open, flexible, and easy-to-use format to transport metadata.
13 |
14 | The Python-odML library can be easily installed via ```pip```. The source code is freely
15 | available on [GitHub](https://github.com/G-Node/python-odml). If you are not familiar
16 | with the version control system **git**, but still want to use it, have a look at the
17 | documentation available on the [git-scm website](https://git-scm.com/).
18 |
19 |
20 | # odML Project page
21 |
22 | More information about the project including related projects as well as tutorials and
23 | examples can be found at our odML [project page](https://g-node.github.io/python-odml).
24 |
25 |
26 | # Getting started
27 |
28 | ## Installation
29 |
30 | *python-odml* is most conveniently installed via pip.
31 |
32 | ```
33 | pip install odml
34 | ```
35 |
36 | To install the latest development version of odml you can use the git installation option of pip:
37 |
38 | ```
39 | pip install git+https://github.com/G-Node/python-odml
40 | ```
41 |
42 | Please note that this version might not be stable.
43 |
44 | ## Tutorial and examples
45 |
46 | - We have assembled a set of
47 | [tutorials](https://python-odml.readthedocs.io/en/latest/tutorial.html "Python Tutorial").
48 |
49 | ## Python convenience scripts
50 |
51 | The Python installation features multiple convenience commandline scripts.
52 |
53 | - `odmlconvert`: Converts odML files of previous file versions into the current one.
54 | - `odmltordf`: Converts odML files to the supported RDF version of odML.
55 | - `odmlview`: Render and browse local XML odML files in the webbrowser.
56 |
57 | All scripts provide detailed usage descriptions by adding the `help` flag to the command.
58 |
59 | odmlconvert -h
60 | odmltordf -h
61 | odmlview -h
62 |
63 |
64 | # Breaking changes
65 |
66 | odML Version 1.4 introduced breaking format and API changes compared to the previous
67 | versions of odML. Files saved in the previous format versions can be converted to a 1.4
68 | compatible format using the version converter from the odml/tools package.
69 |
70 | Be aware that the value dtype ```binary``` has been removed. Incorporating actual binary
71 | data into odML files is discouraged, provide references to the original files using the
72 | ```URL``` dtype instead.
73 |
74 | For details regarding the introduced changes please check the [github
75 | release notes](https://github.com/G-Node/python-odml/releases).
76 |
77 |
78 | # Dependencies
79 |
80 | * Python 3.7+
81 | * Python packages:
82 |
83 | * lxml (version 3.7.2)
84 | * yaml (version >= 5.1)
85 | * rdflib (version >=4.2.2)
86 |
87 | * These packages will be downloaded and installed automatically if the ```pip```
88 | method is used to install odML. Alternatively, they can be installed from the OS
89 | package manager. On Ubuntu, they are available as:
90 |
91 | * python-lxml
92 | * python-yaml
93 | * python-rdflib
94 |
95 | * If you prefer installing using the Python package manager, the following packages are
96 | required to build the lxml Python package on Ubuntu 14.04:
97 |
98 | * libxml2-dev
99 | * libxslt1-dev
100 | * lib32z1-dev
101 |
102 | ## Previous Python versions
103 |
104 | Python 2 has reached end of life. Current and future versions of odml are not Python 2 compatible. We removed support
105 | for Python 2 in August 2020 with version 1.5.2. We also recommend using a Python version >= 3.7. If a
106 | Python version < 3.7 is a requirement, the following dependency needs to be installed as well:
107 |
108 | * pip install
109 | * enum34 (version 0.4.4)
110 | * apt install
111 | * python-enum
112 |
113 | # Building from source
114 |
115 | To download the Python-odML library please either use git and clone
116 | the repository from GitHub:
117 |
118 | ```
119 | $ git clone https://github.com/G-Node/python-odml.git
120 | ```
121 |
122 | If you don't want to use git download the ZIP file also provided on
123 | GitHub to your computer (e.g. as above on your home directory under a "toolbox"
124 | folder).
125 |
126 | To install the Python-odML library, enter the corresponding directory and run:
127 |
128 | ```
129 | $ cd python-odml
130 | $ python setup.py install
131 | ```
132 |
133 | **Note** The master branch is our current development branch, not all features might be
134 | working as expected. Use the release tags instead.
135 |
136 |
137 | # Contributing and Governance
138 |
139 | See the [CONTRIBUTING](https://github.com/G-Node/python-odml/blob/master/CONTRIBUTING.md) document
140 | for more information on this.
141 |
142 |
143 | # Bugs & Questions
144 |
145 | Should you find a behaviour that is likely a bug, please file a bug report at
146 | [the github bug tracker](https://github.com/G-Node/python-odml/issues).
147 |
148 | If you have questions regarding the use of the library, feel free to join the
149 | [#gnode](http://webchat.freenode.net?channels=%23gnode) IRC channel on freenode.
150 |
--------------------------------------------------------------------------------
/test/resources/version_conversion_int.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | def s1
4 |
5 | def subs1
6 | ref subs1
7 | url subs1
8 | subsec
9 | subsec_one
10 |
11 | def prop1
12 | dep p1
13 | string
14 | prop_one
15 | dep val p1
16 |
17 |
18 | def prop3
19 | dep p3
20 | string
21 | 412arbitraryintref val 1filename val 1def val 1
22 | 412arbitraryintref val 1filename val 1def val 1
23 | 412arbitraryintref val 1filename val 1def val 1
24 | 212arbitraryintref val 1filename val 1def val 1
25 | prop three
26 | dep val p3
27 |
28 |
29 | def prop2
30 | dep p2
31 | string
32 | prop two
33 | dep val p2
34 |
35 |
36 | ref s1
37 | url s1
38 | mainsec
39 | sec_one
40 |
41 | def prop1
42 | dep p1
43 | string
44 | '1'
45 | 11
46 | arbitrary
47 | string
48 | ref val 1
49 | filename val 1
50 | def val 1
51 |
52 | 2
53 | 12
54 | arbitrary 2
55 | int
56 | ref val 2
57 | filename val 2
58 | def val 2
59 |
60 | '3'
61 | 13
62 | arbitrary 3
63 | string
64 | ref val 3
65 | filename val 3
66 | def val 3
67 |
68 | prop_one
69 | dep val p1
70 |
71 |
72 | def prop2
73 | dep p2
74 | string
75 | 112arbitraryintref val 1filename val 1def val 1
76 | 212arbitraryintref val 1filename val 1def val 1
77 | 312arbitraryintref val 1filename val 1def val 1
78 | 412arbitraryintref val 1filename val 1def val 1
79 | 412arbitraryintref val 1filename val 1def val 1
80 | 412arbitraryintref val 1filename val 1def val 1
81 | 412arbitraryintref val 1filename val 1def val 1
82 | 412arbitraryintref val 1filename val 1def val 1
83 | prop_two
84 | dep val p2
85 |
86 |
87 | def prop3
88 | dep p3
89 | string
90 | prop_three
91 | dep val p3
92 |
93 |
94 |
95 | def s2
96 | ref s2
97 | mainsec
98 | somewhere s2
99 | section two
100 |
101 |
102 | def s3
103 | ref s3
104 | url s3
105 | mainsec
106 | section three
107 |
108 | author
109 | 2018-07-07
110 | https://terminologies.g-node.org/v1.0/terminologies.xml
111 | v1.13
112 |
113 |
--------------------------------------------------------------------------------
/odml/scripts/odml_to_rdf.py:
--------------------------------------------------------------------------------
1 | """odmlToRDF
2 |
3 | odmlToRDF searches for odML files within a provided SEARCHDIR
4 | and converts them to the newest odML format version and
5 | exports all found and resulting odML files to XML formatted RDF.
6 | Original files will never be overwritten. New files will be
7 | written either to a new directory at the current or a specified
8 | location.
9 |
10 | Usage: odmltordf [-r] [-o OUT] SEARCHDIR
11 |
12 | Arguments:
13 | SEARCHDIR Directory to search for odML files.
14 |
15 | Options:
16 | -o OUT Output directory. Must exist if specified.
17 | If not specified, output files will be
18 | written to the current directory.
19 | -r Search recursively. Directory structures
20 | will not be retained.
21 | -h --help Show this screen.
22 | --version Show version.
23 | """
24 |
25 | import os
26 | import pathlib
27 | import sys
28 | import tempfile
29 |
30 | from docopt import docopt
31 |
32 | try:
33 | from StringIO import StringIO
34 | except ImportError:
35 | from io import StringIO
36 |
37 | import odml
38 |
39 | from odml.tools.odmlparser import ODMLReader, ODMLWriter
40 | from odml.tools.converters import VersionConverter as VerConf
41 |
42 |
43 | def run_rdf_export(odml_file, export_dir):
44 | """
45 | Convert an odML file to an XML RDF file and
46 | export it to an export directory with the
47 | same name as the original file and a '.rdf' file
48 | ending.
49 | :param odml_file: odML file to be converted to RDF.
50 | :param export_dir:
51 | """
52 | out_name = os.path.splitext(os.path.basename(odml_file))[0]
53 | out_file = os.path.join(export_dir, "%s.rdf" % out_name)
54 | doc = ODMLReader().from_file(odml_file)
55 | ODMLWriter("RDF").write_file(doc, out_file)
56 |
57 |
58 | def run_conversion(file_list, output_dir, rdf_dir, report, source_format="XML"):
59 | """
60 | Convert a list of odML files to the latest odML version if required
61 | and export all files to XML RDF files in a specified output directory.
62 | :param file_list: list of files to be exported to RDF.
63 | :param output_dir: Directory where odML files converted to
64 | the latest odML version will be saved.
65 | :param rdf_dir: Directory where exported RDF files will be saved.
66 | :param report: Reporting StringIO.
67 | :param source_format: Original file format of the odML source files.
68 | XML, JSON and YAML are supported, default is XML.
69 | """
70 | # Exceptions are kept as broad as possible to ignore any non-odML or
71 | # invalid odML files and ensuring everything that can be will be converted.
72 | for curr_file in file_list:
73 | file_path = str(curr_file.absolute())
74 | report.write("[Info] Handling file '%s'\n" % file_path)
75 | # When loading the current file succeeds, it is
76 | # a recent odML format file and can be exported
77 | # to RDF right away. Otherwise it needs to be
78 | # converted to the latest odML version first.
79 | try:
80 | odml.load(file_path, source_format)
81 | report.write("[Info] RDF conversion of '%s'\n" % file_path)
82 | run_rdf_export(file_path, rdf_dir)
83 | except Exception as exc:
84 | out_name = os.path.splitext(os.path.basename(file_path))[0]
85 | outfile = os.path.join(output_dir, "%s_conv.xml" % out_name)
86 | try:
87 | VerConf(file_path).write_to_file(outfile, source_format)
88 | try:
89 | report.write("[Info] RDF conversion of '%s'\n" % outfile)
90 | run_rdf_export(outfile, rdf_dir)
91 | except Exception as exc:
92 | report.write("[Error] converting '%s' to RDF: '%s'\n" %
93 | (file_path, exc))
94 | except Exception as exc:
95 | # Ignore files we cannot parse or convert
96 | report.write("[Error] version converting file '%s': '%s'\n" %
97 | (file_path, exc))
98 |
99 |
100 | def main(args=None):
101 | """
102 | Convenience script to automatically convert odML files
103 | within a directory (tree) to RDF. Check the cli help
104 | for details.
105 | :param args: Command line arguments
106 | """
107 | parser = docopt(__doc__, argv=args, version="0.1.0")
108 |
109 | root = parser['SEARCHDIR']
110 | if not os.path.isdir(root):
111 | print(docopt(__doc__, "-h"))
112 | exit(1)
113 |
114 | # Handle all supported odML file formats.
115 | if parser['-r']:
116 | xfiles = list(pathlib.Path(root).rglob('*.odml'))
117 | xfiles.extend(list(pathlib.Path(root).rglob('*.xml')))
118 | jfiles = list(pathlib.Path(root).rglob('*.json'))
119 | yfiles = list(pathlib.Path(root).rglob('*.yaml'))
120 | else:
121 | xfiles = list(pathlib.Path(root).glob('*.odml'))
122 | xfiles.extend(list(pathlib.Path(root).glob('*.xml')))
123 | jfiles = list(pathlib.Path(root).glob('*.json'))
124 | yfiles = list(pathlib.Path(root).glob('*.yaml'))
125 |
126 | out_root = os.getcwd()
127 | if parser["-o"]:
128 | if not os.path.isdir(parser["-o"]):
129 | print("[Error] Could not find output directory '%s'" % parser["-o"])
130 | exit(1)
131 |
132 | out_root = parser["-o"]
133 |
134 | out_dir = tempfile.mkdtemp(prefix="odmlconv_", dir=out_root)
135 | rdf_dir = tempfile.mkdtemp(prefix="odmlrdf_", dir=out_dir)
136 |
137 | # Use this monkeypatch reporter until there is a way
138 | # to run the converters silently.
139 | report = StringIO()
140 | report.write("[Info] Files will be saved to '%s'\n" % out_dir)
141 |
142 | run_conversion(xfiles, out_dir, rdf_dir, report)
143 | run_conversion(jfiles, out_dir, rdf_dir, report, "JSON")
144 | run_conversion(yfiles, out_dir, rdf_dir, report, "YAML")
145 |
146 | print(report.getvalue())
147 | report.close()
148 |
149 |
150 | if __name__ == "__main__":
151 | main(sys.argv[1:])
152 |
--------------------------------------------------------------------------------
/odml/format.py:
--------------------------------------------------------------------------------
1 | """
2 | The module provides general format information and mappings of
3 | XML and RDF attributes to their Python class equivalents.
4 | """
5 |
6 | from rdflib import Namespace
7 |
8 | import odml
9 |
10 |
11 | class Format(object):
12 | """
13 | Base format class for all odML object formats. The formats are required
14 | when the corresponding odML objects are serialized to or loaded from files.
15 | """
16 | _name = ""
17 | _args = {}
18 | _map = {}
19 | _rev_map = None
20 | _rdf_map = {}
21 | _rdf_type = None
22 | _ns = Namespace("https://g-node.org/odml-rdf#")
23 |
24 | @property
25 | def name(self):
26 | """Returns the name of the current odML format"""
27 | return self._name
28 |
29 | @property
30 | def arguments(self):
31 | """Returns all items in the current odML format argument dict"""
32 | return self._args.items()
33 |
34 | @property
35 | def arguments_keys(self):
36 | """Returns all keys of the current odML format argument dict"""
37 | return self._args.keys()
38 |
39 | def map(self, name):
40 | """ Maps an odml name to a python name """
41 | return self._map.get(name, name)
42 |
43 | @property
44 | def map_keys(self):
45 | """Returns all keys of the current odML format map dict"""
46 | return self._map.keys()
47 |
48 | def rdf_map(self, name):
49 | """ Maps a python name to a odml rdf namespace """
50 | return self._rdf_map.get(name, name)
51 |
52 | @property
53 | def rdf_map_keys(self):
54 | """Returns all keys of the current odML format RDF map dict"""
55 | return self._rdf_map.keys()
56 |
57 | @property
58 | def rdf_map_items(self):
59 | """Returns all items of the current odML format RDF map dict"""
60 | return self._rdf_map.items()
61 |
62 | @property
63 | def rdf_type(self):
64 | """ Return rdf type of an object """
65 | return self._rdf_type
66 |
67 | @staticmethod
68 | def namespace():
69 | """ Return current link to current odml namespace"""
70 | return Format._ns
71 |
72 | def revmap(self, name):
73 | """ Maps a python name to an odml name """
74 | if self._rev_map is None:
75 | # create the reverse map only if requested
76 | self._rev_map = {}
77 | for k, val in self._map.items():
78 | self._rev_map[val] = k
79 |
80 | return self._rev_map.get(name)
81 |
82 | def __iter__(self):
83 | """ Iterates each python property name """
84 | for k in self._args:
85 | yield self.map(k)
86 |
87 | def create(self, *args, **kargs):
88 | """
89 | This method will call the init method of the odML class implementation
90 | corresponding to the specific format odML class and return the initialised
91 | class instance. e.g. format.Document.create() will return an initialised
92 | odml.Document instance.
93 | """
94 | return getattr(odml, self.__class__.__name__)(*args, **kargs)
95 |
96 |
97 | class Property(Format):
98 | """
99 | The format class for the odml Property class.
100 | """
101 | _name = "property"
102 | _ns = Format._ns
103 | _rdf_type = _ns.Property
104 | _args = {
105 | 'id': 0,
106 | 'name': 1,
107 | 'value': 0,
108 | 'unit': 0,
109 | 'definition': 0,
110 | 'dependency': 0,
111 | 'dependencyvalue': 0,
112 | 'uncertainty': 0,
113 | 'reference': 0,
114 | 'type': 0,
115 | 'value_origin': 0,
116 | 'val_cardinality': 0
117 | }
118 | _map = {
119 | 'dependencyvalue': 'dependency_value',
120 | 'type': 'dtype',
121 | 'id': 'oid',
122 | 'value': 'values'
123 | }
124 | _rdf_map = {
125 | 'id': _ns.hasId,
126 | 'name': _ns.hasName,
127 | 'definition': _ns.hasDefinition,
128 | 'dtype': _ns.hasDtype,
129 | 'unit': _ns.hasUnit,
130 | 'uncertainty': _ns.hasUncertainty,
131 | 'reference': _ns.hasReference,
132 | 'value': _ns.hasValue,
133 | 'value_origin': _ns.hasValueOrigin
134 | }
135 |
136 |
137 | class Section(Format):
138 | """
139 | The format class for the odml Section class.
140 | """
141 | _name = "section"
142 | _ns = Format._ns
143 | _rdf_type = _ns.Section
144 | _args = {
145 | 'id': 0,
146 | 'type': 1,
147 | 'name': 1,
148 | 'definition': 0,
149 | 'reference': 0,
150 | 'link': 0,
151 | 'repository': 0,
152 | 'section': 0,
153 | 'include': 0,
154 | 'property': 0,
155 | 'sec_cardinality': 0,
156 | 'prop_cardinality': 0
157 | }
158 | _map = {
159 | 'section': 'sections',
160 | 'property': 'properties',
161 | 'id': 'oid'
162 | }
163 | _rdf_map = {
164 | 'id': _ns.hasId,
165 | 'name': _ns.hasName,
166 | 'definition': _ns.hasDefinition,
167 | 'type': _ns.hasType,
168 | 'repository': _ns.hasTerminology,
169 | 'reference': _ns.hasReference,
170 | 'sections': _ns.hasSection,
171 | 'properties': _ns.hasProperty,
172 | }
173 |
174 |
175 | class Document(Format):
176 | """
177 | The format class for the odml Document class.
178 | """
179 | _name = "odML"
180 | _ns = Format._ns
181 | _rdf_type = _ns.Document
182 | _args = {
183 | 'id': 0,
184 | 'version': 0,
185 | 'author': 0,
186 | 'date': 0,
187 | 'section': 0,
188 | 'repository': 0,
189 | }
190 | _map = {
191 | 'section': 'sections',
192 | 'id': 'oid'
193 | }
194 | _rdf_map = {
195 | 'id': _ns.hasId,
196 | 'author': _ns.hasAuthor,
197 | 'date': _ns.hasDate,
198 | 'version': _ns.hasDocVersion,
199 | 'repository': _ns.hasTerminology,
200 | 'sections': _ns.hasSection
201 | }
202 |
203 |
204 | Document = Document()
205 | Section = Section()
206 | Property = Property()
207 |
208 | __all__ = [Document, Section, Property]
209 |
--------------------------------------------------------------------------------
/test/test_format_converter.py:
--------------------------------------------------------------------------------
1 | import os
2 | import shutil
3 | import tempfile
4 | import unittest
5 |
6 | from contextlib import contextmanager
7 |
8 | from odml.tools.converters import FormatConverter
9 | from .util import create_test_dir
10 |
11 | FC = FormatConverter
12 |
13 | # TODO The used NamedTemporaryFile does not play nice with Windows;
14 | # deactivating all affected tests for Windows until this has been fixed.
15 |
16 |
17 | class TestFormatConverter(unittest.TestCase):
18 | def setUp(self):
19 | self.doc = """
20 |
24 |
25 | """
26 | self.tmp_dir = None
27 |
28 | def tearDown(self):
29 | if self.tmp_dir and os.path.exists(self.tmp_dir):
30 | shutil.rmtree(self.tmp_dir)
31 |
32 | @contextmanager
33 | def assertNotRaises(self, exc_type):
34 | try:
35 | yield None
36 | except exc_type:
37 | raise self.failureException('{} raised'.format(exc_type.__name__))
38 |
39 | def _create_open_file(self, in_dir):
40 | in_file = tempfile.NamedTemporaryFile(mode='a+', suffix=".xml", dir=in_dir)
41 | in_file.write(self.doc)
42 | in_file.seek(0)
43 |
44 | return in_file
45 |
46 | def test_convert(self):
47 | if os.name == 'nt':
48 | raise unittest.SkipTest("Skipping test on Windows")
49 |
50 | self.test_convert_dir_no_output_dir(False, FC.convert)
51 | self.test_convert_dir_no_output_dir(True, FC.convert)
52 | self.test_convert_dir_with_output_dir_specified(FC.convert)
53 |
54 | def test_convert_dir(self):
55 | if os.name == 'nt':
56 | raise unittest.SkipTest("Skipping test on Windows")
57 |
58 | with self.assertRaises(ValueError):
59 | FC.convert_dir(None, None, False, "not valid format")
60 |
61 | # Testing recursive part of the converter for not nested input dir
62 | self.test_convert_dir_no_output_dir(True)
63 |
64 | def test_convert_dir_no_output_dir(self, recursive=False, func=None):
65 | if os.name == 'nt':
66 | raise unittest.SkipTest("Skipping test on Windows")
67 |
68 | self.tmp_dir = create_test_dir(__file__)
69 | in_dir = tempfile.mkdtemp(dir=self.tmp_dir)
70 | in_file = self._create_open_file(in_dir)
71 | in_file2 = self._create_open_file(in_dir)
72 |
73 | if not func:
74 | FC.convert_dir(in_dir, None, recursive, "odml")
75 | else:
76 | if recursive:
77 | func([in_dir, "odml", "-r"])
78 | else:
79 | func([in_dir, "odml"])
80 |
81 | files = []
82 | for dir_path, dir_names, file_names in os.walk(self.tmp_dir):
83 | for file_name in file_names:
84 | files.append(os.path.join(dir_path, file_name))
85 |
86 | # check if the input file in the correct repo
87 | self.assertIn(os.path.join(in_dir, in_file.name), files)
88 | self.assertIn(os.path.join(in_dir, in_file2.name), files)
89 |
90 | # check if the output file in correct repo and has intended name
91 | in_files = [in_file.name, in_file2.name]
92 | for file in in_files:
93 | odml_file_name = os.path.basename(file)
94 | odml_file_name = odml_file_name.replace(".xml", ".odml")
95 | root, odml_dir_name = os.path.split(in_dir)
96 | odml_dir_name = odml_dir_name + "_odml"
97 | self.assertIn(os.path.join(root, odml_dir_name, odml_file_name), files)
98 |
99 | in_file.close()
100 | in_file2.close()
101 |
102 | def test_convert_dir_with_output_dir_specified(self, func=None):
103 | if os.name == 'nt':
104 | raise unittest.SkipTest("Skipping test on Windows")
105 |
106 | # Testing FC.convert_dir(in_dir, out_dir, False, "odml")
107 | self.tmp_dir = create_test_dir(__file__)
108 | in_dir = tempfile.mkdtemp(dir=self.tmp_dir)
109 | out_dir = tempfile.mkdtemp(dir=self.tmp_dir)
110 | in_file = self._create_open_file(in_dir)
111 | in_file2 = self._create_open_file(in_dir)
112 |
113 | if not func:
114 | FC.convert_dir(in_dir, out_dir, False, "odml")
115 | else:
116 | func([in_dir, "odml", "-out", out_dir])
117 |
118 | in_files = []
119 | out_files = []
120 | for dir_path, dir_names, file_names in os.walk(in_dir):
121 | for file_name in file_names:
122 | in_files.append(os.path.join(dir_path, file_name))
123 |
124 | for dir_path, dir_names, file_names in os.walk(out_dir):
125 | for file_name in file_names:
126 | out_files.append(os.path.join(dir_path, file_name))
127 |
128 | # check if the input file in the correct repo
129 | self.assertIn(os.path.join(in_dir, in_file.name), in_files)
130 | self.assertIn(os.path.join(in_dir, in_file2.name), in_files)
131 |
132 | # check if the output file in correct repo and has intended name
133 | check_in_files = [in_file.name, in_file2.name]
134 | for file in check_in_files:
135 | _, out_file_name = os.path.split(file)
136 | pre, ext = os.path.splitext(out_file_name)
137 | out_file_name = out_file_name.replace(out_file_name, pre + ".odml")
138 | self.assertIn(os.path.join(out_dir, out_file_name), out_files)
139 |
140 | in_file.close()
141 | in_file2.close()
142 |
143 | def test_check_io_directory(self):
144 | self.tmp_dir = create_test_dir(__file__)
145 | out_dir = tempfile.mkdtemp(dir=self.tmp_dir)
146 | in_dir = tempfile.mkdtemp(dir=self.tmp_dir)
147 | with self.assertRaises(ValueError):
148 | FC._check_input_output_directory(None, None)
149 | with self.assertRaises(ValueError):
150 | FC._check_input_output_directory("/not_valid_path", None)
151 | with self.assertRaises(ValueError):
152 | FC._check_input_output_directory(in_dir, "/not_valid_path")
153 | self.assertNotRaises(FC._check_input_output_directory(in_dir, None))
154 | self.assertNotRaises(FC._check_input_output_directory(in_dir, out_dir))
155 |
--------------------------------------------------------------------------------
/doc/example_odMLs/sample_odml.yaml:
--------------------------------------------------------------------------------
1 | Document:
2 | author: D. N. Adams
3 | date: '1979-10-12'
4 | sections:
5 | - definition: Information on the crew
6 | name: TheCrew
7 | properties:
8 | - definition: List of crew members names
9 | dtype: person
10 | name: NameCrewMembers
11 | value:
12 | - Arthur Philip Dent
13 | - Zaphod Beeblebrox
14 | - Tricia Marie McMillan
15 | - Ford Prefect
16 | - definition: Number of crew members
17 | dtype: int
18 | name: NoCrewMembers
19 | value:
20 | - 4
21 | sections:
22 | - definition: Information on Arthur Dent
23 | name: Arthur Philip Dent
24 | properties:
25 | - definition: Species to which subject belongs to
26 | dtype: string
27 | name: Species
28 | value:
29 | - Human
30 | - definition: Nickname(s) of the subject
31 | dtype: string
32 | name: Nickname
33 | value:
34 | - The sandwich-maker
35 | - definition: Occupation of the subject
36 | dtype: string
37 | name: Occupation
38 | value:
39 | - '-'
40 | - definition: Sex of the subject
41 | dtype: string
42 | name: Gender
43 | value:
44 | - male
45 | - definition: Home planet of the subject
46 | dtype: string
47 | name: HomePlanet
48 | value:
49 | - Earth
50 | sections: []
51 | type: crew/person
52 | - definition: Information on Zaphod Beeblebrox
53 | name: Zaphod Beeblebrox
54 | properties:
55 | - definition: Species to which subject belongs to
56 | dtype: string
57 | name: Species
58 | value:
59 | - Betelgeusian
60 | - definition: Nickname(s) of the subject
61 | dtype: string
62 | name: Nickname
63 | value:
64 | - '-'
65 | - definition: Occupation of the subject
66 | dtype: string
67 | name: Occupation
68 | value:
69 | - Ex-Galactic President
70 | - definition: Sex of the subject
71 | dtype: string
72 | name: Gender
73 | value:
74 | - male
75 | - definition: Home planet of the subject
76 | dtype: string
77 | name: HomePlanet
78 | value:
79 | - A planet in the vicinity of Betelgeuse
80 | sections: []
81 | type: crew/person
82 | - definition: Information on Trillian Astra
83 | name: Tricia Marie McMillan
84 | properties:
85 | - definition: Species to which subject belongs to
86 | dtype: string
87 | name: Species
88 | value:
89 | - Betelgeusian
90 | - definition: Nickname(s) of the subject
91 | dtype: string
92 | name: Nickname
93 | value:
94 | - Trillian Astra
95 | - definition: Occupation of the subject
96 | dtype: string
97 | name: Occupation
98 | value:
99 | - '-'
100 | - definition: Sex of the subject
101 | dtype: string
102 | name: Gender
103 | value:
104 | - female
105 | - definition: Home planet of the subject
106 | dtype: string
107 | name: HomePlanet
108 | value:
109 | - Earth
110 | sections: []
111 | type: crew/person
112 | - definition: Information on Ford Prefect
113 | name: Ford Prefect
114 | properties:
115 | - definition: Species to which subject belongs to
116 | dtype: string
117 | name: Species
118 | value:
119 | - Betelgeusian
120 | - definition: Nickname(s) of the subject
121 | dtype: string
122 | name: Nickname
123 | value:
124 | - Ix
125 | - definition: Occupation of the subject
126 | dtype: string
127 | name: Occupation
128 | value:
129 | - Researcher for the Hitchhiker's Guide to the Galaxy
130 | - definition: Sex of the subject
131 | dtype: string
132 | name: Gender
133 | value:
134 | - male
135 | - definition: Home planet of the subject
136 | dtype: string
137 | name: HomePlanet
138 | value:
139 | - A planet in the vicinity of Betelgeuse
140 | sections: []
141 | type: crew/person
142 | type: crew
143 | - definition: Information on the crew
144 | name: TheStarship
145 | properties:
146 | - definition: Name of person/device
147 | dtype: string
148 | name: Name
149 | value:
150 | - Heart of Gold
151 | - definition: Owner status of device
152 | dtype: string
153 | name: OwnerStatus
154 | value:
155 | - stolen
156 | - definition: Type of drive
157 | dtype: string
158 | name: DriveType
159 | value:
160 | - Infinite Propability Drive
161 | - definition: Technology used to built device
162 | dtype: string
163 | name: Technology
164 | value:
165 | - secret
166 | - definition: Length of device
167 | dtype: float
168 | name: Length
169 | unit: m
170 | value:
171 | - 150.0
172 | - definition: Shape of device
173 | dtype: string
174 | name: Shape
175 | value:
176 | - various
177 | - definition: Planet where device was constructed
178 | dtype: string
179 | name: FactoryPlanet
180 | value:
181 | - Damogran
182 | sections:
183 | - definition: Information on cybernetics present on the ship
184 | name: Cybernetics
185 | properties:
186 | - definition: Type of robots
187 | dtype: string
188 | name: RobotType
189 | value:
190 | - Genuine People Personalities
191 | - definition: Manufacturer of robots
192 | dtype: string
193 | name: Manufacturer
194 | value:
195 | - Sirius Cybernetics Corporation
196 | - definition: Number of cybernetic robots on the ship
197 | dtype: int
198 | name: NoOfCybernetics
199 | value:
200 | - 2
201 | sections:
202 | - definition: Information on Marvin
203 | name: Marvin
204 | properties: []
205 | sections: []
206 | type: starship/cybernetics
207 | - definition: Information on Eddie
208 | name: Eddie
209 | properties: []
210 | sections: []
211 | type: starship/cybernetics
212 | type: starship/cybernetics
213 | type: crew
214 | version: '42'
215 | odml-version: '1.1'
216 |
--------------------------------------------------------------------------------
/test/test_rdf_reader.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import unittest
3 |
4 | from rdflib import Literal
5 |
6 | from odml import Property, Section, Document
7 | from odml.format import Format
8 | from odml.tools.rdf_converter import RDFWriter, RDFReader, rdflib_version_major
9 | from odml.tools.parser_utils import ParserException
10 |
11 | ODMLNS = Format.namespace()
12 |
13 |
14 | class TestRDFReader(unittest.TestCase):
15 |
16 | def setUp(self):
17 | doc = Document()
18 | sec = Section(name="sec1", type="test", parent=doc)
19 | Section(name="sec2", type="test", parent=sec)
20 | Property(name="prop1", values=[1.3], parent=sec)
21 |
22 | self.doc = doc
23 |
24 | def test_rdf_formats(self):
25 | """
26 | Test if document gets correctly converted to odml for turtle, xml and n3.
27 | """
28 | rdf_writer = RDFWriter(self.doc).get_rdf_str()
29 | rdf_reader = RDFReader().from_string(rdf_writer, "turtle")
30 | self.assertEqual(len(rdf_reader[0].sections), 1)
31 | self.assertEqual(len(rdf_reader[0].sections[0].sections), 1)
32 | self.assertEqual(len(rdf_reader[0].sections[0].properties), 1)
33 |
34 | rdf_writer = RDFWriter(self.doc).get_rdf_str("xml")
35 | rdf_reader = RDFReader().from_string(rdf_writer, "xml")
36 | self.assertEqual(len(rdf_reader[0].sections), 1)
37 | self.assertEqual(len(rdf_reader[0].sections[0].sections), 1)
38 | self.assertEqual(len(rdf_reader[0].sections[0].properties), 1)
39 |
40 | rdf_writer = RDFWriter(self.doc).get_rdf_str("n3")
41 | rdf_reader = RDFReader().from_string(rdf_writer, "n3")
42 | self.assertEqual(len(rdf_reader[0].sections), 1)
43 | self.assertEqual(len(rdf_reader[0].sections[0].sections), 1)
44 | self.assertEqual(len(rdf_reader[0].sections[0].properties), 1)
45 |
46 | def test_doc(self):
47 | """
48 | Test if a document and its attributes get converted correctly from rdf to odml.
49 | """
50 | doc = Document()
51 | doc.author = "D. N. Adams"
52 | doc.version = 42
53 | doc.date = datetime.date(1979, 10, 12)
54 |
55 | rdf_writer = RDFWriter(doc).get_rdf_str()
56 | rdf_reader = RDFReader().from_string(rdf_writer, "turtle")
57 |
58 | self.assertEqual(rdf_reader[0].author, "D. N. Adams")
59 | self.assertEqual(rdf_reader[0].version, "42")
60 | self.assertEqual(rdf_reader[0].date, datetime.date(1979, 10, 12))
61 |
62 | def test_section(self):
63 | """
64 | Test if a section and its attributes get converted correctly from rdf to odml.
65 | """
66 | doc = Document()
67 | sec1 = Section(name="sec1", type="test", parent=doc, definition="Interesting stuff.",
68 | reference="The Journal")
69 | Section(name="sec2", type="test", parent=sec1)
70 |
71 | rdf_writer = RDFWriter(doc).get_rdf_str()
72 | rdf_reader = RDFReader().from_string(rdf_writer, "turtle")
73 |
74 | self.assertEqual(rdf_reader[0].sections[0].name, "sec1")
75 | self.assertEqual(rdf_reader[0].sections[0].type, "test")
76 | self.assertEqual(rdf_reader[0].sections[0].id, sec1.id)
77 | self.assertEqual(rdf_reader[0].sections[0].definition, "Interesting stuff.")
78 | self.assertEqual(rdf_reader[0].sections[0].reference, "The Journal")
79 | self.assertEqual(rdf_reader[0].sections[0].parent, rdf_reader[0])
80 | self.assertEqual(len(rdf_reader[0].sections[0].sections), 1)
81 |
82 | def test_property(self):
83 | """
84 | Test if a property and its attributes get converted correctly from rdf to odml.
85 | """
86 | doc = Document()
87 | sec1 = Section(name="sec1", type="test", parent=doc)
88 | prop2 = Property(name="numbers", definition="any number", dtype="float", parent=sec1,
89 | values=[1, 3.4, 67.8, -12], unit="meter", uncertainty=0.8,
90 | value_origin="force", reference="Experiment 1")
91 |
92 | rdf_writer = RDFWriter(doc).get_rdf_str()
93 | rdf_reader = RDFReader().from_string(rdf_writer, "turtle")
94 |
95 | prop = rdf_reader[0].sections[0].properties["numbers"]
96 |
97 | self.assertEqual(prop.name, "numbers")
98 | self.assertEqual(prop.dtype, "float")
99 | self.assertEqual(prop.id, prop2.id)
100 | self.assertEqual(prop.parent, rdf_reader[0].sections[0])
101 | self.assertEqual(len(prop.values), 4)
102 | self.assertEqual(prop.values, [1, 3.4, 67.8, -12])
103 | self.assertEqual(prop.definition, "any number")
104 | self.assertEqual(prop.unit, "meter")
105 | self.assertEqual(prop.uncertainty, "0.8")
106 | self.assertEqual(prop.value_origin, "force")
107 | self.assertEqual(prop.reference, "Experiment 1")
108 |
109 | def test_mandatory_attrs_section(self):
110 | """
111 | Test if ParserError is thrown if mandatory attributes are missing for section.
112 | """
113 | rdf_writer = RDFWriter([self.doc])
114 | rdf_writer.convert_to_rdf()
115 | for rdf_sec in rdf_writer.graph.subjects(predicate=ODMLNS.hasName, object=Literal("sec1")):
116 | rdf_writer.graph.remove((rdf_sec, ODMLNS.hasName, Literal("sec1")))
117 |
118 | # support both >=6.0.0 and <6.0.0 versions of rdflib for the time being
119 | if rdflib_version_major() < 6:
120 | new_graph = rdf_writer.graph.serialize(format="turtle").decode("utf-8")
121 | else:
122 | new_graph = rdf_writer.graph.serialize(format="turtle")
123 |
124 | with self.assertRaises(ParserException):
125 | RDFReader().from_string(new_graph, "turtle")
126 |
127 | def test_mandatory_attrs_property(self):
128 | """
129 | Test if ParserError is thrown if mandatory attributes are missing for section.
130 | """
131 | rdf_writer = RDFWriter([self.doc])
132 | rdf_writer.convert_to_rdf()
133 | for rdf_sec in rdf_writer.graph.subjects(predicate=ODMLNS.hasName, object=Literal("prop1")):
134 | rdf_writer.graph.remove((rdf_sec, ODMLNS.hasName, Literal("prop1")))
135 |
136 | # support both >=6.0.0 and <6.0.0 versions of rdflib for the time being
137 | if rdflib_version_major() < 6:
138 | new_graph = rdf_writer.graph.serialize(format="turtle").decode("utf-8")
139 | else:
140 | new_graph = rdf_writer.graph.serialize(format="turtle")
141 |
142 | with self.assertRaises(ParserException):
143 | RDFReader().from_string(new_graph, "turtle")
144 |
--------------------------------------------------------------------------------