├── tests
├── __init__.py
├── data
│ └── apps
│ │ ├── example_app
│ │ ├── input
│ │ │ ├── config.json
│ │ │ └── manifest.json
│ │ └── twine.json
│ │ ├── simple_app
│ │ ├── input
│ │ │ └── config.json
│ │ └── twine.json
│ │ └── empty_app
│ │ └── twine.json
├── test_monitors.py
├── test_utils.py
├── base.py
├── test_credentials.py
├── test_schema_strands.py
├── test_children.py
├── test_twine.py
└── test_manifest_strands.py
├── twined
├── schema
│ ├── __init__.py
│ └── twine_schema.json
├── utils
│ ├── __init__.py
│ ├── strings.py
│ ├── encoders.py
│ └── load_json.py
├── migrations
│ └── __init__.py
├── __init__.py
└── exceptions.py
├── docs
├── .gitignore
├── source
│ ├── _ext
│ │ ├── sphinx_accordion
│ │ │ ├── __init__.py
│ │ │ ├── semantic-ui-2.4.2
│ │ │ │ ├── .versions
│ │ │ │ ├── accordion.min.css
│ │ │ │ ├── accordion.min.js
│ │ │ │ └── accordion.css
│ │ │ ├── accordion.css
│ │ │ ├── README.md
│ │ │ ├── accordion.js
│ │ │ └── accordion.py
│ │ └── googleanalytics.py
│ ├── favicon.ico
│ ├── images
│ │ ├── schema_form_example.png
│ │ └── digital_twin_hierarchy.svg
│ ├── anatomy_children.rst
│ ├── quick_start.rst
│ ├── about.rst
│ ├── quick_start_installation.rst
│ ├── version_history.rst
│ ├── about_requirements.rst
│ ├── lifecycle.rst
│ ├── anatomy_monitors.rst
│ ├── about_digital_twins.rst
│ ├── anatomy_credentials.rst
│ ├── deployment.rst
│ ├── license.rst
│ ├── quick_start_create_your_first_twine.rst
│ ├── anatomy.rst
│ ├── about_other_considerations.rst
│ ├── index.rst
│ ├── anatomy_values.rst
│ ├── about_introducing_json_schema.rst
│ ├── examples.rst
│ └── conf.py
└── requirements.txt
├── setup.cfg
├── .coveragerc
├── .readthedocs.yaml
├── examples
├── damage_classifier_service
│ ├── twine.json
│ └── data
│ │ └── configuration_manifest.json
├── met_mast_scada_service
│ ├── strands
│ │ ├── output_manifest.json
│ │ └── input_manifest.json
│ └── data
│ │ ├── output_manifest.json
│ │ └── input_manifest.json
└── wind_tunnel_datalogger_service
│ ├── strands
│ └── output_manifest.json
│ └── data
│ └── output_manifest.json
├── .github
└── workflows
│ ├── synced-add-issues-to-octue-board.yaml
│ ├── update-pull-request.yml
│ ├── python-ci.yml
│ ├── release.yml
│ └── codeql.yml
├── LICENSE
├── .pre-commit-config.yaml
├── pyproject.toml
├── .gitignore
└── README.md
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/twined/schema/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/.gitignore:
--------------------------------------------------------------------------------
1 | doctrees
2 | html
3 |
--------------------------------------------------------------------------------
/docs/source/_ext/sphinx_accordion/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/data/apps/example_app/input/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "n_iterations": 16
3 | }
4 |
--------------------------------------------------------------------------------
/docs/source/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/octue/twined/HEAD/docs/source/favicon.ico
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [pydocstyle]
2 | ignore = D100, D101, D104, D105, D107, D203, D205, D213, D301, D400, D415
3 |
--------------------------------------------------------------------------------
/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/.versions:
--------------------------------------------------------------------------------
1 | meteor@1.1.6
2 | semantic:ui-accordion@2.1.3
3 | underscore@1.0.3
4 |
--------------------------------------------------------------------------------
/docs/source/images/schema_form_example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/octue/twined/HEAD/docs/source/images/schema_form_example.png
--------------------------------------------------------------------------------
/.coveragerc:
--------------------------------------------------------------------------------
1 | [paths]
2 | source =
3 | twined/
4 |
5 | [run]
6 | omit =
7 | env/*
8 | tests/*
9 | */tests/*
10 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | # Required by the python script for building documentation
2 | Sphinx
3 | sphinx-rtd-theme
4 | sphinx-tabs
5 | sphinx-charts
6 | jsonschema
7 |
--------------------------------------------------------------------------------
/docs/source/anatomy_children.rst:
--------------------------------------------------------------------------------
1 | .. _children_strand:
2 |
3 | ===============
4 | Children Strand
5 | ===============
6 |
7 | .. ATTENTION::
8 |
9 | Coming Soon!
10 |
--------------------------------------------------------------------------------
/twined/utils/__init__.py:
--------------------------------------------------------------------------------
1 | from .encoders import TwinedEncoder # noqa: F401
2 | from .load_json import load_json # noqa: F401
3 | from .strings import trim_suffix # noqa: F401
4 |
--------------------------------------------------------------------------------
/twined/migrations/__init__.py:
--------------------------------------------------------------------------------
1 | """A subpackage containing any translations from deprecated code to new code for deprecations that haven't been phased
2 | out yet. See https://github.com/octue/twined/issues/102.
3 | """
4 |
--------------------------------------------------------------------------------
/docs/source/quick_start.rst:
--------------------------------------------------------------------------------
1 | .. _quick_start:
2 |
3 | ============
4 | Quick Start
5 | ============
6 |
7 | .. toctree::
8 | :maxdepth: 2
9 |
10 | quick_start_installation
11 | quick_start_create_your_first_twine
12 |
--------------------------------------------------------------------------------
/tests/data/apps/simple_app/input/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "width": 600,
3 | "height": 600,
4 | "max_iterations": 16,
5 | "color_scale": "YlGnBu",
6 | "x_range": [-1.5, 0.6],
7 | "y_range": [-1.26, 1.26],
8 | "type": "png"
9 | }
10 |
--------------------------------------------------------------------------------
/twined/utils/strings.py:
--------------------------------------------------------------------------------
1 | def trim_suffix(text, suffix):
2 | """Strip a suffix from text, if it appears (otherwise return text unchanged)"""
3 | if not text.endswith(suffix):
4 | return text
5 | return text[: len(text) - len(suffix)]
6 |
--------------------------------------------------------------------------------
/twined/__init__.py:
--------------------------------------------------------------------------------
1 | from . import (
2 | exceptions, # noqa: F401
3 | utils, # noqa: F401
4 | )
5 | from .twine import ( # noqa: F401
6 | ALL_STRANDS,
7 | CHILDREN_STRANDS,
8 | CREDENTIAL_STRANDS,
9 | MANIFEST_STRANDS,
10 | SCHEMA_STRANDS,
11 | Twine,
12 | )
13 |
--------------------------------------------------------------------------------
/docs/source/_ext/sphinx_accordion/accordion.css:
--------------------------------------------------------------------------------
1 | .sphinx-accordion.accordion {
2 | margin-bottom: 1.75em;
3 | }
4 |
5 | .sphinx-accordion.title p {
6 | display: inline-block;
7 | margin-top: 8px;
8 | margin-right: 0px;
9 | margin-bottom: 8px;
10 | margin-left: 0px;
11 | }
12 |
--------------------------------------------------------------------------------
/docs/source/_ext/sphinx_accordion/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | ```
4 | extensions = [
5 | ...
6 | 'sphinx_accordion.accordion'
7 | ...
8 | ]
9 | ```
10 |
11 | ```
12 | .. accordion::
13 |
14 | .. accordion-row:: The Title
15 |
16 | The Contents
17 |
18 | .. accordion-row:: The Second Title
19 |
20 | The Contents 2
21 | ```
22 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # Read the Docs configuration file for Sphinx projects
2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
3 |
4 | version: 2
5 |
6 | build:
7 | os: ubuntu-22.04
8 | tools:
9 | python: "3.10"
10 |
11 | sphinx:
12 | configuration: docs/source/conf.py
13 |
14 | python:
15 | install:
16 | - requirements: docs/requirements.txt
17 |
--------------------------------------------------------------------------------
/examples/damage_classifier_service/twine.json:
--------------------------------------------------------------------------------
1 | {
2 | // Manifest strands contain lists, with one entry for each required dataset
3 | "configuration_manifest": {
4 | "datasets": [
5 | {
6 | // Once the inputs are validated, your analysis program can use this key to access the dataset
7 | "key": "trained_model",
8 | // General notes, which are helpful as a reminder to users of the service
9 | "purpose": "The trained classifier"
10 | }
11 | ]
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/examples/met_mast_scada_service/strands/output_manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "output_manifest": {
3 | "datasets": [
4 | {
5 | // Twined will prepare a manifest with this key, which you can add to during the analysis or once its complete
6 | "key": "met_scada_checks",
7 | // General notes, which are helpful as a reminder to users of the service
8 | "purpose": "A dataset containing figures showing correlations between mast and scada data"
9 | }
10 | ]
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/examples/wind_tunnel_datalogger_service/strands/output_manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "output_manifest": {
3 | "datasets": [
4 | {
5 | // Twined will prepare a manifest with this key, which you can add to during the analysis or once its complete
6 | "key": "met_scada_checks",
7 | // General notes, which are helpful as a reminder to users of the service
8 | "purpose": "A dataset containing figures showing correlations between mast and scada data"
9 | }
10 | ]
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/examples/met_mast_scada_service/strands/input_manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | // Manifest strands contain lists, with one entry for each required dataset
3 | "input_manifest": {
4 | "datasets": [
5 | {
6 | // Once the inputs are validated, your analysis program can use this key to access the dataset
7 | "key": "met_mast_data",
8 | // General notes, which are helpful as a reminder to users of the service
9 | "purpose": "A dataset containing meteorological mast data"
10 | },
11 | {
12 | "key": "scada_data",
13 | "purpose": "A dataset containing scada data"
14 | }
15 | ]
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/docs/source/about.rst:
--------------------------------------------------------------------------------
1 | .. _about:
2 |
3 | ============
4 | About Twines
5 | ============
6 |
7 | **Twined** is a framework for describing a digital twin or data service.
8 |
9 | We call these descriptions "twines". To just get started building a *twine*, check out the :ref:`quick_start`. To
10 | get into the detail of what's in a *twine*, see :ref:`anatomy`.
11 |
12 | Here, we look at requirements for the framework, our motivations and background, and some of the decisions made while
13 | developing **twined**.
14 |
15 | .. toctree::
16 | :maxdepth: 1
17 |
18 | about_digital_twins
19 | about_requirements
20 | about_introducing_json_schema
21 | about_other_considerations
22 |
--------------------------------------------------------------------------------
/.github/workflows/synced-add-issues-to-octue-board.yaml:
--------------------------------------------------------------------------------
1 | # WARNING: This file is synced from the octue/.github repository.
2 | # Do not edit this file in any repo other than octue/.github, or your changes will be overwritten
3 |
4 | name: synced-add-issues-to-octue-board
5 |
6 | on:
7 | issues:
8 | types: [opened, reopened]
9 |
10 | jobs:
11 | add-issues-to-octue-board:
12 | runs-on: ubuntu-latest
13 | steps:
14 | - name: Add to Board
15 | uses: monry/actions-add-issue-to-project@v1
16 | with:
17 | # Personal Access Token with `repo`, `org:read` and `org:write` granted
18 | github-token: ${{ secrets.PROJECT_AUTOMATION_GITHUB_TOKEN }}
19 | project-owner: "octue"
20 | project-number: 22
21 | issue-id: ${{ github.event.issue.node_id }}
22 |
--------------------------------------------------------------------------------
/.github/workflows/update-pull-request.yml:
--------------------------------------------------------------------------------
1 | # This workflow updates the pull request description with an auto-generated section containing the categorised commit
2 | # message headers of the pull request's commits. The auto generated section is enveloped between two comments:
3 | # "" and "". Anything outside these in the
4 | # description is left untouched. Auto-generated updates can be skipped for a commit if
5 | # "" is added to the pull request description.
6 |
7 | name: update-pull-request
8 |
9 | on: [pull_request]
10 |
11 | jobs:
12 | description:
13 | uses: octue/workflows/.github/workflows/generate-pull-request-description.yml@main
14 | secrets:
15 | token: ${{ secrets.GITHUB_TOKEN }}
16 | permissions:
17 | contents: read
18 | pull-requests: write
19 |
--------------------------------------------------------------------------------
/tests/data/apps/empty_app/twine.json:
--------------------------------------------------------------------------------
1 | {
2 | "children": [
3 | ],
4 | "configuration_values_schema": {
5 | "$schema": "https://json-schema.org/draft/2020-12/schema",
6 | "title": "The example configuration form",
7 | "description": "The configuration strand of an example twine",
8 | "type": "object",
9 | "properties": {
10 | }
11 | },
12 | "credentials": [
13 | ],
14 | "input_manifest": {
15 | "datasets": {}
16 | },
17 | "input_values_schema": {
18 | "$schema": "https://json-schema.org/draft/2020-12/schema",
19 | "title": "Input Values",
20 | "description": "The input values strand of an example twine",
21 | "type": "object",
22 | "properties": {
23 | }
24 | },
25 | "output_manifest": {
26 | "datasets": {}
27 | },
28 | "output_values_schema": {
29 | "title": "Output Values",
30 | "description": "The output values strand of an example twine",
31 | "type": "object",
32 | "properties": {
33 | }
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/examples/damage_classifier_service/data/configuration_manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "8ead7669-8162-4f64-8cd5-4abe92509e17",
3 | "datasets": [
4 | {
5 | "id": "7ead7669-8162-4f64-8cd5-4abe92509e17",
6 | "name": "training data for system abc123",
7 | "organisation": "megacorp",
8 | "tags": {"system": "abc123"},
9 | "labels": ["classifier", "damage"],
10 | "files": [
11 | {
12 | "path": "datasets/7ead7669/blade_damage.mdl",
13 | "cluster": 0,
14 | "sequence": 0,
15 | "extension": "csv",
16 | "tags": {},
17 | "labels": [],
18 | "posix_timestamp": 0,
19 | "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86",
20 | "last_modified": "2019-02-28T22:40:30.533005Z",
21 | "name": "blade_damage.mdl",
22 | "size_bytes": 59684813,
23 | "sha-512/256": "somesha"
24 | }
25 | ]
26 | }
27 | ]
28 | }
29 |
--------------------------------------------------------------------------------
/examples/met_mast_scada_service/data/output_manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "8ead7669-8162-4f64-8cd5-4abe92509e17",
3 | "datasets": [
4 | {
5 | "id": "4564deca-5654-42e8-aadf-70690b393a30",
6 | "name": "visual cross check data",
7 | "organisation": "megacorp",
8 | "tags": {"location": 108346},
9 | "labels": ["figure", "met", "mast", "scada", "check"],
10 | "files": [
11 | {
12 | "path": "datasets/7ead7669/cross_check.fig",
13 | "cluster": 0,
14 | "sequence": 0,
15 | "extension": "fig",
16 | "tags": {},
17 | "labels": [],
18 | "posix_timestamp": 1551394800,
19 | "id": "38f77fe2-c8c0-49d1-a08c-0928d53a742f",
20 | "last_modified": "2019-02-28T23:00:00.000000Z",
21 | "name": "cross_check.fig",
22 | "size_bytes": 59684813,
23 | "sha-512/256": "somesha"
24 | }
25 | ]
26 | }
27 | ]
28 | }
29 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2013-2024 Octue Ltd, All Rights Reserved.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/tests/test_monitors.py:
--------------------------------------------------------------------------------
1 | from twined import Twine, exceptions
2 |
3 | from .base import BaseTestCase
4 |
5 |
6 | class TestMonitorMessageTwine(BaseTestCase):
7 | STRAND_WITH_MONITOR_MESSAGE_SCHEMA = """
8 | {
9 | "monitor_message_schema": {
10 | "type": "object",
11 | "properties": {
12 | "my_property": {
13 | "type": "number"
14 | }
15 | },
16 | "required": ["my_property"]
17 | }
18 | }
19 | """
20 |
21 | def test_validate_monitor_message_raises_error_if_monitor_message_schema_not_met(self):
22 | """Test that an error is raised if an invalid monitor update is validated."""
23 | twine = Twine(source=self.STRAND_WITH_MONITOR_MESSAGE_SCHEMA)
24 |
25 | with self.assertRaises(exceptions.InvalidValuesContents):
26 | twine.validate_monitor_message([])
27 |
28 | def test_validate_monitor_message_with_valid_monitor_update(self):
29 | """Test that a valid monitor update validates successfully."""
30 | twine = Twine(source=self.STRAND_WITH_MONITOR_MESSAGE_SCHEMA)
31 | twine.validate_monitor_message({"my_property": 3.7})
32 |
--------------------------------------------------------------------------------
/twined/utils/encoders.py:
--------------------------------------------------------------------------------
1 | import importlib.util
2 | import json
3 |
4 | # Determines whether numpy is available
5 | _numpy_spec = importlib.util.find_spec("numpy")
6 |
7 |
8 | class TwinedEncoder(json.JSONEncoder):
9 | """An encoder which will cope with serialising numpy arrays, ndarrays and matrices to JSON (in list form)
10 |
11 | This is designed to work "out of the box" to help people serialise the outputs from twined applications.
12 | It does not require installation of numpy - it'll work fine if numpy is not present, so can be used in a versatile
13 | tool in uncertain environments.
14 |
15 | Example use:
16 | ```
17 | from twined.utils import TwinedEncoder
18 | some_json = {"a": np.array([0, 1])}
19 | json.dumps(some_json, cls=TwinedEncoder)
20 | ```
21 | """
22 |
23 | def default(self, obj):
24 | """Convert the given object to python primitives.
25 |
26 | :param any obj:
27 | :return any:
28 | """
29 | if _numpy_spec is not None:
30 | import numpy
31 |
32 | if isinstance(obj, numpy.ndarray) or isinstance(obj, numpy.matrix):
33 | return obj.tolist()
34 |
35 | return json.JSONEncoder.default(self, obj)
36 |
--------------------------------------------------------------------------------
/docs/source/quick_start_installation.rst:
--------------------------------------------------------------------------------
1 | .. _installation:
2 |
3 | ============
4 | Installation
5 | ============
6 |
7 | **twined** is available on `pypi `_, so installation into your python virtual environment is dead
8 | simple:
9 |
10 | .. code-block:: py
11 |
12 | pip install twined
13 |
14 | Don't have a virtual environment with pip? You probably should! ``pyenv`` is your friend. Google it.
15 |
16 |
17 | .. _compilation:
18 |
19 | Compilation
20 | ============
21 |
22 | There is presently no need to compile **twined**, as it's written entirely in python.
23 |
24 |
25 | .. _third_party_library_installation:
26 |
27 | Third party library installation
28 | ================================
29 |
30 | **twined** is for python >= 3.6 so expects that. Other dependencies can be checked in ``setup.py``, and will
31 | automatically installed during the installation above.
32 |
33 |
34 | .. _third_party_build_requirements:
35 |
36 | Third party build requirements
37 | ==============================
38 |
39 | .. ATTENTION::
40 | Woohoo! There are no crazy dependencies that you have to compile and build for your particular system.
41 | (you know the ones... they never *actually* compile, right?). We aim to keep it this way.
42 |
--------------------------------------------------------------------------------
/docs/source/_ext/googleanalytics.py:
--------------------------------------------------------------------------------
1 | from sphinx.errors import ExtensionError
2 |
3 |
4 | def add_ga_javascript(app, pagename, templatename, context, doctree):
5 | if app.config.googleanalytics_enabled:
6 | id = app.config.googleanalytics_id
7 | metatags = context.get("metatags", "")
8 | metatags += "\n"
9 | metatags += f'\n'
10 | metatags += "\n"
16 | context["metatags"] = metatags
17 |
18 |
19 | def check_config(app):
20 | if not app.config.googleanalytics_id:
21 | raise ExtensionError("'googleanalytics_id' config value must be set for ga statistics to function properly.")
22 |
23 |
24 | def setup(app):
25 | app.add_config_value("googleanalytics_id", "", "html")
26 | app.add_config_value("googleanalytics_enabled", True, "html")
27 | app.connect("html-page-context", add_ga_javascript)
28 | app.connect("builder-inited", check_config)
29 | return {"version": "0.1"}
30 |
--------------------------------------------------------------------------------
/docs/source/version_history.rst:
--------------------------------------------------------------------------------
1 | .. _version_history:
2 |
3 | ===============
4 | Version History
5 | ===============
6 |
7 | Origins
8 | =======
9 |
10 | **twined** began as an internal tool at Octue, enabling applications to be connected together in the Octue ecosystem.
11 |
12 | The twined library is presently being ported out of Octue's SDKs as it became clear that it would be most beneficial to
13 | open-source the framework we developed to connect applications and digital twins together.
14 |
15 |
16 | .. _version_0.0.x:
17 |
18 | 0.0.x
19 | =====
20 |
21 | Initial library framework - development version. Highly unstable! Let's see what happens...
22 |
23 | New Features
24 | ------------
25 | #. Documentation
26 | #. Travis- and RTD- based test and documentation build with Codecov integration
27 | #. Load and validation of twine itself against twine schema
28 | #. Main ``Twine()`` class with strands set as attributes
29 | #. Validation of input, config and output values against twine
30 | #. Validation of manifest json
31 | #. Credential parsing from the environment and validation
32 | #. Hook allowing instantiation of inputs and config to a given class e.g. ``Manifest``
33 | #. Tests to cover the majority of functionality
34 |
35 | Backward Incompatible API Changes
36 | ---------------------------------
37 | #. n/a (Initial release)
38 |
39 | Bug Fixes & Minor Changes
40 | -------------------------
41 | #. n/a (Initial Release)
42 |
--------------------------------------------------------------------------------
/tests/data/apps/example_app/input/manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "8ead7669-8162-4f64-8cd5-4abe92509e17",
3 | "type": "input",
4 | "datasets": [
5 | {
6 | "id": "7ead7669-8162-4f64-8cd5-4abe92509e17",
7 | "name": "my meteorological dataset",
8 | "tags": {},
9 | "labels": ["met", "mast", "wind"],
10 | "files": [
11 | {
12 | "path": "input/datasets/7ead7669/file_1.csv",
13 | "cluster": 0,
14 | "sequence": 0,
15 | "extension": "csv",
16 | "tags": {},
17 | "labels": [],
18 | "posix_timestamp": null,
19 | "data_file": {
20 | "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86",
21 | "last_modified": "2019-02-28T22:40:30.533005Z",
22 | "name": "file_1.csv",
23 | "size_bytes": 59684813,
24 | "sha-512/256": "somesha"
25 | }
26 | },
27 | {
28 | "path": "input/datasets/7ead7669/file_2.csv",
29 | "cluster": 0,
30 | "sequence": 1,
31 | "extension": "csv",
32 | "tags": {},
33 | "labels": [],
34 | "posix_timestamp": null,
35 | "data_file": {
36 | "id": "bbff07bc-7c19-4ed5-be6d-a6546eae8e45",
37 | "last_modified": "2019-02-28T22:40:40.633001Z",
38 | "name": "file_2.csv",
39 | "size_bytes": 59684813,
40 | "sha-512/256": "someothersha"
41 | }
42 | }
43 | ]
44 | }
45 | ]
46 | }
47 |
--------------------------------------------------------------------------------
/docs/source/about_requirements.rst:
--------------------------------------------------------------------------------
1 | .. _requirements:
2 |
3 | Requirements of the framework
4 | ===================================
5 |
6 | A *twine* must describe a digital twin, and have multiple roles. It must:
7 |
8 | #. Define what data is required by a digital twin, in order to run
9 | #. Define what data will be returned by the twin following a successful run
10 | #. Define the formats of these data, in such a way that incoming data can be validated
11 | #. Define what other (1st or 3rd party) twins / services are required by this one in order for it to run.
12 |
13 | If this weren't enough, the description:
14 |
15 | #. Must be trustable (i.e. a *twine* from an untrusted, corrupt or malicious third party should be safe to at least read)
16 | #. Must be machine-readable *and machine-understandable* [1]_
17 | #. Must be human-readable *and human-understandable* [1]_
18 | #. Must be discoverable (that is, searchable/indexable) otherwise people won't know it's there in orer to use it.
19 |
20 | Fortunately for digital twin developers, several of these requirements have already been seen for data interchange
21 | formats developed for the web. **twined** uses ``JSON`` and ``JSONSchema`` to help interchange data.
22 |
23 | If you're not already familiar with ``JSONSchema`` (or wish to know why **twined** uses ``JSON`` over the seemingly more
24 | appropriate ``XML`` standard), see :ref:`introducing_json_schema`.
25 |
26 |
27 | .. Footnotes:
28 |
29 | .. [1] *Understandable* essentially means that, once read, the machine or human knows what it actually means and what to do with it.
30 |
31 |
--------------------------------------------------------------------------------
/docs/source/lifecycle.rst:
--------------------------------------------------------------------------------
1 |
2 | ..
3 |
4 | Data matching the ``configuration_values_schema`` is supplied to the digital twin / data service at
5 | startup.
6 |
7 | It's generally used to define control parameters relating to what the service should do, or how it should operate.
8 | For example, should it produce output images as low resolution PNGs or as SVGs? How many iterations of a fluid
9 | flow solver should be used? What is the acceptable error level on an classifier algorithm?
10 |
11 | Input Values
12 |
13 | Once configuration data supplied to a service has been validated, it can accept inputs and run analyses
14 | using them.
15 |
16 | Depending on the way it's deployed (see :ref:`deployment`), the ``input_values`` might come in from a web request,
17 | over a websocket or called directly from the command line or another library.
18 |
19 | However it comes, new ``input_values``, which are in ``JSON`` format, are checked against the
20 | ``input_values_schema`` strand of the twine. If they match, then analysis can proceed.
21 |
22 | Output Values
23 |
24 | Once a service has Data matching the ``output_values_schema`` is supplied to the service while it's running. Depending on the way
25 | it's deployed, the values might come in from a web request, over a websocket or called directly from
26 | another library
27 |
28 | Input For example current rotor speed, or forecast wind direction.
29 |
30 | Values might be passed at instantiation of a twin (typical application-like process) or via a socket.
31 |
--------------------------------------------------------------------------------
/docs/source/_ext/sphinx_accordion/accordion.js:
--------------------------------------------------------------------------------
1 | // if (!String.prototype.startsWith) {
2 | // Object.defineProperty(String.prototype, 'startsWith', {
3 | // value: function(search, pos) {
4 | // pos = !pos || pos < 0 ? 0 : +pos;
5 | // return this.substring(pos, pos + search.length) === search;
6 | // }
7 | // });
8 | // }
9 |
10 | $(document).ready(function(){console.log('FFS')});
11 |
12 | $(function() {
13 | console.log('SOMETHING HAPPENS MAYBE');
14 |
15 | // We store the data-row values as sphinx-data-
16 | // Add data-row attribute with the extracted value
17 | $('.sphinx-accordion.title').each(function() {
18 | const this1 = $(this);
19 | const prefix = 'sphinx-accordion-title-';
20 | const classes = this1.attr('class').split(/\s+/);
21 | $.each(classes, function(idx, clazz) {
22 | if (clazz.startsWith(prefix)) {
23 | this1.attr('data-row', clazz.substring(prefix.length));
24 | }
25 | });
26 |
27 | const data_row = this1.attr('data-row');
28 |
29 | this1.on('click', function() {
30 | // Find offset in view
31 | const offset = (this1.offset().top - $(window).scrollTop());
32 |
33 | // Toggle active class on this subsequent sibling
34 | if (this1.hasClass('active')) {
35 | this1.removeClass('active');
36 | this1.next().removeClass('active');
37 | } else {
38 | this1.addClass('active');
39 | this1.next().addClass('active');
40 | }
41 |
42 | // Keep tab with the original view offset
43 | $(window).scrollTop(this1.offset().top - offset);
44 | });
45 | });
46 | });
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | exclude: 'build|docs|node_modules|.git|.tox|dist|docs|octue.egg-info|twined.egg-info'
2 | default_stages: [commit]
3 | fail_fast: true
4 | default_language_version:
5 | python: python3 # force all unspecified python hooks to run python3
6 | repos:
7 | - repo: https://github.com/pre-commit/pre-commit-hooks
8 | rev: v3.1.0
9 | hooks:
10 | - id: trailing-whitespace
11 | - id: end-of-file-fixer
12 | - id: check-yaml
13 | - id: check-added-large-files
14 | args: ['--maxkb=10240']
15 |
16 | - repo: https://github.com/astral-sh/ruff-pre-commit
17 | rev: v0.6.2
18 | hooks:
19 | - id: ruff
20 | args: [--fix, --exit-non-zero-on-fix]
21 | - id: ruff-format
22 |
23 | - repo: https://github.com/pycqa/pydocstyle
24 | rev: 6.1.1
25 | hooks:
26 | - id: pydocstyle
27 |
28 | - repo: https://github.com/thclark/pre-commit-sphinx
29 | rev: 0.0.3
30 | hooks:
31 | - id: build-docs
32 | language_version: python3
33 | additional_dependencies:
34 | - 'Sphinx'
35 | - 'sphinx-rtd-theme'
36 | - 'sphinx-tabs'
37 | - 'sphinx-charts'
38 | - 'jsonschema'
39 | - 'setuptools' # Added to avoid missing `distutils` module error in python >= 3.12
40 |
41 | - repo: https://github.com/windpioneers/pre-commit-hooks
42 | rev: 0.0.5
43 | hooks:
44 | - id: check-branch-name
45 | args:
46 | - "^main$"
47 | - "^([a-z][a-z0-9]*)(-[a-z0-9]+)*$"
48 |
49 | - repo: https://github.com/octue/conventional-commits
50 | rev: 0.9.0
51 | hooks:
52 | - id: check-commit-message-is-conventional
53 | stages: [commit-msg]
54 |
--------------------------------------------------------------------------------
/docs/source/anatomy_monitors.rst:
--------------------------------------------------------------------------------
1 | .. _monitors_strand:
2 |
3 | ======================
4 | Monitor Message Strand
5 | ======================
6 |
7 | The ``monitor_message_schema`` strand is *values-based* meaning the data that matches the strand is in JSON form. It is
8 | a *json schema* which describes a monitor message.
9 |
10 | .. tabs::
11 |
12 | .. group-tab:: Monitors Strand
13 |
14 | There are two kinds of monitoring data required from a digital twin.
15 |
16 | **Monitor data (output)**
17 |
18 | Values for health and progress monitoring of the twin, for example percentage progress, iteration number and
19 | status - perhaps even residuals graphs for a converging calculation. Broadly speaking, this should be user-facing
20 | information.
21 |
22 | *This kind of monitoring data can be in a suitable form for display on a dashboard*
23 |
24 | **Log data (output)**
25 |
26 | Logged statements, typically in iostream form, produced by the twin (e.g. via python's ``logging`` module) must be
27 | capturable as an output for debugging and monitoring purposes. Broadly speaking, this should be developer-facing
28 | information.
29 |
30 |
31 |
32 | Let's look at basic examples for twines containing each of these strands:
33 |
34 |
35 | .. tabs::
36 |
37 | .. group-tab:: Monitors Strand
38 |
39 | **Monitor data (output)**
40 |
41 | .. code-block:: javascript
42 |
43 | {
44 | "monitor_message_schema": {
45 | "type": "object",
46 | "properties": {
47 | "my_property": {
48 | "type": "number"
49 | }
50 | },
51 | "required": ["my_property"]
52 | }
53 | }
54 |
55 | **Log data (output)**
56 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "twined"
3 | version = "0.7.0"
4 | repository = "https://www.github.com/octue/twined"
5 | description = "A library to help digital twins and data services talk to one another."
6 | authors = [
7 | "Octue (github: octue) ",
8 | "Tom Clark ",
9 | "Marcus Lugg ",
10 | ]
11 | classifiers = [
12 | "Development Status :: 4 - Beta",
13 | "Intended Audience :: Developers",
14 | "Topic :: Software Development :: Libraries :: Python Modules",
15 | "License :: OSI Approved :: MIT License",
16 | "Programming Language :: Python :: 3.10",
17 | "Programming Language :: Python :: 3.11",
18 | "Programming Language :: Python :: 3.12",
19 | "Programming Language :: Python :: 3.13",
20 | "Operating System :: OS Independent",
21 | ]
22 | keywords = ["digital", "twins", "data", "services", "python", "schema"]
23 | license = "MIT"
24 | readme = "README.md"
25 |
26 | [tool.poetry.dependencies]
27 | python = "^3.10"
28 | jsonschema = "^4"
29 | python-dotenv = ">=0,<=2"
30 |
31 | [tool.poetry.group.dev.dependencies]
32 | pre-commit = ">=2.6.0"
33 | coverage = ">=5.2.1"
34 | numpy = "^2.2.1"
35 | ruff = "^0.6.9"
36 |
37 | [tool.ruff]
38 | line-length = 120
39 | # Enable pycodestyle (`E`) and Pyflakes (`F`) codes.
40 | lint.select = ["E", "F"]
41 | # Ignore E501 line-too-long - see https://docs.astral.sh/ruff/faq/#is-the-ruff-linter-compatible-with-black for why
42 | lint.ignore = ["F405", "E501", "E203", "E731", "N818"]
43 |
44 | [tool.ruff.lint.isort]
45 | known-first-party = ["twined", "app", "settings", "test", "examples"]
46 | section-order = ["future", "standard-library", "third-party", "first-party", "local-folder"]
47 | force-sort-within-sections = true
48 |
49 | [build-system]
50 | requires = ["poetry-core"]
51 | build-backend = "poetry.core.masonry.api"
52 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 | .pytest_cache/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 | local_settings.py
57 | db.sqlite3
58 |
59 | # Flask stuff:
60 | instance/
61 | .webassets-cache
62 |
63 | # Scrapy stuff:
64 | .scrapy
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Jupyter Notebook
73 | .ipynb_checkpoints
74 |
75 | # pyenv
76 | .python-version
77 |
78 | # celery beat schedule file
79 | celerybeat-schedule
80 |
81 | # SageMath parsed files
82 | *.sage.py
83 |
84 | # Environments
85 | .env
86 | .venv
87 | env/
88 | venv/
89 | ENV/
90 | env.bak/
91 | venv.bak/
92 |
93 | # Spyder project settings
94 | .spyderproject
95 | .spyproject
96 |
97 | # Rope project settings
98 | .ropeproject
99 |
100 | # mkdocs documentation
101 | /site
102 |
103 | # mypy
104 | .mypy_cache/
105 |
106 | # jetbrains ides
107 | .idea/
108 |
109 | # Sphinx related files
110 | docs/source/doxyoutput/
111 | docs/source/library_api/
112 | docs/build/
113 |
--------------------------------------------------------------------------------
/tests/data/apps/simple_app/twine.json:
--------------------------------------------------------------------------------
1 | {
2 | "configuration_values_schema": {
3 | "$schema": "https://json-schema.org/draft/2020-12/schema",
4 | "title": "Configuration for a simple app",
5 | "description": "The app creates a mandelbrot plot",
6 | "type": "object",
7 | "properties": {
8 | "width": {
9 | "description": "Number of pixels the image contains in the x direction",
10 | "type": "integer",
11 | "minimum": 2,
12 | "default": 600
13 | },
14 | "height": {
15 | "description": "Number of pixels the image contains in the y direction",
16 | "type": "integer",
17 | "minimum": 2,
18 | "default": 600
19 | },
20 | "max_iterations": {
21 | "description": "Maximum number of iterations used to render each pixel",
22 | "type": "integer",
23 | "minimum": 2,
24 | "default": 64
25 | },
26 | "color_scale": {
27 | "description": "The colour scale string to use when mapping colours. See https://plot.ly/ipython-notebooks/color-scales/ for valid scales",
28 | "type": "string",
29 | "enum": ["PuBu", "YlGnBu"],
30 | "default": "YlGnBu"
31 | },
32 | "type": {
33 | "description": "Type (jpeg or png) of the image that will be produced as a results file",
34 | "type": "string",
35 | "enum": ["jpg", "png"],
36 | "default": "png"
37 | },
38 | "x_range": {
39 | "description": "The x_min to x_max range of space in which to render the fractal",
40 | "type": "array",
41 | "items": [{
42 | "type": "number"
43 | },
44 | {
45 | "type": "number"
46 | },
47 | {
48 | "type": "number"
49 | }],
50 | "additionalItems": false,
51 | "default": [-1.5, 0.6]
52 | },
53 | "y_range": {
54 | "description": "The y_min to y_max range of space in which to render the fractal",
55 | "type": "array",
56 | "items": [{
57 | "type": "number"
58 | },
59 | {
60 | "type": "number"
61 | },
62 | {
63 | "type": "number"
64 | }],
65 | "additionalItems": false,
66 | "default": [-1.26, 1.26]
67 | }
68 | }
69 | },
70 | "output_manifest": {
71 | "datasets": {}
72 | }
73 | }
74 |
--------------------------------------------------------------------------------
/tests/data/apps/example_app/twine.json:
--------------------------------------------------------------------------------
1 | {
2 | "children": [
3 | {
4 | "key": "turbines",
5 | "purpose": "wind turbines in a farm",
6 | "filters": "tags:(met* AND mast AND location) files:(extension:csv AND sequence:>=0) location:10"
7 | }
8 | ],
9 | "configuration_values_schema": {
10 | "$schema": "https://json-schema.org/draft/2020-12/schema",
11 | "title": "The example configuration form",
12 | "description": "The configuration strand of an example twine",
13 | "type": "object",
14 | "properties": {
15 | "n_iterations": {
16 | "description": "An example of an integer configuration variable, called 'n_iterations'.",
17 | "type": "integer",
18 | "minimum": 1,
19 | "maximum": 10,
20 | "default": 5
21 | }
22 | }
23 | },
24 | "credentials": [
25 | {
26 | "name": "MYAPI_SECRET_KEY",
27 | "purpose": "Token for accessing the MyApi service"
28 | },
29 | {
30 | "name": "MY_DATABASE_URI",
31 | "purpose": "A URI for accessing an external database from within a twin or analysis"
32 | }
33 | ],
34 | "input_manifest": {
35 | "datasets": {
36 | "met_mast_data": {
37 | "purpose": "A dataset containing meteorological mast data"
38 | },
39 | "scada_data": {
40 | "purpose": "A dataset containing scada data"
41 | }
42 | }
43 | },
44 | "input_values_schema": {
45 | "$schema": "https://json-schema.org/draft/2020-12/schema",
46 | "title": "Input Values",
47 | "description": "The input values strand of an example twine",
48 | "type": "object",
49 | "properties": {
50 | "width": {
51 | "description": "An example of an integer value called 'width'",
52 | "type": "integer",
53 | "minimum": 2
54 | }
55 | }
56 | },
57 | "output_manifest": {
58 | "datasets": {
59 | "production_data": {
60 | "purpose": "A dataset containing production data"
61 | }
62 | }
63 | },
64 | "output_values_schema": {
65 | "title": "Output Values",
66 | "description": "The output values strand of an example twine",
67 | "type": "object",
68 | "properties": {
69 | "width": {
70 | "description": "An example of an integer value called 'result'",
71 | "type": "integer",
72 | "minimum": 2
73 | }
74 | }
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/.github/workflows/python-ci.yml:
--------------------------------------------------------------------------------
1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions. On successful
2 | # test, the package will be published to the test PyPi server.
3 |
4 | name: python-ci
5 |
6 | on:
7 | push:
8 | branches-ignore:
9 | - main
10 |
11 | jobs:
12 | check-semantic-version:
13 | if: "!contains(github.event.head_commit.message, 'skipci')"
14 | uses: octue/workflows/.github/workflows/check-semantic-version.yml@main
15 | with:
16 | path: pyproject.toml
17 | breaking_change_indicated_by: minor
18 |
19 | run-tests:
20 | if: "!contains(github.event.head_commit.message, 'skipci')"
21 | runs-on: ubuntu-latest
22 | env:
23 | USING_COVERAGE: '3.11'
24 | strategy:
25 | matrix:
26 | python: ['3.10', '3.11', '3.12', '3.13']
27 | steps:
28 | - name: Checkout Repository
29 | uses: actions/checkout@v4
30 |
31 | - name: Setup Python
32 | uses: actions/setup-python@v5
33 | with:
34 | python-version: ${{ matrix.python }}
35 |
36 | - name: Install Poetry
37 | uses: snok/install-poetry@v1.4.1
38 |
39 | - name: Install package
40 | run: poetry install -v
41 |
42 | - name: Run tests
43 | run: |
44 | poetry run coverage run --source twined -m unittest discover
45 | poetry run coverage report --show-missing
46 | poetry run coverage xml
47 |
48 | - name: Upload coverage to Codecov
49 | uses: codecov/codecov-action@v4
50 | with:
51 | files: coverage.xml
52 | fail_ci_if_error: true
53 | token: ${{ secrets.CODECOV_TOKEN }}
54 |
55 | test-publish:
56 | runs-on: ubuntu-latest
57 | needs: [check-semantic-version, run-tests]
58 | permissions:
59 | id-token: write
60 | contents: read
61 | steps:
62 | - name: Checkout Repository
63 | uses: actions/checkout@v4
64 |
65 | - name: Install Poetry
66 | uses: snok/install-poetry@v1.4.1
67 |
68 | - name: Build a binary wheel and a source tarball
69 | run: poetry build
70 |
71 | - name: Test package is publishable with PyPI test server
72 | uses: pypa/gh-action-pypi-publish@v1.12.4
73 | with:
74 | repository-url: https://test.pypi.org/legacy/
75 | skip-existing: true
76 | verbose: true
77 |
--------------------------------------------------------------------------------
/docs/source/about_digital_twins.rst:
--------------------------------------------------------------------------------
1 | .. _digital_twins:
2 |
3 | =============
4 | Digital Twins
5 | =============
6 |
7 | A digital twin is a virtual representation of a real life being - a physical asset like a wind turbine or car - or even
8 | a human.
9 |
10 | There are three reasons why you might want to create a digital twin:
11 | - Monitoring
12 | - Prediction
13 | - Optimisation
14 |
15 | On its own, a digital twin can be quite useful. For example, a twin might embody an AI-based analysis to predict power
16 | output of a turbine.
17 |
18 | .. figure:: images/digital_twin_component_basic.svg
19 | :width: 400px
20 | :align: center
21 | :figclass: align-center
22 | :alt: A digital twin component
23 |
24 | A digital twin consists of some kind of analysis or processing task, which could be run many times per second, or
25 | daily, down to occasionally or sometimes only once (the same as a "normal" analysis).
26 |
27 | Coupling digital twins is generally even more useful. You might wish to couple your turbine twin with a representation
28 | of the local power grid, and a representation of a factory building to determine power demand... enabling you to
29 | optimise your factory plant for lowest energy cost whilst intelligently selling surplus power to the grid.
30 |
31 | .. figure:: images/digital_twin_hierarchy.svg
32 | :width: 350px
33 | :align: center
34 | :figclass: align-center
35 | :alt: Hierarchy of digital twins
36 |
37 | A hierarchy of digital twins. Each blue circle represents a twin, coupled to its neighbours. Yellow nodes are where
38 | schema are used to connect twins.
39 |
40 |
41 | .. _gemini_principles:
42 |
43 | Gemini Principles
44 | =================
45 |
46 | The Gemini Principles have been derived by the
47 | `Centre for Digital Built Britain (CDBB) `_.
48 | We strongly recommend you give them a read if embarking on a digital twins project.
49 |
50 | The aim of **twined** is to enable the following principles. In particular:
51 |
52 | #. Openness (open-source project to create schema for twins that can be run anywhere, anywhen)
53 | #. Federation (encouraging a standardised way of connecting twins together)
54 | #. Security (making sure schemas and data can be read safely)
55 | #. Public Good (see our nano-rant about climate change in :ref:`reason_for_being`)
56 |
57 |
58 |
--------------------------------------------------------------------------------
/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | import json
2 | from tempfile import TemporaryDirectory
3 | import unittest
4 | from unittest import mock
5 |
6 | import numpy as np
7 |
8 | from twined import exceptions
9 | from twined.utils import TwinedEncoder, load_json
10 |
11 | from .base import VALID_SCHEMA_TWINE, BaseTestCase
12 |
13 |
14 | class TestUtils(BaseTestCase):
15 | """Testing operation of the Twine class"""
16 |
17 | def test_load_json_with_file_like(self):
18 | """Ensures that json can be loaded from a file-like object"""
19 | with TemporaryDirectory() as tmp_dir:
20 | with open(self._write_json_string_to_file(VALID_SCHEMA_TWINE, tmp_dir), "r") as file_like:
21 | data = load_json(file_like)
22 | for key in data.keys():
23 | self.assertIn(key, ("configuration_values_schema", "input_values_schema", "output_values_schema"))
24 |
25 | def test_load_json_with_object(self):
26 | """Ensures if load_json is called on an already loaded object, it'll pass-through successfully"""
27 | already_loaded_data = {"a": 1, "b": 2}
28 | data = load_json(already_loaded_data)
29 | for key in data.keys():
30 | self.assertIn(key, ("a", "b"))
31 |
32 | def test_load_json_with_disallowed_kind(self):
33 | """Ensures that when attempting to load json with a kind which is diallowed, the correct exception is raised"""
34 | custom_allowed_kinds = ("file-like", "filename", "object") # Removed "string"
35 | with self.assertRaises(exceptions.InvalidSourceKindException):
36 | load_json("{}", allowed_kinds=custom_allowed_kinds)
37 |
38 | def test_encoder_without_numpy(self):
39 | """Ensures that the json encoder can work without numpy being installed"""
40 | some_json = {"a": np.array([0, 1])}
41 | with mock.patch("twined.utils.encoders._numpy_spec", new=None):
42 | with self.assertRaises(TypeError) as e:
43 | json.dumps(some_json, cls=TwinedEncoder)
44 |
45 | # Very annoying behaviour change between python 3.6 and 3.8
46 | py38 = "Object of type 'ndarray' is not JSON serializable" in e.exception.args[0]
47 | py36 = "Object of type ndarray is not JSON serializable" in e.exception.args[0]
48 | self.assertTrue(py36 or py38)
49 |
50 | def test_encoder_with_numpy(self):
51 | """Ensures that the json encoder can work with numpy installed"""
52 | some_json = {"a": np.array([0, 1])}
53 | json.dumps(some_json, cls=TwinedEncoder)
54 |
55 |
56 | if __name__ == "__main__":
57 | unittest.main()
58 |
--------------------------------------------------------------------------------
/examples/met_mast_scada_service/data/input_manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "8ead7669-8162-4f64-8cd5-4abe92509e17",
3 | "datasets": [
4 | {
5 | "id": "7ead7669-8162-4f64-8cd5-4abe92509e17",
6 | "name": "meteorological mast dataset",
7 | "tags": {"location": 108346},
8 | "labels": ["met", "mast", "wind"],
9 | "files": [
10 | {
11 | "path": "input/datasets/7ead7669/mast_1.csv",
12 | "cluster": 0,
13 | "sequence": 0,
14 | "extension": "csv",
15 | "tags": {},
16 | "labels": [],
17 | "posix_timestamp": 1551393630,
18 | "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86",
19 | "last_modified": "2019-02-28T22:40:30.533005Z",
20 | "name": "mast_1.csv",
21 | "size_bytes": 59684813,
22 | "sha-512/256": "somesha"
23 | },
24 | {
25 | "path": "input/datasets/7ead7669/mast_2.csv",
26 | "cluster": 0,
27 | "sequence": 1,
28 | "extension": "csv",
29 | "tags": {},
30 | "labels": [],
31 | "posix_timestamp": 1551394230,
32 | "id": "bbff07bc-7c19-4ed5-be6d-a6546eae8e45",
33 | "last_modified": "2019-02-28T22:50:40.633001Z",
34 | "name": "mast_2.csv",
35 | "size_bytes": 59684813,
36 | "sha-512/256": "someothersha"
37 | }
38 | ]
39 | },
40 | {
41 | "id": "5cf9e445-c288-4567-9072-edc31003b022",
42 | "name": "scada data exports",
43 | "tags": {"location": 108346, "system": "ab32"},
44 | "labels": ["wind", "turbine", "scada"],
45 | "files": [
46 | {
47 | "path": "input/datasets/7ead7669/export_1.csv",
48 | "cluster": 0,
49 | "sequence": 0,
50 | "extension": "csv",
51 | "tags": {},
52 | "labels": [],
53 | "posix_timestamp": 1551393600,
54 | "id": "78fa511f-3e28-4bc2-aa28-7b6a2e8e6ef9",
55 | "last_modified": "2019-02-28T22:40:00.000000Z",
56 | "name": "export_1.csv",
57 | "size_bytes": 88684813,
58 | "sha-512/256": "somesha"
59 | },
60 | {
61 | "path": "input/datasets/7ead7669/export_2.csv",
62 | "cluster": 0,
63 | "sequence": 1,
64 | "extension": "csv",
65 | "tags": {},
66 | "labels": [],
67 | "posix_timestamp": 1551394200,
68 | "id": "204d7316-7ae6-45e3-8f90-443225b21226",
69 | "last_modified": "2019-02-28T22:50:00.000000Z",
70 | "name": "export_2.csv",
71 | "size_bytes": 88684813,
72 | "sha-512/256": "someothersha"
73 | }
74 | ]
75 | }
76 | ]
77 | }
78 |
--------------------------------------------------------------------------------
/tests/base.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import unittest
4 |
5 | VALID_SCHEMA_TWINE = """
6 | {
7 | "configuration_values_schema": {
8 | "$schema": "https://json-schema.org/draft/2020-12/schema",
9 | "title": "The example configuration form",
10 | "description": "The configuration strand of an example twine",
11 | "type": "object",
12 | "properties": {
13 | "n_iterations": {
14 | "description": "An example of an integer configuration variable, called 'n_iterations'.",
15 | "type": "integer",
16 | "minimum": 1,
17 | "maximum": 10,
18 | "default": 5
19 | }
20 | }
21 | },
22 | "input_values_schema": {
23 | "$schema": "https://json-schema.org/draft/2020-12/schema",
24 | "title": "Input Values",
25 | "description": "The input values strand of an example twine, with a required height value",
26 | "type": "object",
27 | "properties": {
28 | "height": {
29 | "description": "An example of an integer value called 'height'",
30 | "type": "integer",
31 | "minimum": 2
32 | }
33 | },
34 | "required": ["height"]
35 | },
36 | "output_values_schema": {
37 | "title": "Output Values",
38 | "description": "The output values strand of an example twine",
39 | "type": "object",
40 | "properties": {
41 | "width": {
42 | "description": "An example of an integer value called 'result'",
43 | "type": "integer",
44 | "minimum": 2
45 | }
46 | }
47 | }
48 | }
49 | """
50 |
51 |
52 | class BaseTestCase(unittest.TestCase):
53 | """Base test case for twined:
54 | - sets a path to the test data directory
55 | """
56 |
57 | def setUp(self):
58 | """Add the tests data directory to the test class as an attribute.
59 |
60 | :return None:
61 | """
62 | self.path = os.path.join(os.path.dirname(__file__), "data")
63 | super().setUp()
64 |
65 | def _write_json_string_to_file(self, json_string, directory_name):
66 | """Write a JSON string to a JSON file in the given directory."""
67 | valid_schema_twine_file_path = os.path.join(directory_name, "json_written_to_file.json")
68 | with open(valid_schema_twine_file_path, "w") as f:
69 | json.dump(json.loads(json_string), f)
70 | return valid_schema_twine_file_path
71 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Release the package on merge into main
2 |
3 | # This workflow will only be triggered when a pull request into main branch is merged (and not closed without merging).
4 | on:
5 | pull_request:
6 | types: [closed]
7 | branches:
8 | - main
9 |
10 | jobs:
11 | run-tests:
12 | if: "github.event.pull_request.merged == true"
13 | runs-on: ubuntu-latest
14 | env:
15 | USING_COVERAGE: '3.11'
16 | strategy:
17 | matrix:
18 | python: ['3.10', '3.11', '3.12', '3.13']
19 | steps:
20 | - name: Checkout Repository
21 | uses: actions/checkout@v4
22 |
23 | - name: Setup Python
24 | uses: actions/setup-python@v5
25 | with:
26 | python-version: ${{ matrix.python }}
27 |
28 | - name: Install Poetry
29 | uses: snok/install-poetry@v1.4.1
30 |
31 | - name: Install package
32 | run: poetry install -v
33 |
34 | - name: Run tests
35 | run: |
36 | poetry run coverage run --source twined -m unittest discover
37 | poetry run coverage report --show-missing
38 | poetry run coverage xml
39 |
40 | - name: Upload coverage to Codecov
41 | uses: codecov/codecov-action@v4
42 | with:
43 | files: coverage.xml
44 | fail_ci_if_error: false
45 | token: ${{ secrets.CODECOV_TOKEN }}
46 |
47 | release:
48 | needs: run-tests
49 | runs-on: ubuntu-latest
50 | steps:
51 | - uses: actions/checkout@v4
52 |
53 | - name: Install Poetry
54 | uses: snok/install-poetry@v1.4.1
55 |
56 | - name: Get package version
57 | run: echo "PACKAGE_VERSION=$(poetry version -s)" >> $GITHUB_ENV
58 |
59 | - name: Create Release
60 | uses: actions/create-release@v1
61 | env:
62 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, no need to create your own.
63 | with:
64 | tag_name: ${{ env.PACKAGE_VERSION }}
65 | release_name: ${{ github.event.pull_request.title }}
66 | body: ${{ github.event.pull_request.body }}
67 | draft: false
68 | prerelease: false
69 |
70 | publish:
71 | needs: release
72 | runs-on: ubuntu-latest
73 | permissions:
74 | id-token: write
75 | contents: read
76 |
77 | steps:
78 | - name: Checkout Repository
79 | uses: actions/checkout@v4
80 | with:
81 | ref: ${{ inputs.tag }}
82 |
83 | - name: Install Poetry
84 | uses: snok/install-poetry@v1.4.1
85 |
86 | - name: Build a binary wheel and a source tarball
87 | run: poetry build
88 |
89 | - name: Publish package distributions to PyPI
90 | uses: pypa/gh-action-pypi-publish@v1.12.4
91 |
--------------------------------------------------------------------------------
/twined/utils/load_json.py:
--------------------------------------------------------------------------------
1 | import io
2 | import json
3 | import logging
4 |
5 | from twined.exceptions import InvalidSourceKindException
6 |
7 | logger = logging.getLogger(__file__)
8 |
9 |
10 | ALLOWED_KINDS = ("file-like", "filename", "string", "object")
11 |
12 |
13 | def load_json(source, *args, **kwargs):
14 | """Load JSON, automatically detecting whether the input is a valid filename, a string containing json data,
15 | or a python dict already (in which case the result is returned directly).
16 |
17 | That makes this function suitable for use in a pipeline where it's not clear whether data has been loaded yet, or
18 | whether it's in a file or a raw string
19 |
20 | :parameter source: The data source, which can be a string filename ending in *.json (json loaded from disc to
21 | python dict), a file-like object, a string containing raw json data (json loaded from string to python dict), or
22 | any other valid python object (passed through).
23 |
24 | :parameter args, kwargs: Arguments passed through to json.load or json.loads, enabling use of custom encoders etc.
25 | """
26 | allowed_kinds = kwargs.pop("allowed_kinds", ALLOWED_KINDS)
27 |
28 | def check(kind):
29 | if kind not in allowed_kinds:
30 | raise InvalidSourceKindException(f"Attempted to load json from a {kind} data source")
31 |
32 | if isinstance(source, io.IOBase):
33 | logger.debug("Detected source is a file-like object, loading contents...")
34 | check("file-like")
35 | return json.load(source, object_pairs_hook=raise_error_if_duplicate_keys, *args, **kwargs)
36 |
37 | elif not isinstance(source, str):
38 | logger.debug("Source is not a string, bypassing (returning raw data)")
39 | check("object")
40 | return source
41 |
42 | elif source.endswith(".json"):
43 | logger.debug("Detected source is name of a *.json file, loading from %s", source)
44 | check("filename")
45 | with open(source) as f:
46 | return json.load(f, object_pairs_hook=raise_error_if_duplicate_keys, *args, **kwargs)
47 |
48 | else:
49 | logger.debug("Detected source is string containing json data, parsing...")
50 | check("string")
51 | return json.loads(source, object_pairs_hook=raise_error_if_duplicate_keys, *args, **kwargs)
52 |
53 |
54 | def raise_error_if_duplicate_keys(pairs):
55 | """Raise an error if any of the given key-value pairs have the same key.
56 |
57 | :param list(tuple) pairs: a JSON object converted to a list of key-value pairs
58 | :raise KeyError: if any of the pairs have the same key
59 | :return dict:
60 | """
61 | result = {}
62 |
63 | for key, value in pairs:
64 | if key in result:
65 | raise KeyError(f"Duplicate key detected: {key!r}.")
66 |
67 | result[key] = value
68 |
69 | return result
70 |
--------------------------------------------------------------------------------
/docs/source/anatomy_credentials.rst:
--------------------------------------------------------------------------------
1 | .. _credentials_strand:
2 |
3 | ==================
4 | Credentials Strand
5 | ==================
6 |
7 | In order to:
8 |
9 | - GET/POST data from/to an API,
10 | - query a database, or
11 | - connect to a socket (for receiving Values or emitting Values, Monitors or Logs),
12 |
13 | A digital twin must have *access* to it. API keys, database URIs, etc must be supplied to the digital twin but
14 | treated with best practice with respect to security considerations. The purpose of the ``credentials`` strand is to
15 | dictate what credentials the twin requires in order to function.
16 |
17 | .. _defining_the_credentials_strand:
18 |
19 | Defining the Credentials Strand
20 | ===============================
21 |
22 | This is the simplest of the strands, containing a list of credentials (whose ``NAMES_SHOULD_BE_SHOUTY_SNAKE_CASE``) with
23 | a reminder of the purpose.
24 |
25 | .. code-block:: javascript
26 |
27 | {
28 | "credentials": [
29 | {
30 | "name": "SECRET_THE_FIRST",
31 | "purpose": "Token for accessing a 3rd party API service"
32 | },
33 | {
34 | "name": "SECRET_THE_SECOND",
35 | "purpose": "Token for accessing a 3rd party API service"
36 | },
37 | {
38 | "name": "SECRET_THE_THIRD",
39 | "purpose": "Another secret, like a password for a sandbox or local database"
40 | }
41 | ]
42 | }
43 |
44 | .. _supplying_credentials:
45 |
46 | Supplying Credentials
47 | =====================
48 |
49 | .. ATTENTION::
50 |
51 | *Credentials should never be hard-coded into application code*
52 |
53 | Do you trust the twin code? If you insert credentials to your own database into a digital twin
54 | provided by a third party, you better be very sure that twin isn't going to scrape all that data out then send
55 | it elsewhere!
56 |
57 | Alternatively, if you're building a twin requiring such credentials, it's your responsibility to give the end
58 | users confidence that you're not abusing their access.
59 |
60 | There'll be a lot more discussion on these issues, but it's outside the scope of **twined** - all we do here is
61 | make sure a twin has the credentials it requires.
62 |
63 | Credentials should be securely managed by whatever system is managing the twin, then made accessible to the twin
64 | in the form of environment variables:
65 |
66 | .. code-block:: javascript
67 |
68 | SERVICE_API_KEY=someLongTokenTHatYouProbablyHaveToPayTheThirdPartyProviderLoadsOfMoneyFor
69 |
70 | Credentials may also reside in a ``.env`` file in the current directory, either in the format above
71 | (with a new line for each variable) or, for convenience, as bash exports like:
72 |
73 | .. code-block:: javascript
74 |
75 | export SERVICE_API_KEY=someLongTokenTHatYouProbablyHaveToPayTheThirdPartyProviderLoadsOfMoneyFor
76 |
77 | The ``validate_credentials()`` method of the ``Twine`` class checks for their presence and, where contained in a
78 | ``.env`` file, ensures they are loaded into the environment.
79 |
--------------------------------------------------------------------------------
/.github/workflows/codeql.yml:
--------------------------------------------------------------------------------
1 | # For most projects, this workflow file will not need changing; you simply need
2 | # to commit it to your repository.
3 | #
4 | # You may wish to alter this file to override the set of languages analyzed,
5 | # or to provide custom queries or build logic.
6 | #
7 | # ******** NOTE ********
8 | # We have attempted to detect the languages in your repository. Please check
9 | # the `language` matrix defined below to confirm you have the correct set of
10 | # supported CodeQL languages.
11 | #
12 | name: "CodeQL"
13 |
14 | on:
15 | push:
16 | branches: [ "main" ]
17 |
18 | jobs:
19 | analyze:
20 | name: Analyze
21 | runs-on: ubuntu-latest
22 | permissions:
23 | actions: read
24 | contents: read
25 | security-events: write
26 |
27 | strategy:
28 | fail-fast: false
29 | matrix:
30 | language: [ 'python' ]
31 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
32 | # Use only 'java' to analyze code written in Java, Kotlin or both
33 | # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
34 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
35 |
36 | steps:
37 | - name: Checkout repository
38 | uses: actions/checkout@v4
39 |
40 | # Initializes the CodeQL tools for scanning.
41 | - name: Initialize CodeQL
42 | uses: github/codeql-action/init@v2
43 | with:
44 | languages: ${{ matrix.language }}
45 | # If you wish to specify custom queries, you can do so here or in a config file.
46 | # By default, queries listed here will override any specified in a config file.
47 | # Prefix the list here with "+" to use these queries and those in the config file.
48 |
49 | # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
50 | # queries: security-extended,security-and-quality
51 |
52 |
53 | # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
54 | # If this step fails, then you should remove it and run the build manually (see below)
55 | - name: Autobuild
56 | uses: github/codeql-action/autobuild@v2
57 |
58 | # ℹ️ Command-line programs to run using the OS shell.
59 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
60 |
61 | # If the Autobuild fails above, remove it and uncomment the following three lines.
62 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
63 |
64 | # - run: |
65 | # echo "Run, Build Application using script"
66 | # ./location_of_script_within_repo/buildscript.sh
67 |
68 | - name: Perform CodeQL Analysis
69 | uses: github/codeql-action/analyze@v2
70 | with:
71 | category: "/language:${{matrix.language}}"
72 |
--------------------------------------------------------------------------------
/docs/source/deployment.rst:
--------------------------------------------------------------------------------
1 | .. _deployment:
2 |
3 | ==========
4 | Deployment
5 | ==========
6 |
7 |
8 | .. _deploying_with_octue:
9 |
10 | Deploying with Octue
11 | ====================
12 |
13 | `Octue `_ provides automated deployment to a cloud provider (like GCP or Azure), along with
14 | permissions and user management, monitoring, logging and data storage management out of the box.
15 |
16 | There are also a whole bunch of collaborative helper tools, like the graphical
17 | `twine builder `_ and manifesting tools, designed to speed up the process of building
18 | and using twines.
19 |
20 | The full set of services is in early beta, `get in touch `_ and we can help you
21 | architect systems - from small data services to large networks of :ref:`digital_twins`.
22 |
23 |
24 | .. _deploying_with_doctue:
25 |
26 | Coming Soon - Deploying with doctue
27 | ===================================
28 |
29 | Once we've bedded down our services internally at Octue, we'll be open-sourcing more parts of our build/deploy process,
30 | including docker containers with pre-configured servers to run and monitor twine-based services and digital twins.
31 |
32 | This will allow services to be easily spun up on GCP, Azure Digital Ocean etc., and be a nice halfway house between
33 | fully managed system on Octue and running your own webserver. Of course,
34 | without all the collaborative and data management features that Octue provides ;)
35 |
36 | We're looking for commercial sponsors for this part of the process - if that could be you, please
37 | `get in touch `_
38 |
39 |
40 | .. _deploying_as_a_cli:
41 |
42 | Deploying as a command-line application
43 | =======================================
44 |
45 | Use the open-source `octue app template `_ as a guide. Write your new
46 | python code (or call your existing tools/libraries) within it. It's set up to wrap and check configuration, inputs and
47 | outputs using twined. Follow the instructions there to set up your inputs, and your files, and run an analysis.
48 |
49 |
50 | .. _deployment_with_a_web_server:
51 |
52 | Deploying with your own web server
53 | ==================================
54 |
55 | You can use any python based web server (need another language? see :ref:`language_choice`):
56 |
57 | - Add ``configuration_values_data`` to your webserver config
58 | - Set up an endpoint to allow.
59 | - Set up an endpoint to handle incoming requests / socket messages - these will be ``input_values_data``.
60 | - Treat these requests / messages as events which trigger a task.
61 | - In your task framework (e.g. your celery task), either:
62 | - Use **twined** directly to validate the ``input_values_data``/``output_values_data`` (and, on startup, the
63 | ``configuration_values_data``) and handle running any required analysis yourself, or
64 | - import your analysis app (as built in :ref:`deploying_as_a_cli`) and call it with the configuration and input
65 | data in your task framework.
66 | - Return the result to the client.
67 |
--------------------------------------------------------------------------------
/docs/source/license.rst:
--------------------------------------------------------------------------------
1 | .. _license:
2 |
3 | =======
4 | License
5 | =======
6 |
7 | Octue maintains **twined** as an open source project, under the MIT license.
8 |
9 | The boring bit
10 | ==============
11 |
12 | Copyright (c) 2013-2024 Octue Ltd, All Rights Reserved.
13 |
14 | Permission is hereby granted, free of charge, to any person obtaining a copy
15 | of this software and associated documentation files (the "Software"), to deal
16 | in the Software without restriction, including without limitation the rights
17 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
18 | copies of the Software, and to permit persons to whom the Software is
19 | furnished to do so, subject to the following conditions:
20 |
21 | The above copyright notice and this permission notice shall be included in all
22 | copies or substantial portions of the Software.
23 |
24 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
25 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
26 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
27 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
28 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
29 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
30 | SOFTWARE.
31 |
32 |
33 | Third Party Libraries
34 | =====================
35 |
36 | **twined** includes or is linked against the following third party libraries:
37 |
38 |
39 | Plotly.js
40 | ---------
41 | The MIT License (MIT)
42 |
43 | Copyright (c) 2020 Plotly, Inc
44 |
45 | Permission is hereby granted, free of charge, to any person obtaining a copy
46 | of this software and associated documentation files (the "Software"), to deal
47 | in the Software without restriction, including without limitation the rights
48 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
49 | copies of the Software, and to permit persons to whom the Software is
50 | furnished to do so, subject to the following conditions:
51 |
52 | The above copyright notice and this permission notice shall be included in
53 | all copies or substantial portions of the Software.
54 |
55 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
56 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
57 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
58 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
59 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
60 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
61 | THE SOFTWARE.
62 |
63 |
64 | jsonschema
65 | ----------
66 |
67 | Copyright (c) 2013 Julian Berman
68 |
69 | Permission is hereby granted, free of charge, to any person obtaining a copy
70 | of this software and associated documentation files (the "Software"), to deal
71 | in the Software without restriction, including without limitation the rights
72 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
73 | copies of the Software, and to permit persons to whom the Software is
74 | furnished to do so, subject to the following conditions:
75 |
76 | The above copyright notice and this permission notice shall be included in
77 | all copies or substantial portions of the Software.
78 |
79 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
80 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
81 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
82 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
83 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
84 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
85 | THE SOFTWARE.
86 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | > [!NOTE]
2 | > The `twined` library has been deprecated. It now exists in the [Octue SDK](https://github.com/octue/octue-sdk-python)
3 | > in the [`octue.twined`](https://github.com/octue/octue-sdk-python/tree/main/octue/twined) subpackage.
4 |
5 | # twined
6 |
7 | A library to help digital twins and data services talk to one another. Read more at [twined.readthedocs.io](https://twined.readthedocs.io)
8 |
9 | [](https://badge.fury.io/py/twined)
10 | [](https://github.com/octue/twined)
11 | [](https://codecov.io/gh/octue/twined)
12 | [](https://twined.readthedocs.io/en/latest/?badge=latest)
13 | [](https://github.com/pre-commit/pre-commit)
14 | [](https://github.com/ambv/black)
15 |
16 | ## Developer notes
17 |
18 | **Documentation for use of the library is [here](https://twined.readthedocs.io). You don't need to pay attention to the following unless you plan to develop Twined itself.**
19 |
20 | ### Contributing
21 |
22 | - Please raise an issue on the board (or add your $0.02 to an existing issue) so the maintainers know
23 | what's happening and can advise / steer you.
24 |
25 | - Create a fork of twined, undertake your changes on a new branch, named like *issue-84* or similar. To run tests and make commits,
26 | you'll need to do something like:
27 | ```
28 | git clone # fetches the repo to your local machine
29 | cd twined # move into the repo directory
30 | pyenv virtualenv 3.6.9 twinedenv # Makes a virtual environment for you to install the dev tools into. Use any python >= 3.6
31 | pyend activate twinedenv # Activates the virtual environment so you don't screw up other installations
32 | pip install -r requirements-dev.txt # Installs the testing and code formatting utilities
33 | pre-commit install # Installs the pre-commit code formatting hooks in the git repo
34 | tox # Runs the tests with coverage. NB you can also just set up pycharm or vscode to run these.
35 | ```
36 |
37 | - Adopt a Test Driven Development approach to implementing new features or fixing bugs.
38 |
39 | - Ask the `twined` maintainers *where* to make your pull request. We'll create a version branch, according to the
40 | roadmap, into which you can make your PR. We'll help review the changes and improve the PR.
41 |
42 | - Once checks have passed, test coverage of the new code is >=95%, documentation is updated and the Review is passed, we'll merge into the version branch.
43 |
44 | - Once all the roadmapped features for that version are done, we'll release.
45 |
46 |
47 | ### Release process
48 |
49 | The process for creating a new release is as follows:
50 |
51 | 1. Check out a branch for your release, called eg `release/X.Y.Z`
52 | 2. Create a Pull Request into the `main` branch.
53 | 3. Undertake your changes in other branches according to the git flow and create pull requests into `release/X.Y.Z`
54 | 4. Ensure that documentation is updated to match changes, and increment the changelog. **Pull requests which do not update documentation will be refused.**
55 | 5. Ensure that test coverage is sufficient. **Pull requests that decrease test coverage will be refused.**
56 | 6. Ensure code meets style guidelines (pre-commit scripts and flake8 tests will fail otherwise)
57 | 7. Address Review Comments on the PR
58 | 8. Ensure the version in `setup.py` is correct and matches the branch version.
59 | 9. Merge to main. Successful test, doc build, flake8 and a new version number will automatically create the release on pypi.
60 | 10. Confirm the successful release on pypi
61 | 10. Go to code > releases and create a new release on GitHub at the same SHA.
62 |
--------------------------------------------------------------------------------
/docs/source/quick_start_create_your_first_twine.rst:
--------------------------------------------------------------------------------
1 | .. _create_your_first_twine:
2 |
3 | Create your first twine
4 | =======================
5 |
6 | Let's say we want a digital twin that accepts two values, uses them to make a calculation, then gives the result. Anyone connecting to the twin will need to know what values it requires, and what it responds with.
7 |
8 | First, create a blank text file, call it `twine.json`. We'll give the twin a title and description.
9 | Paste in the following:
10 |
11 | .. code-block:: javascript
12 |
13 | {
14 | "title": "My first digital twin... of an atomising discombobulator",
15 | "description": "A simple example... estimates the `foz` value of an atomising discombobulator."
16 | }
17 |
18 | Now, let's define an input values strand, to specify what values are required by the twin. For this we use a json schema
19 | (you can read more about them in :ref:`introducing_json_schema`). Add the ``input_values`` field, so your twine looks like this:
20 |
21 | .. code-block:: javascript
22 |
23 | {
24 | "title": "My first digital twin",
25 | "description": "A simple example to build on..."
26 | "input_values_schema": {
27 | "$schema": "https://json-schema.org/draft/2020-12/schema",
28 | "title": "Input Values schema for my first digital twin",
29 | "description": "These values are supplied to the twin by another program (often over a websocket, depending on your integration provider). So as these values change, the twin can reply with an update.",
30 | "type": "object",
31 | "properties": {
32 | "foo": {
33 | "description": "The foo value... speed of the discombobulator's input bobulation module, in m/s",
34 | "type": "number",
35 | "minimum": 10,
36 | "maximum": 500
37 | },
38 | "baz": {
39 | "description": "The baz value... period of the discombobulator's recombulation unit, in s",
40 | "type": "number",
41 | "minimum": 0,
42 | "maximum": 1000
43 | }
44 | }
45 | }
46 | }
47 |
48 | Finally, let's define an output values strand, to define what kind of data is returned by the twin:
49 |
50 | .. code-block:: javascript
51 |
52 | "output_values_schema": {
53 | "$schema": "https://json-schema.org/draft/2020-12/schema",
54 | "title": "Output Values schema for my first digital twin",
55 | "description": "The twin will output data that matches this schema",
56 | "type": "object",
57 | "properties": {
58 | "foz": {
59 | "description": "Estimate of the foz value... efficiency of the discombobulator in %",
60 | "type": "number",
61 | "minimum": 10,
62 | "maximum": 500
63 | }
64 | }
65 | }
66 |
67 |
68 | .. _load_the_twine:
69 |
70 | Load the twine
71 | ==============
72 |
73 | **twined** provides a `Twine()` class to load a twine (from a file or a json string).
74 | The loading process checks the twine itself is valid. It's as simple as:
75 |
76 | .. code-block:: py
77 |
78 | from twined import Twine
79 |
80 | my_twine = Twine(source='twine.json')
81 |
82 |
83 | .. _validate_some_inputs:
84 |
85 | Validate some inputs
86 | ====================
87 |
88 | Say we have some json that we want to parse and validate, to make sure it matches what's required for input values.
89 |
90 | .. code-block:: py
91 |
92 | my_input_values = my_twine.validate_input_values(json='{"foo": 30, "baz": 500}')
93 |
94 | You can read the values from a file too. Paste the following into a file named ``input_values.json``:
95 |
96 | .. code-block:: javascript
97 |
98 | {
99 | "foo": 30,
100 | "baz": 500
101 | }
102 |
103 | Then parse and validate directly from the file:
104 |
105 | .. code-block:: py
106 |
107 | my_input_values = my_twine.validate_input_values(source="input_values.json")
108 |
109 |
110 | .. ATTENTION::
111 | LIBRARY IS UNDER CONSTRUCTION! WATCH THIS SPACE FOR MORE!
112 |
--------------------------------------------------------------------------------
/docs/source/anatomy.rst:
--------------------------------------------------------------------------------
1 | .. _anatomy:
2 |
3 | =========================
4 | Anatomy Of The Twine File
5 | =========================
6 |
7 | The main point of **twined** is to enable engineers and scientists to easily (and rigorously) define a digital twin
8 | or data service.
9 |
10 | This is done by adding a ``twine.json`` file to the repository containing your code. Adding a *twine* means you can:
11 |
12 | - communicate (to you or a colleague) what data is required by this service
13 | - communicate (to another service / machine) what data is required
14 | - deploy services automatically with a provider like `Octue `_.
15 |
16 | To just get started building a *twine*, check out the :ref:`quick_start`. To learn more about twines in general,
17 | see :ref:`about`. Here, we describe the parts of a *twine* ("strands") and what they mean.
18 |
19 | .. _strands:
20 |
21 | Strands
22 | =======
23 |
24 | A *twine* has several sections, called *strands*. Each defines a different kind of data required (or produced) by the
25 | twin.
26 |
27 | .. list-table::
28 | :widths: 30 70
29 | :header-rows: 1
30 |
31 | * - Strand
32 | - Describes the twin's requirements for...
33 | * - :ref:`Configuration Values `
34 | - Data, in JSON form, used for configuration of the twin/service.
35 | * - :ref:`Configuration Manifest `
36 | - Files/datasets required by the twin at configuration/startup
37 | * - :ref:`Input Values `
38 | - Data, in JSON form, passed to the twin in order to trigger an analysis
39 | * - :ref:`Input Manifest `
40 | - Files/datasets passed with Input Values to trigger an analysis
41 | * - :ref:`Output Values `
42 | - Data, in JSON form, that will be produced by the twin (in response to inputs)
43 | * - :ref:`Output Manifest `
44 | - Files/datasets that will be produced by the twin (in response to inputs)
45 | * - :ref:`Credentials `
46 | - Credentials that are required by the twin in order to access third party services
47 | * - :ref:`Children `
48 | - Other twins, access to which are required for this twin to function
49 | * - :ref:`Monitors `
50 | - Visual and progress outputs from an analysis
51 |
52 |
53 | .. toctree::
54 | :maxdepth: 1
55 | :hidden:
56 |
57 | anatomy_values
58 | anatomy_manifest
59 | anatomy_credentials
60 | anatomy_monitors
61 | anatomy_children
62 |
63 |
64 | .. _twine_file_schema:
65 |
66 | Twine File Schema
67 | =================
68 |
69 | Because the ``twine.json`` file itself is in ``JSON`` format with a strict structure, **twined** uses a schema to make
70 | that twine files are correctly written (a "schema-schema", if you will, since a twine already contains schema). Try not
71 | to think about it. But if you must, the *twine* schema is
72 | `here `_.
73 |
74 | The first thing **twined** always does is check that the ``twine.json`` file itself is valid, and give you a
75 | descriptive error if it isn't.
76 |
77 |
78 | .. _other_external_io:
79 |
80 | Other External I/O
81 | ==================
82 |
83 | A twin might:
84 |
85 | - GET/POST data from/to an external API,
86 | - query/update a database,
87 | - upload files to an object store,
88 | - trigger events in another network, or
89 | - perform pretty much any interaction you can think of with other applications over the web.
90 |
91 | However, such data exchange may not be controllable by **twined** (which is intended to operate at the boundaries of the
92 | twin) unless the resulting data is returned from the twin (and must therefore be compliant with the schema).
93 |
94 | So, there's nothing for **twined** to do here, and no need for a strand in the *twine* file. However, interacting with
95 | third party APIs or databases might require some credentials. See :ref:`credentials_strand` for help with that.
96 |
97 | .. NOTE::
98 | This is actually a very common scenario. For example, the purpose of the twin might be to fetch data (like a weather
99 | forecast) from some external API then return it in the ``output_values`` for use in a network of digital twins.
100 | But its the twin developer's job to do the fetchin' and make sure the resulting data is compliant with the
101 | ``output_values_schema`` (see :ref:`values_based_strands`).
--------------------------------------------------------------------------------
/docs/source/about_other_considerations.rst:
--------------------------------------------------------------------------------
1 | .. _other_considerations:
2 |
3 | ====================
4 | Other Considerations
5 | ====================
6 |
7 | A variety of thoughts that arose whilst architecting **twined**.
8 |
9 | .. _bash_style_stdio:
10 |
11 | Bash-style stdio
12 | ================
13 |
14 | Some thought was given to using a very old-school-unix approach to piping data between twins, via stdout.
15 |
16 | Whilst attractive (as being a wildly fast way of piping data between twins on the same machine) it was felt this
17 | was insufficiently general, eg:
18 |
19 | - where twins don't exist on the same machine or container, making it cumbersome to engineer common iostreams
20 | - where slight differences between different shells might lead to incompatibilities or changes in behaviour
21 |
22 | And also unfriendly, eg:
23 |
24 | - engineers or scientists unfamiliar with subtleties of bash shell scripting encounter difficulty piping data around
25 | - difficult to build friendly web based tools to introspect the data and configuration
26 | - bound to be headaches on windows platforms, even though windows now supports bash
27 | - easy to corrupt using third party libraries (e.g. which print to stdout)
28 |
29 |
30 | .. _Units:
31 |
32 | Units
33 | =====
34 |
35 | Being used (mostly) for engineering and scientific analysis, it was tempting to add in a specified sub-schema for units.
36 | For example, mandating that where values can be given in units, they be specified in a certain way, like:
37 |
38 | .. code-block:: javascript
39 |
40 | {
41 | "wind_speed": {
42 | "value": 10.2,
43 | "units": "mph"
44 | }
45 | }
46 |
47 | or (more succinct):
48 |
49 | .. code-block:: javascript
50 |
51 | {
52 | "wind_speed": 10.2,
53 | "wind_speed_units": "mph"
54 | }
55 |
56 | It's still extremely tempting to provide this facility; or at least provide some way of specifying in the schema
57 | what units a value should be provided in. Thinking about it but don't have time right now.
58 | If anybody wants to start crafting a PR with an extension or update to **twined** that facilitates this; please raise an
59 | issue to start progressing it.
60 |
61 |
62 | .. _variable_style:
63 |
64 | Variable Style
65 | ==============
66 |
67 | A premptive stamp on the whinging...
68 |
69 | Note that in the ``JSON`` descriptions above, all variables are named in ``snake_case`` rather than ``camelCase``. This
70 | decision, more likely than even Brexit to divide opinions, is based on:
71 |
72 | - The languages we anticipate being most popular for building twins seem to trend toward snake case (eg
73 | `python `_, `c++ `_)
74 | although to be fair we might've woefully misjudged which languages start emerging.
75 |
76 | - The reservation of snake case for the schema spec has the subtle advantage that in future, we might be able to use
77 | camelCase within the spec to denote class types in some useful way, just like in python. Not sure yet; just mulling.
78 |
79 | - The :ref:`requirements` mention human-readability as a must;
80 | `this paper `_
81 | suggests a 20% slower comprehension of camel case than snake, although to be fair that's probably arguable.
82 |
83 | - We're starting in Python so are taking a lead from PEP8, which is bar none the most successful style guide on the
84 | planet, because it got everybody on the same page really early on.
85 |
86 | If existing code that you're dropping in uses camelCase, please don't file that as an issue... converting property
87 | names automatically after schema validation generation is trivial, there are tons of libraries (like
88 | `humps `_) to do it.
89 |
90 | We'd also consider a pull request for a built-in utility converting `to `_ and
91 | `from `_ that does this following validation and prior to returning results.
92 | Suggest your proposed approach on the `issues board `_.
93 |
94 |
95 | .. _language_choice:
96 |
97 | Language Choice
98 | ===============
99 |
100 | **twined** is presently released in python only. It won't be too hard to replicate functionality in other languages, and
101 | we're considering other languages at present, so might be easily persuadable ;)
102 |
103 | If you require implementation of **twined** in a different language,
104 | and are willing to consider sponsorship of development and maintenance of that library, please
105 | `file an issue `_.
106 |
107 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | .. ATTENTION::
2 | This library is in very early stages. Like the idea of it? Please
3 | `star us on GitHub `_ and contribute via the
4 | `issues board `_ and
5 | `roadmap `_.
6 |
7 | ======
8 | Twined
9 | ======
10 |
11 | **twined** is a library to help create and connect :ref:`digital_twins` and data services.
12 |
13 | .. epigraph::
14 | *"Twined" [t-why-nd] ~ encircled, twisted together, interwoven*
15 |
16 | A digital twin is a virtual representation of a real life being - a physical asset like a wind turbine or car - or even
17 | a human. Like real things, digital twins need to interact, so can be connected together, but need a common communication
18 | framework to do so.
19 |
20 | **twined** helps you to define a single file, a "twine", that defines a digital twin / data service. It specifies
21 | specifying its data interfaces, connections to other twins, and other requirements.
22 |
23 | Any person, or any computer, can read a twine and understand *what-goes-in* and *what-comes-out*. That makes it easy to
24 | collaborate with other teams, since everybody is crystal clear about what's needed.
25 |
26 | .. figure:: images/digital_twin_hierarchy.svg
27 | :width: 350px
28 | :align: center
29 | :figclass: align-center
30 | :alt: Hierarchy of digital twins
31 |
32 | Digital twins / data services connected in a hierarchy. Each blue circle represents a twin, coupled to its neighbours.
33 | Yellow nodes are where schema are used to connect twins.
34 |
35 |
36 | .. _aims:
37 |
38 | Aims
39 | ====
40 |
41 | **twined** provides a toolkit to help create and validate "twines" - descriptions of a digital twin, what data it
42 | requires, what it does and how it works.
43 |
44 | The goals of this **twined** library are as follows:
45 | - Provide a clear framework for what a *twine* can and/or must contain
46 | - Provide functions to validate incoming data against a known *twine*
47 | - Provide functions to check that a *twine* itself is valid
48 | - Provide (or direct you to) tools to create *twines* describing what you require
49 |
50 | In :ref:`anatomy`, we describe the different parts of a twine (examining how digital twins connect and interact...
51 | building them together in hierarchies and networks). But you may prefer to dive straight in with the :ref:`quick_start`
52 | guide.
53 |
54 | The scope of **twined** is not large. Many other libraries will deal with hosting and deploying digital twins, still
55 | more will deal with the actual analyses done within them. **twined** purely deals with parsing and checking the
56 | information exchanged.
57 |
58 |
59 | .. _reason_for_being:
60 |
61 | Raison d'etre
62 | =============
63 |
64 | Octue believes that a lynchpin of solving climate change is the ability for all engineering, manufacturing, supply
65 | chain and infrastructure plant to be connected together, enabling strong optimisation and efficient use of these
66 | systems.
67 |
68 | To enable engineers and scientists to build, connect and run digital twins in large networks (or even in small teams!)
69 | it is necessary for everyone to be on the same page - the :ref:`gemini_principles` are a great way to start with that,
70 | which is why we've released this part of our technology stack as open source, to support those principles and help
71 | develop a wider ecosystem.
72 |
73 | The main goal is to **help engineers and scientists focus on doing engineering and science** - instead of apis, data
74 | cleaning/management, and all this cloud-pipeline-devops-test-ci-ml BS that takes up 90% of a scientist's time, when they
75 | should be spending their valuable time researching migratory patterns of birds, or cell structures, or wind turbine
76 | performance, or whatever excites them.
77 |
78 | .. _uses:
79 |
80 | Uses
81 | =====
82 |
83 | At `Octue `_, **twined** is used as a core part of our application creation process:
84 |
85 | * As a format to communicate requirements to our partners in research projects
86 | * As a tool to validate incoming data to digital twins
87 | * As a framework to help establish schema when designing digital twins
88 | * As a source of information on digital twins in our network, to help map and connect twins together
89 |
90 | We'd like to hear about your use case. Please get in touch!
91 |
92 | We use the `GitHub Issue Tracker `_ to manage bug reports and feature requests.
93 | Please note, this is not a "general help" forum; we recommend Stack Overflow for such questions. For really gnarly
94 | issues or for help designing digital twin schema, Octue is able to provide application support services for those
95 | building digital twins using **twined**.
96 |
97 | .. toctree::
98 | :maxdepth: 2
99 |
100 | self
101 | quick_start
102 | anatomy
103 | about
104 | deployment
105 | license
106 | version_history
107 |
--------------------------------------------------------------------------------
/tests/test_credentials.py:
--------------------------------------------------------------------------------
1 | import os
2 | import unittest
3 | from unittest import mock
4 |
5 | from twined import Twine, exceptions
6 |
7 | from .base import VALID_SCHEMA_TWINE, BaseTestCase
8 |
9 |
10 | class TestCredentialsTwine(BaseTestCase):
11 | """Tests related to the twine itself - ensuring that valid and invalid `credentials` entries in a twine file work
12 | as expected.
13 | """
14 |
15 | def test_fails_on_no_name(self):
16 | """Ensures InvalidTwine exceptions are raised when instantiating twines with a missing `name` field in a
17 | credential.
18 | """
19 | invalid_credentials_no_name_twine = """
20 | {
21 | "credentials": [
22 | {
23 | "purpose": "credentials without a name should be invalid"
24 | }
25 | ]
26 | }
27 | """
28 |
29 | with self.assertRaises(exceptions.InvalidTwine):
30 | Twine(source=invalid_credentials_no_name_twine)
31 |
32 | def test_fails_on_lowercase_name(self):
33 | """Ensures InvalidTwine exceptions are raised when instantiating twines with lowercase letters in the `name`
34 | field.
35 | """
36 | invalid_credentials_lowercase_name_twine = """
37 | {
38 | "credentials": [
39 | {
40 | "name": "my_secrets_should_be_uppercase",
41 | "purpose": "Token for accessing a 3rd party API service"
42 | }
43 | ]
44 | }
45 | """
46 |
47 | with self.assertRaises(exceptions.InvalidTwine):
48 | Twine(source=invalid_credentials_lowercase_name_twine)
49 |
50 | def test_fails_on_dict(self):
51 | """Ensures InvalidTwine exceptions are raised when instantiating twines with invalid `credentials` entries
52 | (given as a dict, not an array).
53 | """
54 | invalid_credentials_dict_not_array_twine = """
55 | {
56 | "credentials": {
57 | "name": "MY_API_SECRET_KEY",
58 | "purpose": "Token for accessing a 3rd party API service"
59 | }
60 | }
61 | """
62 |
63 | with self.assertRaises(exceptions.InvalidTwine):
64 | Twine(source=invalid_credentials_dict_not_array_twine)
65 |
66 | def test_fails_on_name_whitespace(self):
67 | """Test that a credential with spaces in its name causes an error to be raised when validated."""
68 | invalid_credentials_space_in_name_twine = """
69 | {
70 | "credentials": [
71 | {
72 | "name": "MY NAME SHOULD NOT HAVE WHITESPACE",
73 | "purpose": "Token for accessing a 3rd party API service"
74 | }
75 | ]
76 | }
77 | """
78 |
79 | with self.assertRaises(exceptions.InvalidTwine):
80 | Twine(source=invalid_credentials_space_in_name_twine)
81 |
82 |
83 | class TestCredentialsValidation(BaseTestCase):
84 | """Tests related to whether validation of children occurs successfully (given a valid twine)"""
85 |
86 | VALID_CREDENTIALS_TWINE = """
87 | {
88 | "credentials": [
89 | {
90 | "name": "SECRET_THE_FIRST",
91 | "purpose": "Token for accessing a 3rd party API service"
92 | },
93 | {
94 | "name": "SECRET_THE_SECOND",
95 | "purpose": "Token for accessing a 3rd party API service"
96 | },
97 | {
98 | "name": "SECRET_THE_THIRD"
99 | }
100 | ]
101 | }
102 | """
103 |
104 | def test_no_credentials(self):
105 | """Test that a twine with no credentials will validate straightforwardly"""
106 | twine = Twine(source=VALID_SCHEMA_TWINE)
107 | twine.validate_credentials()
108 |
109 | def test_missing_credentials(self):
110 | """Test that a twine with credentials will not validate where they are missing from the environment"""
111 | twine = Twine(source=self.VALID_CREDENTIALS_TWINE)
112 | with self.assertRaises(exceptions.CredentialNotFound):
113 | twine.validate_credentials()
114 |
115 | def test_credentials(self):
116 | """Test that the environment will override a default value for a credential."""
117 | twine = Twine(source=self.VALID_CREDENTIALS_TWINE)
118 | with mock.patch.dict(
119 | os.environ,
120 | {"SECRET_THE_FIRST": "a value", "SECRET_THE_SECOND": "another value", "SECRET_THE_THIRD": "value"},
121 | ):
122 | twine.validate_credentials()
123 | self.assertEqual(os.environ["SECRET_THE_THIRD"], "value")
124 |
125 |
126 | if __name__ == "__main__":
127 | unittest.main()
128 |
--------------------------------------------------------------------------------
/examples/wind_tunnel_datalogger_service/data/output_manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "e13f3499-7162-4218-bc56-6a59b1ad3310",
3 | "datasets": [
4 | {
5 | "id": "1eba4346-daff-421b-921c-8f1c05d6997d",
6 | "name": "Test results from naca0012 section",
7 | "organisation": "megacorp",
8 | "tags": {"section": "naca0012"},
9 | "labels": [],
10 | "files": [
11 | {
12 | "path": "datasets/7ead7669/sys_temp.json",
13 | "cluster": 0,
14 | "sequence": 0,
15 | "extension": "json",
16 | "tags": {},
17 | "labels": ["system", "temperature"],
18 | "posix_timestamp": 1551394800,
19 | "id": "afcdef45-da6b-4805-95d6-7a889d81f5b9",
20 | "last_modified": "2020-02-28T13:12:42.000000Z",
21 | "name": "sys_temp.json",
22 | "size_bytes": 20486,
23 | "sha-512/256": "somesha"
24 | },
25 | {
26 | "path": "datasets/7ead7669/tunnel_profile.json",
27 | "cluster": 1,
28 | "sequence": 0,
29 | "extension": "json",
30 | "tags": {},
31 | "labels": ["wind", "tunnel", "velocity", "profile", "background", "turbulence"],
32 | "posix_timestamp": 1551394800,
33 | "id": "3667aa6d-ee64-4cd4-a2fd-e72bcdc65791",
34 | "last_modified": "2020-02-28T13:24:43.000000Z",
35 | "name": "tunnel_profile.json",
36 | "size_bytes": 678486,
37 | "sha-512/256": "somesha"
38 | },
39 | {
40 | "path": "datasets/7ead7669/cp_reference.dat",
41 | "cluster": 2,
42 | "sequence": 0,
43 | "extension": "dat",
44 | "tags": {},
45 | "labels": ["pressure", "coefficient", "cp", "profile", "reference"],
46 | "posix_timestamp": 1551394800,
47 | "id": "310bc665-fe8c-4948-b821-0ad43fcd480d",
48 | "last_modified": "2020-02-28T13:47:23.000000Z",
49 | "name": "cp_reference.dat",
50 | "size_bytes": 59684813,
51 | "sha-512/256": "somesha"
52 | },
53 | {
54 | "path": "datasets/7ead7669/cp_00001.dat",
55 | "cluster": 3,
56 | "sequence": 0,
57 | "extension": "dat",
58 | "tags": {"alpha": 0},
59 | "labels": ["pressure", "coefficient", "cp", "profile", "reference"],
60 | "posix_timestamp": 1551394800,
61 | "id": "c3a6c14d-19d8-44da-9aa5-119798f53d15",
62 | "last_modified": "2020-02-28T13:54:24.000000Z",
63 | "name": "cp_00001.dat",
64 | "size_bytes": 59684813,
65 | "sha-512/256": "somesha"
66 | },
67 | {
68 | "path": "datasets/7ead7669/cp_00002.dat",
69 | "cluster": 3,
70 | "sequence": 1,
71 | "extension": "dat",
72 | "tags": {"alpha": 1},
73 | "labels": ["pressure", "coefficient", "cp", "profile", "reference"],
74 | "posix_timestamp": 1551394800,
75 | "id": "fac62036-722c-481a-9daf-87897c4872ec",
76 | "last_modified": "2020-02-28T13:56:21.000000Z",
77 | "name": "cp_00002.dat",
78 | "size_bytes": 59684813,
79 | "sha-512/256": "somesha"
80 | },
81 | {
82 | "path": "datasets/7ead7669/cp_00003.dat",
83 | "cluster": 3,
84 | "sequence": 2,
85 | "extension": "dat",
86 | "tags": {"alpha": 2},
87 | "labels": ["pressure", "coefficient", "cp", "profile", "reference"],
88 | "posix_timestamp": 1551394800,
89 | "id": "70cda7f6-c97d-4b99-9156-2ff6f5947b7e",
90 | "last_modified": "2020-02-28T13:57:03.000000Z",
91 | "name": "cp_00003.dat",
92 | "size_bytes": 59684813,
93 | "sha-512/256": "somesha"
94 | },
95 | {
96 | "path": "datasets/7ead7669/cp_00004.dat",
97 | "cluster": 3,
98 | "sequence": 3,
99 | "extension": "dat",
100 | "tags": {"alpha": 3},
101 | "labels": ["pressure", "coefficient", "cp", "profile", "reference"],
102 | "posix_timestamp": 1551394800,
103 | "id": "5ab4015a-608a-4ecd-9e30-95aee82d86d9",
104 | "last_modified": "2020-02-28T13:58:46.000000Z",
105 | "name": "cp_00004.dat",
106 | "size_bytes": 59684813,
107 | "sha-512/256": "somesha"
108 | },
109 | {
110 | "path": "datasets/7ead7669/cp_00005.dat",
111 | "cluster": 3,
112 | "sequence": 4,
113 | "extension": "dat",
114 | "tags": {"alpha": 4},
115 | "labels": ["pressure", "coefficient", "cp", "profile", "reference"],
116 | "posix_timestamp": 1551394800,
117 | "id": "3ba97d4b-002d-4ca3-a6b0-54573a5eefde",
118 | "last_modified": "2020-02-28T13:59:32.000000Z",
119 | "name": "cp_00005.dat",
120 | "size_bytes": 59684813,
121 | "sha-512/256": "somesha"
122 | }
123 | ]
124 | }
125 | // { ... additional datasets for different foils tested ... }
126 | ]
127 | }
128 |
--------------------------------------------------------------------------------
/twined/schema/twine_schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "$defs": {
3 | "file_tags_template": {
4 | "oneOf": [
5 | {
6 | "type": "object",
7 | "properties": {
8 | "$schema": {
9 | "type": "string"
10 | },
11 | "type": {
12 | "const": "object"
13 | },
14 | "properties": {
15 | "type": "object"
16 | },
17 | "required": {
18 | "type": "array",
19 | "items": {
20 | "type": "string"
21 | }
22 | }
23 | },
24 | "required": ["type", "properties"]
25 | },
26 | {
27 | "type": "object",
28 | "properties": {
29 | "$ref": {
30 | "type": "string"
31 | }
32 | },
33 | "required": ["$ref"]
34 | }
35 | ]
36 | },
37 | "manifest": {
38 | "type": "object",
39 | "properties": {
40 | "optional": {
41 | "type": "boolean",
42 | "description": "This should be `true` if the manifest is optional."
43 | },
44 | "datasets": {
45 | "type": "object",
46 | "description": "A list of entries, each describing a dataset that should be attached to / made available to the digital twin",
47 | "patternProperties": {
48 | ".+": {
49 | "description": "A dataset representation whose property name/key uniquely identifies the dataset to the service",
50 | "type": "object",
51 | "properties": {
52 | "purpose": {
53 | "description": "What data this dataset contains, eg 'the set of data files from the energy production calculation process'",
54 | "type": "string",
55 | "default": ""
56 | },
57 | "file_tags_template": {
58 | "$ref": "#/$defs/file_tags_template"
59 | },
60 | "optional": {
61 | "type": "boolean"
62 | }
63 | }
64 | }
65 | }
66 | }
67 | },
68 | "required": ["datasets"]
69 | }
70 | },
71 | "type": "object",
72 | "$schema": "https://json-schema.org/draft/2020-12/schema",
73 | "properties": {
74 | "children": {
75 | "type": "array",
76 | "items": {
77 | "type": "object",
78 | "properties": {
79 | "key": {
80 | "description": "A textual key identifying a group of child twins",
81 | "type": "string"
82 | },
83 | "purpose": {
84 | "description": "What this group of child twins are used for",
85 | "type": "string",
86 | "default": ""
87 | },
88 | "filters": {
89 | "description": "A search term, using the Lucene Query Language, which can be used to automatically refine the list of available child twins down to ones suitable for use here.",
90 | "type": "string",
91 | "default": ""
92 | }
93 | },
94 | "required": [
95 | "key"
96 | ]
97 | }
98 | },
99 | "configuration_manifest": {
100 | "$ref": "#/$defs/manifest"
101 | },
102 | "configuration_values_schema": {
103 | "type": "object",
104 | "properties": {
105 | "properties": {
106 | "type": "object"
107 | },
108 | "optional": {
109 | "type": "boolean",
110 | "description": "This should be `true` if the configuration values are optional."
111 | }
112 | }
113 | },
114 | "credentials": {
115 | "type": "array",
116 | "items": {
117 | "type": "object",
118 | "properties": {
119 | "name": {
120 | "description": "The credential name, in upper snake case, eg 'MYAPI_SECRET_KEY'",
121 | "type": "string",
122 | "pattern": "^[A-Z]+(?:_[A-Z]+)*$"
123 | },
124 | "purpose": {
125 | "description": "What this credential is used for, eg 'Token for accessing the MyApi service'",
126 | "type": "string"
127 | }
128 | },
129 | "required": [
130 | "name"
131 | ],
132 | "additionalProperties": false
133 | }
134 | },
135 | "input_manifest": {
136 | "$ref": "#/$defs/manifest"
137 | },
138 | "input_values_schema": {
139 | "type": "object",
140 | "properties": {
141 | "properties": {
142 | "type": "object"
143 | },
144 | "optional": {
145 | "type": "boolean",
146 | "description": "This should be `true` if the input values are optional."
147 | }
148 | }
149 | },
150 | "output_manifest": {
151 | "$ref": "#/$defs/manifest"
152 | },
153 | "output_values_schema": {
154 | "type": "object",
155 | "properties": {
156 | "properties": {
157 | "type": "object"
158 | },
159 | "optional": {
160 | "type": "boolean",
161 | "description": "This should be `true` if the output values are optional."
162 | }
163 | }
164 | }
165 | }
166 | }
167 |
--------------------------------------------------------------------------------
/docs/source/anatomy_values.rst:
--------------------------------------------------------------------------------
1 | .. _values_based_strands:
2 |
3 | ====================
4 | Values-based Strands
5 | ====================
6 |
7 | The ``configuration_values_schema``, ``input_values_schema`` and ``output_values_schema`` strands are *values-based*,
8 | meaning the data that matches these strands is in JSON form.
9 |
10 | Each of these strands is a *json schema* which describes that data.
11 |
12 | .. tabs::
13 |
14 | .. group-tab:: Configuration Values Strand
15 |
16 | This strand is a ``configuration_values_schema``, that is used to check validity of any
17 | ``configuration_values`` data supplied to the twin at startup.
18 |
19 | The Configuration Values Strand is generally used to define control parameters relating to what the twin should
20 | do, or how it should operate.
21 |
22 | For example, should it produce output images as low resolution PNGs or as SVGs? How many iterations of a fluid
23 | flow solver should be used? What is the acceptable error level on an classifier algorithm?
24 |
25 | .. group-tab:: Input Values Strand
26 |
27 | This strand is an ``input_values_schema``, that is used to check validity of ``input_values`` data supplied to the
28 | twin at the beginning of an analysis task.
29 |
30 | The Input Values Strand is generally used to define actual data which will be processed by the twin. Sometimes, it
31 | may be used to define control parameters specific to an analysis.
32 |
33 | For example, if a twin cleans and detects anomalies in a 10-minute timeseries of 1Hz data, the ``input_values``
34 | might contain an array of data and a list of corresponding timestamps. It may also contain a control parameter
35 | specifying which algorithm is used to do the detection.
36 |
37 | .. NOTE::
38 | Depending on the way the twin is deployed (see :ref:`deployment`), the ``input_values`` might come in from a
39 | web request, over a websocket or called directly from the command line or another library.
40 |
41 | However they come, if the new ``input_values`` validate against the ``input_values_schema`` strand,
42 | then analysis can proceed.
43 |
44 | .. group-tab:: Output Values Strand
45 |
46 | This strand is an ``output_values_schema``, that is used to check results (``output_values``) computed during an
47 | analysis. This ensures that the application wrapped up within the *twine* is operating correctly, and
48 | enables other twins/services or the end users to see what outputs they will get.
49 |
50 | For example,if a twin cleans and detects anomalies in a 10-minute timeseries of 1Hz data, the ``output_values``
51 | might contain an array of data interpolated onto regular timestamps, with missing values filled in and a list of
52 | warnings where anomalies were found.
53 |
54 |
55 | Let's look at basic examples for twines containing each of these strands:
56 |
57 |
58 | .. tabs::
59 |
60 | .. group-tab:: Configuration Values Strand
61 |
62 | This *twine* contains an example ``configuration_values_schema`` with one control parameter.
63 |
64 | `Many more detailed and specialised examples are available in the GitHub repository `_
65 |
66 | .. code-block:: javascript
67 |
68 | {
69 | "configuration_values_schema": {
70 | "title": "The example configuration form",
71 | "description": "The Configuration Values Strand of an example twine",
72 | "type": "object",
73 | "properties": {
74 | "n_iterations": {
75 | "description": "An example of an integer configuration variable, called 'n_iterations'.",
76 | "type": "integer",
77 | "minimum": 1,
78 | "maximum": 10,
79 | "default": 5
80 | }
81 | }
82 | }
83 | }
84 |
85 | Matching ``configuration_values`` data could look like this:
86 |
87 | .. code-block:: javascript
88 |
89 | {
90 | "n_iterations": 8,
91 | }
92 |
93 |
94 | .. group-tab:: Input Values Strand
95 |
96 | This *twine* contains an example ``input_values_schema`` with one input value, which marked as required.
97 |
98 | Many more detailed and specialised examples are available in :ref:`examples`.
99 |
100 | .. code-block:: javascript
101 |
102 | {
103 | "input_values_schema": {
104 | "title": "Input Values",
105 | "description": "The input values strand of an example twine, with a required height value",
106 | "type": "object",
107 | "properties": {
108 | "height": {
109 | "description": "An example of an integer value called 'height'",
110 | "type": "integer",
111 | "minimum": 2
112 | }
113 | },
114 | "required": ["height"]
115 | },
116 |
117 | Matching ``input_values`` data could look like this:
118 |
119 | .. code-block:: javascript
120 |
121 | {
122 | "height": 13,
123 | }
124 |
125 |
126 | .. group-tab:: Output Values Strand
127 |
128 | Stuff
--------------------------------------------------------------------------------
/docs/source/about_introducing_json_schema.rst:
--------------------------------------------------------------------------------
1 | .. _introducing_json_schema:
2 |
3 | =======================
4 | Introducing JSON Schema
5 | =======================
6 |
7 | ``JSON`` is a data interchange format that has rapidly taken over as the defacto web-based data communication standard
8 | in recent years.
9 |
10 | ``JSONSchema`` is a way of specifying what a ``JSON`` document should contain. The Schema are, themselves, written in
11 | ``JSON``!
12 |
13 | Whilst schema can become extremely complicated in some scenarios, they are best designed to be quite succinct. See below
14 | for the schema (and matching ``JSON``) for an integer and a string variable.
15 |
16 | **JSON:**
17 |
18 | .. code-block:: json
19 |
20 | {
21 | "id": 1,
22 | "name": "Tom"
23 | }
24 |
25 |
26 | **Schema:**
27 |
28 | .. code-block:: json
29 |
30 | {
31 | "type": "object",
32 | "title": "An id number and a name",
33 | "properties": {
34 | "id": {
35 | "type": "integer",
36 | "title": "An integer number",
37 | "default": 0
38 | },
39 | "name": {
40 | "type": "string",
41 | "title": "A string name",
42 | "default": ""
43 | }
44 | }
45 | }
46 |
47 |
48 | .. _useful_resources:
49 |
50 | Useful resources
51 | ================
52 | .. list-table::
53 | :widths: auto
54 | :header-rows: 1
55 |
56 | * - Link
57 | - Resource
58 | * - https://jsonschema.net/
59 | - Useful web tool for inferring schema from existing json
60 | * - https://jsoneditoronline.org
61 | - A powerful online editor for json, allowing manipulation of large documents better than most text editors
62 | * - https://www.json.org/
63 | - The JSON standard spec
64 | * - https://json-schema.org/
65 | - The (draft standard) JSONSchema spec
66 | * - https://rjsf-team.github.io/react-jsonschema-form/
67 | - A front end library for generating webforms directly from a schema
68 |
69 |
70 | .. _human_readbility:
71 |
72 | Human readability
73 | =================
74 |
75 | Back in our :ref:`requirements` section, we noted it was important for humans to read and understand schema.
76 |
77 | The actual documents themselves are pretty easy to read by technical users. But, for non technical users, readability can be
78 | enhanced even further by the ability to turn ``JSONSchema`` into web forms automatically. For our example above, we can
79 | autogenerate a web form straight from the schema:
80 |
81 | .. figure:: images/schema_form_example.png
82 | :width: 500px
83 | :align: center
84 | :figclass: align-center
85 | :alt: Web form from an example schema
86 |
87 | Web form generated from the example schema above.
88 |
89 | Thus, we can take a schema (or a part of a schema) and use it to generate a control form for a digital twin in a web
90 | interface without writing a separate form component - great for ease and maintainability.
91 |
92 |
93 | .. _why_not_xml:
94 |
95 | Why not XML?
96 | ============
97 |
98 | In a truly excellent `three-part blog `_, writer Seva Savris takes us
99 | through the ups and downs of ``JSON`` versus ``XML``; well worth a read if wishing to understand the respective technologies
100 | better.
101 |
102 | In short, both ``JSON`` and ``XML`` are generalised data interchange specifications and can both can do what we want here.
103 | We choose ``JSON`` because:
104 |
105 | #. Textual representation is much more concise and easy to understand (very important where non-developers like
106 | engineers and scientists must be expected to interpret schema)
107 |
108 | #. `Attack vectors `_. Because entities in ``XML``
109 | are not necessarily primitives (unlike in ``JSON``), an ``XML`` document parser in its default state may leave a system
110 | open to XXE injection attacks and DTD validation attacks, and therefore requires hardening. ``JSON`` documents are
111 | similarly afflicated (just like any kind of serialized data) but default parsers, operating on the premise of only
112 | deserializing to primitive types, are safe by default - it is only when nondefault parsering or deserialization
113 | techniques (such as ``JSONP``) are used that the application becomes vulnerable. By utilising a default ``JSON`` parser
114 | we can therefore significantly shrink the attack surface of the system. See
115 | `this blog post `_ for further discussion.
116 |
117 | #. ``XML`` is powerful... perhaps too powerful. The standard can be adapted greatly, resulting in high encapsulation
118 | and a high resilience to future unknowns. Both beneficial. However, this requires developers of twins to maintain
119 | interfaces of very high complexity, adaptable to a much wider variety of input. To enable developers to progress, we
120 | suggest handling changes and future unknowns through well-considered versioning, whilst keeping their API simple.
121 |
122 | #. ``XML`` allows baked-in validation of data and attributes. Whilst advantageous in some situations, this is not a
123 | benefit here. We wish validation to be one-sided: validation of data accepted/generated by a digital twin should be
124 | occur within (at) the boundaries of that twin.
125 |
126 | #. Required validation capabilities, built into ``XML`` are achievable with ``JSONSchema`` (otherwise missing from the
127 | pure ``JSON`` standard)
128 |
129 | #. ``JSON`` is a more compact expression than XML, significantly reducing memory and bandwidth requirements. Whilst
130 | not a major issue for most modern PCS, sensors on the edge may have limited memory, and both memory and bandwidth at
131 | scale are extremely expensive. Thus for extremely large networks of interconnected systems there could be significant
132 | speed and cost savings.
133 |
134 |
--------------------------------------------------------------------------------
/tests/test_schema_strands.py:
--------------------------------------------------------------------------------
1 | import os
2 | from tempfile import TemporaryDirectory
3 | import unittest
4 |
5 | from twined import Twine, exceptions
6 |
7 | from .base import VALID_SCHEMA_TWINE, BaseTestCase
8 |
9 |
10 | class TestSchemaStrands(BaseTestCase):
11 | """Testing operation of the Twine class for validation of data using strands which contain schema"""
12 |
13 | VALID_CONFIGURATION_VALUE = """{"n_iterations": 1}"""
14 |
15 | def test_invalid_strand(self):
16 | """Ensures that an incorrect strand name would lead to the correct exception
17 | Note: This tests an internal method. The current API doesn't allow this error to emerge but tthis check allows
18 | us to extend to a generic method
19 | """
20 | twine = Twine(source=VALID_SCHEMA_TWINE)
21 | data = twine._load_json("configuration", source=self.VALID_CONFIGURATION_VALUE)
22 | with self.assertRaises(exceptions.UnknownStrand):
23 | twine._validate_against_schema("not_a_strand_name", data)
24 |
25 | def test_missing_values_files(self):
26 | """Ensures that if you try to read values from missing files, the right exceptions get raised"""
27 | twine = Twine(source=VALID_SCHEMA_TWINE)
28 | values_file = os.path.join(self.path, "not_a_file.json")
29 |
30 | with self.assertRaises(exceptions.ConfigurationValuesFileNotFound):
31 | twine.validate_configuration_values(source=values_file)
32 |
33 | with self.assertRaises(exceptions.InputValuesFileNotFound):
34 | twine.validate_input_values(source=values_file)
35 |
36 | with self.assertRaises(exceptions.OutputValuesFileNotFound):
37 | twine.validate_output_values(source=values_file)
38 |
39 | def test_no_values(self):
40 | """Ensures that giving no data source raises an invalidJson error"""
41 | with self.assertRaises(exceptions.InvalidValuesJson):
42 | Twine(source=VALID_SCHEMA_TWINE).validate_configuration_values(source=None)
43 |
44 | def test_empty_values(self):
45 | """Ensures that appropriate errors are generated for invalid values"""
46 | with self.assertRaises(exceptions.InvalidValuesJson):
47 | Twine(source=VALID_SCHEMA_TWINE).validate_configuration_values(source="")
48 |
49 | def test_strand_not_found(self):
50 | """Ensures that if a twine doesn't have a strand, you can't validate against it"""
51 | valid_no_output_schema_twine = """
52 | {
53 | "configuration_values_schema": {
54 | "$schema": "https://json-schema.org/draft/2020-12/schema",
55 | "title": "The example configuration form",
56 | "description": "The configuration strand of an example twine",
57 | "type": "object",
58 | "properties": {
59 | "n_iterations": {
60 | "description": "An example of an integer configuration variable, called 'n_iterations'.",
61 | "type": "integer",
62 | "minimum": 1,
63 | "maximum": 10,
64 | "default": 5
65 | }
66 | }
67 | }
68 | }
69 | """
70 |
71 | with self.assertRaises(exceptions.StrandNotFound):
72 | Twine(source=valid_no_output_schema_twine).validate_output_values(source="{}")
73 |
74 | def test_incorrect_values(self):
75 | """Ensures that appropriate errors are generated for invalid values"""
76 | incorrect_configuration_value = """{"n_iterations": "should not be a string, this field requires an integer"}"""
77 | with self.assertRaises(exceptions.InvalidValuesContents):
78 | Twine(source=VALID_SCHEMA_TWINE).validate_configuration_values(source=incorrect_configuration_value)
79 |
80 | def test_missing_not_required_values(self):
81 | """Ensures that appropriate errors are generated for missing values"""
82 | Twine(source=VALID_SCHEMA_TWINE).validate_output_values(source="{}")
83 |
84 | def test_missing_required_values(self):
85 | """Ensures that appropriate errors are generated for missing values"""
86 | with self.assertRaises(exceptions.InvalidValuesContents):
87 | Twine(source=VALID_SCHEMA_TWINE).validate_input_values(source="{}")
88 |
89 | def test_valid_values_files(self):
90 | """Ensures that values can be read and validated correctly from files on disk"""
91 | twine = Twine(source=VALID_SCHEMA_TWINE)
92 |
93 | with TemporaryDirectory() as tmp_dir:
94 | valid_configuration_file = self._write_json_string_to_file(self.VALID_CONFIGURATION_VALUE, tmp_dir)
95 | twine.validate_configuration_values(source=valid_configuration_file)
96 | twine.validate_input_values(source="""{"height": 40}""")
97 | twine.validate_output_values(source="""{"width": 36}""")
98 |
99 | def test_valid_values_json(self):
100 | """Ensures that values can be read and validated correctly from a json string"""
101 | Twine(source=VALID_SCHEMA_TWINE).validate_configuration_values(source=self.VALID_CONFIGURATION_VALUE)
102 |
103 | def test_valid_with_extra_values(self):
104 | """Ensures that extra values get ignored"""
105 | configuration_valid_with_extra_field = """
106 | {
107 | "n_iterations": 1,
108 | "another_field": "may or may not be quietly ignored"
109 | }
110 | """
111 |
112 | Twine(source=VALID_SCHEMA_TWINE).validate_configuration_values(source=configuration_valid_with_extra_field)
113 |
114 | def test_missing_optional_values_do_not_raise_error(self):
115 | """Test that not providing an optional strand doesn't result in a validation error."""
116 | twine = Twine(source={"configuration_values_schema": {"optional": True}})
117 | twine.validate(configuration_values=None)
118 |
119 |
120 | if __name__ == "__main__":
121 | unittest.main()
122 |
--------------------------------------------------------------------------------
/tests/test_children.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from twined import Twine, exceptions
4 |
5 | from .base import BaseTestCase
6 |
7 |
8 | class TestChildrenTwine(BaseTestCase):
9 | """Tests ensuring that valid and invalid `children` entries in a twine file work as expected."""
10 |
11 | def test_invalid_children_dict_not_array(self):
12 | """Ensure that `InvalidTwine` exceptions are raised when instantiating twines where `children` entry is
13 | incorrectly specified as a dict, not an array.
14 | """
15 | with self.assertRaises(exceptions.InvalidTwine):
16 | Twine(source="""{"children": {}}""")
17 |
18 | def test_invalid_children_no_key(self):
19 | """Ensure that `InvalidTwine` exceptions are raised when instantiating twines where a child is specified without
20 | the required `key` field.
21 | """
22 | source = """
23 | {
24 | "children": [{"purpose": "The purpose.", "notes": "Here are some notes."}]
25 | }
26 | """
27 |
28 | with self.assertRaises(exceptions.InvalidTwine):
29 | Twine(source=source)
30 |
31 | def test_valid_children(self):
32 | """Ensures that a twine with one child can be instantiated correctly."""
33 | source = """
34 | {
35 | "children": [{"key": "gis", "purpose": "The purpose.", "notes": "Some notes."}]
36 | }
37 | """
38 | self.assertEqual(len(Twine(source=source).children), 1)
39 |
40 | def test_empty_children(self):
41 | """Ensures that a twine file will validate with an empty list object as children"""
42 | twine = Twine(source="""{"children": []}""")
43 | self.assertEqual(len(twine.children), 0)
44 |
45 |
46 | class TestChildrenValidation(BaseTestCase):
47 | """Tests related to whether validation of children occurs successfully (given a valid twine)"""
48 |
49 | VALID_TWINE_WITH_CHILDREN = """
50 | {
51 | "children": [{"key": "gis", "purpose": "The purpose", "notes": "Some notes."}]
52 | }
53 | """
54 |
55 | VALID_CHILD_VALUE = """
56 | [
57 | {
58 | "key": "gis",
59 | "id": "some-id",
60 | "backend": {
61 | "name": "GCPPubSubBackend",
62 | "project_id": "my-project"
63 | }
64 | }
65 | ]
66 | """
67 |
68 | def test_no_children(self):
69 | """Test that a twine with no children will validate on an empty children input"""
70 | Twine().validate_children(source=[])
71 |
72 | def test_missing_children(self):
73 | """Test that a twine with children will not validate on an empty children input."""
74 | with self.assertRaises(exceptions.InvalidValuesContents):
75 | Twine(source=self.VALID_TWINE_WITH_CHILDREN).validate_children(source=[])
76 |
77 | def test_extra_children(self):
78 | """Test that a twine with no children will not validate a non-empty children input."""
79 | with self.assertRaises(exceptions.InvalidValuesContents):
80 | Twine().validate_children(source=self.VALID_CHILD_VALUE)
81 |
82 | def test_backend_cannot_be_empty(self):
83 | """Test that the backend field of a child cannot be empty."""
84 | single_child_missing_backend = """[{"key": "gis", "id": "some-id", "backend": {}}]"""
85 |
86 | with self.assertRaises(exceptions.InvalidValuesContents):
87 | Twine().validate_children(source=single_child_missing_backend)
88 |
89 | def test_extra_key_validation_on_empty_twine(self):
90 | """Test that children with extra data will not raise a validation error on an empty twine."""
91 | children_values_with_extra_data = """
92 | [
93 | {"key": "gis", "id": "id", "uri_env_name": "VAR_NAME", "an_extra_key": "not a problem if present"},
94 | {"key": "some_weird_other_child", "id": "some-other-id", "uri_env_name": "SOME_ENV_VAR_NAME"}
95 | ]
96 | """
97 |
98 | with self.assertRaises(exceptions.InvalidValuesContents):
99 | Twine().validate_children(source=children_values_with_extra_data)
100 |
101 | def test_extra_key_validation_on_valid_twine(self):
102 | """Test that children with extra data will not raise a validation error on a non-empty valid twine.
103 | # TODO review this behaviour - possibly should raise an error but allow for a user specified extra_data property
104 | """
105 | single_child_with_extra_data = """
106 | [
107 | {
108 | "key": "gis",
109 | "id": "some-id",
110 | "backend": {
111 | "name": "GCPPubSubBackend",
112 | "project_id": "my-project"
113 | },
114 | "some_extra_property": "should not be a problem if present"
115 | }
116 | ]
117 | """
118 |
119 | twine = Twine(source=self.VALID_TWINE_WITH_CHILDREN)
120 | twine.validate_children(source=single_child_with_extra_data)
121 |
122 | def test_invalid_env_name(self):
123 | """Test that a child uri env name not in ALL_CAPS_SNAKE_CASE doesn't validate"""
124 | child_with_invalid_environment_variable_name = """
125 | [
126 | {
127 | "key": "gis",
128 | "id": "some-id",
129 | "uri_env_name": "an environment variable not in CAPS_CASE is invalid per the credentials spec"
130 | }
131 | ]
132 | """
133 |
134 | with self.assertRaises(exceptions.InvalidValuesContents):
135 | Twine().validate_children(source=child_with_invalid_environment_variable_name)
136 |
137 | def test_invalid_json(self):
138 | """Tests that a children entry with invalid json will raise an error"""
139 | with self.assertRaises(exceptions.InvalidValuesJson):
140 | Twine(source=self.VALID_TWINE_WITH_CHILDREN).validate_children(source="[")
141 |
142 | def test_valid(self):
143 | """Test that a valid twine will validate valid children
144 | Valiantly and Validly validating validity since 1983.
145 | To those reading this, know that YOU'RE valid.
146 | """
147 | twine = Twine(source=self.VALID_TWINE_WITH_CHILDREN)
148 | twine.validate_children(source=self.VALID_CHILD_VALUE)
149 |
150 |
151 | if __name__ == "__main__":
152 | unittest.main()
153 |
--------------------------------------------------------------------------------
/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.min.css:
--------------------------------------------------------------------------------
1 | /*!
2 | * # Semantic UI 2.4.1 - Accordion
3 | * http://github.com/semantic-org/semantic-ui/
4 | *
5 | *
6 | * Released under the MIT license
7 | * http://opensource.org/licenses/MIT
8 | *
9 | */.ui.accordion,.ui.accordion .accordion{max-width:100%}.ui.accordion .accordion{margin:1em 0 0;padding:0}.ui.accordion .accordion .title,.ui.accordion .title{cursor:pointer}.ui.accordion .title:not(.ui){padding:.5em 0;font-family:Lato,'Helvetica Neue',Arial,Helvetica,sans-serif;font-size:1em;color:rgba(0,0,0,.87)}.ui.accordion .accordion .title~.content,.ui.accordion .title~.content{display:none}.ui.accordion:not(.styled) .accordion .title~.content:not(.ui),.ui.accordion:not(.styled) .title~.content:not(.ui){margin:'';padding:.5em 0 1em}.ui.accordion:not(.styled) .title~.content:not(.ui):last-child{padding-bottom:0}.ui.accordion .accordion .title .dropdown.icon,.ui.accordion .title .dropdown.icon{display:inline-block;float:none;opacity:1;width:1.25em;height:1em;margin:0 .25rem 0 0;padding:0;font-size:1em;-webkit-transition:opacity .1s ease,-webkit-transform .1s ease;transition:opacity .1s ease,-webkit-transform .1s ease;transition:transform .1s ease,opacity .1s ease;transition:transform .1s ease,opacity .1s ease,-webkit-transform .1s ease;vertical-align:baseline;-webkit-transform:none;transform:none}.ui.accordion.menu .item .title{display:block;padding:0}.ui.accordion.menu .item .title>.dropdown.icon{float:right;margin:.21425em 0 0 1em;-webkit-transform:rotate(180deg);transform:rotate(180deg)}.ui.accordion .ui.header .dropdown.icon{font-size:1em;margin:0 .25rem 0 0}.ui.accordion .accordion .active.title .dropdown.icon,.ui.accordion .active.title .dropdown.icon{-webkit-transform:rotate(90deg);transform:rotate(90deg)}.ui.accordion.menu .item .active.title>.dropdown.icon{-webkit-transform:rotate(90deg);transform:rotate(90deg)}.ui.styled.accordion{width:600px}.ui.styled.accordion,.ui.styled.accordion .accordion{border-radius:.28571429rem;background:#fff;-webkit-box-shadow:0 1px 2px 0 rgba(34,36,38,.15),0 0 0 1px rgba(34,36,38,.15);box-shadow:0 1px 2px 0 rgba(34,36,38,.15),0 0 0 1px rgba(34,36,38,.15)}.ui.styled.accordion .accordion .title,.ui.styled.accordion .title{margin:0;padding:.75em 1em;color:rgba(0,0,0,.4);font-weight:700;border-top:1px solid rgba(34,36,38,.15);-webkit-transition:background .1s ease,color .1s ease;transition:background .1s ease,color .1s ease}.ui.styled.accordion .accordion .title:first-child,.ui.styled.accordion>.title:first-child{border-top:none}.ui.styled.accordion .accordion .content,.ui.styled.accordion .content{margin:0;padding:.5em 1em 1.5em}.ui.styled.accordion .accordion .content{padding:0;padding:.5em 1em 1.5em}.ui.styled.accordion .accordion .active.title,.ui.styled.accordion .accordion .title:hover,.ui.styled.accordion .active.title,.ui.styled.accordion .title:hover{background:0 0;color:rgba(0,0,0,.87)}.ui.styled.accordion .accordion .active.title,.ui.styled.accordion .accordion .title:hover{background:0 0;color:rgba(0,0,0,.87)}.ui.styled.accordion .active.title{background:0 0;color:rgba(0,0,0,.95)}.ui.styled.accordion .accordion .active.title{background:0 0;color:rgba(0,0,0,.95)}.ui.accordion .accordion .active.content,.ui.accordion .active.content{display:block}.ui.fluid.accordion,.ui.fluid.accordion .accordion{width:100%}.ui.inverted.accordion .title:not(.ui){color:rgba(255,255,255,.9)}@font-face{font-family:Accordion;src:url(data:application/x-font-ttf;charset=utf-8;base64,AAEAAAALAIAAAwAwT1MvMggjB5AAAAC8AAAAYGNtYXAPfOIKAAABHAAAAExnYXNwAAAAEAAAAWgAAAAIZ2x5Zryj6HgAAAFwAAAAyGhlYWT/0IhHAAACOAAAADZoaGVhApkB5wAAAnAAAAAkaG10eAJuABIAAAKUAAAAGGxvY2EAjABWAAACrAAAAA5tYXhwAAgAFgAAArwAAAAgbmFtZfC1n04AAALcAAABPHBvc3QAAwAAAAAEGAAAACAAAwIAAZAABQAAAUwBZgAAAEcBTAFmAAAA9QAZAIQAAAAAAAAAAAAAAAAAAAABEAAAAAAAAAAAAAAAAAAAAABAAADw2gHg/+D/4AHgACAAAAABAAAAAAAAAAAAAAAgAAAAAAACAAAAAwAAABQAAwABAAAAFAAEADgAAAAKAAgAAgACAAEAIPDa//3//wAAAAAAIPDZ//3//wAB/+MPKwADAAEAAAAAAAAAAAAAAAEAAf//AA8AAQAAAAAAAAAAAAIAADc5AQAAAAABAAAAAAAAAAAAAgAANzkBAAAAAAEAAAAAAAAAAAACAAA3OQEAAAAAAQASAEkAtwFuABMAADc0PwE2FzYXFh0BFAcGJwYvASY1EgaABQgHBQYGBQcIBYAG2wcGfwcBAQcECf8IBAcBAQd/BgYAAAAAAQAAAEkApQFuABMAADcRNDc2MzIfARYVFA8BBiMiJyY1AAUGBwgFgAYGgAUIBwYFWwEACAUGBoAFCAcFgAYGBQcAAAABAAAAAQAAqWYls18PPPUACwIAAAAAAM/9o+4AAAAAz/2j7gAAAAAAtwFuAAAACAACAAAAAAAAAAEAAAHg/+AAAAIAAAAAAAC3AAEAAAAAAAAAAAAAAAAAAAAGAAAAAAAAAAAAAAAAAQAAAAC3ABIAtwAAAAAAAAAKABQAHgBCAGQAAAABAAAABgAUAAEAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAADgCuAAEAAAAAAAEADAAAAAEAAAAAAAIADgBAAAEAAAAAAAMADAAiAAEAAAAAAAQADABOAAEAAAAAAAUAFgAMAAEAAAAAAAYABgAuAAEAAAAAAAoANABaAAMAAQQJAAEADAAAAAMAAQQJAAIADgBAAAMAAQQJAAMADAAiAAMAAQQJAAQADABOAAMAAQQJAAUAFgAMAAMAAQQJAAYADAA0AAMAAQQJAAoANABaAHIAYQB0AGkAbgBnAFYAZQByAHMAaQBvAG4AIAAxAC4AMAByAGEAdABpAG4AZ3JhdGluZwByAGEAdABpAG4AZwBSAGUAZwB1AGwAYQByAHIAYQB0AGkAbgBnAEYAbwBuAHQAIABnAGUAbgBlAHIAYQB0AGUAZAAgAGIAeQAgAEkAYwBvAE0AbwBvAG4ALgADAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA) format('truetype'),url(data:application/font-woff;charset=utf-8;base64,d09GRk9UVE8AAASwAAoAAAAABGgAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAABDRkYgAAAA9AAAAS0AAAEtFpovuE9TLzIAAAIkAAAAYAAAAGAIIweQY21hcAAAAoQAAABMAAAATA984gpnYXNwAAAC0AAAAAgAAAAIAAAAEGhlYWQAAALYAAAANgAAADb/0IhHaGhlYQAAAxAAAAAkAAAAJAKZAedobXR4AAADNAAAABgAAAAYAm4AEm1heHAAAANMAAAABgAAAAYABlAAbmFtZQAAA1QAAAE8AAABPPC1n05wb3N0AAAEkAAAACAAAAAgAAMAAAEABAQAAQEBB3JhdGluZwABAgABADr4HAL4GwP4GAQeCgAZU/+Lix4KABlT/4uLDAeLa/iU+HQFHQAAAHkPHQAAAH4RHQAAAAkdAAABJBIABwEBBw0PERQZHnJhdGluZ3JhdGluZ3UwdTF1MjB1RjBEOXVGMERBAAACAYkABAAGAQEEBwoNVp38lA78lA78lA77lA773Z33bxWLkI2Qj44I9xT3FAWOj5CNkIuQi4+JjoePiI2Gi4YIi/uUBYuGiYeHiIiHh4mGi4aLho2Ijwj7FPcUBYeOiY+LkAgO+92L5hWL95QFi5CNkI6Oj4+PjZCLkIuQiY6HCPcU+xQFj4iNhouGi4aJh4eICPsU+xQFiIeGiYaLhouHjYePiI6Jj4uQCA74lBT4lBWLDAoAAAAAAwIAAZAABQAAAUwBZgAAAEcBTAFmAAAA9QAZAIQAAAAAAAAAAAAAAAAAAAABEAAAAAAAAAAAAAAAAAAAAABAAADw2gHg/+D/4AHgACAAAAABAAAAAAAAAAAAAAAgAAAAAAACAAAAAwAAABQAAwABAAAAFAAEADgAAAAKAAgAAgACAAEAIPDa//3//wAAAAAAIPDZ//3//wAB/+MPKwADAAEAAAAAAAAAAAAAAAEAAf//AA8AAQAAAAEAADfYOJZfDzz1AAsCAAAAAADP/aPuAAAAAM/9o+4AAAAAALcBbgAAAAgAAgAAAAAAAAABAAAB4P/gAAACAAAAAAAAtwABAAAAAAAAAAAAAAAAAAAABgAAAAAAAAAAAAAAAAEAAAAAtwASALcAAAAAUAAABgAAAAAADgCuAAEAAAAAAAEADAAAAAEAAAAAAAIADgBAAAEAAAAAAAMADAAiAAEAAAAAAAQADABOAAEAAAAAAAUAFgAMAAEAAAAAAAYABgAuAAEAAAAAAAoANABaAAMAAQQJAAEADAAAAAMAAQQJAAIADgBAAAMAAQQJAAMADAAiAAMAAQQJAAQADABOAAMAAQQJAAUAFgAMAAMAAQQJAAYADAA0AAMAAQQJAAoANABaAHIAYQB0AGkAbgBnAFYAZQByAHMAaQBvAG4AIAAxAC4AMAByAGEAdABpAG4AZ3JhdGluZwByAGEAdABpAG4AZwBSAGUAZwB1AGwAYQByAHIAYQB0AGkAbgBnAEYAbwBuAHQAIABnAGUAbgBlAHIAYQB0AGUAZAAgAGIAeQAgAEkAYwBvAE0AbwBvAG4ALgADAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA) format('woff');font-weight:400;font-style:normal}.ui.accordion .accordion .title .dropdown.icon,.ui.accordion .title .dropdown.icon{font-family:Accordion;line-height:1;-webkit-backface-visibility:hidden;backface-visibility:hidden;font-weight:400;font-style:normal;text-align:center}.ui.accordion .accordion .title .dropdown.icon:before,.ui.accordion .title .dropdown.icon:before{content:'\f0da'}
--------------------------------------------------------------------------------
/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.min.js:
--------------------------------------------------------------------------------
1 | !function(F,A,e,q){"use strict";A=void 0!==A&&A.Math==Math?A:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")(),F.fn.accordion=function(a){var v,s=F(this),b=(new Date).getTime(),y=[],C=a,O="string"==typeof C,x=[].slice.call(arguments,1);A.requestAnimationFrame||A.mozRequestAnimationFrame||A.webkitRequestAnimationFrame||A.msRequestAnimationFrame;return s.each(function(){var e,c,u=F.isPlainObject(a)?F.extend(!0,{},F.fn.accordion.settings,a):F.extend({},F.fn.accordion.settings),d=u.className,n=u.namespace,g=u.selector,l=u.error,t="."+n,i="module-"+n,o=s.selector||"",f=F(this),m=f.find(g.title),p=f.find(g.content),r=this,h=f.data(i);c={initialize:function(){c.debug("Initializing",f),c.bind.events(),u.observeChanges&&c.observeChanges(),c.instantiate()},instantiate:function(){h=c,f.data(i,c)},destroy:function(){c.debug("Destroying previous instance",f),f.off(t).removeData(i)},refresh:function(){m=f.find(g.title),p=f.find(g.content)},observeChanges:function(){"MutationObserver"in A&&((e=new MutationObserver(function(e){c.debug("DOM tree modified, updating selector cache"),c.refresh()})).observe(r,{childList:!0,subtree:!0}),c.debug("Setting up mutation observer",e))},bind:{events:function(){c.debug("Binding delegated events"),f.on(u.on+t,g.trigger,c.event.click)}},event:{click:function(){c.toggle.call(this)}},toggle:function(e){var n=e!==q?"number"==typeof e?m.eq(e):F(e).closest(g.title):F(this).closest(g.title),t=n.next(p),i=t.hasClass(d.animating),o=t.hasClass(d.active),a=o&&!i,s=!o&&i;c.debug("Toggling visibility of content",n),a||s?u.collapsible?c.close.call(n):c.debug("Cannot close accordion content collapsing is disabled"):c.open.call(n)},open:function(e){var n=e!==q?"number"==typeof e?m.eq(e):F(e).closest(g.title):F(this).closest(g.title),t=n.next(p),i=t.hasClass(d.animating);t.hasClass(d.active)||i?c.debug("Accordion already open, skipping",t):(c.debug("Opening accordion content",n),u.onOpening.call(t),u.onChanging.call(t),u.exclusive&&c.closeOthers.call(n),n.addClass(d.active),t.stop(!0,!0).addClass(d.animating),u.animateChildren&&(F.fn.transition!==q&&f.transition("is supported")?t.children().transition({animation:"fade in",queue:!1,useFailSafe:!0,debug:u.debug,verbose:u.verbose,duration:u.duration}):t.children().stop(!0,!0).animate({opacity:1},u.duration,c.resetOpacity)),t.slideDown(u.duration,u.easing,function(){t.removeClass(d.animating).addClass(d.active),c.reset.display.call(this),u.onOpen.call(this),u.onChange.call(this)}))},close:function(e){var n=e!==q?"number"==typeof e?m.eq(e):F(e).closest(g.title):F(this).closest(g.title),t=n.next(p),i=t.hasClass(d.animating),o=t.hasClass(d.active);!o&&!(!o&&i)||o&&i||(c.debug("Closing accordion content",t),u.onClosing.call(t),u.onChanging.call(t),n.removeClass(d.active),t.stop(!0,!0).addClass(d.animating),u.animateChildren&&(F.fn.transition!==q&&f.transition("is supported")?t.children().transition({animation:"fade out",queue:!1,useFailSafe:!0,debug:u.debug,verbose:u.verbose,duration:u.duration}):t.children().stop(!0,!0).animate({opacity:0},u.duration,c.resetOpacity)),t.slideUp(u.duration,u.easing,function(){t.removeClass(d.animating).removeClass(d.active),c.reset.display.call(this),u.onClose.call(this),u.onChange.call(this)}))},closeOthers:function(e){var n,t,i,o=e!==q?m.eq(e):F(this).closest(g.title),a=o.parents(g.content).prev(g.title),s=o.closest(g.accordion),l=g.title+"."+d.active+":visible",r=g.content+"."+d.active+":visible";i=u.closeNested?(n=s.find(l).not(a)).next(p):(n=s.find(l).not(a),t=s.find(r).find(l).not(a),(n=n.not(t)).next(p)),0/input) cannot be found"""
55 |
56 |
57 | class CredentialNotFound(TwineException):
58 | """Raised when a credential specified in the twine file is not present in either the environment or a .env file"""
59 |
60 |
61 | class TwineFileNotFound(TwineException, FileNotFoundError):
62 | """Raised when the specified twine file is not present"""
63 |
64 |
65 | class ConfigurationValuesFileNotFound(TwineException, FileNotFoundError):
66 | """Raised when attempting to read configuration values from a file that is missing"""
67 |
68 |
69 | class ConfigurationManifestFileNotFound(TwineException, FileNotFoundError):
70 | """Raised when a configuration manifest file is required by a twine, but is not present in the input directory"""
71 |
72 |
73 | class InputManifestFileNotFound(TwineException, FileNotFoundError):
74 | """Raised when an input manifest file is required by a twine, but is not present in the input directory"""
75 |
76 |
77 | class InputValuesFileNotFound(TwineException, FileNotFoundError):
78 | """Raised when attempting to read input values from a file that is missing"""
79 |
80 |
81 | class OutputManifestFileNotFound(TwineException, FileNotFoundError):
82 | """Raised when twined checks that output manifest file has been produced, but it is not present in the output directory"""
83 |
84 |
85 | class OutputValuesFileNotFound(TwineException, FileNotFoundError):
86 | """Raised when attempting to read output values from a file that is missing"""
87 |
88 |
89 | # --------------------- Exceptions relating to validation of JSON data (input, output, config values) ------------------
90 |
91 |
92 | class InvalidSourceKindException(TwineException):
93 | """Raised when attempting to use the json loader for a disallowed kind"""
94 |
95 |
96 | class InvalidValues(TwineException):
97 | """Raised when JSON data (like Config data, Input Values or Output Values) is invalid"""
98 |
99 |
100 | class InvalidValuesJson(InvalidValues):
101 | """Raised when the JSON in the file or string is broken so cannot be parsed"""
102 |
103 |
104 | class InvalidValuesContents(InvalidValues, ValidationError):
105 | """Raised when the JSON in the file is not valid according to its matching schema."""
106 |
107 |
108 | # --------------------- Exceptions relating to validation of manifests ------------------------
109 |
110 |
111 | class InvalidManifest(TwineException):
112 | """Raised when a manifest loaded from JSON does not pass validation"""
113 |
114 |
115 | class InvalidManifestJson(InvalidManifest):
116 | """Raised when the json in the manifest file is broken"""
117 |
118 |
119 | class InvalidManifestType(InvalidManifest):
120 | """Raised when user attempts to create a manifest of an invalid type"""
121 |
122 |
123 | class InvalidManifestContents(InvalidManifest, ValidationError):
124 | """Raised when the manifest files are missing or do not match tags, sequences, clusters, extensions etc as required"""
125 |
126 |
127 | # --------------------- Exceptions relating to access of data using the Twine instance ------------------------
128 |
129 |
130 | # TODO This is related to filtering files from a manifest. Determine whether this belongs here,
131 | # or whether we should port the filtering code across from the SDK.
132 | class UnexpectedNumberOfResults(TwineException):
133 | """Raise when searching for a single data file (or a particular number of data files) and the number of results exceeds that expected"""
134 |
135 |
136 | # --------------------- Maps allowing customised exceptions per-strand (simplifies code elsewhere) ------------------
137 |
138 |
139 | file_not_found_map = {
140 | "twine": TwineFileNotFound,
141 | "configuration_values": ConfigurationValuesFileNotFound,
142 | "input_values": InputValuesFileNotFound,
143 | "output_values": OutputValuesFileNotFound,
144 | "configuration_manifest": ConfigurationManifestFileNotFound,
145 | "input_manifest": InputManifestFileNotFound,
146 | "output_manifest": OutputManifestFileNotFound,
147 | }
148 |
149 | # TODO Specialised per-strand exceptions to help drill to the root of the issues
150 | invalid_json_map = {
151 | "twine": InvalidTwineJson,
152 | "children": InvalidValuesJson,
153 | "configuration_values": InvalidValuesJson,
154 | "input_values": InvalidValuesJson,
155 | "output_values": InvalidValuesJson,
156 | "monitor_message": InvalidValuesJson,
157 | "configuration_manifest": InvalidManifestJson,
158 | "input_manifest": InvalidManifestJson,
159 | "output_manifest": InvalidManifestJson,
160 | }
161 |
162 | # TODO Specialised per-strand exceptions to help drill to the root of the issues
163 | invalid_contents_map = {
164 | "twine": InvalidTwineContents,
165 | "children": InvalidValuesContents,
166 | "configuration_values": InvalidValuesContents,
167 | "input_values": InvalidValuesContents,
168 | "output_values": InvalidValuesContents,
169 | "monitor_message": InvalidValuesContents,
170 | "configuration_manifest": InvalidManifestContents,
171 | "input_manifest": InvalidManifestContents,
172 | "output_manifest": InvalidManifestContents,
173 | }
174 |
--------------------------------------------------------------------------------
/tests/test_twine.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from twined import Twine, exceptions
4 |
5 | from .base import BaseTestCase
6 |
7 |
8 | class TestTwine(BaseTestCase):
9 | """Testing operation of the Twine class"""
10 |
11 | def test_init_twine_with_filename(self):
12 | """Ensures that the twine class can be instantiated with a file"""
13 | Twine(source=os.path.join(self.path, "apps", "simple_app", "twine.json"))
14 |
15 | def test_init_twine_with_json(self):
16 | """Ensures that a twine can be instantiated with a json string"""
17 | with open(os.path.join(self.path, "apps", "simple_app", "twine.json"), "r", encoding="utf-8") as f:
18 | Twine(source=f.read())
19 |
20 | def test_no_twine(self):
21 | """Tests that the canonical-but-useless case of no twine provided validates empty"""
22 | Twine()
23 |
24 | def test_incorrect_version_twine(self):
25 | """Ensures exception is thrown on mismatch between installed and specified versions of twined"""
26 | incorrect_version_twine = """{"twined_version": "0.0.0"}"""
27 | with self.assertRaises(exceptions.TwineVersionConflict):
28 | Twine(source=incorrect_version_twine)
29 |
30 | def test_empty_twine(self):
31 | """Ensures that an empty twine file can be loaded"""
32 | with self.assertLogs(level="DEBUG") as log:
33 | Twine(source="{}")
34 | self.assertEqual(len(log.output), 3)
35 | self.assertEqual(len(log.records), 3)
36 | self.assertIn("Detected source", log.output[0])
37 | self.assertIn("Validated", log.output[1])
38 |
39 | def test_example_twine(self):
40 | """Ensures that the example (full) twine can be loaded and validated"""
41 | Twine(source=os.path.join(self.path, "apps", "example_app", "twine.json"))
42 |
43 | def test_simple_twine(self):
44 | """Ensures that the simple app schema can be loaded and used to parse some basic config and values data"""
45 | Twine(source=os.path.join(self.path, "apps", "simple_app", "twine.json"))
46 |
47 | def test_broken_json_twine(self):
48 | """Ensures that an invalid json file raises an InvalidTwine exception"""
49 | invalid_json_twine = """
50 | {
51 | "children": [
52 | "configuration_values_schema": {
53 | "$schema": "https://json-schema.org/draft/2020-12/schema",
54 | "title": "The example configuration form",
55 | "description": "The configuration strand of an example twine",
56 | "type": "object",
57 | "properties": {
58 | }
59 | },
60 | }
61 | """
62 |
63 | with self.assertRaises(exceptions.InvalidTwineJson):
64 | Twine(source=invalid_json_twine)
65 |
66 | def test_available_strands_properties(self):
67 | """Test that the `available_strands` and `available_manifest_strands` properties work correctly."""
68 | twine = """
69 | {
70 | "configuration_values_schema": {
71 | "$schema": "https://json-schema.org/draft/2020-12/schema",
72 | "title": "The example configuration form",
73 | "description": "The configuration strand of an example twine",
74 | "type": "object",
75 | "properties": {
76 | "n_iterations": {
77 | "description": "An example of an integer configuration variable, called 'n_iterations'.",
78 | "type": "integer",
79 | "minimum": 1,
80 | "maximum": 10,
81 | "default": 5
82 | }
83 | }
84 | },
85 | "input_values_schema": {
86 | "$schema": "https://json-schema.org/draft/2020-12/schema",
87 | "title": "Input Values",
88 | "description": "The input values strand of an example twine, with a required height value",
89 | "type": "object",
90 | "properties": {
91 | "height": {
92 | "description": "An example of an integer value called 'height'",
93 | "type": "integer",
94 | "minimum": 2
95 | }
96 | },
97 | "required": ["height"]
98 | },
99 | "output_values_schema": {
100 | "title": "Output Values",
101 | "description": "The output values strand of an example twine",
102 | "type": "object",
103 | "properties": {
104 | "width": {
105 | "description": "An example of an integer value called 'result'",
106 | "type": "integer",
107 | "minimum": 2
108 | }
109 | }
110 | },
111 | "output_manifest": {
112 | "datasets": {
113 | "my-dataset": {}
114 | }
115 | }
116 | }
117 | """
118 |
119 | twine = Twine(source=twine)
120 |
121 | self.assertEqual(
122 | twine.available_strands,
123 | {"configuration_values", "input_values", "output_values", "output_manifest"},
124 | )
125 |
126 | self.assertEqual(twine.available_manifest_strands, {"output_manifest"})
127 |
128 | def test_required_strands_property(self):
129 | """Test that the required strands property is correct."""
130 | twines = [
131 | {
132 | "configuration_values_schema": {},
133 | "input_values_schema": {},
134 | "output_values_schema": {},
135 | "output_manifest": {"datasets": {}},
136 | },
137 | {
138 | "configuration_values_schema": {"optional": True},
139 | "input_values_schema": {},
140 | "output_values_schema": {},
141 | "output_manifest": {"datasets": {}, "optional": True},
142 | },
143 | {
144 | "configuration_values_schema": {"optional": False},
145 | "input_values_schema": {},
146 | "output_values_schema": {},
147 | "output_manifest": {"datasets": {}, "optional": False},
148 | },
149 | ]
150 |
151 | expected_required_strands = [
152 | {"configuration_values", "input_values", "output_values", "output_manifest"},
153 | {"input_values", "output_values"},
154 | {"configuration_values", "input_values", "output_values", "output_manifest"},
155 | ]
156 |
157 | for twine, expected in zip(twines, expected_required_strands):
158 | with self.subTest(twine=twine):
159 | twine = Twine(source=twine)
160 | self.assertEqual(twine.required_strands, expected)
161 |
--------------------------------------------------------------------------------
/docs/source/examples.rst:
--------------------------------------------------------------------------------
1 | .. _examples:
2 |
3 | ========
4 | Examples
5 | ========
6 |
7 | Here, we look at example use cases for the library, and show how to use it in python.
8 |
9 | It's also well worth looking at the unit test cases
10 | copied straight from the unit test cases, so you can always check there to see how everything hooks up.
11 |
12 |
13 | .. _example_equipment_installation_cost:
14 |
15 | [Simple] Equipment installation cost
16 | ====================================
17 |
18 | .. tabs::
19 |
20 | .. group-tab:: Scenario
21 |
22 | You need to provide your team with an estimate for installation cost of an equipment foundation.
23 |
24 | It's a straightforward calculation for you, but the Logistics Team keeps changing the installation position, to
25 | try and optimise the overall project logistics.
26 |
27 | Each time the locations change, the GIS team gives you an updated embedment depth, which is what you use
28 | (along with steel cost and foundation type), to calculate cost and report it back.
29 |
30 | This twine allows you to define to create a wrapper around your scripts that communicates to the GIS team what you
31 | need as an input, communicate to the logistics team what they can expect as an output.
32 |
33 | When deployed as a digital twin, the calculation gets automatically updated, leaving you free to get on with
34 | all the other work!
35 |
36 | .. group-tab:: Twine
37 |
38 | We specify the ``steel_cost`` and ``foundation_type`` as ``configuration`` values, which you can set on startup of the twin.
39 |
40 | Once the twin is running, it requires the ``embedment_depth`` as an ``input_value`` from the GIS team. A member
41 | of the GIS team can use your twin to get ``foundation_cost`` directly.
42 |
43 | .. code-block:: javascript
44 |
45 | {
46 | "title": "Foundation Cost Model",
47 | "description": "This twine helps compute the cost of an installed foundation.",
48 | "children": [
49 | ],
50 | "configuration_values_schema": {
51 | "$schema": "https://json-schema.org/draft/2020-12/schema",
52 | "title": "Foundation cost twin configuration",
53 | "description": "Set config parameters and constants at startup of the twin.",
54 | "type": "object",
55 | "properties": {
56 | "steel_cost": {
57 | "description": "The cost of steel in GBP/m^3. To get a better predictive model, you could add an economic twin that forecasts the cost of steel using the project timetable.",
58 | "type": "number",
59 | "minimum": 0,
60 | "default": 3000
61 | },
62 | "foundation_type": {
63 | "description": "The type of foundation being used.",
64 | "type": "string",
65 | "pattern": "^(monopile|twisted-jacket)$",
66 | "default": "monopile"
67 | }
68 | }
69 | },
70 | "input_values_schema": {
71 | "$schema": "https://json-schema.org/draft/2020-12/schema",
72 | "title": "Input Values schema for the foundation cost twin",
73 | "description": "These values are supplied to the twin asynchronously over a web socket. So as these values change, the twin can reply with an update.",
74 | "type": "object",
75 | "properties": {
76 | "embedment_depth": {
77 | "description": "Embedment depth in metres",
78 | "type": "number",
79 | "minimum": 10,
80 | "maximum": 500
81 | }
82 | }
83 | },
84 | "output_manifest": {
85 | "datasets": []
86 | },
87 | "output_values_schema": {
88 | "title": "Output Values schema for the foundation cost twin",
89 | "description": "The response supplied to a change in input values will always conform to this schema.",
90 | "type": "object",
91 | "properties": {
92 | "foundation_cost": {
93 | "description": "The foundation cost.",
94 | "type": "integer",
95 | "minimum": 2
96 | }
97 | }
98 | }
99 | }
100 |
101 |
102 | .. _example_site_weather_conditions:
103 |
104 | [Simple] Site weather conditions
105 | ================================
106 |
107 | .. tabs::
108 |
109 | .. group-tab:: Scenario
110 |
111 | You need to be able to get characteristic weather conditions at a specific location, for a range of reasons
112 | including assessing extreme design loads. The values you need are computed in a script, which calls a Weather
113 | API (provided by a third party), but also needs a dataset of "Wind Resource" files.
114 |
115 | .. group-tab:: Twine
116 |
117 | .. code-block:: javascript
118 |
119 | {
120 | "title": "Weather Service Digital Twin",
121 | "description": "Provides a model for design extreme weather conditions given a location",
122 | "notes": "Easily extendable with children to add forecast and historical data of different types.",
123 | "credentials": [
124 | {
125 | "name": "WEATHER_API_SECRET_KEY",
126 | "purpose": "Token for accessing a 3rd party weather API service"
127 | }
128 | ],
129 | "input_manifest": {
130 | "datasets": [
131 | {
132 | "key": "wind_resource_data",
133 | "purpose": "A dataset containing Wind Resource Grid files"
134 | }
135 | ]
136 | },
137 | "input_values_schema": {
138 | "$schema": "https://json-schema.org/draft/2020-12/schema",
139 | "title": "Input Values for the weather service twin",
140 | "description": "This is a simple example for getting metocean conditions at a single location",
141 | "type": "object",
142 | "properties": {
143 | "location": {
144 | "description": "Location",
145 | "type": "object",
146 | "properties": {
147 | "latitude": {
148 | "type": "number",
149 | "minimum": -90,
150 | "maximum": 90
151 | },
152 | "longitude": {
153 | "type": "number",
154 | "minimum": -180,
155 | "maximum": 180
156 | },
157 | "srid": {
158 | "description": "The Spatial Reference System ID for the coordinate. Default is 4326 (WGS84)",
159 | "type": "integer",
160 | "default": 4326
161 | }
162 | }
163 | }
164 | }
165 | },
166 | "output_manifest": {
167 | "datasets": [
168 | {
169 | "key": "production_data",
170 | "purpose": "A dataset containing production data",
171 | "tags": {"cleaned": true},
172 | "labels": ["production", "wind"]
173 | }
174 | ]
175 | },
176 | "output_values_schema": {
177 | "$schema": "https://json-schema.org/draft/2020-12/schema",
178 | "title": "Output Values for the metocean service twin",
179 | "description": "The output values strand of an example twine",
180 | "type": "object",
181 | "properties": {
182 | "water_depth": {
183 | "description": "Design water depth for use in concept calculations",
184 | "type": "number"
185 | },
186 | "extreme_wind_speed": {
187 | "description": "Extreme wind speed value for use in concept calculations",
188 | "type": "number"
189 | }
190 | }
191 | }
192 | }
193 |
--------------------------------------------------------------------------------
/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.css:
--------------------------------------------------------------------------------
1 | /*!
2 | * # Semantic UI 2.4.1 - Accordion
3 | * http://github.com/semantic-org/semantic-ui/
4 | *
5 | *
6 | * Released under the MIT license
7 | * http://opensource.org/licenses/MIT
8 | *
9 | */
10 |
11 |
12 | /*******************************
13 | Accordion
14 | *******************************/
15 |
16 | .ui.accordion,
17 | .ui.accordion .accordion {
18 | max-width: 100%;
19 | }
20 | .ui.accordion .accordion {
21 | margin: 1em 0em 0em;
22 | padding: 0em;
23 | }
24 |
25 | /* Title */
26 | .ui.accordion .title,
27 | .ui.accordion .accordion .title {
28 | cursor: pointer;
29 | }
30 |
31 | /* Default Styling */
32 | .ui.accordion .title:not(.ui) {
33 | padding: 0.5em 0em;
34 | font-family: 'Lato', 'Helvetica Neue', Arial, Helvetica, sans-serif;
35 | font-size: 1em;
36 | color: rgba(0, 0, 0, 0.87);
37 | }
38 |
39 | /* Content */
40 | .ui.accordion .title ~ .content,
41 | .ui.accordion .accordion .title ~ .content {
42 | display: none;
43 | }
44 |
45 | /* Default Styling */
46 | .ui.accordion:not(.styled) .title ~ .content:not(.ui),
47 | .ui.accordion:not(.styled) .accordion .title ~ .content:not(.ui) {
48 | margin: '';
49 | padding: 0.5em 0em 1em;
50 | }
51 | .ui.accordion:not(.styled) .title ~ .content:not(.ui):last-child {
52 | padding-bottom: 0em;
53 | }
54 |
55 | /* Arrow */
56 | .ui.accordion .title .dropdown.icon,
57 | .ui.accordion .accordion .title .dropdown.icon {
58 | display: inline-block;
59 | float: none;
60 | opacity: 1;
61 | width: 1.25em;
62 | height: 1em;
63 | margin: 0em 0.25rem 0em 0rem;
64 | padding: 0em;
65 | font-size: 1em;
66 | -webkit-transition: opacity 0.1s ease, -webkit-transform 0.1s ease;
67 | transition: opacity 0.1s ease, -webkit-transform 0.1s ease;
68 | transition: transform 0.1s ease, opacity 0.1s ease;
69 | transition: transform 0.1s ease, opacity 0.1s ease, -webkit-transform 0.1s ease;
70 | vertical-align: baseline;
71 | -webkit-transform: none;
72 | transform: none;
73 | }
74 |
75 | /*--------------
76 | Coupling
77 | ---------------*/
78 |
79 |
80 | /* Menu */
81 | .ui.accordion.menu .item .title {
82 | display: block;
83 | padding: 0em;
84 | }
85 | .ui.accordion.menu .item .title > .dropdown.icon {
86 | float: right;
87 | margin: 0.21425em 0em 0em 1em;
88 | -webkit-transform: rotate(180deg);
89 | transform: rotate(180deg);
90 | }
91 |
92 | /* Header */
93 | .ui.accordion .ui.header .dropdown.icon {
94 | font-size: 1em;
95 | margin: 0em 0.25rem 0em 0rem;
96 | }
97 |
98 |
99 | /*******************************
100 | States
101 | *******************************/
102 |
103 | .ui.accordion .active.title .dropdown.icon,
104 | .ui.accordion .accordion .active.title .dropdown.icon {
105 | -webkit-transform: rotate(90deg);
106 | transform: rotate(90deg);
107 | }
108 | .ui.accordion.menu .item .active.title > .dropdown.icon {
109 | -webkit-transform: rotate(90deg);
110 | transform: rotate(90deg);
111 | }
112 |
113 |
114 | /*******************************
115 | Types
116 | *******************************/
117 |
118 |
119 | /*--------------
120 | Styled
121 | ---------------*/
122 |
123 | .ui.styled.accordion {
124 | width: 600px;
125 | }
126 | .ui.styled.accordion,
127 | .ui.styled.accordion .accordion {
128 | border-radius: 0.28571429rem;
129 | background: #FFFFFF;
130 | -webkit-box-shadow: 0px 1px 2px 0 rgba(34, 36, 38, 0.15), 0px 0px 0px 1px rgba(34, 36, 38, 0.15);
131 | box-shadow: 0px 1px 2px 0 rgba(34, 36, 38, 0.15), 0px 0px 0px 1px rgba(34, 36, 38, 0.15);
132 | }
133 | .ui.styled.accordion .title,
134 | .ui.styled.accordion .accordion .title {
135 | margin: 0em;
136 | padding: 0.75em 1em;
137 | color: rgba(0, 0, 0, 0.4);
138 | font-weight: bold;
139 | border-top: 1px solid rgba(34, 36, 38, 0.15);
140 | -webkit-transition: background 0.1s ease, color 0.1s ease;
141 | transition: background 0.1s ease, color 0.1s ease;
142 | }
143 | .ui.styled.accordion > .title:first-child,
144 | .ui.styled.accordion .accordion .title:first-child {
145 | border-top: none;
146 | }
147 |
148 | /* Content */
149 | .ui.styled.accordion .content,
150 | .ui.styled.accordion .accordion .content {
151 | margin: 0em;
152 | padding: 0.5em 1em 1.5em;
153 | }
154 | .ui.styled.accordion .accordion .content {
155 | padding: 0em;
156 | padding: 0.5em 1em 1.5em;
157 | }
158 |
159 | /* Hover */
160 | .ui.styled.accordion .title:hover,
161 | .ui.styled.accordion .active.title,
162 | .ui.styled.accordion .accordion .title:hover,
163 | .ui.styled.accordion .accordion .active.title {
164 | background: transparent;
165 | color: rgba(0, 0, 0, 0.87);
166 | }
167 | .ui.styled.accordion .accordion .title:hover,
168 | .ui.styled.accordion .accordion .active.title {
169 | background: transparent;
170 | color: rgba(0, 0, 0, 0.87);
171 | }
172 |
173 | /* Active */
174 | .ui.styled.accordion .active.title {
175 | background: transparent;
176 | color: rgba(0, 0, 0, 0.95);
177 | }
178 | .ui.styled.accordion .accordion .active.title {
179 | background: transparent;
180 | color: rgba(0, 0, 0, 0.95);
181 | }
182 |
183 |
184 | /*******************************
185 | States
186 | *******************************/
187 |
188 |
189 | /*--------------
190 | Active
191 | ---------------*/
192 |
193 | .ui.accordion .active.content,
194 | .ui.accordion .accordion .active.content {
195 | display: block;
196 | }
197 |
198 |
199 | /*******************************
200 | Variations
201 | *******************************/
202 |
203 |
204 | /*--------------
205 | Fluid
206 | ---------------*/
207 |
208 | .ui.fluid.accordion,
209 | .ui.fluid.accordion .accordion {
210 | width: 100%;
211 | }
212 |
213 | /*--------------
214 | Inverted
215 | ---------------*/
216 |
217 | .ui.inverted.accordion .title:not(.ui) {
218 | color: rgba(255, 255, 255, 0.9);
219 | }
220 |
221 |
222 | /*******************************
223 | Theme Overrides
224 | *******************************/
225 |
226 | @font-face {
227 | font-family: 'Accordion';
228 | src: url(data:application/x-font-ttf;charset=utf-8;base64,AAEAAAALAIAAAwAwT1MvMggjB5AAAAC8AAAAYGNtYXAPfOIKAAABHAAAAExnYXNwAAAAEAAAAWgAAAAIZ2x5Zryj6HgAAAFwAAAAyGhlYWT/0IhHAAACOAAAADZoaGVhApkB5wAAAnAAAAAkaG10eAJuABIAAAKUAAAAGGxvY2EAjABWAAACrAAAAA5tYXhwAAgAFgAAArwAAAAgbmFtZfC1n04AAALcAAABPHBvc3QAAwAAAAAEGAAAACAAAwIAAZAABQAAAUwBZgAAAEcBTAFmAAAA9QAZAIQAAAAAAAAAAAAAAAAAAAABEAAAAAAAAAAAAAAAAAAAAABAAADw2gHg/+D/4AHgACAAAAABAAAAAAAAAAAAAAAgAAAAAAACAAAAAwAAABQAAwABAAAAFAAEADgAAAAKAAgAAgACAAEAIPDa//3//wAAAAAAIPDZ//3//wAB/+MPKwADAAEAAAAAAAAAAAAAAAEAAf//AA8AAQAAAAAAAAAAAAIAADc5AQAAAAABAAAAAAAAAAAAAgAANzkBAAAAAAEAAAAAAAAAAAACAAA3OQEAAAAAAQASAEkAtwFuABMAADc0PwE2FzYXFh0BFAcGJwYvASY1EgaABQgHBQYGBQcIBYAG2wcGfwcBAQcECf8IBAcBAQd/BgYAAAAAAQAAAEkApQFuABMAADcRNDc2MzIfARYVFA8BBiMiJyY1AAUGBwgFgAYGgAUIBwYFWwEACAUGBoAFCAcFgAYGBQcAAAABAAAAAQAAqWYls18PPPUACwIAAAAAAM/9o+4AAAAAz/2j7gAAAAAAtwFuAAAACAACAAAAAAAAAAEAAAHg/+AAAAIAAAAAAAC3AAEAAAAAAAAAAAAAAAAAAAAGAAAAAAAAAAAAAAAAAQAAAAC3ABIAtwAAAAAAAAAKABQAHgBCAGQAAAABAAAABgAUAAEAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAADgCuAAEAAAAAAAEADAAAAAEAAAAAAAIADgBAAAEAAAAAAAMADAAiAAEAAAAAAAQADABOAAEAAAAAAAUAFgAMAAEAAAAAAAYABgAuAAEAAAAAAAoANABaAAMAAQQJAAEADAAAAAMAAQQJAAIADgBAAAMAAQQJAAMADAAiAAMAAQQJAAQADABOAAMAAQQJAAUAFgAMAAMAAQQJAAYADAA0AAMAAQQJAAoANABaAHIAYQB0AGkAbgBnAFYAZQByAHMAaQBvAG4AIAAxAC4AMAByAGEAdABpAG4AZ3JhdGluZwByAGEAdABpAG4AZwBSAGUAZwB1AGwAYQByAHIAYQB0AGkAbgBnAEYAbwBuAHQAIABnAGUAbgBlAHIAYQB0AGUAZAAgAGIAeQAgAEkAYwBvAE0AbwBvAG4ALgADAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA) format('truetype'), url(data:application/font-woff;charset=utf-8;base64,d09GRk9UVE8AAASwAAoAAAAABGgAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAABDRkYgAAAA9AAAAS0AAAEtFpovuE9TLzIAAAIkAAAAYAAAAGAIIweQY21hcAAAAoQAAABMAAAATA984gpnYXNwAAAC0AAAAAgAAAAIAAAAEGhlYWQAAALYAAAANgAAADb/0IhHaGhlYQAAAxAAAAAkAAAAJAKZAedobXR4AAADNAAAABgAAAAYAm4AEm1heHAAAANMAAAABgAAAAYABlAAbmFtZQAAA1QAAAE8AAABPPC1n05wb3N0AAAEkAAAACAAAAAgAAMAAAEABAQAAQEBB3JhdGluZwABAgABADr4HAL4GwP4GAQeCgAZU/+Lix4KABlT/4uLDAeLa/iU+HQFHQAAAHkPHQAAAH4RHQAAAAkdAAABJBIABwEBBw0PERQZHnJhdGluZ3JhdGluZ3UwdTF1MjB1RjBEOXVGMERBAAACAYkABAAGAQEEBwoNVp38lA78lA78lA77lA773Z33bxWLkI2Qj44I9xT3FAWOj5CNkIuQi4+JjoePiI2Gi4YIi/uUBYuGiYeHiIiHh4mGi4aLho2Ijwj7FPcUBYeOiY+LkAgO+92L5hWL95QFi5CNkI6Oj4+PjZCLkIuQiY6HCPcU+xQFj4iNhouGi4aJh4eICPsU+xQFiIeGiYaLhouHjYePiI6Jj4uQCA74lBT4lBWLDAoAAAAAAwIAAZAABQAAAUwBZgAAAEcBTAFmAAAA9QAZAIQAAAAAAAAAAAAAAAAAAAABEAAAAAAAAAAAAAAAAAAAAABAAADw2gHg/+D/4AHgACAAAAABAAAAAAAAAAAAAAAgAAAAAAACAAAAAwAAABQAAwABAAAAFAAEADgAAAAKAAgAAgACAAEAIPDa//3//wAAAAAAIPDZ//3//wAB/+MPKwADAAEAAAAAAAAAAAAAAAEAAf//AA8AAQAAAAEAADfYOJZfDzz1AAsCAAAAAADP/aPuAAAAAM/9o+4AAAAAALcBbgAAAAgAAgAAAAAAAAABAAAB4P/gAAACAAAAAAAAtwABAAAAAAAAAAAAAAAAAAAABgAAAAAAAAAAAAAAAAEAAAAAtwASALcAAAAAUAAABgAAAAAADgCuAAEAAAAAAAEADAAAAAEAAAAAAAIADgBAAAEAAAAAAAMADAAiAAEAAAAAAAQADABOAAEAAAAAAAUAFgAMAAEAAAAAAAYABgAuAAEAAAAAAAoANABaAAMAAQQJAAEADAAAAAMAAQQJAAIADgBAAAMAAQQJAAMADAAiAAMAAQQJAAQADABOAAMAAQQJAAUAFgAMAAMAAQQJAAYADAA0AAMAAQQJAAoANABaAHIAYQB0AGkAbgBnAFYAZQByAHMAaQBvAG4AIAAxAC4AMAByAGEAdABpAG4AZ3JhdGluZwByAGEAdABpAG4AZwBSAGUAZwB1AGwAYQByAHIAYQB0AGkAbgBnAEYAbwBuAHQAIABnAGUAbgBlAHIAYQB0AGUAZAAgAGIAeQAgAEkAYwBvAE0AbwBvAG4ALgADAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA) format('woff');
229 | font-weight: normal;
230 | font-style: normal;
231 | }
232 |
233 | /* Dropdown Icon */
234 | .ui.accordion .title .dropdown.icon,
235 | .ui.accordion .accordion .title .dropdown.icon {
236 | font-family: Accordion;
237 | line-height: 1;
238 | -webkit-backface-visibility: hidden;
239 | backface-visibility: hidden;
240 | font-weight: normal;
241 | font-style: normal;
242 | text-align: center;
243 | }
244 | .ui.accordion .title .dropdown.icon:before,
245 | .ui.accordion .accordion .title .dropdown.icon:before {
246 | content: '\f0da' /*rtl:'\f0d9'*/;
247 | }
248 |
249 |
250 | /*******************************
251 | User Overrides
252 | *******************************/
253 |
254 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Documentation build configuration file
4 | #
5 | # This file is execfile()d with the current directory set to its containing dir.
6 | #
7 | # Note that not all possible configuration values are present in this
8 | # autogenerated file.
9 | #
10 | # All configuration values have a default; values that are commented out
11 | # serve to show the default.
12 |
13 | from distutils.util import strtobool
14 | import os
15 | import sys
16 |
17 | import sphinx_rtd_theme
18 |
19 |
20 | def str2bool(value):
21 | """Allows for parsing boolean environment variables like 'True' and 'False' correctly"""
22 | return bool(strtobool(value))
23 |
24 |
25 | # If extensions (or modules to document with autodoc) are in another directory,
26 | # add these directories to sys.path here. If the directory is relative to the
27 | # documentation root, use os.path.abspath to make it absolute, like shown here.
28 | sys.path.insert(0, os.path.abspath("./_ext"))
29 |
30 | # -- General configuration -----------------------------------------------------
31 |
32 | # If your documentation needs a minimal Sphinx version, state it here.
33 | # needs_sphinx = '1.0'
34 |
35 | # Add any Sphinx extension module names here, as strings. They can be extensions
36 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
37 | # Breate and exhale are added as recommended by:
38 | # https://exhale.readthedocs.io/en/latest/usage.html#usage-quickstart-guide
39 | extensions = [
40 | "sphinx.ext.autodoc",
41 | "sphinx.ext.doctest",
42 | "sphinx.ext.todo",
43 | "sphinx.ext.coverage",
44 | "sphinx.ext.viewcode",
45 | "sphinx_tabs.tabs",
46 | "sphinx.ext.mathjax",
47 | "sphinx.ext.ifconfig",
48 | "sphinx_charts.charts",
49 | "googleanalytics",
50 | "sphinx_accordion.accordion",
51 | ]
52 |
53 | # Add any paths that contain templates here, relative to this directory.
54 | templates_path = ["_templates"]
55 |
56 | # The suffix of source filenames.
57 | source_suffix = ".rst"
58 |
59 | # The encoding of source files.
60 | # source_encoding = 'utf-8-sig'
61 |
62 | # The master toctree document.
63 | master_doc = "index"
64 |
65 | # General information about the project.
66 | project = "twined"
67 | copyright = "2013-2024 Octue Ltd"
68 |
69 | # The version info for the project you're documenting, acts as replacement for
70 | # |version| and |release|, also used in various other places throughout the
71 | # built documents.
72 |
73 | # The full version, including alpha/beta/rc tags.
74 | release = os.getenv("RELEASE_TAG", "x.y.unknown")
75 |
76 | # The short X.Y version.
77 | version = ".".join(release.split(".")[0:2])
78 |
79 | # The language for content autogenerated by Sphinx. Refer to documentation
80 | # for a list of supported languages.
81 | # language = None
82 |
83 | # There are two options for replacing |today|: either, you set today to some
84 | # non-false value, then it is used:
85 | # today = ''
86 | # Else, today_fmt is used as the format for a strftime call.
87 | # today_fmt = '%B %d, %Y'
88 |
89 | # List of patterns, relative to source directory, that match files and
90 | # directories to ignore when looking for source files.
91 | exclude_patterns = []
92 |
93 | # The reST default role (used for this markup: `text`) to use for all documents.
94 | # default_role = None
95 |
96 | # If true, '()' will be appended to :func: etc. cross-reference text.
97 | # add_function_parentheses = True
98 |
99 | # If true, the current module name will be prepended to all description
100 | # unit titles (such as .. function::).
101 | # add_module_names = True
102 |
103 | # If true, sectionauthor and moduleauthor directives will be shown in the
104 | # output. They are ignored by default.
105 | # show_authors = False
106 |
107 | # The name of the Pygments (syntax highlighting) style to use.
108 | pygments_style = None
109 |
110 | # A list of ignored prefixes for module index sorting.
111 | # modindex_common_prefix = []
112 |
113 | # -- Google Analytics Configuration --------------------------------------------
114 |
115 | # Only add google analytics when building on ReadTheDocs,
116 | # to avoid clicks from development pages adding to analytics
117 | googleanalytics_id = "UA-43965341-6"
118 | googleanalytics_enabled = True
119 |
120 | # -- Options for HTML output ---------------------------------------------------
121 |
122 | # The theme to use for HTML and HTML Help pages. See the documentation for
123 | # a list of builtin themes.
124 | html_theme = "sphinx_rtd_theme"
125 |
126 | # Theme options are theme-specific and customize the look and feel of a theme
127 | # further. For a list of options available for each theme, see the
128 | # documentation.
129 | # html_theme_options = {}
130 |
131 | # Add any paths that contain custom themes here, relative to this directory.
132 | # html_theme_path = ["_themes",]
133 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
134 |
135 | # The name for this set of Sphinx documents. If None, it defaults to
136 | # " v documentation".
137 | html_title = "twined: for Digital Twins"
138 |
139 | # A shorter title for the navigation bar. Default is the same as html_title.
140 | # html_short_title = None
141 |
142 | # The name of an image file (relative to this directory) to place at the top
143 | # of the sidebar.
144 | # html_logo = None
145 |
146 | # The name of an image file (within the static path) to use as favicon of the
147 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
148 | # pixels large.
149 | html_favicon = "favicon.ico"
150 |
151 | # Add any paths that contain custom static files (such as style sheets) here,
152 | # relative to this directory. They are copied after the builtin static files,
153 | # so a file named "default.css" will overwrite the builtin "default.css".
154 | html_static_path = ["_static"]
155 |
156 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
157 | # using the given strftime format.
158 | # html_last_updated_fmt = '%b %d, %Y'
159 |
160 | # If true, SmartyPants will be used to convert quotes and dashes to
161 | # typographically correct entities.
162 | html_use_smartypants = True
163 |
164 | # Custom sidebar templates, maps document names to template names.
165 | # html_sidebars = {}
166 |
167 | # Additional templates that should be rendered to pages, maps page names to
168 | # template names.
169 | # html_additional_pages = {}
170 |
171 | # If false, no module index is generated.
172 | html_domain_indices = True
173 |
174 | # If false, no index is generated.
175 | html_use_index = True
176 |
177 | # If true, the index is split into individual pages for each letter.
178 | html_split_index = False
179 |
180 | # If true, links to the reST sources are added to the pages.
181 | html_show_sourcelink = False
182 |
183 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
184 | html_show_sphinx = False
185 |
186 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
187 | html_show_copyright = True
188 |
189 | # If true, an OpenSearch description file will be output, and all pages will
190 | # contain a tag referring to it. The value of this option must be the
191 | # base URL from which the finished HTML is served.
192 | # html_use_opensearch = ''
193 |
194 | # This is the file name suffix for HTML files (e.g. ".xhtml").
195 | # html_file_suffix = None
196 |
197 | # Output file base name for HTML help builder.
198 | htmlhelp_basename = "twinedDoc"
199 |
200 | # -- Options for LaTeX output --------------------------------------------------
201 |
202 | latex_elements = {
203 | # The paper size ('letterpaper' or 'a4paper').
204 | # 'papersize': 'letterpaper',
205 | # The font size ('10pt', '11pt' or '12pt').
206 | # 'pointsize': '10pt',
207 | # Additional stuff for the LaTeX preamble.
208 | # 'preamble': '',
209 | }
210 |
211 | # Grouping the document tree into LaTeX files. List of tuples
212 | # (source start file, target name, title, author, documentclass [howto/manual]).
213 | latex_documents = [
214 | ("index", "twined.tex", "Twined", "Octue Ltd", "manual"),
215 | ]
216 |
217 | # The name of an image file (relative to this directory) to place at the top of
218 | # the title page.
219 | # latex_logo = None
220 |
221 | # For "manual" documents, if this is true, then toplevel headings are parts,
222 | # not chapters.
223 | # latex_use_parts = False
224 |
225 | # If true, show page references after internal links.
226 | # latex_show_pagerefs = False
227 |
228 | # If true, show URL addresses after external links.
229 | # latex_show_urls = False
230 |
231 | # Documents to append as an appendix to all manuals.
232 | # latex_appendices = []
233 |
234 | # If false, no module index is generated.
235 | # latex_domain_indices = True
236 |
237 |
238 | # -- Options for manual page output --------------------------------------------
239 |
240 | # One entry per manual page. List of tuples
241 | # (source start file, name, description, authors, manual section).
242 | man_pages = [("index", "twined", "Twined", ["Octue Ltd"], 1)]
243 |
244 | # If true, show URL addresses after external links.
245 | # man_show_urls = False
246 |
247 |
248 | # -- Options for Texinfo output ------------------------------------------------
249 |
250 | # Grouping the document tree into Texinfo files. List of tuples
251 | # (source start file, target name, title, author,
252 | # dir menu entry, description, category)
253 | texinfo_documents = [
254 | (
255 | "index",
256 | "twined",
257 | "Twined",
258 | "Octue Ltd",
259 | "Twined",
260 | "Twined is a library to help digital twins talk to one another.",
261 | "Miscellaneous",
262 | ),
263 | ]
264 |
265 | # Documents to append as an appendix to all manuals.
266 | # texinfo_appendices = []
267 |
268 | # If false, no module index is generated.
269 | # texinfo_domain_indices = True
270 |
271 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
272 | # texinfo_show_urls = 'footnote'
273 |
--------------------------------------------------------------------------------
/docs/source/_ext/sphinx_accordion/accordion.py:
--------------------------------------------------------------------------------
1 | """Accordion dropdown for Sphinx, with HTML builder"""
2 |
3 | import json
4 | import os
5 | import posixpath
6 |
7 | from docutils import nodes
8 | from docutils.parsers.rst import Directive
9 | from pkg_resources import resource_filename
10 | from pygments.lexers import get_all_lexers
11 | from sphinx.util import logging
12 | from sphinx.util.osutil import copyfile
13 |
14 | FILES = [
15 | "semantic-ui-2.4.2/accordion.css",
16 | "semantic-ui-2.4.2/accordion.js",
17 | "accordion.css",
18 | "accordion.js",
19 | ]
20 |
21 |
22 | LEXER_MAP = {}
23 | for lexer in get_all_lexers():
24 | for short_name in lexer[1]:
25 | LEXER_MAP[short_name] = lexer[0]
26 |
27 |
28 | def get_compatible_builders(app):
29 | builders = [
30 | "html",
31 | "singlehtml",
32 | "dirhtml",
33 | "readthedocs",
34 | "readthedocsdirhtml",
35 | "readthedocssinglehtml",
36 | "readthedocssinglehtmllocalmedia",
37 | "spelling",
38 | ]
39 | builders.extend(app.config["sphinx_tabs_valid_builders"])
40 | return builders
41 |
42 |
43 | class AccordionDirective(Directive):
44 | """Top-level accordion directive"""
45 |
46 | has_content = True
47 |
48 | def run(self):
49 | """Parse an accordion directive"""
50 | self.assert_has_content()
51 | env = self.state.document.settings.env
52 |
53 | node = nodes.container()
54 | node["classes"] = ["sphinx-accordion", "ui", "styled", "fluid", "accordion"]
55 |
56 | if "next_accordion_id" not in env.temp_data:
57 | env.temp_data["next_accordion_id"] = 0
58 | if "accordion_stack" not in env.temp_data:
59 | env.temp_data["accordion_stack"] = []
60 |
61 | accordion_id = env.temp_data["next_accordion_id"]
62 | accordion_key = "accordion_%d" % accordion_id
63 | env.temp_data["next_accordion_id"] += 1
64 | env.temp_data["accordion_stack"].append(accordion_id)
65 |
66 | env.temp_data[accordion_key] = {}
67 | env.temp_data[accordion_key]["row_ids"] = []
68 | env.temp_data[accordion_key]["row_titles"] = []
69 | env.temp_data[accordion_key]["is_first_row"] = True
70 |
71 | self.state.nested_parse(self.content, self.content_offset, node)
72 |
73 | if env.app.builder.name in get_compatible_builders(env.app):
74 | title_nodes = []
75 | row_ids = env.temp_data[accordion_key]["row_ids"]
76 | row_titles = env.temp_data[accordion_key]["row_titles"]
77 | for idx, [data_row, row_name] in enumerate(row_titles):
78 | title_node = nodes.container()
79 | title_node.tagname = "div"
80 | title_node["classes"] = ["sphinx-accordion", "title"]
81 | title_node["classes"].append(f"sphinx-accordion-title-{accordion_id}-{row_ids[idx]}")
82 | title_node += row_name.children
83 | icon_node = nodes.inline()
84 | icon_node.tagname = "i"
85 | icon_node["classes"] = ["dropdown", "icon"]
86 | # Access the first child, we don't want the container that somehow gets generated
87 | title_node.children.insert(0, icon_node)
88 | title_nodes.append(title_node)
89 |
90 | node.children = [child for pair in zip(title_nodes, node.children) for child in pair]
91 |
92 | env.temp_data["accordion_stack"].pop()
93 | return [node]
94 |
95 |
96 | class AccordionRowDirective(Directive):
97 | """AccordionRow directive, for adding a row to an accordion"""
98 |
99 | has_content = True
100 |
101 | def run(self):
102 | """Parse a row directive"""
103 | self.assert_has_content()
104 | env = self.state.document.settings.env
105 |
106 | accordion_id = env.temp_data["accordion_stack"][-1]
107 | accordion_key = "accordion_%d" % accordion_id
108 |
109 | args = self.content[0].strip()
110 | if args.startswith("{"):
111 | try:
112 | args = json.loads(args)
113 | self.content.trim_start(1)
114 | except ValueError:
115 | args = {}
116 | else:
117 | args = {}
118 |
119 | row_name = nodes.container()
120 | self.state.nested_parse(self.content[:1], self.content_offset, row_name)
121 | args["row_name"] = row_name
122 |
123 | include_accordion_id_in_data_row = False
124 | if "row_id" not in args:
125 | args["row_id"] = env.new_serialno(accordion_key)
126 | include_accordion_id_in_data_row = True
127 | i = 1
128 | while args["row_id"] in env.temp_data[accordion_key]["row_ids"]:
129 | args["row_id"] = "%s-%d" % (args["row_id"], i)
130 | i += 1
131 | env.temp_data[accordion_key]["row_ids"].append(args["row_id"])
132 |
133 | data_row = str(args["row_id"])
134 | if include_accordion_id_in_data_row:
135 | data_row = "%d-%s" % (accordion_id, data_row)
136 | data_row = "sphinx-accordion-content-{}".format(data_row)
137 |
138 | env.temp_data[accordion_key]["row_titles"].append((data_row, args["row_name"]))
139 |
140 | text = "\n".join(self.content)
141 | node = nodes.container(text)
142 | classes = "sphinx-accordion content"
143 | node["classes"] = classes.split(" ")
144 | node["classes"].extend(args.get("classes", []))
145 | node["classes"].append(data_row)
146 |
147 | self.state.nested_parse(self.content[2:], self.content_offset, node)
148 |
149 | if env.app.builder.name not in get_compatible_builders(env.app):
150 | outer_node = nodes.container()
151 | row = nodes.container()
152 | row.tagname = "a"
153 | row["classes"] = ["item"]
154 | row += row_name
155 | outer_node.append(row)
156 | outer_node.append(node)
157 | return [outer_node]
158 |
159 | return [node]
160 |
161 |
162 | class _FindAccordionDirectiveVisitor(nodes.NodeVisitor):
163 | """Visitor pattern than looks for a sphinx accordion directive in a document"""
164 |
165 | def __init__(self, document):
166 | nodes.NodeVisitor.__init__(self, document)
167 | self._found = False
168 |
169 | def unknown_visit(self, node):
170 | if (
171 | not self._found
172 | and isinstance(node, nodes.container)
173 | and "classes" in node
174 | and isinstance(node["classes"], list)
175 | ):
176 | self._found = "sphinx-accordion" in node["classes"]
177 |
178 | @property
179 | def found_accordion_directive(self):
180 | """Return whether a sphinx accordion directive was found"""
181 | return self._found
182 |
183 |
184 | def update_context(app, pagename, templatename, context, doctree):
185 | """Remove sphinx-accordion CSS and JS asset files if not used in a page"""
186 | if doctree is None:
187 | return
188 | visitor = _FindAccordionDirectiveVisitor(doctree)
189 | doctree.walk(visitor)
190 | if not visitor.found_accordion_directive:
191 | paths = [posixpath.join("_static", "sphinx_accordion/" + f) for f in FILES]
192 | if "css_files" in context:
193 | context["css_files"] = context["css_files"][:]
194 | for path in paths:
195 | if path.endswith(".css") and path in context["css_files"]:
196 | context["css_files"].remove(path)
197 | if "script_files" in context:
198 | context["script_files"] = context["script_files"][:]
199 | for path in paths:
200 | if path.endswith(".js") and path in context["script_files"]:
201 | context["script_files"].remove(path)
202 |
203 |
204 | def copy_assets(app, exception):
205 | """Copy asset files to the output"""
206 | if "getLogger" in dir(logging):
207 | log = logging.getLogger(__name__).info
208 | warn = logging.getLogger(__name__).warning
209 | else:
210 | log = app.info
211 | warn = app.warning
212 | builders = get_compatible_builders(app)
213 | if exception:
214 | return
215 | if app.builder.name not in builders:
216 | if not app.config["sphinx_accordion_nowarn"]:
217 | warn("Not copying accordion assets! Not compatible with %s builder" % app.builder.name)
218 | return
219 |
220 | log("Copying accordion assets")
221 |
222 | installdir = os.path.join(app.builder.outdir, "_static", "sphinx_accordion")
223 |
224 | for path in FILES:
225 | source = resource_filename("sphinx_accordion", path)
226 | dest = os.path.join(installdir, path)
227 | destdir = os.path.dirname(dest)
228 | if not os.path.exists(destdir):
229 | os.makedirs(destdir)
230 |
231 | copyfile(source, dest)
232 |
233 |
234 | def setup(app):
235 | """Set up the plugin"""
236 | app.add_config_value("sphinx_accordion_nowarn", False, "")
237 | app.add_config_value("sphinx_accordion_valid_builders", [], "")
238 | app.add_directive("accordion", AccordionDirective)
239 | app.add_directive("accordion-row", AccordionRowDirective)
240 |
241 | for path in ["sphinx_accordion/" + f for f in FILES]:
242 | if path.endswith(".css"):
243 | if "add_css_file" in dir(app):
244 | app.add_css_file(path)
245 | else:
246 | app.add_stylesheet(path)
247 | if path.endswith(".js"):
248 | if "add_script_file" in dir(app):
249 | app.add_script_file(path)
250 | else:
251 | app.add_js_file(path)
252 |
253 | app.connect("html-page-context", update_context)
254 | app.connect("build-finished", copy_assets)
255 |
256 | return {
257 | "parallel_read_safe": True,
258 | "parallel_write_safe": True,
259 | }
260 |
--------------------------------------------------------------------------------
/tests/test_manifest_strands.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from twined import Twine, exceptions
4 |
5 | from .base import BaseTestCase
6 |
7 |
8 | class TestManifestStrands(BaseTestCase):
9 | """Testing operation of the Twine class for validation of data using strands which require manifests"""
10 |
11 | VALID_MANIFEST_STRAND = """
12 | {
13 | "configuration_manifest": {
14 | "datasets": {
15 | "configuration_files_data": {
16 | "purpose": "A dataset containing files used in configuration"
17 | }
18 | }
19 | },
20 | "input_manifest": {
21 | "datasets": {
22 | "met_mast_data": {
23 | "purpose": "A dataset containing meteorological mast data"
24 | },
25 | "scada_data": {
26 | "purpose": "A dataset containing scada data"
27 | }
28 | }
29 | },
30 | "output_manifest": {
31 | "datasets": {
32 | "output_files_data": {
33 | "purpose": "A dataset containing output results"
34 | }
35 | }
36 | }
37 | }
38 | """
39 |
40 | def test_missing_manifest_files(self):
41 | """Ensures that if you try to read values from missing files, the right exceptions get raised"""
42 | twine = Twine(source=self.VALID_MANIFEST_STRAND)
43 | file = os.path.join(self.path, "not_a_file.json")
44 |
45 | with self.assertRaises(exceptions.ConfigurationManifestFileNotFound):
46 | twine.validate_configuration_manifest(source=file)
47 |
48 | with self.assertRaises(exceptions.InputManifestFileNotFound):
49 | twine.validate_input_manifest(source=file)
50 |
51 | with self.assertRaises(exceptions.OutputManifestFileNotFound):
52 | twine.validate_output_manifest(source=file)
53 |
54 | def test_error_raised_if_datasets_are_missing_from_manifest(self):
55 | """Test that an error is raised if a dataset is missing from a manifest."""
56 | twine = """
57 | {
58 | "input_manifest": {
59 | "datasets": {
60 | "cat": {
61 | "purpose": "blah"
62 | },
63 | "dog": {
64 | "purpose": "blah"
65 | }
66 | }
67 | }
68 | }
69 | """
70 |
71 | input_manifest = {
72 | "id": "30d2c75c-a7b9-4f16-8627-9c8d5cc04bf4",
73 | "datasets": {"my-dataset": "gs://my-bucket/my_dataset", "dog": "gs://dog-house/dog"},
74 | }
75 |
76 | twine = Twine(source=twine)
77 |
78 | with self.assertRaises(exceptions.InvalidManifestContents) as context:
79 | twine.validate_input_manifest(source=input_manifest)
80 |
81 | self.assertEqual(
82 | context.exception.message,
83 | "A dataset named 'cat' is expected in the input_manifest but is missing.",
84 | )
85 |
86 | def test_missing_optional_datasets_do_not_raise_error(self):
87 | """Test that optional datasets specified in the twine missing from the manifest don't raise an error."""
88 | twine = """
89 | {
90 | "input_manifest": {
91 | "datasets": {
92 | "cat": {
93 | "purpose": "blah",
94 | "optional": true
95 | },
96 | "dog": {
97 | "purpose": "blah"
98 | }
99 | }
100 | }
101 | }
102 | """
103 |
104 | input_manifest = {
105 | "id": "30d2c75c-a7b9-4f16-8627-9c8d5cc04bf4",
106 | "datasets": {"dog": "gs://dog-house/dog"},
107 | }
108 |
109 | Twine(source=twine).validate_input_manifest(source=input_manifest)
110 |
111 | def test_valid_manifest_files(self):
112 | """Ensures that a manifest file will validate."""
113 | valid_configuration_manifest = """
114 | {
115 | "id": "3ead7669-8162-4f64-8cd5-4abe92509e17",
116 | "datasets": {
117 | "configuration_files_data": {
118 | "id": "34ad7669-8162-4f64-8cd5-4abe92509e17",
119 | "name": "configuration_files_data",
120 | "tags": {},
121 | "labels": ["the", "config", "labels"],
122 | "files": [
123 | {
124 | "path": "configuration/datasets/7ead7669/file_1.csv",
125 | "tags": {},
126 | "labels": [],
127 | "timestamp": 0,
128 | "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86"
129 | },
130 | {
131 | "path": "configuration/datasets/7ead7669/file_2.csv",
132 | "tags": {},
133 | "labels": [],
134 | "timestamp": 0,
135 | "id": "bbff07bc-7c19-4ed5-be6d-a6546eae8e45"
136 | }
137 | ]
138 | }
139 | }
140 | }
141 | """
142 |
143 | valid_input_manifest = """
144 | {
145 | "id": "8ead7669-8162-4f64-8cd5-4abe92509e17",
146 | "datasets": {
147 | "met_mast_data": {
148 | "id": "7ead7669-8162-4f64-8cd5-4abe92509e17",
149 | "name": "met_mast_data",
150 | "tags": {},
151 | "labels": ["met", "mast", "wind"],
152 | "files": [
153 | {
154 | "path": "input/datasets/7ead7669/file_1.csv",
155 | "tags": {},
156 | "labels": [],
157 | "timestamp": 0,
158 | "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86"
159 | },
160 | {
161 | "path": "input/datasets/7ead7669/file_2.csv",
162 | "tags": {},
163 | "labels": [],
164 | "timestamp": 0,
165 | "id": "bbff07bc-7c19-4ed5-be6d-a6546eae8e45"
166 | }
167 | ]
168 | },
169 | "scada_data": "gs://my-bucket/scada-data"
170 | }
171 | }
172 | """
173 |
174 | valid_output_manifest = """
175 | {
176 | "id": "2ead7669-8162-4f64-8cd5-4abe92509e17",
177 | "datasets": {
178 | "output_files_data": {
179 | "id": "1ead7669-8162-4f64-8cd5-4abe92509e17",
180 | "name": "output_files_data",
181 | "tags": {},
182 | "labels": ["the", "output", "labels"],
183 | "files": [
184 | {
185 | "path": "input/datasets/7ead7669/file_1.csv",
186 | "tags": {},
187 | "labels": [],
188 | "timestamp": 0,
189 | "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86"
190 | },
191 | {
192 | "path": "input/datasets/7ead7669/file_2.csv",
193 | "tags": {},
194 | "labels": [],
195 | "timestamp": 0,
196 | "id": "bbff07bc-7c19-4ed5-be6d-a6546eae8e45"
197 | }
198 | ]
199 | }
200 | }
201 | }
202 | """
203 |
204 | twine = Twine(source=self.VALID_MANIFEST_STRAND)
205 | twine.validate_configuration_manifest(source=valid_configuration_manifest)
206 | twine.validate_input_manifest(source=valid_input_manifest)
207 | twine.validate_output_manifest(source=valid_output_manifest)
208 |
209 | # def test_empty_values(self):
210 | # """ Ensures that appropriate errors are generated for invalid values
211 | # """
212 | # twine_file = VALID_SCHEMA_TWINE
213 | # twine = Twine(file=twine_file)
214 | # values_file = os.path.join(self.path, "configurations", "empty.json")
215 | # with self.assertRaises(exceptions.InvalidValuesJson):
216 | # twine.validate_configuration(file=values_file)
217 | #
218 | # def test_incorrect_values(self):
219 | # """ Ensures that appropriate errors are generated for invalid values
220 | # """
221 | # twine_file = VALID_SCHEMA_TWINE
222 | # twine = Twine(file=twine_file)
223 | # values_file = os.path.join(self.path, "configurations", "incorrect.json")
224 | # with self.assertRaises(exceptions.InvalidValuesContents):
225 | # twine.validate_configuration(file=values_file)
226 | #
227 | # def test_missing_not_required_values(self):
228 | # """ Ensures that appropriate errors are generated for missing values
229 | # """
230 | # twine_file = VALID_SCHEMA_TWINE
231 | # twine = Twine(file=twine_file)
232 | # values_file = os.path.join(self.path, "outputs", "missing_not_required.json")
233 | # twine.validate_output_values(file=values_file)
234 | #
235 | # def test_missing_required_values(self):
236 | # """ Ensures that appropriate errors are generated for missing values
237 | # """
238 | # twine_file = VALID_SCHEMA_TWINE
239 | # twine = Twine(file=twine_file)
240 | # values_file = os.path.join(self.path, "inputs", "missing_required.json")
241 | # with self.assertRaises(exceptions.InvalidValuesContents):
242 | # twine.validate_input_values(file=values_file)
243 | #
244 | # def test_valid_values_files(self):
245 | # """ Ensures that values can be read and validated correctly from files on disk
246 | # """
247 | # twine_file = VALID_SCHEMA_TWINE
248 | # twine = Twine(file=twine_file)
249 | # twine.validate_configuration(file=os.path.join(self.path, "configurations", "valid.json"))
250 | # twine.validate_input_values(file=os.path.join(self.path, "inputs", "valid.json"))
251 | # twine.validate_output_values(file=os.path.join(self.path, "outputs", "valid.json"))
252 | #
253 | # def test_valid_values_json(self):
254 | # """ Ensures that values can be read and validated correctly from a json string
255 | # """
256 | # twine_file = VALID_SCHEMA_TWINE
257 | # twine = Twine(file=twine_file)
258 | # values_file = os.path.join(self.path, "configurations", "valid.json")
259 | # with open(values_file, "r", encoding="utf-8") as f:
260 | # json_string = f.read()
261 | # twine.validate_configuration(json=json_string)
262 | #
263 | # def test_valid_with_extra_values(self):
264 | # """ Ensures that extra values get ignored
265 | # """
266 | # twine_file = VALID_SCHEMA_TWINE
267 | # twine = Twine(file=twine_file)
268 | # values_file = os.path.join(self.path, "configurations", "valid_with_extra.json")
269 | # twine.validate_configuration(file=values_file)
270 |
271 | def test_error_raised_if_multiple_datasets_have_same_name(self):
272 | """Test that an error is raised if the input manifest has more than one dataset with the same name."""
273 | twine = """
274 | {
275 | "input_manifest": {
276 | "datasets": {
277 | "met_mast_data": {
278 | "purpose": "A dataset containing meteorological mast data"
279 | }
280 | }
281 | }
282 | }
283 | """
284 |
285 | input_manifest = """
286 | {
287 | "id": "8ead7669-8162-4f64-8cd5-4abe92509e17",
288 | "datasets": {
289 | "met_mast_data": {
290 | "id": "7ead7669-8162-4f64-8cd5-4abe92509e19",
291 | "name": "met_mast_data",
292 | "tags": {},
293 | "labels": [],
294 | "files": []
295 | },
296 | "met_mast_data": {
297 | "id": "7ead7669-8162-4f64-8cd5-4abe92509e18",
298 | "name": "met_mast_data",
299 | "tags": {},
300 | "labels": [],
301 | "files": []
302 | }
303 | }
304 | }
305 | """
306 |
307 | twine = Twine(source=twine)
308 |
309 | with self.assertRaises(KeyError):
310 | twine.validate_input_manifest(source=input_manifest)
311 |
312 | def test_missing_optional_manifest_does_not_raise_error(self):
313 | """Test that not providing an optional strand doesn't result in a validation error."""
314 | twine = Twine(source={"output_manifest": {"datasets": {}, "optional": True}})
315 | twine.validate(output_manifest=None)
316 |
--------------------------------------------------------------------------------
/docs/source/images/digital_twin_hierarchy.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------