├── tests
├── __init__.py
├── import_opsa_test.py
├── test_package.py
└── mapping_bowtie_test.py
├── bowtie
├── io
│ ├── __init__.py
│ ├── utilities.py
│ └── import_opsa.py
├── mapping
│ ├── __init__.py
│ └── mapping_bowtie.py
├── dataresources
│ ├── __init__.py
│ ├── type_definitions.py
│ └── opsa.py
└── __init__.py
├── requirements.txt
├── docs
├── source
│ ├── bowtie.rst
│ ├── bowtie.mapping.rst
│ ├── bowtie.io.rst
│ └── bowtie.dataresources.rst
├── index.rst
├── Makefile
├── make.bat
└── conf.py
├── setup.py
├── README.md
├── data
├── test_et_v03_fails.xml
├── test_et_v03_works.xml
├── test_et_v03_true.xml
├── test_et_v03.xml
├── test_et_v01.xml
├── bearfield_et_al_eliminating_consequences_arcs_v02.xml
├── bearfield_et_al_eliminating_consequences_arcs_v03.xml
├── bearfield_et_al_eliminating_causal_arcs_v02.xml
├── bearfield_et_al_eliminating_causal_arcs_v03.xml
├── test_et_v02.xml
├── khakzad_dynamic_v03.xml
├── Zarei_regulator_system_v01.xml
└── Li_submarine_pipeline_v04.xml
├── .gitignore
└── LICENSE
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/bowtie/io/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/bowtie/mapping/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/bowtie/dataresources/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/bowtie/__init__.py:
--------------------------------------------------------------------------------
1 | __version__ = "v0.1.0.dev"
2 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | networkx
2 | numpy
3 | scipy
4 | pandas
5 | pyparsing
6 | torch
7 | statsmodels
8 | tqdm
9 | joblib
10 | pgmpy
11 | treelib
12 | ipykernel
13 | matplotlib
14 |
--------------------------------------------------------------------------------
/docs/source/bowtie.rst:
--------------------------------------------------------------------------------
1 | bowtie package
2 | ==============
3 |
4 | Subpackages
5 | -----------
6 |
7 | .. toctree::
8 |
9 | bowtie.dataresources
10 | bowtie.io
11 | bowtie.mapping
12 |
13 | Module contents
14 | ---------------
15 |
16 | .. automodule:: bowtie
17 | :members:
18 | :undoc-members:
19 | :show-inheritance:
20 |
--------------------------------------------------------------------------------
/docs/source/bowtie.mapping.rst:
--------------------------------------------------------------------------------
1 | bowtie.mapping package
2 | ======================
3 |
4 | Submodules
5 | ----------
6 |
7 | bowtie.mapping.mapping\_bowtie module
8 | -------------------------------------
9 |
10 | .. automodule:: bowtie.mapping.mapping_bowtie
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 |
16 | Module contents
17 | ---------------
18 |
19 | .. automodule:: bowtie.mapping
20 | :members:
21 | :undoc-members:
22 | :show-inheritance:
23 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. bowtie documentation master file, created by
2 | sphinx-quickstart on Sun Dec 15 22:09:04 2019.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Welcome to bowtie's documentation!
7 | ==================================
8 |
9 | .. toctree::
10 | :maxdepth: 2
11 | :caption: Contents:
12 |
13 | source/bowtie
14 |
15 |
16 |
17 | Indices and tables
18 | ==================
19 |
20 | * :ref:`genindex`
21 | * :ref:`modindex`
22 | * :ref:`search`
23 |
--------------------------------------------------------------------------------
/docs/source/bowtie.io.rst:
--------------------------------------------------------------------------------
1 | bowtie.io package
2 | =================
3 |
4 | Submodules
5 | ----------
6 |
7 | bowtie.io.import\_opsa module
8 | -----------------------------
9 |
10 | .. automodule:: bowtie.io.import_opsa
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | bowtie.io.utilities module
16 | --------------------------
17 |
18 | .. automodule:: bowtie.io.utilities
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 |
24 | Module contents
25 | ---------------
26 |
27 | .. automodule:: bowtie.io
28 | :members:
29 | :undoc-members:
30 | :show-inheritance:
31 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Learn more: https://github.com/kennethreitz/setup.py
4 |
5 | from setuptools import setup, find_packages
6 |
7 | import bowtie
8 |
9 | with open('README.md') as f:
10 | readme = f.read()
11 |
12 | with open('LICENSE') as f:
13 | license = f.read()
14 |
15 | setup(
16 | name='bowtie',
17 | version=bowtie.__version__,
18 | description='A library to map bow-tie method to bayesian networks',
19 | long_description=readme,
20 | author='Frank T. Zurheide',
21 | author_email='frank.zurheide@gmail.com',
22 | license=license,
23 | packages=find_packages(exclude=('tests', 'docs'))
24 | )
25 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/source/bowtie.dataresources.rst:
--------------------------------------------------------------------------------
1 | bowtie.dataresources package
2 | ============================
3 |
4 | Submodules
5 | ----------
6 |
7 | bowtie.dataresources.opsa module
8 | --------------------------------
9 |
10 | .. automodule:: bowtie.dataresources.opsa
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | bowtie.dataresources.type\_definitions module
16 | ---------------------------------------------
17 |
18 | .. automodule:: bowtie.dataresources.type_definitions
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 |
24 | Module contents
25 | ---------------
26 |
27 | .. automodule:: bowtie.dataresources
28 | :members:
29 | :undoc-members:
30 | :show-inheritance:
31 |
--------------------------------------------------------------------------------
/tests/import_opsa_test.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | import xml.etree.ElementTree as ET
4 | from treelib import Tree
5 | from bowtie.io.import_opsa import OPSA_Importer
6 |
7 |
8 | class Test_Import(unittest.TestCase):
9 |
10 | def test_import(self):
11 | # filename = "../data/test_et_v03_fails.xml"
12 | filename = "data/test_et_v03_fails.xml"
13 |
14 | # read file
15 | xml_root = ET.parse(filename).getroot()
16 | tree = Tree()
17 | tree_root = tree.create_node('root')
18 | importer = OPSA_Importer()
19 | importer.parse(xml_root, tree, tree_root)
20 |
21 | self.assertTrue(isinstance(tree, Tree))
22 |
23 |
24 | if __name__ == '__main__':
25 | unittest.main()
26 |
--------------------------------------------------------------------------------
/bowtie/dataresources/type_definitions.py:
--------------------------------------------------------------------------------
1 | from enum import Enum, auto
2 |
3 |
4 | class GateType(Enum):
5 | """
6 | Enumeration of available gate types.
7 |
8 | **Note:** not all of this types are already implemented.
9 | """
10 | OR = auto()
11 | AND = auto()
12 | ATLEAST = auto()
13 | NOT = auto()
14 | LINK = auto()
15 | UNKNOWN = auto()
16 |
17 |
18 | class EventType(Enum):
19 | """
20 | Definition of OPSA data types.
21 | """
22 | FAULT_TREE = auto()
23 | EVENT_TREE = auto()
24 | GATE = auto()
25 | BASIC_EVENT = auto()
26 | INITIATING_EVENT = auto()
27 | FUNCTIONAL_EVENT = auto()
28 | SEQUENCE = auto()
29 | FORK = auto()
30 | PATH = auto()
31 | UNKNOWN = auto()
32 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # pybnbowtie
2 |
3 | pybnbowtie is a library for mapping bow-tie analysis to Bayesian networks.
4 |
5 |
6 | ## Dependencies
7 |
8 | pybnbowtie has the following dependencies:
9 |
10 | - pgmpy (with its own dependencies)
11 | - treelib
12 | - matploblib
13 |
14 | Enable usage of jupyter (see also notes below)
15 | - ipykernel
16 |
17 | ### Installation of dependencies from pgmpy
18 |
19 | - networkx
20 | - numpy
21 | - scipy
22 | - pandas
23 | - pyparsing
24 | - torch
25 | - statsmodels
26 | - tqdm
27 | - joblib
28 | - pgmpy
29 | - treelib
30 |
31 | And perhaps for jupyter also
32 | - ipykernel
33 | - matplotlib
34 |
35 |
36 | ## Installation
37 |
38 | To install pybnbowtie from source code:
39 |
40 | ```
41 | git clone https://github.com/zurheide/pybnbowtie.git
42 | cd pybnbowtie
43 | pip install -r requirements.txt
44 | python setup.py install
45 | ```
46 |
47 |
48 | ## jupyter
49 |
50 | If pipenv is used the environment has to be installed in jupyter.
51 | Howto was found here: https://stackoverflow.com/questions/47295871/is-there-a-way-to-use-pipenv-with-jupyter-notebook
52 |
53 | run ``python -m ipykernel install --user --name=my-virtualenv-name`` before usage of jupyter:
54 | ```
55 | $ pipenv shell
56 | $ python -m ipykernel install --user --name=my-virtualenv-name
57 | $ jupyter notebook
58 | ```
59 |
60 | Afterwards select **my-virtualenv-name _kernel_** in jupyter.
61 |
--------------------------------------------------------------------------------
/data/test_et_v03_fails.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/data/test_et_v03_works.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/data/test_et_v03_true.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/tests/test_package.py:
--------------------------------------------------------------------------------
1 | import xml.etree.ElementTree as ET
2 | from treelib import Tree
3 |
4 | from pgmpy.inference import VariableElimination
5 |
6 | from bowtie.io.import_opsa import OPSA_Importer
7 | from bowtie.mapping.mapping_bowtie import MappingBowTie
8 |
9 |
10 | # Import a bow tie
11 |
12 | # define opsa file
13 | filename = '../data/test_et_v03_works.xml'
14 | # filename = '../data/test_et_v03_true.xml'
15 | filename = '../data/khakzad_dynamic_v03.xml'
16 | # filename = '../data/Zarei_regulator_system_v01.xml'
17 |
18 | # read file
19 | xml_root = ET.parse(filename).getroot()
20 | tree = Tree()
21 | tree_root = tree.create_node('root')
22 | importer = OPSA_Importer()
23 | importer.parse(xml_root, tree, tree_root)
24 |
25 | # show imported data
26 | tree.show()
27 |
28 | # map data
29 | mapper = MappingBowTie(tree)
30 | model = mapper.map()
31 |
32 | # show model nodes
33 | print('nodes')
34 | print(model.nodes())
35 |
36 | print('check bayesian model = {}'.format(model.check_model()))
37 |
38 | # print CPD tables of bayesian network
39 | for cpd in model.cpds:
40 | print(cpd)
41 |
42 | # Inference of bayesian network
43 | node_te = mapper.get_top_event(tree)
44 | print('top event = {}'.format(node_te.tag))
45 |
46 | infer = VariableElimination(model)
47 | te_dist = infer.query(variables=[node_te.tag])
48 | print(te_dist)
49 |
50 | # Consequences
51 | consequence = mapper.consequence_name
52 | # print(consequence)
53 | c_dist = infer.query(variables=[consequence])
54 | print(c_dist)
55 | print(c_dist.values)
56 |
--------------------------------------------------------------------------------
/data/test_et_v03.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
--------------------------------------------------------------------------------
/bowtie/io/utilities.py:
--------------------------------------------------------------------------------
1 | """
2 | Helper functions for functionality that is needed across the
3 | code but finds no better place.
4 |
5 | Includes mainly a function for a deep copy of a tree.
6 | """
7 |
8 |
9 | from treelib import Tree
10 |
11 |
12 | # Helper utilities
13 |
14 | def root_traverse(tree, root, new_tree, new_root):
15 | """
16 | Traverse the tree starting at the root and create for each node
17 | a new node that links to the same data.
18 |
19 | Recursive function. Hopefully our trees are small.
20 | """
21 | # traverse children
22 | children = tree.children(root.identifier)
23 | for child in children:
24 | # copy node
25 | n = new_tree.create_node(tag=child.tag,
26 | parent=new_root,
27 | data=child.data)
28 | # traverse child node
29 | root_traverse(tree, child, new_tree, n)
30 |
31 |
32 | def tree_copy(tree):
33 | """
34 | Function to copy a tree. Takes a tree from the ``treeLib`` and traverses
35 | it. For every existing node is a new node created. Only this ensures
36 | that the copy of the tree contains new nodes with new labels.
37 |
38 | Returns a tree with the same data but new nodes (with new labels).
39 | """
40 |
41 | root = tree.get_node(tree.root)
42 |
43 | # create new tree
44 | new_tree = Tree()
45 | # copy root node
46 | new_tree.create_node(tag=root.tag, data=root.data)
47 | new_root = new_tree.get_node(new_tree.root)
48 |
49 | # traverse the data, starting from the roots
50 | root_traverse(tree, root, new_tree, new_root)
51 |
52 | return new_tree
53 |
--------------------------------------------------------------------------------
/data/test_et_v01.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Editors
2 | .vscode/
3 | .idea/
4 |
5 | # Vagrant
6 | .vagrant/
7 |
8 | # Mac/OSX
9 | .DS_Store
10 |
11 | # Windows
12 | Thumbs.db
13 |
14 | # Source for the following rules: https://raw.githubusercontent.com/github/gitignore/master/Python.gitignore
15 | # Byte-compiled / optimized / DLL files
16 | __pycache__/
17 | *.py[cod]
18 | *$py.class
19 |
20 | # C extensions
21 | *.so
22 |
23 | # Distribution / packaging
24 | .Python
25 | build/
26 | develop-eggs/
27 | dist/
28 | downloads/
29 | eggs/
30 | .eggs/
31 | lib/
32 | lib64/
33 | parts/
34 | sdist/
35 | var/
36 | wheels/
37 | *.egg-info/
38 | .installed.cfg
39 | *.egg
40 | MANIFEST
41 |
42 | # PyInstaller
43 | # Usually these files are written by a python script from a template
44 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
45 | *.manifest
46 | *.spec
47 |
48 | # Installer logs
49 | pip-log.txt
50 | pip-delete-this-directory.txt
51 |
52 | # Unit test / coverage reports
53 | htmlcov/
54 | .tox/
55 | .nox/
56 | .coverage
57 | .coverage.*
58 | .cache
59 | nosetests.xml
60 | coverage.xml
61 | *.cover
62 | .hypothesis/
63 | .pytest_cache/
64 |
65 | # Translations
66 | *.mo
67 | *.pot
68 |
69 | # Django stuff:
70 | *.log
71 | local_settings.py
72 | db.sqlite3
73 |
74 | # Flask stuff:
75 | instance/
76 | .webassets-cache
77 |
78 | # Scrapy stuff:
79 | .scrapy
80 |
81 | # Sphinx documentation
82 | docs/_build/
83 |
84 | # PyBuilder
85 | target/
86 |
87 | # Jupyter Notebook
88 | .ipynb_checkpoints
89 |
90 | # IPython
91 | profile_default/
92 | ipython_config.py
93 |
94 | # pyenv
95 | .python-version
96 |
97 | # celery beat schedule file
98 | celerybeat-schedule
99 |
100 | # SageMath parsed files
101 | *.sage.py
102 |
103 | # Environments
104 | .env
105 | .venv
106 | env/
107 | venv/
108 | ENV/
109 | env.bak/
110 | venv.bak/
111 |
112 | # Spyder project settings
113 | .spyderproject
114 | .spyproject
115 |
116 | # Rope project settings
117 | .ropeproject
118 |
119 | # mkdocs documentation
120 | /site
121 |
122 | # mypy
123 | .mypy_cache/
124 | .dmypy.json
125 | dmypy.json
126 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import sys
15 | sys.path.insert(0, os.path.abspath('..'))
16 |
17 |
18 | # -- Project information -----------------------------------------------------
19 |
20 | project = 'bow-tie-mapping'
21 | copyright = '2020, Frank Zurheide'
22 | author = 'Frank Zurheide'
23 |
24 |
25 | # -- General configuration ---------------------------------------------------
26 |
27 | # Add any Sphinx extension module names here, as strings. They can be
28 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
29 | # ones.
30 | extensions = ['sphinx.ext.todo', 'sphinx.ext.viewcode', 'sphinx.ext.autodoc']
31 |
32 | # Add any paths that contain templates here, relative to this directory.
33 | templates_path = ['_templates']
34 |
35 | # List of patterns, relative to source directory, that match files and
36 | # directories to ignore when looking for source files.
37 | # This pattern also affects html_static_path and html_extra_path.
38 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
39 |
40 |
41 | # -- Options for HTML output -------------------------------------------------
42 |
43 | # The theme to use for HTML and HTML Help pages. See the documentation for
44 | # a list of builtin themes.
45 | #
46 | html_theme = 'alabaster'
47 | # html_theme = 'pyramid'
48 | # html_theme = 'classic'
49 | html_theme = 'sphinxdoc'
50 |
51 | # Add any paths that contain custom static files (such as style sheets) here,
52 | # relative to this directory. They are copied after the builtin static files,
53 | # so a file named "default.css" will overwrite the builtin "default.css".
54 | html_static_path = ['_static']
55 |
--------------------------------------------------------------------------------
/data/bearfield_et_al_eliminating_consequences_arcs_v02.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
--------------------------------------------------------------------------------
/data/bearfield_et_al_eliminating_consequences_arcs_v03.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
--------------------------------------------------------------------------------
/data/bearfield_et_al_eliminating_causal_arcs_v02.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
--------------------------------------------------------------------------------
/data/bearfield_et_al_eliminating_causal_arcs_v03.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
--------------------------------------------------------------------------------
/data/test_et_v02.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
--------------------------------------------------------------------------------
/bowtie/dataresources/opsa.py:
--------------------------------------------------------------------------------
1 | '''
2 | OPSA_Containers holds high level structures like event tree or faul tree
3 |
4 | Definition of the Open-PSA data-Structures.
5 | OPSA XML file will be imported and a tree structure with
6 | this objects will be created.
7 |
8 | Definition of OPSA structure can be found at the internet site of
9 | https://open-psa.github.io
10 |
11 | '''
12 |
13 |
14 | class OPSA_Container():
15 | '''
16 | OPSA_Container holds high level structures like event tree or faul tree
17 | '''
18 |
19 | def __init__(self, name, container_type):
20 | """
21 | Set the name and the container_type.
22 | """
23 | self.name = name
24 | self.container_type = container_type
25 | self.label = None
26 |
27 | def set_label(self, label):
28 | """
29 | Sets a label (description) to the container.
30 | """
31 | self.label = label
32 |
33 |
34 | class OPSA_Gate():
35 | """
36 | Definition of a gate, for example OR, AND, AT_LEAST, ....
37 | Available (but not necessary implemented) types of gates are defined
38 | in the class ``GateType``.
39 | """
40 | def __init__(self, name, gate_type, gate_label=None):
41 | self.name = name
42 | self.gate_type = gate_type
43 | self.gate_label = gate_label
44 | self.atleast_min = -1
45 |
46 | def set_atleast_min(self, minimun):
47 | """
48 | Set minimum number of active input for AT_LEAST gate.
49 | """
50 | self.atleast_min = minimun
51 |
52 |
53 | class OPSA_Basic_Event():
54 | """
55 | Basic event in a fault tree. A basic event is connected to a gate.
56 | The basic event has to have a name. If no probability is given the
57 | creator must take care of the probability (e.g. 1.0).
58 | """
59 | def __init__(self, name, probability):
60 | self.name = name
61 | self.probability = probability
62 | self.label = None
63 | self.attribute = None
64 | self.parameter = None
65 |
66 | def set_label(self, label):
67 | """
68 | Sets a label (description) to the container.
69 | """
70 | self.label = label
71 |
72 | def set_probability(self, probability):
73 | """
74 | Set the probability of the ``OPSA_Basic_Event``. Probability is
75 | a float in the range of 0.0 to 1.0
76 | """
77 | self.probability = probability
78 |
79 | def add_attribute(self, name, value):
80 | """
81 | Add attribute to the ``OPSA_Basic_Event``. The storage is a ``set``
82 | so it is possible to overwrite existing values. This is not checked.
83 | """
84 | if self.attribute is None:
85 | self.attribute = {}
86 | self.attribute[name] = value
87 |
88 | def get_attribute(self):
89 | """
90 | Returns the ``set`` of attributes.
91 | """
92 | return self.attribute
93 |
94 | def set_parameter(self, parameter):
95 | """
96 | Sets parameter. Not implemented. Should be used for time dependend
97 | data.
98 | """
99 | self.parameter = parameter
100 |
101 |
102 | class OPSA_Initiating_Event():
103 | """
104 | ``OPSA_Initiating_Event`` is a link from a top event to an
105 | event tree.
106 | """
107 | def __init__(self, name, container_type, event_tree):
108 | self.name = name
109 | self.container_type = container_type
110 | self.event_tree = event_tree
111 |
112 |
113 | class OPSA_Functional_Event():
114 | """
115 | The functional event is a splitting event to two new different paths.
116 | This element forks the event tree and two ``OPSA_Path`` elements follow
117 | this functional events.
118 |
119 | Most often this is also called *safety gate* or *safety element*,
120 | for example a ignition prevention.
121 | """
122 | def __init__(self, name, attributes=None, label=None):
123 | self.name = name
124 | self.label = label
125 | self.attributes = attributes
126 |
127 | def set_label(self, label):
128 | """
129 | Sets a label (description) to the container.
130 | """
131 | self.label = label
132 |
133 |
134 | class OPSA_Sequence():
135 | """
136 | The sequence is the end point of an event tree. In other places it is
137 | also called *consequence*.
138 | """
139 | def __init__(self, name, probability=None, label=None):
140 | self.name = name
141 | self.label = label
142 | self.probability = probability
143 | self.event_tree_name = None
144 |
145 | def set_label(self, label):
146 | """
147 | Sets a label (description) to the container.
148 | """
149 | self.label = label
150 |
151 | def set_event_tree(self, name):
152 | """
153 | Sets the name of the event tree to the sequence.
154 | """
155 | self.event_tree_name = name
156 |
157 |
158 | class OPSA_Path():
159 | """
160 | A path follows a ``OPSA_Functional_Event``. There are always two possible
161 | paths after a functional event: YES/NO, true/false, works/fails.
162 | """
163 | def __init__(self, name, state, label=None, probability=None):
164 | self.name = name
165 | self.state = state
166 | self.label = label
167 | self.probability = probability
168 | self.parameter = None
169 |
170 | def set_expression(self, expression):
171 | """
172 | ZZZ TODO: Check this
173 |
174 | The expression defines the probability of this path. Both paths
175 | must give a probability of 1.0. This is not checked and must be
176 | done by the program logic.
177 | """
178 | self.expression = expression
179 |
180 | def set_parameter(self, parameter):
181 | self.parameter = parameter
182 |
183 | def set_probability(self, probability):
184 | """
185 | The expression defines the probability of this path. Both paths
186 | must give a probability of 1.0. This is not checked and must be
187 | done by the program logic.
188 | """
189 | self.probability = probability
190 |
--------------------------------------------------------------------------------
/data/khakzad_dynamic_v03.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/tests/mapping_bowtie_test.py:
--------------------------------------------------------------------------------
1 | """
2 | Run this test with unittest:
3 |
4 | $ python -m unittest -v mapping_bowtie_test
5 | """
6 |
7 |
8 | import unittest
9 |
10 | # import xml.etree.ElementTree as ET
11 | from treelib import Tree
12 | # from bowtie.io.import_opsa import OPSA_Importer
13 | from bowtie.mapping.mapping_bowtie import MappingBowTie
14 |
15 |
16 | class Test_Import(unittest.TestCase):
17 |
18 | def test_check_path(self):
19 | tree = Tree()
20 | mapper = MappingBowTie(tree)
21 |
22 | res = mapper.check_path(['S1'], ['S1'])
23 | self.assertTrue(res)
24 |
25 | def test_check_path_2(self):
26 | tree = Tree()
27 | mapper = MappingBowTie(tree)
28 | try:
29 | res = mapper.check_path(['S1'], ['S2'])
30 | except RuntimeError:
31 | self.assertTrue(True)
32 |
33 | def test_check_path_3(self):
34 | tree = Tree()
35 | mapper = MappingBowTie(tree)
36 | try:
37 | res = mapper.check_path(['S1', 'S3', 'S5'],
38 | ['S1', 'S2', 'S3', 'S4', 'S5', 'S6'])
39 | except RuntimeError:
40 | self.assertTrue(True)
41 |
42 | def test_check_path_4(self):
43 | tree = Tree()
44 | mapper = MappingBowTie(tree)
45 | try:
46 | res = mapper.check_path(['S1', 'S2', 'S3', 'S4', 'S5', 'S6'],
47 | ['S1', 'S6', 'S4', 'S5'])
48 | except RuntimeError:
49 | self.assertTrue(True)
50 |
51 | def test_check_path_5(self):
52 | tree = Tree()
53 | mapper = MappingBowTie(tree)
54 | res = mapper.check_path(['S1', 'S2', 'S3', 'S4', 'S5', 'S6'],
55 | ['S1', 'S3', 'S4', 'S6'])
56 | self.assertTrue(res)
57 |
58 | def test_split_probability_table(self):
59 | tree = Tree()
60 | mapper = MappingBowTie(tree)
61 | p = [(('Alarm', 'true'), ('Sprinkler', 'true'), '0.2250'),
62 | (('Alarm', 'true'), ('Ignition', 'false'), '0.2250'),
63 | (('Alarm', 'true'), ('Ignition', 'false'), '0.225'),
64 | (('Alarm', 'false'), ('Sprinkler', 'true'), '0.9987'),
65 | (('Alarm', 'false'), ('Sprinkler', 'false'), '0.9987'),
66 | (('Alarm', 'true'), ('Ignition', 'true'), '0.0013'),
67 | (('Alarm', 'true'), ('Sprinkler', 'true'), '0.0013'),
68 | (('Alarm', 'false'), ('Sprinkler', 'true'), '0.775'),
69 | (('Alarm', 'false'), ('Ignition', 'true'), '0.9987'),
70 | (('Alarm', 'false'), ('Sprinkler', 'false'), '0.775'),
71 | (('Alarm', 'false'), ('Ignition', 'false'), '0.775'),
72 | (('Alarm', 'true'), ('Sprinkler', 'false'), '0.0013'),
73 | (('Alarm', 'true'), ('Sprinkler', 'false'), '0.225')]
74 |
75 | name_list, sans_name_list = mapper.split_probability_table('Alarm', p)
76 |
77 | self.assertEqual(len(name_list), 13)
78 | self.assertEqual(len(name_list[0]), 1)
79 | self.assertEqual(len(sans_name_list), 13)
80 | self.assertEqual(len(sans_name_list[0]), 1)
81 |
82 | def test_split_probability_table_2(self):
83 | tree = Tree()
84 | mapper = MappingBowTie(tree)
85 | p = [(('Alarm', 'true'), ('Sprinkler', 'true'), '0.2250'),
86 | (('Alarm', 'true'), ('Ignition', 'false'), '0.2250'),
87 | (('Alarm', 'true'), ('Ignition', 'false'), '0.225'),
88 | (('Alarm', 'false'), ('Sprinkler', 'true'), '0.9987'),
89 | (('Alarm', 'false'), ('Sprinkler', 'false'), '0.9987'),
90 | (('Alarm', 'true'), ('Ignition', 'true'), '0.0013'),
91 | (('Alarm', 'true'), ('Sprinkler', 'true'), '0.0013'),
92 | (('Alarm', 'false'), ('Sprinkler', 'true'), '0.775'),
93 | (('Alarm', 'false'), ('Ignition', 'true'), '0.9987'),
94 | (('Alarm', 'false'), ('Sprinkler', 'false'), '0.775'),
95 | (('Alarm', 'false'), ('Ignition', 'false'), '0.775'),
96 | (('Alarm', 'true'), ('Sprinkler', 'false'), '0.0013'),
97 | (('Alarm', 'true'), ('Sprinkler', 'false'), '0.225')]
98 |
99 | name_list, sans_name_list = mapper.split_probability_table('Lala', p)
100 |
101 | self.assertEqual(len(name_list), 13)
102 | self.assertEqual(len(name_list[0]), 0)
103 | self.assertEqual(len(sans_name_list), 13)
104 | self.assertEqual(len(sans_name_list[0]), 2)
105 |
106 | def test_split_probability_table_3(self):
107 | tree = Tree()
108 | mapper = MappingBowTie(tree)
109 | p = [(('e2', 'o22'), ('e1', 'o12'), '0.9'),
110 | (('e2', 'o21'), ('e1', 'o12'), '0.1'),
111 | (('e2', 'o22'), ('e1', 'o11'), '0.99'),
112 | (('e2', 'o21'), ('e1', 'o11'), '0.01')]
113 |
114 | name_list, sans_name_list = mapper.split_probability_table('e2', p)
115 |
116 | self.assertEqual(len(name_list), 4)
117 | self.assertEqual(len(name_list[0]), 1)
118 | self.assertEqual(len(sans_name_list), 4)
119 | self.assertEqual(len(sans_name_list[0]), 1)
120 |
121 | def test_states_in_probability(self):
122 | tree = Tree()
123 | mapper = MappingBowTie(tree)
124 | p = [(('e2', 'o22'), ('e1', 'o12'), '0.9'),
125 | (('e2', 'o21'), ('e1', 'o12'), '0.1'),
126 | (('e2', 'o22'), ('e1', 'o11'), '0.99'),
127 | (('e2', 'o21'), ('e1', 'o11'), '0.01')]
128 |
129 | res = mapper.states_in_probability(p)
130 |
131 | # print(res)
132 | self.assertEqual(len(res), 2)
133 | self.assertEqual(len(res['e1']), 2)
134 | self.assertEqual(len(res['e2']), 2)
135 | # check if o11 and o12 are in e1 array
136 | self.assertIn('o11', res['e1'])
137 | self.assertIn('o12', res['e1'])
138 | # check if o11 and o12 are in e1 array
139 | self.assertIn('o21', res['e2'])
140 | self.assertIn('o22', res['e2'])
141 |
142 | def test_find_state_for_probabilities(self):
143 | tree = Tree()
144 | mapper = MappingBowTie(tree)
145 | p = [[('e1', 'o11'), ('e2', 'o21'),
146 | ('failure leakage', 'fails'), 'c1'],
147 | [('e1', 'o11'), ('e2', 'o21'),
148 | ('failure leakage', 'works'), 'Safe'],
149 | [('e1', 'o11'), ('e2', 'o22'),
150 | ('failure leakage', 'fails'), 'c2'],
151 | [('e1', 'o11'), ('e2', 'o22'),
152 | ('failure leakage', 'works'), 'Safe'],
153 | [('e1', 'o12'), ('e2', 'o21'),
154 | ('failure leakage', 'fails'), 'c1'],
155 | [('e1', 'o12'), ('e2', 'o21'),
156 | ('failure leakage', 'works'), 'Safe'],
157 | [('e1', 'o12'), ('e2', 'o22'),
158 | ('failure leakage', 'fails'), 'c3'],
159 | [('e1', 'o12'), ('e2', 'o22'),
160 | ('failure leakage', 'works'), 'Safe']]
161 |
162 | v = [('e1', 'o12'), ('e2', 'o22'), ('failure leakage', 'works')]
163 | res = mapper.find_state_for_probabilities(p, v)
164 |
165 | self.assertEqual(res, 'Safe')
166 |
167 | def test_find_state_for_probabilities_2(self):
168 | tree = Tree()
169 | mapper = MappingBowTie(tree)
170 | p = [[('e1', 'o11'), ('e2', 'o21'),
171 | ('failure leakage', 'fails'), 'c1'],
172 | [('e1', 'o11'), ('e2', 'o21'),
173 | ('failure leakage', 'works'), 'Safe'],
174 | [('e1', 'o11'), ('e2', 'o22'),
175 | ('failure leakage', 'fails'), 'c2'],
176 | [('e1', 'o11'), ('e2', 'o22'),
177 | ('failure leakage', 'works'), 'Safe'],
178 | [('e1', 'o12'), ('e2', 'o21'),
179 | ('failure leakage', 'fails'), 'c1'],
180 | [('e1', 'o12'), ('e2', 'o21'),
181 | ('failure leakage', 'works'), 'Safe'],
182 | [('e1', 'o12'), ('e2', 'o22'),
183 | ('failure leakage', 'fails'), 'c3'],
184 | [('e1', 'o12'), ('e2', 'o22'),
185 | ('failure leakage', 'works'), 'Safe']]
186 |
187 | v = [('e1', 'o12'), ('e2', 'o22'), ('failure leakage', 'fails')]
188 | res = mapper.find_state_for_probabilities(p, v)
189 |
190 | self.assertEqual(res, 'c3')
191 |
192 | def test_find_state_for_probabilities_3(self):
193 | tree = Tree()
194 | mapper = MappingBowTie(tree)
195 | p = [[('e1', 'o11'), ('e2', 'o21'),
196 | ('failure leakage', 'fails'), 'c1'],
197 | [('e1', 'o11'), ('e2', 'o21'),
198 | ('failure leakage', 'works'), 'Safe'],
199 | [('e1', 'o11'), ('e2', 'o22'),
200 | ('failure leakage', 'fails'), 'c2'],
201 | [('e1', 'o11'), ('e2', 'o22'),
202 | ('failure leakage', 'works'), 'Safe'],
203 | [('e1', 'o12'), ('e2', 'o21'),
204 | ('failure leakage', 'fails'), 'c1'],
205 | [('e1', 'o12'), ('e2', 'o21'),
206 | ('failure leakage', 'works'), 'Safe'],
207 | [('e1', 'o12'), ('e2', 'o22'),
208 | ('failure leakage', 'fails'), 'c3'],
209 | [('e1', 'o12'), ('e2', 'o22'),
210 | ('failure leakage', 'works'), 'Safe']]
211 |
212 | v = [('x1', 'x12'), ('x2', 'ox2'), ('nada', 'rien')]
213 | res = mapper.find_state_for_probabilities(p, v)
214 |
215 | self.assertEqual(res, None)
216 |
217 | def test_derived_values_from_probabilities(self):
218 | tree = Tree()
219 | mapper = MappingBowTie(tree)
220 | p = [[('e1', 'o11'), ('e2', 'o21'),
221 | ('failure leakage', 'fails'), 'c1'],
222 | [('e1', 'o11'), ('e2', 'o21'),
223 | ('failure leakage', 'works'), 'Safe'],
224 | [('e1', 'o11'), ('e2', 'o22'),
225 | ('failure leakage', 'fails'), 'c2'],
226 | [('e1', 'o11'), ('e2', 'o22'),
227 | ('failure leakage', 'works'), 'Safe'],
228 | [('e1', 'o12'), ('e2', 'o21'),
229 | ('failure leakage', 'fails'), 'c1'],
230 | [('e1', 'o12'), ('e2', 'o21'),
231 | ('failure leakage', 'works'), 'Safe'],
232 | [('e1', 'o12'), ('e2', 'o22'),
233 | ('failure leakage', 'fails'), 'c3'],
234 | [('e1', 'o12'), ('e2', 'o22'),
235 | ('failure leakage', 'works'), 'Safe']]
236 |
237 | s = {'c3', 'c2', 'c1', 'Safe'}
238 | consequence = "Consequenzzzze"
239 | value_list, evidence, evidence_card, state_names = mapper.derived_values_from_probabilities(p, s, consequence)
240 |
241 | self.assertEqual(len(value_list), 4)
242 | self.assertEqual(len(evidence), 3)
243 | self.assertIn("failure leakage", evidence)
244 | self.assertIn("e1", evidence)
245 | self.assertIn("e2", evidence)
246 |
247 | self.assertEqual(len(evidence_card), 3)
248 | self.assertEqual(evidence_card[0], 2)
249 | self.assertEqual(evidence_card[1], 2)
250 | self.assertEqual(evidence_card[2], 2)
251 |
252 | self.assertEqual(len(state_names), 4)
253 | self.assertEqual(len(state_names[consequence]), 4)
254 | self.assertIn("c1", state_names[consequence])
255 | self.assertIn("c2", state_names[consequence])
256 | self.assertIn("c3", state_names[consequence])
257 | self.assertIn("Safe", state_names[consequence])
258 | self.assertIn("works", state_names["failure leakage"])
259 | self.assertIn("fails", state_names["failure leakage"])
260 | self.assertIn(None, state_names["e1"])
261 | self.assertIn(None, state_names["e2"])
262 |
263 | def test_derived_values_from_probabilities_2(self):
264 | tree = Tree()
265 | mapper = MappingBowTie(tree)
266 | p = [[('e1', 'yes'), ('e2', 'true'),
267 | ('failure leakage', 'fails'), 'c1'],
268 | [('e1', 'yes'), ('e2', 'true'),
269 | ('failure leakage', 'works'), 'Safe'],
270 | [('e1', 'yes'), ('e2', 'false'),
271 | ('failure leakage', 'fails'), 'c2'],
272 | [('e1', 'yes'), ('e2', 'false'),
273 | ('failure leakage', 'works'), 'Safe'],
274 | [('e1', 'no'), ('e2', 'true'),
275 | ('failure leakage', 'fails'), 'c1'],
276 | [('e1', 'no'), ('e2', 'true'),
277 | ('failure leakage', 'works'), 'Safe'],
278 | [('e1', 'no'), ('e2', 'false'),
279 | ('failure leakage', 'fails'), 'c3'],
280 | [('e1', 'no'), ('e2', 'false'),
281 | ('failure leakage', 'works'), 'Safe']]
282 |
283 | s = {'c3', 'c2', 'c1', 'Safe'}
284 | consequence = "Consequenzzzze"
285 | value_list, evidence, evidence_card, state_names = mapper.derived_values_from_probabilities(p, s, consequence)
286 |
287 | self.assertEqual(len(value_list), 4)
288 | self.assertEqual(len(evidence), 3)
289 | self.assertIn("failure leakage", evidence)
290 | self.assertIn("e1", evidence)
291 | self.assertIn("e2", evidence)
292 |
293 | self.assertEqual(len(evidence_card), 3)
294 | self.assertEqual(evidence_card[0], 2)
295 | self.assertEqual(evidence_card[1], 2)
296 | self.assertEqual(evidence_card[2], 2)
297 |
298 | self.assertEqual(len(state_names), 4)
299 | self.assertEqual(len(state_names[consequence]), 4)
300 | self.assertIn("c1", state_names[consequence])
301 | self.assertIn("c2", state_names[consequence])
302 | self.assertIn("c3", state_names[consequence])
303 | self.assertIn("Safe", state_names[consequence])
304 | self.assertIn("works", state_names["failure leakage"])
305 | self.assertIn("fails", state_names["failure leakage"])
306 | self.assertIn("yes", state_names["e1"])
307 | self.assertIn("no", state_names["e1"])
308 | self.assertIn("true", state_names["e2"])
309 | self.assertIn("false", state_names["e2"])
310 |
311 |
312 | if __name__ == '__main__':
313 | unittest.main()
314 |
--------------------------------------------------------------------------------
/data/Zarei_regulator_system_v01.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 |
272 |
273 |
274 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 |
293 |
294 |
295 |
296 |
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
316 |
317 |
318 |
319 |
320 |
321 |
322 |
323 |
324 |
325 |
326 |
327 |
328 |
329 |
330 |
331 |
332 |
333 |
334 |
335 |
336 |
337 |
338 |
339 |
340 |
341 |
342 |
343 |
344 |
345 |
346 |
347 |
348 |
349 |
350 |
351 |
352 |
353 |
354 |
355 |
356 |
357 |
358 |
359 |
360 |
361 |
362 |
363 |
364 |
365 |
366 |
367 |
368 |
369 |
370 |
371 |
372 |
373 |
374 |
375 |
376 |
377 |
378 |
379 |
380 |
381 |
382 |
383 |
384 |
385 |
386 |
387 |
388 |
389 |
390 |
391 |
392 |
393 |
394 |
395 |
396 |
397 |
398 |
399 |
400 |
401 |
402 |
403 |
404 |
405 |
406 |
407 |
408 |
409 |
410 |
411 |
412 |
413 |
414 |
415 |
416 |
417 |
418 |
419 |
420 |
421 |
422 |
423 |
424 |
425 |
426 |
427 |
428 |
429 |
430 |
431 |
432 |
433 |
434 |
435 |
436 |
437 |
438 |
439 |
440 |
441 |
442 |
443 |
444 |
445 |
446 |
447 |
448 |
449 |
450 |
451 |
452 |
453 |
454 |
455 |
456 |
457 |
458 |
459 |
460 |
461 |
462 |
463 |
464 |
465 |
466 |
467 |
468 |
469 |
470 |
471 |
472 |
473 |
474 |
475 |
476 |
477 |
478 |
479 |
480 |
481 |
--------------------------------------------------------------------------------
/data/Li_submarine_pipeline_v04.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 |
272 |
273 |
274 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 |
293 |
294 |
295 |
296 |
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
316 |
317 |
318 |
319 |
320 |
321 |
322 |
323 |
324 |
325 |
326 |
327 |
328 |
329 |
330 |
331 |
332 |
333 |
334 |
335 |
336 |
337 |
338 |
339 |
340 |
341 |
342 |
343 |
344 |
345 |
346 |
347 |
348 |
349 |
350 |
351 |
352 |
353 |
354 |
355 |
356 |
357 |
358 |
359 |
360 |
361 |
362 |
363 |
364 |
365 |
366 |
367 |
368 |
369 |
370 |
371 |
372 |
373 |
374 |
375 |
376 |
377 |
378 |
379 |
380 |
381 |
382 |
383 |
384 |
385 |
386 |
387 |
388 |
389 |
390 |
391 |
392 |
393 |
394 |
395 |
396 |
397 |
398 |
399 |
400 |
401 |
402 |
403 |
404 |
405 |
406 |
407 |
408 |
409 |
410 |
411 |
412 |
413 |
414 |
415 |
416 |
417 |
418 |
419 |
420 |
421 |
422 |
423 |
424 |
425 |
426 |
427 |
428 |
429 |
430 |
431 |
432 |
433 |
434 |
435 |
436 |
437 |
438 |
439 |
440 |
441 |
442 |
443 |
444 |
445 |
446 |
447 |
448 |
449 |
450 |
451 |
452 |
453 |
454 |
455 |
456 |
457 |
458 |
459 |
460 |
--------------------------------------------------------------------------------
/bowtie/io/import_opsa.py:
--------------------------------------------------------------------------------
1 | """
2 | This module holds the ``OPSA_Importer`` class. It takes a XML
3 | document and parses it to a treelib tree.
4 |
5 | Example of usage::
6 |
7 | import xml.etree.ElementTree as ET
8 | from treelib import Tree
9 | from bowtie.io.import_opsa import OPSA_Importer
10 |
11 | # read XML file, here opsa_input.xml
12 | xml_root = ET.parse("opsa_input.xml").getroot()
13 | # create and prepare tree for results
14 | tree = Tree()
15 | tree_root = tree.create_node('root')
16 | # create importer and parse
17 | importer = OPSA_Importer()
18 | importer.parse(xml_root, tree, tree_root)
19 | # show results
20 | tree.show()
21 |
22 | """
23 |
24 |
25 | from treelib import Tree
26 |
27 | from bowtie.dataresources.type_definitions import GateType, EventType
28 | from bowtie.dataresources.opsa import OPSA_Container, OPSA_Gate, \
29 | OPSA_Basic_Event, OPSA_Initiating_Event, OPSA_Functional_Event, \
30 | OPSA_Sequence, OPSA_Path
31 | from .utilities import tree_copy
32 |
33 |
34 | class OPSA_Importer:
35 | """
36 | Class that parses a XML in opsa definition and returns a tree
37 | with the data for further processing.
38 |
39 | Implemented gates: ``OR``, ``AND``.
40 | """
41 |
42 | def check_root(self, root):
43 | """
44 | Check that the XML is of opsa-mef format. Returns True or raises
45 | an Exception.
46 | """
47 | expected_tag = 'opsa-mef'
48 | if root.tag != expected_tag:
49 | raise Exception('root.tag unknown: <{}>. Should be <{}>'
50 | .format(root.tag, expected_tag))
51 | return True
52 |
53 | def tree_find_node(self, tree, node_name, data_class):
54 | '''
55 | Returns the first node in the tree with node_name and of type
56 | data_class. Returns the node or None.
57 | '''
58 | # convert the tree to a list for easier iterating
59 | aList = tree.all_nodes()
60 |
61 | for el in aList:
62 | if el.tag == node_name:
63 | if isinstance(el.data, data_class):
64 | return el
65 |
66 | return None
67 |
68 | def tree_find_nodes(self, tree, node_name, data_class):
69 | """
70 | Returns a list with all found nodes with name_name
71 | and of class data_class.
72 | """
73 |
74 | aList = tree.all_nodes()
75 |
76 | result = []
77 | for el in aList:
78 | if node_name is None or el.tag == node_name:
79 | if isinstance(el.data, data_class):
80 | result.append(el)
81 |
82 | return result
83 |
84 | def bd_define_basic_event(self, xml_node, tree):
85 | """
86 | Parses basic event definition (``define-basic-event``) in
87 | the ``model-data`` OPSA data structure.
88 | Reads data (probability and label) and stores it in the previously
89 | defined nodes.
90 | """
91 |
92 | # search for existing event (should already be created before)
93 | existing_nodes = self.tree_find_nodes(tree,
94 | xml_node.get('name'),
95 | OPSA_Basic_Event)
96 |
97 | if len(existing_nodes) < 1:
98 | raise RuntimeError('Node does not exist for tag:',
99 | xml_node.get('name'))
100 |
101 | for a_node in existing_nodes:
102 | d = a_node.data
103 |
104 | for e in xml_node:
105 |
106 | if e.tag == 'float':
107 | # while reading value convert it diretly to a float
108 | d.set_probability(float(e.get('value')))
109 | elif e.tag == 'label':
110 | d.set_label(e.text.strip())
111 | elif e.tag == 'attributes':
112 | for attrib in e:
113 | d.add_attribute(attrib.get('name'),
114 | attrib.get('value'))
115 | elif e.tag == 'parameter':
116 | d.set_parameter(e.get('name'))
117 | else:
118 | raise NotImplementedError('bd_define_basic_event not' +
119 | 'implemented property:', e.tag)
120 |
121 | def define_parameter(self, xml_node, tree):
122 | """
123 | Parses parameters in the ``model-data`` OPSA data structure.
124 | Reads the parameter and stores it in the nodes that have this
125 | parameter defined.
126 | """
127 |
128 | name = xml_node.get('name')
129 |
130 | # read parameter values
131 | for e in xml_node:
132 | if e.tag == 'float':
133 | probability = float(e.get('value'))
134 | # elif e.tag == 'parameter':
135 | # parameter = e.get('name')
136 | # elif e.tag == 'sub':
137 | # pass
138 | # elif e.tag == 'exponential':
139 | # print('*************************************')
140 | # elif e.tag == 'lognormal-deviate':
141 | # print('*************************************')
142 | else:
143 | raise NotImplementedError('define_parameter not' +
144 | 'implemented property:', e.tag)
145 |
146 | existing_paths = self.tree_find_nodes(tree, None, OPSA_Path)
147 | existing_basic_events = self.tree_find_nodes(tree, None,
148 | OPSA_Basic_Event)
149 |
150 | found_parameters = False
151 |
152 | if existing_paths:
153 | for node in existing_paths:
154 | path = node.data
155 | if name == path.parameter:
156 | if probability:
157 | path.set_probability(probability)
158 | found_parameters = True
159 | else:
160 | raise RuntimeError(' no objects with parameters found, perhaps' +
161 | 'other classes will fit')
162 |
163 | if existing_basic_events:
164 | for node in existing_basic_events:
165 | basic_event = node.data
166 | if name == basic_event.parameter:
167 | if probability:
168 | basic_event.set_probability(probability)
169 | found_parameters = True
170 |
171 | if found_parameters is False:
172 | print('WARNING: parameter not used. Parameter name = <{}>'
173 | .format(xml_node.get('name')))
174 |
175 | def parse_model_data_node(self, xml_node, tree):
176 | """
177 | Parses a single entry of the ``model-data`` OPSA data structure.
178 | Data can be of type ``define-basic-event`` or ``define-parameter``.
179 | """
180 |
181 | if xml_node.tag == 'define-basic-event':
182 | self.bd_define_basic_event(xml_node, tree)
183 | elif xml_node.tag == 'define-parameter':
184 | self.define_parameter(xml_node, tree)
185 | else:
186 | raise NotImplementedError('parse_model_data_node unhandled {}'
187 | .format(xml_node.tag))
188 |
189 | def parse_et_collect_formula(self, xml_node, tree, tree_parent):
190 | """
191 | Parses in the event tree (et) the collect-formula.
192 | """
193 |
194 | parent = tree_parent
195 |
196 | xml = xml_node
197 | if xml.tag == 'not':
198 | opsa_gate = OPSA_Gate(xml_node.tag, GateType.NOT)
199 | gate = tree.create_node(xml_node.tag, parent=parent,
200 | data=opsa_gate)
201 | parent = gate
202 | xml = xml[0]
203 |
204 | # now parse all data in the formula
205 | for elem in xml.iter():
206 | tag = elem.tag
207 | if tag == 'gate':
208 | # create a link
209 | opsa_gate = OPSA_Gate(elem.get('name'), GateType.LINK)
210 | gate = tree.create_node(elem.get('name'), parent=parent,
211 | data=opsa_gate)
212 |
213 | def parse_model_data(self, xml_node, tree):
214 | """
215 | Parses the ``model-data`` in the OPSA data structure.
216 | Calls for each entry the function ``parse_model_data_node``.
217 | """
218 |
219 | # traverse all nodes of the fault tree
220 | for subnode in xml_node:
221 | self.parse_model_data_node(subnode, tree)
222 |
223 | def et_define_functional_event(self, xml_node, tree, parent, a_dict):
224 | """
225 | Parses in the event tree (et) the functional event.
226 | The element contains mainly a description in the ``label``.
227 | """
228 |
229 | # sanity check if functional events are previously stored in the tree
230 | existing_nodes = self.tree_find_nodes(tree, xml_node.get('name'),
231 | OPSA_Functional_Event)
232 | if len(existing_nodes) > 0:
233 | print('aaaargh, should not be, first we define the'
234 | + 'functional-event, then we store them in the tree')
235 | raise RuntimeError('existing functional-events should not happen'
236 | + 'at this stage of computation')
237 |
238 | fe = OPSA_Functional_Event(xml_node.get('name'))
239 |
240 | for e in xml_node:
241 | if e.tag == 'label':
242 | fe.set_label(e.text.strip())
243 | else:
244 | raise NotImplementedError('ft_define_basic_event not'
245 | + 'implemented property:', e.tag)
246 |
247 | a_dict[xml_node.get('name')] = fe
248 |
249 | def parse_et_collect_expression(self, xml_node):
250 | """
251 | Parses in the event tree (et) the collect-expression.
252 | The probability of the path is stored in the ``float`` element.
253 | """
254 |
255 | parameter = None
256 | probability = None
257 |
258 | for elem in xml_node.iter():
259 | tag = elem.tag
260 |
261 | if tag == 'parameter':
262 | parameter = elem.get('name')
263 | elif tag == 'sub':
264 | # just continue reading, not handled
265 | pass
266 | elif tag == 'float':
267 | probability = float(elem.get('value'))
268 | else:
269 | raise NotImplementedError('this tag is not jet handled', tag)
270 |
271 | return parameter, probability
272 |
273 | def parse_et_path(self, xml_node, tree, tree_parent, a_dict):
274 | """
275 | Parses in the event tree (et) a path.
276 |
277 | This function is recursive. If the path contains another fork it
278 | calls the caller function ``parse_et_fork``.
279 | """
280 |
281 | parent = tree_parent
282 | state = xml_node.get('state')
283 |
284 | # create artificial name for path
285 | path_name = 'Path' + '.' + tree_parent.data.name + '.' + state
286 | path = OPSA_Path(name=path_name, state=state)
287 |
288 | # now add the path object to the tree, we will change it later
289 | n = tree.create_node(path_name, parent=parent, data=path)
290 | parent = n # now the path is the new parent
291 |
292 | for elem in xml_node:
293 | if elem.tag == 'collect-expression':
294 | param, prob = self.parse_et_collect_expression(elem[0])
295 | if param:
296 | n.data.set_parameter(param)
297 | if prob:
298 | n.data.set_probability(prob)
299 | elif elem.tag == 'collect-formula':
300 | self.parse_et_collect_formula(elem[0], tree, parent)
301 | elif elem.tag == 'sequence':
302 | seq = a_dict[elem.get('name')]
303 |
304 | if seq is None:
305 | raise RuntimeError('Sequence <{}> is not defined'
306 | .format(elem.get('name')))
307 | tree.create_node(elem.get('name'), parent=parent, data=seq)
308 | elif elem.tag == 'fork':
309 | self.parse_et_fork(elem, tree, parent, a_dict)
310 | elif elem.tag == 'branch':
311 | # find branch in a_dict
312 | subtree = a_dict[elem.get('name')]
313 | if subtree is None:
314 | raise RuntimeError('branch {} was not defined'
315 | .format(elem.get('name')))
316 |
317 | t = tree_copy(subtree)
318 |
319 | tree.paste(parent.identifier, t)
320 | else:
321 | raise NotImplementedError('parse_et_path:', elem.tag)
322 |
323 | def parse_et_fork(self, xml_node, tree, tree_parent, a_dict):
324 | """
325 | Parses in the event tree (et) the fork.
326 | The fork for the functional event holds a path. This path is then
327 | parsed by calling ``parse_et_path``.
328 | """
329 |
330 | parent = tree_parent
331 | func_elem_name = xml_node.get('functional-event')
332 | func_elem = a_dict.get(func_elem_name)
333 |
334 | if func_elem is None:
335 | raise RuntimeError('No functional-event to fork found with name'
336 | + ' <{}>'.format(func_elem_name))
337 |
338 | # add functional-event (triggered from fork) to tree
339 | n = tree.create_node(func_elem.name, parent=parent, data=func_elem)
340 | if n:
341 | parent = n
342 |
343 | for elem in xml_node:
344 | if elem.tag == 'path':
345 | self.parse_et_path(elem, tree, parent, a_dict)
346 | else:
347 | raise RuntimeError('fork should only contain path elements'
348 | + 'and not <{}>:'.format(elem.tag))
349 |
350 | def parse_et_initial_state(self, xml_node, tree, tree_parent, a_dict):
351 | """
352 | Parses in the event tree (et) the initial state. Each path is
353 | parsed by calling ``parse_et_fork``.
354 | """
355 |
356 | for sub in xml_node:
357 | if sub.tag == 'fork':
358 | self.parse_et_fork(sub, tree, tree_parent, a_dict)
359 | else:
360 | raise NotImplementedError('unknown tag:', xml_node.tag)
361 |
362 | def et_define_sequence(self, xml_node, tree, tree_parent, a_dict):
363 | """
364 | Parses the event tree (et) sequence. Result of parsing is stored
365 | in ``a_dict`` for later usage.
366 | """
367 |
368 | # sanity check if functional events are previously stored in the tree
369 | existing_nodes = self.tree_find_nodes(tree, xml_node.get('name'),
370 | OPSA_Sequence)
371 | if len(existing_nodes) > 0:
372 | print('aaaargh, should not be, first we define the sequence, then'
373 | + 'we store them in the tree')
374 | raise RuntimeError('existing functional-events should not happen'
375 | + 'at this stage of computation')
376 |
377 | seq = OPSA_Sequence(xml_node.get('name'))
378 |
379 | for e in xml_node:
380 | if e.tag == 'label':
381 | seq.set_label(e.get('label'))
382 | elif e.tag == 'event-tree':
383 | seq.set_event_tree(e.get('name'))
384 | else:
385 | raise NotImplementedError('et_define_sequence not implemented'
386 | + 'property:', e.tag)
387 |
388 | a_dict[xml_node.get('name')] = seq
389 |
390 | def et_define_branch(self, xml_node, tree, tree_parent, a_dict):
391 | """
392 | Parses a branching data structure by calling ``parse_et_fork``.
393 | """
394 |
395 | subtree = Tree()
396 | parent = subtree.root
397 |
398 | for e in xml_node:
399 | self.parse_et_fork(e, subtree, parent, a_dict)
400 |
401 | if subtree.size() == 0:
402 | raise RuntimeError('Event tree branch contains no data')
403 |
404 | a_dict[xml_node.get('name')] = subtree
405 |
406 | def parse_event_tree_node(self, xml_node, tree, tree_parent, a_dict):
407 | """
408 | Parses the event tree by calling functions to parse the data for
409 | functional event, sequence, initial state and branch.
410 |
411 | Communication and preprocessed data is stored and shared in the
412 | ``a_dict`` dictionary.
413 | """
414 |
415 | if xml_node.tag == 'define-functional-event':
416 | self.et_define_functional_event(xml_node, tree, tree_parent,
417 | a_dict)
418 | elif xml_node.tag == 'define-sequence':
419 | self.et_define_sequence(xml_node, tree, tree_parent, a_dict)
420 | elif xml_node.tag == 'initial-state':
421 | self.parse_et_initial_state(xml_node, tree, tree_parent, a_dict)
422 | elif xml_node.tag == 'define-branch':
423 | self.et_define_branch(xml_node, tree, tree_parent, a_dict)
424 | else:
425 | raise NotImplementedError('parse_event_tree_node(...) does not'
426 | + 'handle {}'.format(xml_node.tag))
427 |
428 | def parse_event_tree(self, xml_node, tree, tree_parent):
429 | """
430 | Parses the event tree. Before parsing the initiating event is created
431 | (if it does not exist). Parses all subtree nodes of the event tree.
432 | """
433 |
434 | parent = tree_parent
435 |
436 | # check if there is an initiating-event that points to the
437 | # current event-tree
438 | existing_nodes = self.tree_find_nodes(tree, None,
439 | OPSA_Initiating_Event)
440 | if len(existing_nodes) > 0:
441 | # check if 'Initiating_Event.event-tree == xml_node.name'
442 | for node in existing_nodes:
443 | if node.data.event_tree == xml_node.get('name'):
444 | parent = node
445 |
446 | # create new event-tree node in tree
447 | n = tree.create_node(xml_node.get('name'), parent=parent,
448 | data=OPSA_Container(xml_node.get('name'),
449 | EventType.EVENT_TREE))
450 | # traverse all nodes of the fault tree
451 | a_dict = {}
452 | for subnode in xml_node:
453 | self.parse_event_tree_node(subnode, tree, n, a_dict)
454 |
455 | def parse_gate_type(self, node):
456 | """
457 | Check if gate type is or, not, at_least.
458 | **Only add new gate types if they are properly handled.**
459 | Returns class ``GateType``.
460 | """
461 | tag = node.tag
462 | if tag == 'or':
463 | return GateType.OR
464 | if tag == 'and':
465 | return GateType.AND
466 | if tag == 'atleast':
467 | return GateType.ATLEAST
468 |
469 | raise NotImplementedError('gate type unknown {}'.format(node.tag))
470 |
471 | def ft_define_gate(self, xml_gate, tree, parent):
472 | """
473 | Parses in a fault tree (ft) a gate. Handles *AND*, *OR* and *AT_LEAST*
474 | (**Note:** *AT_LEAST* is not jet converted to a bayesian network).
475 |
476 | For large OPSA definitions reoccuring parts are left out by just
477 | stopping with the gate name that is defined somewhere else in the
478 | data structure. Therefore this function has to take into account if
479 | the gate does alredy exist.
480 | """
481 |
482 | gate_label = None
483 | for subnode in xml_gate:
484 | if (subnode.tag == 'and'
485 | or subnode.tag == 'or'
486 | or subnode.tag == 'atleast'):
487 | # get gate type and then create connected events, gates, ...
488 | # to this gate
489 | gate_type = self.parse_gate_type(subnode)
490 | existing_node = self.tree_find_node(tree, xml_gate.get('name'),
491 | OPSA_Gate)
492 | if existing_node is None:
493 | opsa_gate = OPSA_Gate(xml_gate.get('name'), gate_type,
494 | gate_label)
495 | if gate_type == GateType.ATLEAST:
496 | # refine the opsa_gate with atleast value
497 | opsa_gate.set_atleast_min(subnode.get('min'))
498 |
499 | gate = tree.create_node(xml_gate.get('name'),
500 | parent=parent,
501 | data=opsa_gate)
502 | else:
503 | # existing_node
504 | gate = existing_node
505 | gate.data.gate_type = gate_type
506 |
507 | if gate_type == GateType.ATLEAST:
508 | gate.data.set_atleast_min(subnode.get('min'))
509 |
510 | if len(subnode) <= 0:
511 | raise RuntimeError("No connections to gate: {}"
512 | .format(xml_gate.get('name')))
513 |
514 | for e in subnode:
515 | if e.tag == 'gate':
516 | opsa_gate = OPSA_Gate(e.get('name'), None)
517 | tree.create_node(e.get('name'), parent=gate,
518 | data=opsa_gate)
519 | elif e.tag == 'basic-event' or e.tag == 'event':
520 | opsa_basic_event = OPSA_Basic_Event(e.get('name'),
521 | probability=None)
522 | tree.create_node(e.get('name'), parent=gate,
523 | data=opsa_basic_event)
524 | else:
525 | raise NotImplementedError('ft_define_gate does not'
526 | + 'handle {}'.format(e.tag))
527 |
528 | elif subnode.tag == 'label':
529 | gate_label = subnode.text.strip()
530 | continue
531 |
532 | else:
533 | raise NotImplementedError(' Unknown tag <{}>'
534 | .format(subnode.tag))
535 |
536 | def ft_define_basic_event(self, xml_node, tree, parent):
537 | """
538 | Parses in a fault tree (ft) the basic event. Stores, if available,
539 | the probability and the label (description) of the basic event.
540 | Exponential probability is not jet implemented.
541 | """
542 |
543 | # search for existing event (should already be created before)
544 | existing_node = self.tree_find_node(tree, xml_node.get('name'),
545 | OPSA_Basic_Event)
546 | d = existing_node.data
547 |
548 | for e in xml_node:
549 | if e.tag == 'float':
550 | d.set_probability(float(e.get('value')))
551 | elif e.tag == 'label':
552 | d.set_label(e.text.strip())
553 | elif e.tag == 'exponential':
554 | print('*************************************')
555 | raise NotImplementedError('Probability <{}> unknown'
556 | .format(e.tag))
557 | else:
558 | raise NotImplementedError('ft_define_basic_event not'
559 | 'implemented property: <{}> in'
560 | '<{}>'
561 | .format(e.tag, xml_node.get('name')))
562 |
563 | def parse_fault_tree_node(self, xml_node, tree, tree_parent):
564 | """
565 | Parses the fault tree by calling functions to parse the data for
566 | gate, basic_event and label of the fault tree.
567 | """
568 | if xml_node.tag == 'define-gate':
569 | self.ft_define_gate(xml_node, tree, tree_parent)
570 | elif xml_node.tag == 'define-basic-event':
571 | self.ft_define_basic_event(xml_node, tree, tree_parent)
572 | elif xml_node.tag == 'label':
573 | tree_parent.data.set_label(xml_node.text.strip())
574 | elif xml_node.tag == 'define-parameter':
575 | raise NotImplementedError('parse_fault_tree_node(...) does not'
576 | 'implement ')
577 | else:
578 | raise NotImplementedError('parse_fault_tree_node(...) does not'
579 | 'handle {}'.format(xml_node.tag))
580 |
581 | def parse_fault_tree(self, xml_node, tree, tree_parent):
582 | """
583 | Creates a new node in the tree for the fault tree. Then
584 | it parses the fault tree by calling ``parse_fault_tree_node``.
585 | """
586 |
587 | # create new fault-tree node in tree
588 | n = tree.create_node(xml_node.get('name'), parent=tree_parent,
589 | data=OPSA_Container(xml_node.get('name'),
590 | EventType.FAULT_TREE))
591 |
592 | # traverse all nodes of the fault tree
593 | for subnode in xml_node:
594 | self.parse_fault_tree_node(subnode, tree, n)
595 |
596 | def parse_initiating_event(self, xml_node, tree, tree_parent):
597 | """
598 | Creates a new node in the tree with an initiating event
599 | ``OPSA_Initiating_Event``.
600 | """
601 |
602 | # create the initiating event
603 | # create new fault-tree node in tree
604 | tree.create_node(xml_node.get('name'), parent=tree_parent,
605 | data=OPSA_Initiating_Event(xml_node.get('name'),
606 | EventType.INITIATING_EVENT,
607 | xml_node.get('event-tree')
608 | )
609 | )
610 |
611 | def parse_containers(self, xml_node, tree, tree_parent):
612 | """
613 | Parses the XML and switches dependend on the container type
614 | to the appropriate parsing function.
615 | """
616 | if xml_node.tag == 'define-event-tree':
617 | self.parse_event_tree(xml_node, tree, tree_parent)
618 | elif xml_node.tag == 'define-fault-tree':
619 | self.parse_fault_tree(xml_node, tree, tree_parent)
620 | elif xml_node.tag == 'model-data':
621 | self.parse_model_data(xml_node, tree)
622 | elif xml_node.tag == 'define-initiating-event':
623 | self.parse_initiating_event(xml_node, tree, tree_parent)
624 | else:
625 | raise NotImplementedError('Container type unknown: {}'
626 | .format(xml_node.tag))
627 |
628 | def parse(self, xml_root, tree, tree_root):
629 | """
630 | Parse the xml_root and stores the data in tree.
631 | It iterates all the containers in the XML data.
632 | """
633 |
634 | self.check_root(xml_root)
635 |
636 | for container in xml_root:
637 | self.parse_containers(container, tree, tree_root)
638 | self.postprocess(tree)
639 |
640 | def postprocess(self, tree):
641 | """
642 | Correct flaws during loading. They are caused mainly from
643 | shortcuts in the XML data.
644 |
645 | If gate is not internal node, there must be somewhere a definition
646 | of that node. So find it and copy it to that gate.
647 | Continue until there are no more internal gates
648 | """
649 |
650 | max_iter = 1000
651 | i = 0
652 | found_leaf_gates = True
653 |
654 | while i < max_iter and found_leaf_gates:
655 | found_leaf_gates = self.find_leaf_gates(tree)
656 | i += 1
657 |
658 | if found_leaf_gates:
659 | raise RuntimeError('Still too many gates without children found,'
660 | 'perhaps increase max_iter = {}'
661 | .format(max_iter))
662 |
663 | def find_leaf_gates(self, tree):
664 | """
665 | Find all the gates that are leafes. All gates should be internal,
666 | so it has to have basic events as leafes.
667 | """
668 |
669 | for node in tree.all_nodes_itr():
670 | if isinstance(node.data, OPSA_Gate):
671 | if node.is_leaf():
672 | print(' Node is leaf {}\tid = {}'
673 | .format(node.tag, node.identifier))
674 | # find all all_nodes with same tag as node
675 | found_nodes = self.tree_find_nodes(tree, node.tag,
676 | OPSA_Gate)
677 |
678 | if len(found_nodes) == 0:
679 | raise RuntimeError('No internal gate found with {}'
680 | .format(node.tag))
681 |
682 | for f in found_nodes:
683 | if not f.is_leaf():
684 | self.paste_gate(tree, node, f)
685 | return True
686 | return False
687 |
688 | def paste_gate(self, tree, dest_node, source_node):
689 | """
690 | Paste the source_node data to the dest_node.
691 | """
692 |
693 | for child in tree.children(source_node.identifier):
694 | subtree = tree.subtree(child.identifier)
695 | t = tree_copy(subtree)
696 | tree.paste(dest_node.identifier, t)
697 |
--------------------------------------------------------------------------------
/bowtie/mapping/mapping_bowtie.py:
--------------------------------------------------------------------------------
1 | """
2 | The ``mapping_bowtie`` module holds the class ``MappingBowTie``. this
3 | class maps the Bow-Tie that is stored in a tree structure to a
4 | Bayesian network.
5 |
6 | For the Bayesian network the pgmpy package is used.
7 |
8 | Example::
9 |
10 | import xml.etree.ElementTree as ET
11 | from treelib import Tree
12 | from bowtie.io.import_opsa import OPSA_Importer
13 | from bowtie.mapping.mapping_bowtie import MappingBowTie
14 |
15 | # Note: importing is the same as the example in bowtie.io.import_opsa
16 | # read XML file, here opsa_input.xml
17 | xml_root = ET.parse("opsa_input.xml").getroot()
18 | # create and prepare tree for results
19 | tree = Tree()
20 | tree_root = tree.create_node('root')
21 | # create importer and parse
22 | importer = OPSA_Importer()
23 | importer.parse(xml_root, tree, tree_root)
24 |
25 | # now map the Bow-Tie to a Bayesian network
26 | mapper = MappingBowTie(tree)
27 | model = mapper.map()
28 |
29 | # show model nodes
30 | print('nodes')
31 | print(model.nodes())
32 |
33 | print('check bayesian model = {}'.format(model.check_model()))
34 |
35 | """
36 |
37 | from pgmpy.models import BayesianModel
38 | from pgmpy.factors.discrete import TabularCPD
39 | from treelib import Tree
40 |
41 | from bowtie.dataresources.type_definitions import GateType, EventType
42 | from bowtie.dataresources.opsa import OPSA_Container, OPSA_Basic_Event, \
43 | OPSA_Gate, OPSA_Initiating_Event, OPSA_Functional_Event, OPSA_Path, \
44 | OPSA_Sequence
45 | from bowtie.io.import_opsa import OPSA_Importer
46 |
47 |
48 | class MappingBowTie:
49 |
50 | '''
51 | Convert a Bow-Tie into a Bayesian network.
52 |
53 | The conversion is based on the paper:
54 |
55 | Nima Khakzada, Faisal Khana, Paul Amyotte: *Dynamic safety analysis
56 | of process systems by mapping bow-tie into Bayesian network.*
57 | Process Safety and Environmental Protection, 2013.
58 |
59 | The function ``map()`` maps the Bow-Tie tree into a Bayesian network.
60 | The returned Bayesian network is of the package pgmpy.
61 |
62 | Implemented mapping of gates: ``OR``, ``AND``.
63 | '''
64 |
65 | def __init__(self, tree):
66 | self.tree = tree
67 | self.importer = OPSA_Importer()
68 | self.model = None
69 | # define some constants
70 | self.consequence_name = 'Consequence'
71 | self.safe_sequence = 'Safe_State'
72 |
73 | def find_event_tree(self, tree, top_event):
74 | """
75 | Find and return the event tree in the ``tree``.
76 | """
77 |
78 | # find the event tree
79 | events = self.importer.tree_find_nodes(tree, top_event.tag,
80 | OPSA_Initiating_Event)
81 | if len(events) != 1:
82 | raise RuntimeError('Initiating event for event tree not defined.'
83 | + 'No connection from fault tree to event tree')
84 | event = events[0]
85 |
86 | event_trees = self.importer.tree_find_nodes(tree,
87 | event.data.event_tree,
88 | OPSA_Container)
89 | if len(event_trees) != 1:
90 | raise RuntimeError('More than one event tree for the Top Event')
91 | event_tree = event_trees[0]
92 |
93 | return event_tree
94 |
95 | def get_top_event(self, tree):
96 | """
97 | Return the top event of the fault tree in the ``tree``.
98 | """
99 |
100 | # look for first fault tree in the tree structure
101 | root_node = tree.get_node(tree.root)
102 | children = tree.children(root_node.identifier)
103 | for child in children:
104 | d = child.data
105 | if isinstance(d, OPSA_Container):
106 | if d.container_type == EventType.FAULT_TREE:
107 | fault_tree = child
108 | break
109 | if not fault_tree:
110 | raise RuntimeError('No fault tree given, so also no top event')
111 |
112 | # get children of fault tree. First element is the Top Event
113 | children = tree.children(fault_tree.identifier)
114 | if len(children) != 1:
115 | raise RuntimeError('Fault tree must have exactly ONE Top Event.'
116 | + 'But has {}'.format(len(children)))
117 |
118 | return children[0]
119 |
120 | def probability_contains_values(self, prob_row, prob_values):
121 | """
122 | If all the elements of ``prob_values`` are in ``prob_row``
123 | return ``True`` otherwise ``False``.
124 | """
125 |
126 | if len(prob_row) - 1 < len(prob_values):
127 | list_a = prob_row[:-1]
128 | list_b = prob_values
129 | else:
130 | list_a = prob_values
131 | list_b = prob_row
132 | for element in list_a:
133 | inside = element in list_b
134 | if not inside:
135 | return False
136 |
137 | return True
138 |
139 | def find_state_for_probabilities(self, prob_list, prob_values):
140 | """
141 | Find the row in the ``prob_list`` that contains the value for
142 | ``prob_values``.
143 | """
144 |
145 | # have at least all the values of the prob_values in the prob_list
146 | for row in prob_list:
147 | if self.probability_contains_values(row, prob_values):
148 | return row[len(row) - 1]
149 |
150 | return None
151 |
152 | def find_tag_in_tree(self, tree, tag):
153 | """
154 | Find the nodes in the ``tree`` that have the tag ``tag``. Returns
155 | a list with the nodes.
156 | """
157 | res = []
158 | for node in tree.all_nodes_itr():
159 | if node.tag == tag:
160 | res.append(node)
161 | return res
162 |
163 | def states_in_probability(self, prob_list):
164 | """
165 | Return a dictionary with the states in the ``prob_list``.
166 | """
167 | tmp_dict = {}
168 |
169 | for row in prob_list:
170 | for element in row:
171 | if isinstance(element, tuple):
172 | # is element[0] in res_dict, add element[1] in list
173 | # not in res_dict, add list
174 | if element[0] in tmp_dict:
175 | tmp_dict[element[0]].append(element[1])
176 | else:
177 | tmp_dict[element[0]] = [element[1]]
178 |
179 | res = {}
180 | for k, v in tmp_dict.items():
181 | res[k] = list(set(v))
182 |
183 | return res
184 |
185 | def split_probability_table(self, name, prob_list):
186 | """
187 | Split the ``prob_list`` in two lists ``name_list`` and
188 | ``sans_name_list``. ``name_list``contains prob_list elements that
189 | have the name equal to ``name``, ``sans_name_list`` contains all
190 | the other elements. Returns ``name_list``, ``sans_name_list``.
191 | """
192 |
193 | name_list = []
194 | sans_name_list = []
195 |
196 | for row in prob_list:
197 | new_row = []
198 | new_row_name = []
199 | for element in row:
200 | if isinstance(element, tuple):
201 | if element[0] == name:
202 | new_row_name.append(element)
203 | else:
204 | new_row.append(element)
205 | sans_name_list.append(new_row)
206 | name_list.append(new_row_name)
207 |
208 | return name_list, sans_name_list
209 |
210 | def get_state_for_number(self, states, bit):
211 | """
212 | Return the state stored in ``states`` for the bit.
213 | Until now only 2 bits are allowed, i.e. two states.
214 | Values of ``states`` are compared if they belong to the given
215 | set of allowed states.
216 |
217 | Allowed states::
218 |
219 | first = ['works', 'true', 'yes']
220 | second = ['fails', 'false', 'no']
221 |
222 | """
223 |
224 | # some quick checks for the size of the bit
225 | if not (bit >= 0 or bit < 2):
226 | raise RuntimeError('number of states wrong {}'.format(bit))
227 |
228 | first = ['works', 'true', 'yes']
229 | second = ['fails', 'false', 'no']
230 |
231 | if bit == 0:
232 | for f in first:
233 | if f in states:
234 | return f
235 | if bit == 1:
236 | for s in second:
237 | if s in states:
238 | return s
239 | return None
240 |
241 | def derived_values_from_probabilities(self, prob_list, seq, variable_name):
242 | '''
243 | Returns the values, evidence, evidence_card.
244 | '''
245 |
246 | # get values for the states
247 | all_states = self.states_in_probability(prob_list)
248 | all_states_list = list(all_states)
249 |
250 | len_pos = (len(all_states))
251 | num_pos = 2**len_pos
252 |
253 | values = {}
254 | for s in seq:
255 | values[s] = [None] * num_pos
256 |
257 | for i in range(num_pos):
258 | bin_str = list(format(i, '08b'))
259 | bin_str.reverse()
260 |
261 | prob_values = []
262 | for n in range(len_pos):
263 | bit = int(bin_str[n])
264 | event = all_states_list[n]
265 | state = self.get_state_for_number(all_states[event], bit)
266 |
267 | # state = all_states[event][bit]
268 | prob_values.append((event, state))
269 |
270 | state = self.find_state_for_probabilities(prob_list, prob_values)
271 |
272 | for s in seq:
273 | if s == state:
274 | x = 1.0
275 | # x = 0.0
276 | else:
277 | x = 0.0
278 | # x = 1.0
279 | values[s][i] = x
280 | evidence = all_states_list.copy()
281 | evidence.reverse()
282 |
283 | # evidence card
284 | evidence_card = []
285 | for e in evidence:
286 | evidence_card.append(len(all_states[e]))
287 |
288 | state_names = {}
289 | state_names[variable_name] = list(seq)
290 | for ev in evidence:
291 | tmp = []
292 | for i in range(len(all_states[ev])):
293 | s = self.get_state_for_number(all_states[ev], i)
294 | tmp.append(s)
295 |
296 | # state_names[ev] = all_states[ev].copy()
297 | state_names[ev] = tmp
298 |
299 | value_list = []
300 | for s in seq:
301 | value_list.append(values[s])
302 |
303 | return value_list, evidence, evidence_card, state_names
304 |
305 | def consequence_probability_add_state(self, prob_list, event, state):
306 | """
307 | Add the state to the probability list ``prob_list``.
308 | """
309 |
310 | # insert the event with state
311 | prob_copy = []
312 | for row in prob_list:
313 | cons = row[len(row) - 1]
314 | if not isinstance(cons, str):
315 | raise RuntimeError("Last element of row ({}) should be"
316 | "sequence name type=str"
317 | .format(row))
318 |
319 | new_row = row.copy()
320 | t1 = (event, 'fails')
321 | new_row.insert(len(row) - 1, t1)
322 | prob_copy.append(new_row)
323 |
324 | new_row = row.copy()
325 | t2 = (event, 'works')
326 | new_row.insert(len(row) - 1, t2)
327 | new_row[len(new_row) - 1] = state
328 | prob_copy.append(new_row)
329 |
330 | return prob_copy
331 |
332 | def create_cdp_consequence(self, name, probs, top_event):
333 | '''
334 | Create a ``TabularCPD()`` for the consequences.
335 | '''
336 |
337 | new_probs = self.consequence_probability_add_state(probs,
338 | top_event.tag,
339 | 'Safe')
340 |
341 | # get number of consequences
342 | s = set()
343 | for row in new_probs:
344 | seq_name = row[len(row) - 1]
345 | if not isinstance(seq_name, str):
346 | raise RuntimeError('Last element of row ({}) should be'
347 | 'equence name'.format(row))
348 | s.add(seq_name)
349 |
350 | variable = name
351 | variable_card = len(s)
352 | values, evidence, evidence_card, state_names = \
353 | self.derived_values_from_probabilities(new_probs, s, name)
354 |
355 | cpd = self.fill_cpd_table(variable, variable_card, values,
356 | evidence, evidence_card, state_names)
357 | return cpd
358 |
359 | def sorted_probability_dict(self, prob_dict):
360 | """
361 | Return the ``values``, ``states`` from the prob_dict.
362 | """
363 |
364 | if len(prob_dict) != 2:
365 | raise RuntimeError('Only two states allowed, given: {}'
366 | .format(list(prob_dict))
367 | )
368 |
369 | first = ['works', 'true', 'yes']
370 | second = ['fails', 'false', 'no']
371 |
372 | values = []
373 | states = []
374 | prob_dict_list = list(prob_dict)
375 |
376 | found_first = False
377 | # find first value
378 | for f in first:
379 | if not found_first:
380 | if f in prob_dict_list:
381 | values.append([prob_dict[f]])
382 | states.append(f)
383 |
384 | found_first = True
385 |
386 | found_second = False
387 | # find second value
388 | for f in second:
389 | if not found_second:
390 | if f in prob_dict_list:
391 | values.append([prob_dict[f]])
392 | states.append(f)
393 |
394 | found_second = True
395 |
396 | if not (found_first and found_second):
397 | raise RuntimeError('Could not find first or second value')
398 |
399 | return values, states
400 |
401 | def create_cpd_functional_event_empty_probs(self, tree, name):
402 | """
403 | Return CPD table for node with name ``name``.
404 | The probability list is empty, because all the probabilities of
405 | node ``name`` have the same value.
406 | """
407 |
408 | # get the probabilities of the node "name"
409 | node_list = self.find_tag_in_tree(tree, name)
410 | # this probability list is empty, because all the probabilities of
411 | # node "name" have the same value. So we can just take the first one
412 | node = node_list[0]
413 |
414 | prob_dict = self.fe_prob_to_dict(tree, node)
415 |
416 | variable = name
417 | variable_card = len(prob_dict.keys())
418 |
419 | values, states = self.sorted_probability_dict(prob_dict)
420 | state_names = {name: states}
421 |
422 | # CPDs create
423 | cpd = TabularCPD(variable=variable,
424 | variable_card=variable_card,
425 | values=values,
426 | state_names=state_names
427 | )
428 | return cpd
429 |
430 | def create_cpd_functional_event_list(self, name, prob_list):
431 | """
432 | Returns CPD table for the prob_list.
433 | """
434 |
435 | # get values for the states
436 | all_states = self.states_in_probability(prob_list)
437 | all_states_list = list(all_states)
438 |
439 | if not (name in all_states_list):
440 | raise RuntimeError('Event ({}) cannot be found in the'
441 | 'probability list ({})'
442 | .format(name, prob_list))
443 |
444 | all_states_wo_name = all_states.copy()
445 | all_states_wo_name.pop(name, None)
446 |
447 | all_states_list_wo_name = all_states_list.copy()
448 | all_states_list_wo_name.remove(name)
449 |
450 | _ = self.split_probability_table(name, prob_list)
451 |
452 | # here we have 2^len_pos possibilities (works/fails)
453 | len_pos = (len(all_states))
454 | num_pos = 2**len_pos
455 |
456 | if len_pos == len(prob_list):
457 | print(len_pos, len(prob_list))
458 | raise RuntimeError("Length missmatch of probability list and"
459 | "computed possiblities")
460 |
461 | value_list = []
462 | # now cycle over all possibilities and numbers
463 | for i in range(num_pos):
464 | bin_str = list(format(i, '08b'))
465 | bin_str.reverse()
466 |
467 | prob_values = []
468 | for n in range(len_pos):
469 | bit = int(bin_str[n])
470 | if n == len_pos - 1:
471 | event = name
472 | state = all_states[event][bit]
473 | else:
474 | event = all_states_list_wo_name[n - 1]
475 | state = all_states[event][bit]
476 |
477 | prob_values.append((event, state))
478 |
479 | prob = self.find_state_for_probabilities(prob_list, prob_values)
480 | value_list.append(prob)
481 |
482 | len_name_states = len(all_states[name])
483 | num_events = int(num_pos / len_name_states)
484 |
485 | values = []
486 | n = 0
487 | for j in range(num_events):
488 | # fill for each event the list with posiblities
489 | tmp = [None] * len_name_states
490 | for i in range(len_name_states):
491 | tmp[i] = value_list[n]
492 | n += 1
493 |
494 | values.append(tmp)
495 |
496 | # cpd_s_sn = TabularCPD(variable='S', variable_card=2,
497 | # values=[[0.95, 0.2],
498 | # [0.05, 0.8]],
499 | # evidence=['I'],
500 | # evidence_card=[2],
501 | # state_names={'S': ['Bad', 'Good'],
502 | # 'I': ['Dumb', 'Intelligent']})
503 | #
504 | # +---------+---------+----------------+
505 | # | I | I(Dumb) | I(Intelligent) |
506 | # +---------+---------+----------------+
507 | # | S(Bad) | 0.95 | 0.2 |
508 | # +---------+---------+----------------+
509 | # | S(Good) | 0.05 | 0.8 |
510 | # +---------+---------+----------------+
511 |
512 | variable = name
513 | variable_card = len(all_states[name])
514 |
515 | evidence = all_states_list_wo_name
516 | # evidence card
517 | evidence_card = []
518 | for e in evidence:
519 | evidence_card.append(len(all_states[e]))
520 | state_names = all_states
521 |
522 | cpd = self.fill_cpd_table(variable, variable_card, values, evidence,
523 | evidence_card, state_names)
524 |
525 | return cpd
526 |
527 | def create_cdp_functional_event(self, tree, name, probs):
528 | """
529 | Create CPD for ``name`` with probability ``probs``.
530 | """
531 |
532 | cpd = None
533 | if len(probs) == 0:
534 | cpd = self.create_cpd_functional_event_empty_probs(tree, name)
535 | else:
536 | cpd = self.create_cpd_functional_event_list(name, probs)
537 |
538 | return cpd
539 |
540 | def get_functional_event_tuple(self, tree, path, sequence_name):
541 | """
542 | Create a functional event tuple for the nodes in the path that
543 | are of type OPSA_Functional_Event.
544 | """
545 |
546 | aList = []
547 | for ind, n in enumerate(path):
548 | node = tree.get_node(n)
549 | if isinstance(node.data, OPSA_Functional_Event):
550 | # get next element
551 | if ind + 1 > len(path):
552 | for p in path:
553 | print('node = {}'.format(tree.get_node(p).tag))
554 | raise RuntimeError('Cannot access the next element of ({})'
555 | .format(node.tag))
556 |
557 | next_node = tree.get_node(path[ind + 1])
558 |
559 | if not isinstance(next_node.data, OPSA_Path):
560 | raise RuntimeError('Functional event ({}) should be'
561 | 'followed by a path'.format(node.tag))
562 |
563 | aList.append((node.tag, next_node.data.state))
564 |
565 | aList.append(sequence_name)
566 | res = ([x for x in aList])
567 |
568 | return res
569 |
570 | def check_causal_arc_elimination(self, elimination_dict, b_node):
571 | """
572 | Returns the value of the elimination_dict for b_node.
573 | """
574 | return elimination_dict[b_node]
575 |
576 | def get_next_element_of_path(self, tree, path, start_element,
577 | node_type=OPSA_Gate):
578 | """
579 | Returns the next element if the path beginning at start_element
580 | that is of type node_type.
581 | """
582 |
583 | start_index = self.get_list_index_of_element(path, start_element)
584 |
585 | if len(path) < start_index + 1:
586 | return None
587 |
588 | node = tree.get_node(path[start_index + 1])
589 |
590 | if isinstance(node.data, node_type):
591 | return node
592 | else:
593 | return None
594 |
595 | def fe_prob_to_dict(self, tree, node):
596 | """
597 | Returns probability of the node children.
598 | """
599 | children = tree.children(node.identifier)
600 | res = {}
601 | for child in children:
602 | res[child.data.state] = child.data.probability
603 |
604 | return res
605 |
606 | def perform_event_tree_checks(self, tree):
607 | """
608 | Perform some checks in the tree containing the event tree.
609 | Checks if consequence_name and safe_sequence are included.
610 | """
611 |
612 | fe = self.importer.tree_find_nodes(tree, self.consequence_name,
613 | OPSA_Functional_Event)
614 | if len(fe) > 0:
615 | raise RuntimeError('There has to be no functional event'
616 | + 'named <{}>!'.format(self.consequence_name))
617 |
618 | seq = self.importer.tree_find_nodes(tree, self.safe_sequence,
619 | OPSA_Sequence)
620 | if len(seq) > 0:
621 | raise RuntimeError('There has to be no sequence named <{}>!'
622 | .format(self.safe_sequence))
623 |
624 | def get_event_tree_subtree(self, tree, event_tree):
625 | """
626 | Returns a subtree and the root of that subtree that includes only
627 | the event_tree. The subtree is
628 | taken from the tree and starts at the event_tree.
629 | """
630 |
631 | subtree = tree.subtree(event_tree.identifier)
632 | subtree_root = subtree.get_node(subtree.root)
633 |
634 | return subtree, subtree_root
635 |
636 | def get_list_index_of_element(self, aList, element):
637 | """
638 | Checks if element is in aList. Returns None if not included or
639 | if included it returns the index of element in aList, so that
640 | element = aList[index].
641 | """
642 |
643 | cnt = aList.count(element)
644 | if cnt == 0:
645 | # no element in aList
646 | return None
647 | return aList.index(element)
648 |
649 | def check_path(self, complete_path, test_path):
650 | """
651 | Checks if all elements of ``test_path`` are stored in
652 | ``complete_path``. Returns True, otherwise raises an exception.
653 | """
654 |
655 | last_index = 0
656 | for element in test_path:
657 | index_complete = self.get_list_index_of_element(complete_path,
658 | element)
659 | if index_complete is None:
660 | raise RuntimeError('Path incomplete: The element ({}) is'
661 | 'not in the complete path. There should'
662 | 'be a path from top event to consequence'
663 | 'that contains all functional events.'
664 | 'Please check this'
665 | .format(element))
666 | if last_index > index_complete:
667 | raise RuntimeError('Ordering wrong: The element ({})'
668 | 'appears before ({}). Differs from other'
669 | 'path definition.'
670 | .format(complete_path[last_index], element))
671 | last_index = index_complete
672 |
673 | return True
674 |
675 | def get_all_events_in_row(self, paths_list):
676 | """
677 | Make a guess and just take the path with the most elements for
678 | the complete path. Check if this is true (should be).
679 |
680 | Not sure if this works properly for all cases - perhaps users
681 | will abuse the event trees.
682 | """
683 |
684 | # start with the longest path
685 | complete_path = []
686 | for path in paths_list:
687 | if len(path) > len(complete_path):
688 | complete_path = path
689 | for path in paths_list:
690 | if not self.check_path(complete_path, path):
691 | raise RuntimeError('Path is not correct: {}'.format(path))
692 |
693 | return complete_path
694 |
695 | def get_functional_event_ordering(self, tree):
696 | """
697 | Returns all the functional events in all available paths.
698 | """
699 |
700 | # get all the paths for the functional_events
701 | all_paths = []
702 | for p in tree.paths_to_leaves():
703 | a_list = []
704 | for n in p:
705 | node = tree.get_node(n)
706 | if isinstance(node.data, OPSA_Functional_Event):
707 | a_list.append(node.tag)
708 | all_paths.append(a_list)
709 |
710 | return self.get_all_events_in_row(all_paths)
711 |
712 | def get_probs_equal(self, tree, node_list):
713 | """
714 | Check if all the probabilities of the nodes in node_list are the same.
715 | If probabilities of nodes differ return value is False,
716 | otherwise True.
717 | """
718 |
719 | if len(node_list) == 0:
720 | raise RuntimeError("node_list is empty")
721 |
722 | first_prob = self.fe_prob_to_dict(tree, node_list[0])
723 | for node in node_list:
724 | sub_probs = self.fe_prob_to_dict(tree, node)
725 | if first_prob != sub_probs:
726 | return False
727 |
728 | return True
729 |
730 | def get_causal_arc_elemination(self, tree, fe_ordered):
731 | """
732 | Return dictionary with arc elimination.
733 | """
734 |
735 | # False: no arc elimination needed, probabilities are differeent
736 | # True: arc elimination needed (probabilities not different)
737 | result_dict = {}
738 |
739 | for element in fe_ordered:
740 | result_dict[element] = True
741 | events = self.importer.tree_find_nodes(tree, element,
742 | OPSA_Functional_Event)
743 | prob_equal = self.get_probs_equal(tree, events)
744 | result_dict[element] = prob_equal
745 |
746 | return result_dict
747 |
748 | def create_cpd_tables_event_tree(self, model, subtree, result_dict,
749 | top_event):
750 | """
751 | Create and add CPD tables from result_dict.
752 | """
753 |
754 | for key, value in result_dict.items():
755 | if key == self.consequence_name:
756 | cpd = self.create_cdp_consequence(key, value,
757 | top_event)
758 | else:
759 | cpd = self.create_cdp_functional_event(subtree, key, value)
760 | model.add_cpds(cpd)
761 |
762 | def map_path_previous(self, tree, elimination_dict):
763 |
764 | '''
765 | Before we can map the event tree to the dictionary,
766 | the number of states has to be checked, because otherwise
767 | downstream in the code we will have some problems with the CPD tables
768 | creation.
769 | So dummy-run the map_path without writing the data to the result_dict
770 | and do not store data if there are less then one state in map_path
771 | '''
772 |
773 | tmp_dict = {}
774 | for path in tree.paths_to_leaves():
775 | # collect the nodes of the path
776 | node_list = []
777 | for n in path:
778 | node = tree.get_node(n)
779 | node_list.append(node)
780 |
781 | for ind, node in enumerate(node_list):
782 | if not isinstance(node.data, OPSA_Functional_Event):
783 | continue
784 |
785 | causal = self.check_causal_arc_elimination(elimination_dict,
786 | node.tag)
787 | if causal:
788 | continue
789 |
790 | path_element = self.get_next_element_of_path(tree, path,
791 | node.identifier,
792 | OPSA_Path)
793 |
794 | tuple_list = [(node.tag, path_element.data.state)]
795 |
796 | for prev_node_index in reversed(range(0, ind)):
797 | prev_node = node_list[prev_node_index]
798 |
799 | if not isinstance(prev_node.data, OPSA_Functional_Event):
800 | continue
801 | prev_path_element = \
802 | self.get_next_element_of_path(tree,
803 | path,
804 | prev_node.identifier,
805 | OPSA_Path)
806 | tuple_list.append((prev_node.tag,
807 | prev_path_element.data.state))
808 |
809 | tuple_list.append((path_element.data.probability))
810 | t = tuple([(x) for x in tuple_list])
811 |
812 | # does key exist in tmp_dict? otherwise create a list
813 | if node.tag not in tmp_dict:
814 | tmp_dict[node.tag] = []
815 | tmp_dict[node.tag].append(t)
816 |
817 | # get number of states and only use the ones with exactly two states
818 | possible_states = {}
819 | for key, value in tmp_dict.items():
820 | # get values for the states
821 | all_states = self.states_in_probability(value)
822 | all_states_list = list(all_states)
823 |
824 | possible_list = []
825 |
826 | for name in all_states_list:
827 | num_states = len(all_states[name])
828 | if num_states < 2:
829 | continue
830 | if num_states > 2:
831 | raise RuntimeError('No more than two (2) states allowed.'
832 | + 'Sorry. Functional Event ({})'
833 | .format(name))
834 |
835 | possible_list.append(name)
836 |
837 | if not (key in possible_states):
838 | possible_states[key] = []
839 | possible_states[key] = possible_list
840 |
841 | return possible_states
842 |
843 | def map_path(self, model, tree, result_dict, elimination_dict, top_event,
844 | possible_states):
845 | '''
846 | - loop over all paths from top event to all sequences
847 | - create node_list with all the nodes of the path
848 | - loop over all elements of the path (node)
849 | - if the node is not a functional event, continue
850 | - loop over all previous elements of the path (prev_node)
851 | - if prev_node is not a functional event, continue
852 | - add edge prev_node -> node
853 | - get next element in path and add to tupple_list
854 | - connect top event with consequences
855 | '''
856 |
857 | # loop over all functional elements in path
858 | for path in tree.paths_to_leaves():
859 | # collect the nodes of the path
860 | node_list = []
861 | for n in path:
862 | node = tree.get_node(n)
863 | node_list.append(node)
864 |
865 | for ind, node in enumerate(node_list):
866 | if not isinstance(node.data, OPSA_Functional_Event):
867 | continue
868 |
869 | # connect the node with the consequence node
870 | model.add_edge(node.tag, self.consequence_name)
871 |
872 | # if causal elimination, we do not need a connection from any
873 | # prev_node to actual node
874 | causal = self.check_causal_arc_elimination(elimination_dict,
875 | node.tag)
876 | if causal:
877 | continue
878 |
879 | path_element = self.get_next_element_of_path(tree, path,
880 | node.identifier,
881 | OPSA_Path)
882 | tuple_list = [(node.tag, path_element.data.state)]
883 |
884 | for prev_node_index in reversed(range(0, ind)):
885 | prev_node = node_list[prev_node_index]
886 |
887 | if not isinstance(prev_node.data, OPSA_Functional_Event):
888 | continue
889 |
890 | # is previous node a possibility? we checked this before!
891 | if not (prev_node.tag in possible_states[node.tag]):
892 | continue
893 |
894 | model.add_edge(prev_node.tag, node.tag)
895 |
896 | prev_path_element = \
897 | self.get_next_element_of_path(tree, path,
898 | prev_node.identifier,
899 | OPSA_Path)
900 | tuple_list.append((prev_node.tag,
901 | prev_path_element.data.state))
902 |
903 | tuple_list.append((path_element.data.probability))
904 | t = tuple([(x) for x in tuple_list])
905 | result_dict[node.tag].append(t)
906 |
907 | # finally, store the functional events and the states that lead
908 | # to this sequence
909 | seq_node = tree.get_node(path[-1])
910 | t = self.get_functional_event_tuple(tree, path, seq_node.tag)
911 | result_dict[self.consequence_name].append(t)
912 |
913 | # connect top event with the consequences
914 | model.add_edge(top_event.tag, self.consequence_name)
915 |
916 | def map_functional_event_data(self, tree):
917 | """
918 | Returns a dictionary with the functional events and a dictionary
919 | with the causal arc elimination.
920 | """
921 |
922 | # result_dict stores the data for later processing to get the CPD/CPT
923 | result_dict = {}
924 |
925 | functional_events_ordered = self.get_functional_event_ordering(tree)
926 | elimination_dict = \
927 | self.get_causal_arc_elemination(tree, functional_events_ordered)
928 |
929 | for fe in functional_events_ordered:
930 | result_dict[fe] = []
931 | result_dict[self.consequence_name] = []
932 | return result_dict, elimination_dict
933 |
934 | def map_functional_events(self, model, tree, te, event_tree):
935 | """
936 | Maps the functional events of the event tree.
937 | """
938 |
939 | self.perform_event_tree_checks(tree)
940 | subtree, _ = self.get_event_tree_subtree(tree, event_tree)
941 |
942 | result_dict, elimination_dict = self.map_functional_event_data(subtree)
943 | possible_states = self.map_path_previous(subtree, elimination_dict)
944 | self.map_path(model, subtree, result_dict, elimination_dict, te,
945 | possible_states)
946 |
947 | self.create_cpd_tables_event_tree(model, subtree, result_dict, te)
948 |
949 | def get_probability_values(self, gate_type, num):
950 | """
951 | Set the probability values for the gate types.
952 |
953 | Handled gate types are ``OR``and ``AND``.
954 | """
955 |
956 | if not (num >= 1 and num < 8):
957 | raise RuntimeError("number of functional events too high ({})"
958 | .format(num))
959 |
960 | num_values = 2**num
961 |
962 | values_1 = []
963 | values_2 = []
964 | if gate_type == GateType.OR:
965 |
966 | for i in range(num_values):
967 | if i == 0:
968 | values_1.append(1.0)
969 | values_2.append(0.0)
970 | else:
971 | values_1.append(0.0)
972 | values_2.append(1.0)
973 | values = [values_1, values_2]
974 | return values
975 | elif gate_type == GateType.AND:
976 | for i in range(num_values):
977 | if i != num_values - 1:
978 | values_1.append(1.0)
979 | values_2.append(0.0)
980 | else:
981 | values_1.append(0.0)
982 | values_2.append(1.0)
983 | values = [values_1, values_2]
984 | return values
985 | else:
986 | raise NotImplementedError('gate_type not handled: {}'
987 | .format(gate_type))
988 |
989 | return None
990 |
991 | def get_evidence_card(self, model, children):
992 | """
993 | Returns the size of the variable_card. A little bit tricky.
994 | Explaination can be found in the source code.
995 | """
996 |
997 | evidence = []
998 | for child in children:
999 | cpd = model.get_cpds(child)
1000 | if cpd is None:
1001 | # OK, some explaination. While we fill the PGM we do not have
1002 | # all the values already filled in. This means, that we do
1003 | # not have the number of the variable_card. The algorithm
1004 | # should be changed, so that we traverse from the leaf to
1005 | # the root of the tree. Well should....
1006 | # for now we just make an educated guess that
1007 | # evidence_card == 2
1008 | # TODO: fix this
1009 | variable_card = 2
1010 | else:
1011 | variable_card = cpd.variable_card
1012 |
1013 | evidence.append(variable_card)
1014 | return evidence
1015 |
1016 | def get_state_names(self, variable, children, states=['works', 'fails']):
1017 | '''
1018 | Create something like that::
1019 |
1020 | state_names={'AND': ['works', 'fails'],
1021 | 'A': ['works', 'fails'],
1022 | 'B': ['works', 'fails'],
1023 | 'C': ['works', 'fails'],
1024 | 'D': ['works', 'fails']})
1025 | '''
1026 |
1027 | state_names = {}
1028 | state_names[variable] = states
1029 | for child in children:
1030 | state_names[child] = states
1031 |
1032 | return state_names
1033 |
1034 | def fill_cpd_table(self, variable, variable_card, values, evidence,
1035 | evidence_card, state_names=None):
1036 | """
1037 | Function to shorten the source code (wrapper). Creates a
1038 | ``TabularCPD()`` from the given values.
1039 | """
1040 |
1041 | cpd = TabularCPD(variable=variable, variable_card=variable_card,
1042 | values=values,
1043 | evidence=evidence, evidence_card=evidence_card,
1044 | state_names=state_names)
1045 | return cpd
1046 |
1047 | def create_cpd_gate(self, model, gate_node, child_nodes):
1048 | """
1049 | Create cpd table ``TabularCPD()`` for the gate.
1050 | """
1051 |
1052 | num_children = len(child_nodes)
1053 | children = [c.tag for c in child_nodes]
1054 | variable = gate_node.tag
1055 | # TODO: if the variables should ever be changed from 'fails' and
1056 | # 'works' this has to be adapted here
1057 | variable_card = 2
1058 |
1059 | values = self.get_probability_values(gate_node.data.gate_type,
1060 | num_children)
1061 | evidence = children
1062 | evidence_card = self.get_evidence_card(model, children)
1063 | state_names = self.get_state_names(variable, children)
1064 |
1065 | cpd = self.fill_cpd_table(variable, variable_card, values, evidence,
1066 | evidence_card, state_names)
1067 | model.add_cpds(cpd)
1068 |
1069 | def map_connections(self, model, tree):
1070 | '''
1071 | Maps the connections between the gates. Also creates
1072 | the CPD tables for the connections
1073 | '''
1074 | nodes = self.importer.tree_find_nodes(tree, None, OPSA_Gate)
1075 |
1076 | for node in nodes:
1077 | children = tree.children(node.identifier)
1078 | for child in children:
1079 | model.add_edge(child.tag, node.tag)
1080 | # create CPD for all children
1081 | self.create_cpd_gate(model, node, children)
1082 |
1083 | def map_gates(self, model, tree):
1084 | '''
1085 | Intermediate events (Gates) AND the Top Event are mapped
1086 | from a tree structure to a Bayesian network (BN)
1087 | '''
1088 | nodes = self.importer.tree_find_nodes(tree, None, OPSA_Gate)
1089 | for node in nodes:
1090 | model.add_node(node.tag)
1091 |
1092 | self.map_connections(model, tree)
1093 |
1094 | def cpd_exists(self, model, name):
1095 | """
1096 | If a cpd with the variable name exist the function returns True
1097 | otherwise False.
1098 | """
1099 | # find cpd for consequence
1100 | cpds = model.cpds
1101 | for cpd in cpds:
1102 | if cpd.variable == name:
1103 | return True
1104 | return False
1105 |
1106 | def create_cpd_basic_event(self, model, node):
1107 | """
1108 | Creates the cpd ``TabularCPD()`` for the basic event.
1109 | """
1110 | # is there already a CPD for this nodes?
1111 | if self.cpd_exists(model, node.tag):
1112 | print('Warning: CPD already exists: {}'.format(node.tag))
1113 | return
1114 |
1115 | # get probability
1116 | probability = node.data.probability
1117 | if probability is None:
1118 | probability = 0.0
1119 | else:
1120 | probability = float(probability)
1121 |
1122 | if probability < 0.0 or probability > 1.0:
1123 | print('WARNING: probability not set, so I chose 0.0')
1124 | probability = 0.0
1125 |
1126 | # working: C = 0; failing: C = 1
1127 | cpd = TabularCPD(variable=node.tag, variable_card=2,
1128 | values=[[1.0 - probability], [probability]],
1129 | state_names={node.tag: ['works', 'fails']}
1130 | )
1131 |
1132 | model.add_cpds(cpd)
1133 |
1134 | def map_basic_events(self, model, tree):
1135 | """
1136 | Maps the basic event to the Bayesian model. Create nodes and
1137 | CPD tables.
1138 | """
1139 | # importer = OPSA_Importer()
1140 | nodes = self.importer.tree_find_nodes(tree, None, OPSA_Basic_Event)
1141 |
1142 | for node in nodes:
1143 | model.add_node(node.tag)
1144 | self.create_cpd_basic_event(model, node)
1145 |
1146 | def map(self, tree=None):
1147 | """
1148 | Maps the Bow-Tie tree to a Bayesian network.
1149 | If the tree is not given it must be set during class creation.
1150 |
1151 | The BayesianModel is returned.
1152 | """
1153 |
1154 | print('mapping from Bow-Tie to Bayesian network')
1155 |
1156 | if tree:
1157 | self.tree = tree
1158 |
1159 | model = BayesianModel()
1160 |
1161 | model = self.map_FT(self.tree, model)
1162 | model = self.map_ET(self.tree, model)
1163 |
1164 | return model
1165 |
1166 | def map_FT(self, tree=None, model=None):
1167 | """
1168 | Maps only the *fault* tree to the Bayesian network.
1169 | If parameter for ``tree`` is not given the class variable will be used.
1170 | If parameter for model is not given, a new ``BayesianModel()``
1171 | network will be created.
1172 | Returns the model.
1173 | """
1174 | if tree is not None:
1175 | self.tree = tree
1176 |
1177 | if not isinstance(self.tree, Tree):
1178 | raise RuntimeError("tree is not a Tree: {}".format(tree))
1179 |
1180 | if model is None:
1181 | model = BayesianModel()
1182 |
1183 | self.map_basic_events(model, self.tree)
1184 | self.map_gates(model, self.tree)
1185 |
1186 | return model
1187 |
1188 | def map_ET(self, tree=None, model=None):
1189 | """
1190 | Maps only the *event* tree to the Bayesian network.
1191 | If parameter for ``tree`` is not given the class variable will be used.
1192 | If parameter for model is not given, a new ``BayesianModel()``
1193 | network will be created.
1194 | Returns the model.
1195 | """
1196 | if tree:
1197 | self.tree = tree
1198 |
1199 | if not isinstance(self.tree, Tree):
1200 | raise RuntimeError("tree is not a Tree: {}".format(tree))
1201 |
1202 | if model is None:
1203 | model = BayesianModel()
1204 |
1205 | te = self.get_top_event(self.tree)
1206 |
1207 | event_tree = self.find_event_tree(self.tree, te)
1208 | self.map_functional_events(model, self.tree, te, event_tree)
1209 |
1210 | return model
1211 |
--------------------------------------------------------------------------------