├── .coveragerc
├── .flake8
├── .gitattributes
├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
└── workflows
│ ├── publish.yml
│ ├── pylint.yml
│ └── pytest.yml
├── .gitignore
├── .pre-commit-config.yaml
├── .readthedocs.yml
├── CODE_OF_CONDUCT.md
├── LICENSE
├── README.md
├── contributing.md
├── docs
├── Makefile
├── conf.py
├── examples
│ ├── dynamic_plug_names.rst
│ ├── examples.rst
│ ├── house_and_birthday.rst
│ ├── nested_graphs.rst
│ ├── vfx_render_farm_conversion.rst
│ ├── vfx_rendering.rst
│ ├── workflow_design_pattern.rst
│ └── world_clock.rst
├── flowpipe-for-vfx-pipelines.md
├── index.rst
├── make.bat
├── requirements.txt
└── source
│ ├── flowpipe.errors.rst
│ ├── flowpipe.event.rst
│ ├── flowpipe.graph.rst
│ ├── flowpipe.node.rst
│ ├── flowpipe.plug.rst
│ ├── flowpipe.rst
│ └── flowpipe.utilities.rst
├── examples
├── dynamic_plug_names.py
├── house_and_birthday.py
├── nested_graphs.py
├── vfx_render_farm_conversion.py
├── vfx_rendering.py
├── workflow_design_pattern.py
└── world_clock.py
├── flowpipe-for-vfx-pipelines.md
├── flowpipe
├── __init__.py
├── errors.py
├── evaluator.py
├── event.py
├── graph.py
├── node.py
├── plug.py
└── utilities.py
├── logo.png
├── poetry.lock
├── pyproject.toml
├── readthedocs.yml
├── setup.py
└── tests
├── conftest.py
├── test_convert_function_to_node.py
├── test_event.py
├── test_examples.py
├── test_graph.py
├── test_inputpluggroup.py
├── test_multiprocessing.py
├── test_node.py
├── test_plugs.py
├── test_subgraphs.py
└── test_utilities.py
/.coveragerc:
--------------------------------------------------------------------------------
1 | [report]
2 | omit =
3 | *gui*
4 | *____*
5 | exclude_lines =
6 | except ImportError:
7 | from ordereddict import OrderedDict
8 | pragma: no cover
9 |
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | ignore =
3 | # Clashing with black:
4 | E203,
5 | W503
6 | exclude =
7 | .git,
8 | .github,
9 | docs,
10 |
11 | per-file-ignores =
12 | # tests and examples may contain lines longer than 79 chars
13 | tests/*: E501
14 | examples/*: E501
15 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
4 | # Custom for Visual Studio
5 | *.cs diff=csharp
6 |
7 | # Standard to msysgit
8 | *.doc diff=astextplain
9 | *.DOC diff=astextplain
10 | *.docx diff=astextplain
11 | *.DOCX diff=astextplain
12 | *.dot diff=astextplain
13 | *.DOT diff=astextplain
14 | *.pdf diff=astextplain
15 | *.PDF diff=astextplain
16 | *.rtf diff=astextplain
17 | *.RTF diff=astextplain
18 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: bug
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | If possible, provide an easy to execute code snippet that demonstrates the bug, otherwise please describe how the bug can be reproduced.
15 |
16 | **Expected behavior**
17 | A clear and concise description of what you expected to happen.
18 |
19 | **Suggestions for Solution**
20 | If you have ideas/suggestions on how to fix the bug, please provide them here.
21 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: enhancement
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | name: Publish
2 |
3 | on:
4 | release:
5 | types:
6 | - created
7 |
8 | jobs:
9 | publish:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: actions/checkout@v2
13 | - name: Set up Python 3.10
14 | uses: actions/setup-python@v2
15 | - name: Install Poetry
16 | run: |
17 | python -m pip install --upgrade poetry wheel
18 | - name: Install dependencies
19 | run: |
20 | poetry install
21 | - name: Publish
22 | env:
23 | POETRY_PYPI_TOKEN_PYPI: ${{ secrets.POETRY_PYPI_TOKEN_PYPI }}
24 | run: |
25 | poetry config pypi-token.pypi $POETRY_PYPI_TOKEN_PYPI
26 | poetry build
27 | poetry publish
28 |
--------------------------------------------------------------------------------
/.github/workflows/pylint.yml:
--------------------------------------------------------------------------------
1 | name: Pylint
2 |
3 | on: [push]
4 |
5 | jobs:
6 | pylint:
7 | name: Pylint
8 | runs-on: ubuntu-latest
9 | steps:
10 | - uses: actions/checkout@v2
11 | - name: Set up Python 3.10
12 | uses: actions/setup-python@v2
13 | - name: Install Poetry
14 | run: |
15 | python -m pip install --upgrade poetry wheel
16 | - name: Install dependencies
17 | run: |
18 | poetry install
19 | - name: Analysing the code with pylint
20 | run: |
21 | poetry run pylint ./flowpipe
22 |
--------------------------------------------------------------------------------
/.github/workflows/pytest.yml:
--------------------------------------------------------------------------------
1 | # .github/workflows/app.yaml
2 | name: Pytest
3 |
4 | on: [push]
5 |
6 | jobs:
7 | tests:
8 | name: Tests
9 | runs-on: ubuntu-latest
10 | strategy:
11 | matrix:
12 | python-version: ["3.8", "3.10", "3.x"]
13 | steps:
14 | - name: Check out repository code
15 | uses: actions/checkout@v2
16 | - name: Set up Python ${{ matrix.python-version }}
17 | uses: actions/setup-python@v2
18 | with:
19 | python-version: ${{ matrix.python-version }}
20 | - name: Install Poetry
21 | run: |
22 | python -m pip install --upgrade poetry wheel
23 | - name: Install dependencies
24 | run: |
25 | poetry install --no-cache
26 | - name: Run tests without coverage
27 | if: ${{ matrix.python-version == 2.7 }}
28 | run: |
29 | poetry run pytest tests
30 | - name: Run tests with coverage
31 | if: ${{ matrix.python-version == 3.7 }}
32 | run: |
33 | poetry run pytest tests --cov-report=term-missing:skip-covered --cov-report=xml --cov=flowpipe | tee pytest-coverage.txt
34 | - name: Pytest coverage comment
35 | uses: MishaKav/pytest-coverage-comment@main
36 | if: ${{ matrix.python-version == 3.7 }}
37 | id: coverageComment
38 | with:
39 | hide-comment: ${{ github.ref == 'refs/heads/master' }}
40 | pytest-coverage-path: ./pytest-coverage.txt
41 | - name: Update Readme with Coverage Html
42 | if: ${{ github.ref == 'refs/heads/master' && matrix.python-version == 3.7 }}
43 | run: |
44 | sed -i '//,//c\\n\${{ steps.coverageComment.outputs.coverageHtml }}\n${{ steps.coverageComment.outputs.summaryReport }}\n' ./README.md
45 | - name: Commit & Push changes to Readme
46 | if: ${{ github.ref == 'refs/heads/master' && matrix.python-version == 3.7 }}
47 | uses: actions-js/push@master
48 | with:
49 | message: Update coverage on Readme
50 | branch: master
51 | github_token: ${{ secrets.GITHUB_TOKEN }}
52 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | venv/
2 | docs/build
3 | *____*
4 | gui
5 | celery
6 | cover
7 | .coverage
8 | .pytest_cache*
9 | doc/source/
10 | logos/
11 | venv*
12 | .vscode
13 | *.code-workspace
14 | pytest*
15 |
16 | # Sublime
17 | *.sublime-project
18 | *.sublime-workspace
19 |
20 | # Byte-compiled / optimized / DLL files
21 | __pycache__/
22 | *.py[cod]
23 | *$py.class
24 |
25 | # C extensions
26 | *.so
27 |
28 | # Distribution / packaging
29 | .Python
30 | env/
31 | build/
32 | develop-eggs/
33 | dist/
34 | downloads/
35 | eggs/
36 | .eggs/
37 | lib/
38 | lib64/
39 | parts/
40 | sdist/
41 | var/
42 | *.egg-info/
43 | .installed.cfg
44 | *.egg
45 |
46 | # PyInstaller
47 | # Usually these files are written by a python script from a template
48 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
49 | *.manifest
50 | *.spec
51 |
52 | # Installer logs
53 | pip-log.txt
54 | pip-delete-this-directory.txt
55 |
56 | # Unit test / coverage reports
57 | htmlcov/
58 | .tox/
59 | .coverage
60 | .coverage.*
61 | .cache
62 | nosetests.xml
63 | coverage.xml
64 | *,cover
65 | .hypothesis/
66 |
67 | # Translations
68 | *.mo
69 | *.pot
70 |
71 | # Django stuff:
72 | *.log
73 | local_settings.py
74 |
75 | # Flask stuff:
76 | instance/
77 | .webassets-cache
78 |
79 | # Scrapy stuff:
80 | .scrapy
81 |
82 | # Sphinx documentation
83 | docs/_build/
84 |
85 | # PyBuilder
86 | target/
87 |
88 | # IPython Notebook
89 | .ipynb_checkpoints
90 |
91 | # pyenv
92 | .python-version
93 |
94 | # celery beat schedule file
95 | celerybeat-schedule
96 |
97 | # dotenv
98 | .env
99 |
100 | # virtualenv
101 | venv/
102 | ENV/
103 |
104 | # Spyder project settings
105 | .spyderproject
106 |
107 | # Rope project settings
108 | .ropeproject
109 | *.sublime-project
110 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/ambv/black
3 | rev: 23.11.0
4 | hooks:
5 | - id: black
6 | - repo: https://github.com/pycqa/isort
7 | rev: 5.12.0
8 | hooks:
9 | - id: isort
10 | - repo: https://github.com/python-poetry/poetry
11 | rev: 1.7.0
12 | hooks:
13 | - id: poetry-check
14 | - id: poetry-lock
15 | - repo: local
16 | hooks:
17 | - id: unittests
18 | name: unittests
19 | language: system
20 | entry: poetry run pytest ./tests
21 | pass_filenames: false
22 | - id: pylint
23 | name: pylint
24 | language: system
25 | entry: poetry run pylint ./flowpipe
26 | pass_filenames: false
27 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 |
5 | # Required
6 | version: 2
7 |
8 | # Build documentation in the docs/ directory with Sphinx
9 | sphinx:
10 | configuration: docs/conf.py
11 |
12 | # Build documentation with MkDocs
13 | #mkdocs:
14 | # configuration: mkdocs.yml
15 |
16 | builder: html
17 |
18 | # Optionally set the version of Python and requirements required to build your docs
19 | python:
20 | version: 3.7
21 | install:
22 | - requirements: docs/requirements.txt
23 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as
6 | contributors and maintainers pledge to making participation in our project and
7 | our community a harassment-free experience for everyone, regardless of age, body
8 | size, disability, ethnicity, sex characteristics, gender identity and expression,
9 | level of experience, education, socio-economic status, nationality, personal
10 | appearance, race, religion, or sexual identity and orientation.
11 |
12 | ## Our Standards
13 |
14 | Examples of behavior that contributes to creating a positive environment
15 | include:
16 |
17 | * Using welcoming and inclusive language
18 | * Being respectful of differing viewpoints and experiences
19 | * Gracefully accepting constructive criticism
20 | * Focusing on what is best for the community
21 | * Showing empathy towards other community members
22 |
23 | Examples of unacceptable behavior by participants include:
24 |
25 | * The use of sexualized language or imagery and unwelcome sexual attention or
26 | advances
27 | * Trolling, insulting/derogatory comments, and personal or political attacks
28 | * Public or private harassment
29 | * Publishing others' private information, such as a physical or electronic
30 | address, without explicit permission
31 | * Other conduct which could reasonably be considered inappropriate in a
32 | professional setting
33 |
34 | ## Our Responsibilities
35 |
36 | Project maintainers are responsible for clarifying the standards of acceptable
37 | behavior and are expected to take appropriate and fair corrective action in
38 | response to any instances of unacceptable behavior.
39 |
40 | Project maintainers have the right and responsibility to remove, edit, or
41 | reject comments, commits, code, wiki edits, issues, and other contributions
42 | that are not aligned to this Code of Conduct, or to ban temporarily or
43 | permanently any contributor for other behaviors that they deem inappropriate,
44 | threatening, offensive, or harmful.
45 |
46 | ## Scope
47 |
48 | This Code of Conduct applies both within project spaces and in public spaces
49 | when an individual is representing the project or its community. Examples of
50 | representing a project or community include using an official project e-mail
51 | address, posting via an official social media account, or acting as an appointed
52 | representative at an online or offline event. Representation of a project may be
53 | further defined and clarified by project maintainers.
54 |
55 | ## Enforcement
56 |
57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
58 | reported by contacting the project team at paulschweizer@gmx.net. All
59 | complaints will be reviewed and investigated and will result in a response that
60 | is deemed necessary and appropriate to the circumstances. The project team is
61 | obligated to maintain confidentiality with regard to the reporter of an incident.
62 | Further details of specific enforcement policies may be posted separately.
63 |
64 | Project maintainers who do not follow or enforce the Code of Conduct in good
65 | faith may face temporary or permanent repercussions as determined by other
66 | members of the project's leadership.
67 |
68 | ## Attribution
69 |
70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
72 |
73 | [homepage]: https://www.contributor-covenant.org
74 |
75 | For answers to common questions about this code of conduct, see
76 | https://www.contributor-covenant.org/faq
77 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2017 Paul Schweizer
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [](https://pypi.org/project/flowpipe/)
2 |
3 |
4 |
5 | 
Coverage Report
File | Stmts | Miss | Cover |
---|
TOTAL | 940 | 0 | 100% |
6 |
7 |
8 |
9 | [](LICENSE)  [](https://flowpipe.readthedocs.io/en/latest) [](https://github.com/psf/black)
10 |
11 | 
12 |
13 | # Flow-based Programming
14 |
15 | A lightweight framework for flow-based programming in python.
16 |
17 | ```c
18 | +-------------------+ +---------------------+
19 | | Invite People | | Birthday Party |
20 | |-------------------| |---------------------|
21 | o amount<4> | +----->o attendees<> |
22 | | people o---+ +--->o cake<> |
23 | +-------------------+ | +---------------------+
24 | |
25 | +-------------------+ |
26 | | Bake a cake | |
27 | +-------------------+ |
28 | o type<"Chocolate"> | |
29 | | cake o-----+
30 | +-------------------+
31 | ```
32 |
33 | Benefits:
34 |
35 | - Visualize code
36 | - Re-usability
37 | - Streamlined code design
38 | - Built-in concurrency
39 | - Represent workflows one to one in the code
40 |
41 | # Quick Example
42 |
43 | Consider this simple example on how to represent the construction of a house with Flowpipe:
44 |
45 | ```python
46 | from flowpipe import Graph, INode, Node, InputPlug, OutputPlug
47 |
48 |
49 | class HireWorkers(INode):
50 | """A node can be derived from the INode interface.
51 |
52 | The plugs are defined in the init method.
53 | The compute method received the inputs from any connected upstream nodes.
54 | """
55 |
56 | def __init__(self, amount=None, **kwargs):
57 | super(HireWorkers, self).__init__(**kwargs)
58 | InputPlug('amount', self, amount)
59 | OutputPlug('workers', self)
60 |
61 | def compute(self, amount):
62 | workers = ['John', 'Jane', 'Mike', 'Michelle']
63 | print('{0} workers are hired to build the house.'.format(amount))
64 | return {'workers.{0}'.format(i): workers[i] for i in range(amount)}
65 |
66 |
67 | @Node(outputs=['workers'])
68 | def Build(workers, section):
69 | """A node can also be created by the Node decorator.outputs
70 |
71 | The inputs to the function are turned into InputsPlugs, otuputs are defined
72 | in the decorator itself. The wrapped function is used as the compute method.
73 | """
74 | print('{0} are building the {1}'.format(', '.join(workers.values()), section))
75 | return {'workers.{0}'.format(i): worker for i, worker in workers.items()}
76 |
77 |
78 | @Node()
79 | def Party(attendees):
80 | print('{0} and {1} are having a great party!'.format(
81 | ', '.join(list(attendees.values())[:-1]), list(attendees.values())[-1]))
82 |
83 |
84 | # Create a graph with the necessary nodes
85 | graph = Graph(name='How to build a house')
86 | workers = HireWorkers(graph=graph, amount=4)
87 | build_walls = Build(graph=graph, name='Build Walls', section='walls')
88 | build_roof = Build(graph=graph, name='Build Roof', section='roof')
89 | party = Party(graph=graph, name='Housewarming Party')
90 |
91 | # Wire up the connections between the nodes
92 | workers.outputs['workers']['0'].connect(build_walls.inputs['workers']['0'])
93 | workers.outputs['workers']['1'].connect(build_walls.inputs['workers']['1'])
94 | workers.outputs['workers']['2'].connect(build_roof.inputs['workers']['0'])
95 | workers.outputs['workers']['3'].connect(build_roof.inputs['workers']['1'])
96 | build_walls.outputs['workers']['0'] >> party.inputs['attendees']['0']
97 | build_walls.outputs['workers']['1'] >> party.inputs['attendees']['2']
98 | build_roof.outputs['workers']['0'] >> party.inputs['attendees']['1']
99 | build_roof.outputs['workers']['1'] >> party.inputs['attendees']['3']
100 | party.inputs['attendees']['4'].value = 'Homeowner'
101 | ```
102 |
103 | Visualize the code as a graph or as a listing:
104 |
105 | ```python
106 | print(graph.name)
107 | print(graph)
108 | print(graph.list_repr())
109 | ```
110 |
111 | Output:
112 |
113 | ```c
114 | How to build a house
115 | +------------------------+ +------------------------+ +---------------------------+
116 | | HireWorkers | | Build Roof | | Housewarming Party |
117 | |------------------------| |------------------------| |---------------------------|
118 | o amount<4> | o section<"roof"> | % attendees |
119 | | workers % % workers | +--->o attendees.0<> |
120 | | workers.0 o-----+--->o workers.0<> | |--->o attendees.1<> |
121 | | workers.1 o-----|--->o workers.1<> | |--->o attendees.2<> |
122 | | workers.2 o-----| | workers % |--->o attendees.3<> |
123 | | workers.3 o-----| | workers.0 o-----| o attendees.4<"Homeowner> |
124 | +------------------------+ | | workers.1 o-----| +---------------------------+
125 | | +------------------------+ |
126 | | +------------------------+ |
127 | | | Build Walls | |
128 | | |------------------------| |
129 | | o section<"walls"> | |
130 | | % workers | |
131 | +--->o workers.0<> | |
132 | +--->o workers.1<> | |
133 | | workers % |
134 | | workers.0 o-----+
135 | | workers.1 o-----+
136 | +------------------------+
137 |
138 | Build a House
139 | HireWorkers
140 | [i] amount: 4
141 | [o] workers
142 | [o] workers.0 >> Build Walls.workers.0
143 | [o] workers.1 >> Build Walls.workers.1
144 | [o] workers.2 >> Build Roof.workers.0
145 | [o] workers.3 >> Build Roof.workers.1
146 | Build Roof
147 | [i] section: "roof"
148 | [i] workers
149 | [i] workers.0 << HireWorkers.workers.2
150 | [i] workers.1 << HireWorkers.workers.3
151 | [o] workers
152 | [o] workers.0 >> Housewarming Party.attendees.1
153 | [o] workers.1 >> Housewarming Party.attendees.3
154 | Build Walls
155 | [i] section: "walls"
156 | [i] workers
157 | [i] workers.0 << HireWorkers.workers.0
158 | [i] workers.1 << HireWorkers.workers.1
159 | [o] workers
160 | [o] workers.0 >> Housewarming Party.attendees.0
161 | [o] workers.1 >> Housewarming Party.attendees.2
162 | Housewarming Party
163 | [i] attendees
164 | [i] attendees.0 << Build Walls.workers.0
165 | [i] attendees.1 << Build Roof.workers.0
166 | [i] attendees.2 << Build Walls.workers.1
167 | [i] attendees.3 << Build Roof.workers.1
168 | [i] attendees.4: "Homeowner"
169 | ```
170 |
171 | Now build the house:
172 |
173 | ```python
174 | graph.evaluate(mode='threading') # Options are linear, threading and multiprocessing
175 | ```
176 |
177 | Output:
178 |
179 | ```c
180 | 4 workers are hired to build the house.
181 | Michelle, Mike are building the roof
182 | Jane, John are building the walls
183 | Mike, John, Michelle, Jane and Homeowner are having a great party!
184 | ```
185 |
186 | (Note: for more elaborate evaluation schemes, see [Evaluators](#evaluators))
187 |
188 | We now know how to throw a party, so let's invite some people and re-use these skills for a birthday:
189 |
190 | ```python
191 | graph = Graph(name='How to throw a birthday party')
192 |
193 | @Node(outputs=['people'])
194 | def InvitePeople(amount):
195 | people = ['John', 'Jane', 'Mike', 'Michelle']
196 | d = {'people.{0}'.format(i): people[i] for i in range(amount)}
197 | d['people'] = {people[i]: people[i] for i in range(amount)}
198 | return d
199 |
200 | invite = InvitePeople(graph=graph, amount=4)
201 | birthday_party = Party(graph=graph, name='Birthday Party')
202 | invite.outputs['people'] >> birthday_party.inputs['attendees']
203 |
204 | print(graph.name)
205 | print(graph)
206 | graph.evaluate()
207 | ```
208 |
209 | Output:
210 |
211 | ```c
212 | How to throw a birthday party
213 | +-------------------+ +---------------------+
214 | | InvitePeople | | Birthday Party |
215 | |-------------------| |---------------------|
216 | o amount<4> | +--->o attendees<> |
217 | | people o-----+ +---------------------+
218 | +-------------------+
219 |
220 | Jane, Michelle, Mike and John are having a great party!
221 | ```
222 |
223 | ## More Examples
224 |
225 | There are more examples for common use cases of flowpipe:
226 |
227 | The code for these examples:
228 | [house_and_birthday.py](examples/house_and_birthday.py)!
229 |
230 | Another simple example:
231 | [world_clock.py](examples/world_clock.py)!
232 |
233 | How to make use of nested subgraphs:
234 | [nested_graphs.py](examples/nested_graphs.py)!
235 |
236 | Using the command pattern with flowpipe successfully:
237 | [workflow_design_pattern.py](examples/workflow_design_pattern.py)!
238 |
239 | Use flowpipe on a remote cluster of machines, commonly refered to as a "render farm" in the VFX/Animation industry:
240 | [vfx_render_farm_conversion.py](examples/vfx_render_farm_conversion.py)!
241 |
242 | An example graph showcasing a common workflow encountered in the VFX/Animation industry:
243 | [vfx_rendering.py](examples/vfx_rendering.py)!
244 |
245 | ## VFX Pipeline
246 |
247 | If you are working in the VFX/Animation industry, please check out this extensive guide on how to use [flowpipe in a vfx pipeline](flowpipe-for-vfx-pipelines.md)!
248 |
249 | # Evaluators
250 |
251 | If your nodes just need sequential, threaded or multiprocessing evaluation, the `Graph.evaluate()` method will serve you just fine. If you want to take more control over the way your Graph is being evaluated, `Evaluators` are for you. This can also be used to add, e.g. logging or tracing to node evaluation.
252 |
253 | Evaluators allow you to take control of node evaluation order, or their scheduling.
254 | See `flowpipe/evaluator.py` to see the `Graph.evaluate()` method's evaluation schemes.
255 |
256 | To use a custom evaluator, subclass `flowpipe.evaluator.Evaluator`, and provide at least an `_evaluate_nodes(self, nodes)` method.
257 | This method should take a list of nodes and call their respective `node.evalaute()` methods (along with any other task you want to do for each node being evaluated).
258 | To use a cusom evaluator, create it and call its `Evalator.evaluate()` method with the Graph to evaluate as an argument:
259 |
260 | ```py
261 | from flowpipe.evaluators import LinearEvaluator
262 |
263 | # assuming you created a graph to evaluate above, called `graph`
264 | lin_eval = LinearEvaluator()
265 | lin_eval.evaluate(graph)
266 | ```
267 |
--------------------------------------------------------------------------------
/contributing.md:
--------------------------------------------------------------------------------
1 | # Contributing to Flowpipe
2 |
3 | :+1::tada: Thank you very much for taking the time to contribute! :tada::+1:
4 |
5 | ## Start coding
6 |
7 | This is how you can setup your development environment for flowpipe:
8 |
9 | ```bash
10 | git clone https://github.com/PaulSchweizer/flowpipe.git
11 | cd ./flowpipe
12 | python -m venv venv
13 | pip install -r requirements-dev.txt
14 | pre-commit install
15 |
16 | # Run the tests to confirm that your setup is complete
17 | python setup.py develop
18 | pytest ./tests
19 | ```
20 |
21 | ## Questions
22 |
23 | Please don't hesitate to submit any questions as tickets on github!
24 | Please also note that we have set of [examples](examples) and a [readme](README.md) so you might be able to find answers in there as well.
25 |
26 | ## Feature Requests, Bug Reports, Ideas
27 |
28 | Same as for questions, please submit your feature requests, ideas and bug reports as tickets on github. Any such contribution is very much appreciated as it helps to improve this project further.
29 |
30 | ## Pull Requests
31 |
32 | Please fork the repo, create a branch for your changes and submit a pull request.
33 | Pull requests should follow the below conventions.
34 | Also note that we are always squashing when merging the PRs so don't worry about the number of commits on your PR.
35 |
36 | ## Compatibility
37 |
38 | We want to keep this library backwards compatible to python 2.7 for now. The reason is that I know of two users who still run this package in a python 2.7 environment and I would like to provide support for them (until they have switched to py3).
39 | With that being said, we also aim for compatibility with python 3.6+.
40 |
41 | ## Unittests
42 |
43 | We have an extensive, reliable test suite and we want to keep it that way, so please write sufficient tests for your contributions.
44 | We also want to keep the coverage at 100%. If there are good reasons for not covering parts of the code, please explicitely exclude them either via `# pragma: no cover` directly in the code or by specifying it in the [.coveragerc](.coveragerc) file.
45 | The tests have to pass on travis (py2.7 and py3.6).
46 |
47 | ## Coding styleguide
48 |
49 | - We use [black](https://github.com/ambv/black)
50 | - For docstrings please use the [google style](https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings)
51 |
52 | ## Release to PyPi (Collaborators only)
53 |
54 | Currently there is no specific release policy enacted, so please state in your Pull Request whether you'd need a new PyPi release after the merge and we will release it.
55 |
56 | To perform a release as a collaborator, follow this recipe:
57 |
58 | 1. On the master branch, update the version number in `pyproject.toml` and `docs\conf.py`.
59 | 2. Tag the commit and push the tag.
60 | 3. Commit and push that change.
61 | 4. On the github page, navigate to "Releases" and "Draft a new release".
62 | 5. Enter the required info for a new release. _Make sure the version number you give here is the same as in setup.py!_
63 | 6. Click "Publish release", and the CI pipeline will automatically build a new release and push it to PyPi via github actions (see [publish.yml](.github\workflows\publish.yml)).
64 |
65 | # Next Steps: Become a Collaborator on github
66 |
67 | If you have made some contributions already and want to become more involved in the project please don't hesitate to ask about becoming a collaborator.
68 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import sys
15 |
16 | sys.path.insert(0, os.path.abspath("./.."))
17 | sys.setrecursionlimit(1500)
18 |
19 |
20 | # -- Project information -----------------------------------------------------
21 |
22 | project = "Flowpipe"
23 | copyright = "2020, Paul Schweizer"
24 | author = "Paul Schweizer"
25 |
26 | # The full version, including alpha/beta/rc tags
27 | release = "1.0.4"
28 |
29 |
30 | # -- General configuration ---------------------------------------------------
31 |
32 | # Add any Sphinx extension module names here, as strings. They can be
33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
34 | # ones.
35 | extensions = [
36 | "sphinx.ext.autodoc",
37 | "sphinx.ext.intersphinx",
38 | "sphinx.ext.napoleon",
39 | "m2r2",
40 | ]
41 |
42 | # Add any paths that contain templates here, relative to this directory.
43 | templates_path = ["_templates"]
44 |
45 | # List of patterns, relative to source directory, that match files and
46 | # directories to ignore when looking for source files.
47 | # This pattern also affects html_static_path and html_extra_path.
48 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "setup.py"]
49 |
50 |
51 | # -- Options for HTML output -------------------------------------------------
52 |
53 | # The theme to use for HTML and HTML Help pages. See the documentation for
54 | # a list of builtin themes.
55 | #
56 | html_theme = "alabaster"
57 |
58 | # Add any paths that contain custom static files (such as style sheets) here,
59 | # relative to this directory. They are copied after the builtin static files,
60 | # so a file named "default.css" will overwrite the builtin "default.css".
61 | html_static_path = ["_static"]
62 |
63 |
64 | # -- Other Options -----------------------------------------------------------
65 |
66 | html_logo = "../logo.png"
67 |
68 | m2r_parse_relative_links = True
69 |
70 | master_doc = "index"
71 |
--------------------------------------------------------------------------------
/docs/examples/dynamic_plug_names.rst:
--------------------------------------------------------------------------------
1 | Dynamic Plug Names
2 | ------------------
3 |
4 | .. literalinclude:: ../../examples/dynamic_plug_names.py
5 |
--------------------------------------------------------------------------------
/docs/examples/examples.rst:
--------------------------------------------------------------------------------
1 | Examples
2 | ========
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 |
7 | ./../flowpipe-for-vfx-pipelines
8 | dynamic_plug_names
9 | house_and_birthday
10 | nested_graphs
11 | vfx_render_farm_conversion
12 | vfx_rendering
13 | workflow_design_pattern
14 | world_clock
15 |
--------------------------------------------------------------------------------
/docs/examples/house_and_birthday.rst:
--------------------------------------------------------------------------------
1 | House and Birthday
2 | ------------------
3 |
4 | .. literalinclude:: ../../examples/house_and_birthday.py
5 |
--------------------------------------------------------------------------------
/docs/examples/nested_graphs.rst:
--------------------------------------------------------------------------------
1 | Nested Graphs
2 | -------------
3 |
4 | .. literalinclude:: ../../examples/nested_graphs.py
5 |
--------------------------------------------------------------------------------
/docs/examples/vfx_render_farm_conversion.rst:
--------------------------------------------------------------------------------
1 | VFX Renderfarm Conversion
2 | -------------------------
3 |
4 | .. literalinclude:: ../../examples/vfx_render_farm_conversion.py
5 |
--------------------------------------------------------------------------------
/docs/examples/vfx_rendering.rst:
--------------------------------------------------------------------------------
1 | VFX Rendering
2 | -------------
3 |
4 | .. literalinclude:: ../../examples/vfx_rendering.py
5 |
--------------------------------------------------------------------------------
/docs/examples/workflow_design_pattern.rst:
--------------------------------------------------------------------------------
1 | Workflow Design Pattern
2 | -----------------------
3 |
4 | .. literalinclude:: ../../examples/workflow_design_pattern.py
5 |
--------------------------------------------------------------------------------
/docs/examples/world_clock.rst:
--------------------------------------------------------------------------------
1 | World Clock
2 | -----------
3 |
4 | .. literalinclude:: ../../examples/world_clock.py
5 |
--------------------------------------------------------------------------------
/docs/flowpipe-for-vfx-pipelines.md:
--------------------------------------------------------------------------------
1 | .. mdinclude:: ../flowpipe-for-vfx-pipelines.md
2 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. Flowpipe documentation master file, created by
2 | sphinx-quickstart on Mon Dec 28 15:51:26 2020.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | .. mdinclude:: ../README.md
7 |
8 | Code Documentation
9 | ==================
10 |
11 | .. toctree::
12 | :maxdepth: 4
13 |
14 | source/flowpipe
15 |
16 | examples/examples.rst
17 |
18 | Indices and tables
19 | ==================
20 |
21 | * :ref:`genindex`
22 | * :ref:`modindex`
23 | * :ref:`search`
24 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | m2r2
2 | ascii-canvas>=1.3.5
3 | Sphinx>=3.4.1
4 |
--------------------------------------------------------------------------------
/docs/source/flowpipe.errors.rst:
--------------------------------------------------------------------------------
1 | flowpipe.errors module
2 | ======================
3 |
4 | .. automodule:: flowpipe.errors
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/flowpipe.event.rst:
--------------------------------------------------------------------------------
1 | flowpipe.event module
2 | =====================
3 |
4 | .. automodule:: flowpipe.event
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/flowpipe.graph.rst:
--------------------------------------------------------------------------------
1 | flowpipe.graph module
2 | =====================
3 |
4 | .. automodule:: flowpipe.graph
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/flowpipe.node.rst:
--------------------------------------------------------------------------------
1 | flowpipe.node module
2 | ====================
3 |
4 | .. automodule:: flowpipe.node
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/flowpipe.plug.rst:
--------------------------------------------------------------------------------
1 | flowpipe.plug module
2 | ====================
3 |
4 | .. automodule:: flowpipe.plug
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/flowpipe.rst:
--------------------------------------------------------------------------------
1 | API Reference
2 | =============
3 |
4 | Submodules
5 | ----------
6 |
7 | .. toctree::
8 | :maxdepth: 4
9 |
10 | flowpipe.errors
11 | flowpipe.event
12 | flowpipe.graph
13 | flowpipe.node
14 | flowpipe.plug
15 | flowpipe.utilities
16 |
17 | Module contents
18 | ---------------
19 |
20 | .. automodule:: flowpipe
21 | :members:
22 | :undoc-members:
23 | :show-inheritance:
24 |
--------------------------------------------------------------------------------
/docs/source/flowpipe.utilities.rst:
--------------------------------------------------------------------------------
1 | flowpipe.utilities module
2 | =========================
3 |
4 | .. automodule:: flowpipe.utilities
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/examples/dynamic_plug_names.py:
--------------------------------------------------------------------------------
1 | """Showing a programming pattern that defines plug names at runtime.
2 |
3 | In some applications it will be useful to re-use the same node definition for
4 | different inputs/output - our working example will be to compute a face match.
5 | To do so, we use an `EmbeddingNode` to compute features from both, an input
6 | and a reference image, and then a `MatchNode` to compute whether the faces are
7 | the same from these embeddings.
8 |
9 | If the graph is to remain both, clean and explicit, it is advantageous to name
10 | the plugs differently for the different `EmbeddingNode`.
11 |
12 | To do so, accept the plug names as parameters to the nodes `__init__()` method.
13 | You can then define the InputPlugs / OutputPlug with the given name. To access
14 | the dynamically named plugs, your INode instance needs to store the plug names
15 | as attributes, and the `compute()` method needs to allow for generic keyword
16 | arguments.
17 | """
18 |
19 | from flowpipe import Graph, INode, InputPlug, OutputPlug
20 |
21 |
22 | def compute_embeddings(image):
23 | """A mock function for a call to a deep learning model or a web service."""
24 | del image # this is just a mock and doesn't do anything with the input
25 | return 42
26 |
27 |
28 | def compare_embeddings(image_emb, reference_emb, threshold=2):
29 | """A mock function for the appropriate comparison of embeddings."""
30 | return abs(image_emb - reference_emb) < threshold
31 |
32 |
33 | class EmbeddingNode(INode):
34 | """The embedding node computes facial features from an image."""
35 |
36 | def __init__(self, input_name, output_name, **kwargs):
37 | """Set up a new EmbeddingNode with given names for plugs."""
38 | super().__init__(**kwargs)
39 |
40 | self.input_name = input_name # Needed to access the value in compute
41 | InputPlug(input_name, self)
42 |
43 | self.output_name = output_name # Needed to access the value in compute
44 | OutputPlug(output_name, self)
45 |
46 | # Accept generic keyword arguments, since the names of the inputs are
47 | # undefined until at runtime
48 | def compute(self, **kwargs):
49 | image = kwargs.pop(self.input_name)
50 |
51 | embedding = compute_embeddings(image)
52 |
53 | return {self.output_name: embedding}
54 |
55 |
56 | class MatchNode(INode):
57 | """The match node compares two embeddings."""
58 |
59 | def __init__(self, threshold=2, **kwargs):
60 | super().__init__(**kwargs)
61 | self.threshold = threshold
62 |
63 | InputPlug("image_emb", self)
64 | InputPlug("reference_emb", self)
65 |
66 | OutputPlug("facematch", self)
67 |
68 | def compute(self, image_emb, reference_emb):
69 | """Compare the embeddings."""
70 | match = compare_embeddings(image_emb, reference_emb, self.threshold)
71 | return {"facematch": match}
72 |
73 |
74 | def get_facematch_graph(threshold):
75 | """Set up facematching e.g. with paramters taken from a config."""
76 | facematch_graph = Graph()
77 |
78 | # It is useful to define
79 | image_node = EmbeddingNode(
80 | input_name="image",
81 | output_name="image_emb",
82 | graph=facematch_graph,
83 | name="ImageEmbeddings",
84 | )
85 |
86 | reference_node = EmbeddingNode(
87 | input_name="reference",
88 | output_name="reference_emb",
89 | graph=facematch_graph,
90 | name="ReferenceEmbeddings",
91 | )
92 |
93 | match_node = MatchNode(threshold=threshold, graph=facematch_graph)
94 |
95 | image_node.outputs["image_emb"] >> match_node.inputs["image_emb"]
96 | (
97 | reference_node.outputs["reference_emb"]
98 | >> match_node.inputs["reference_emb"]
99 | )
100 |
101 | match_node.outputs["facematch"].promote_to_graph("result")
102 |
103 | return facematch_graph
104 |
105 |
106 | if __name__ == "__main__":
107 | facematch = get_facematch_graph(1)
108 |
109 | image = "foo" # load image from disk
110 | reference = "bar" # load image from database
111 | facematch.evaluate(mode="threading")
112 |
113 | print(facematch)
114 | print("\n", facematch.outputs["result"].value)
115 |
--------------------------------------------------------------------------------
/examples/house_and_birthday.py:
--------------------------------------------------------------------------------
1 | """Two simple examples from the README file.
2 |
3 | Build a house:
4 |
5 | +------------------------+ +------------------------+ +---------------------------+
6 | | HireWorkers | | Build Roof | | Housewarming Party |
7 | |------------------------| |------------------------| |---------------------------|
8 | o amount<4> | o section<"roof"> | % attendees |
9 | | workers % % workers | +--->o attendees.0<> |
10 | | workers.0 o-----+--->o workers.0<> | |--->o attendees.1<> |
11 | | workers.1 o-----|--->o workers.1<> | |--->o attendees.2<> |
12 | | workers.2 o-----| | workers % |--->o attendees.3<> |
13 | | workers.3 o-----| | workers.0 o-----| o attendees.4<"Homeowner> |
14 | +------------------------+ | | workers.1 o-----| +---------------------------+
15 | | +------------------------+ |
16 | | +------------------------+ |
17 | | | Build Walls | |
18 | | |------------------------| |
19 | | o section<"walls"> | |
20 | | % workers | |
21 | +--->o workers.0<> | |
22 | +--->o workers.1<> | |
23 | | workers % |
24 | | workers.0 o-----+
25 | | workers.1 o-----+
26 | +------------------------+
27 |
28 | Throw a birthday party:
29 |
30 | +-------------------+ +---------------------+
31 | | InvitePeople | | Birthday Party |
32 | |-------------------| |---------------------|
33 | o amount<4> | +--->o attendees<> |
34 | | people o-----+ +---------------------+
35 | +-------------------+
36 |
37 | """
38 | from flowpipe import Graph, INode, InputPlug, Node, OutputPlug
39 |
40 |
41 | class HireWorkers(INode):
42 | """A node can be derived from the INode interface.
43 |
44 | The plugs are defined in the init method.
45 | The compute method received the inputs from any connected upstream nodes.
46 | """
47 |
48 | def __init__(self, amount=None, **kwargs):
49 | super(HireWorkers, self).__init__(**kwargs)
50 | InputPlug("amount", self, amount)
51 | OutputPlug("workers", self)
52 |
53 | def compute(self, amount):
54 | workers = ["John", "Jane", "Mike", "Michelle"]
55 | print("{0} workers are hired to build the house.".format(amount))
56 | return {"workers.{0}".format(i): workers[i] for i in range(amount)}
57 |
58 |
59 | @Node(outputs=["workers"])
60 | def Build(workers, section):
61 | """A node can also be created by the Node decorator.outputs
62 |
63 | The inputs to the function are turned into InputsPlugs, outputs are defined
64 | in the decorator itself.
65 | The wrapped function is used as the compute method.
66 | """
67 | print(
68 | "{0} are building the {1}".format(", ".join(workers.values()), section)
69 | )
70 | return {"workers.{0}".format(i): worker for i, worker in workers.items()}
71 |
72 |
73 | @Node()
74 | def Party(attendees):
75 | """Nodes do not necessarily need to have output or input plugs."""
76 | print(
77 | "{0} and {1} are having a great party!".format(
78 | ", ".join(list(attendees.values())[:-1]),
79 | list(attendees.values())[-1],
80 | )
81 | )
82 |
83 |
84 | graph = Graph(name="Build a House")
85 | workers = HireWorkers(graph=graph, amount=4)
86 | build_walls = Build(graph=graph, name="Build Walls", section="walls")
87 | build_roof = Build(graph=graph, name="Build Roof", section="roof")
88 | party = Party(graph=graph, name="Housewarming Party")
89 |
90 | # Nodes are connected via their input/output plugs.
91 | workers.outputs["workers"]["0"].connect(build_walls.inputs["workers"]["0"])
92 | workers.outputs["workers"]["1"].connect(build_walls.inputs["workers"]["1"])
93 | workers.outputs["workers"]["2"].connect(build_roof.inputs["workers"]["0"])
94 | workers.outputs["workers"]["3"].connect(build_roof.inputs["workers"]["1"])
95 |
96 | # Connecting nodes can be done via the bit shift operator as well
97 | build_walls.outputs["workers"]["0"] >> party.inputs["attendees"]["0"]
98 | build_walls.outputs["workers"]["1"] >> party.inputs["attendees"]["2"]
99 | build_roof.outputs["workers"]["0"] >> party.inputs["attendees"]["1"]
100 | build_roof.outputs["workers"]["1"] >> party.inputs["attendees"]["3"]
101 |
102 | # Initial values can be set onto the input plugs for initialization
103 | party.inputs["attendees"]["4"].value = "Homeowner"
104 |
105 |
106 | print("---------------------------------------")
107 | print(graph.name)
108 | print(graph)
109 | print(graph.list_repr())
110 | print("---------------------------------------")
111 | graph.evaluate()
112 | print("---------------------------------------")
113 |
114 |
115 | graph = Graph(name="Celebrate a Birthday Party")
116 |
117 |
118 | @Node(outputs=["people"])
119 | def InvitePeople(amount):
120 | people = ["John", "Jane", "Mike", "Michelle"]
121 | d = {"people.{0}".format(i): people[i] for i in range(amount)}
122 | d["people"] = {people[i]: people[i] for i in range(amount)}
123 | return d
124 |
125 |
126 | invite = InvitePeople(graph=graph, amount=4)
127 | birthday_party = Party(graph=graph, name="Birthday Party")
128 | invite.outputs["people"] >> birthday_party.inputs["attendees"]
129 |
130 |
131 | print("---------------------------------------")
132 | print(graph.name)
133 | print(graph)
134 | print("---------------------------------------")
135 | graph.evaluate()
136 | print("---------------------------------------")
137 |
--------------------------------------------------------------------------------
/examples/nested_graphs.py:
--------------------------------------------------------------------------------
1 | """Nested graphs are supported in flowpipe."""
2 | from flowpipe import Graph, Node
3 |
4 |
5 | @Node(outputs=["file"])
6 | def MyNode(file):
7 | # Something is done in here ...
8 | return {"file": file}
9 |
10 |
11 | # A graph that fixes an incoming file, cleaning up messy names etc.
12 | #
13 | # +-----------------------+ +-------------------------+
14 | # | Cleanup Filename | | Change Lineendings |
15 | # |-----------------------| |-------------------------|
16 | # o file<> | +--->o file<> |
17 | # | file o-----+ | file o
18 | # +-----------------------+ +-------------------------+
19 | fix_file = Graph(name="fix_file")
20 | cleanup_filename = MyNode(name="Cleanup Filename", graph=fix_file)
21 | change_lineendings = MyNode(name="Change Lineendings", graph=fix_file)
22 | cleanup_filename.outputs["file"].connect(change_lineendings.inputs["file"])
23 |
24 |
25 | # A second graph reads finds files, and extracts their contents into a database
26 | # +----------------+ +----------------------------+ +----------------+
27 | # | Find File | | Read Values from File | | Update DB |
28 | # |----------------| |----------------------------| |----------------|
29 | # o file<> | +--->o file<> | +--->o file<> |
30 | # | file o-----+ | file o-----+ | file o
31 | # +----------------+ +----------------------------+ +----------------+
32 | udpate_db_from_file = Graph(name="udpate_db_from_file")
33 | find_file = MyNode(name="Find File", graph=udpate_db_from_file)
34 | values_from_file = MyNode(
35 | name="Read Values from File", graph=udpate_db_from_file
36 | )
37 | update_db = MyNode(name="Update DB", graph=udpate_db_from_file)
38 | find_file.outputs["file"].connect(values_from_file.inputs["file"])
39 | values_from_file.outputs["file"].connect(update_db.inputs["file"])
40 |
41 |
42 | # The second graph however relies on clean input files so the first graph can
43 | # be used within the second "udpate db" graph.
44 | # For this purpose, graphs can promote input and output plugs from their nodes
45 | # to the graph level, making other graphs aware of them:
46 | fix_file["Cleanup Filename"].inputs["file"].promote_to_graph(
47 | name="file_to_clean"
48 | )
49 | fix_file["Change Lineendings"].outputs["file"].promote_to_graph(
50 | name="clean_file"
51 | )
52 |
53 | # Now the update_db graph can connect nodes to the fix_file graph
54 | find_file.outputs["file"].connect(fix_file.inputs["file_to_clean"])
55 | fix_file.outputs["clean_file"].connect(
56 | udpate_db_from_file["Read Values from File"].inputs["file"]
57 | )
58 |
59 |
60 | # The result now looks like this:
61 | #
62 | # +---udpate_db_from_file----+ +-------fix_file--------+ +--------fix_file---------+ +----udpate_db_from_file-----+ +---udpate_db_from_file----+
63 | # | Find File | | Cleanup Filename | | Change Lineendings | | Read Values from File | | Update DB |
64 | # |--------------------------| |-----------------------| |-------------------------| |----------------------------| |--------------------------|
65 | # o file<> | +--->o file<> | +--->o file<> | +--->o file<> | +--->o file<> |
66 | # | file o-----+ | file o-----+ | file o-----+ | file o-----+ | file o
67 | # +--------------------------+ +-----------------------+ +-------------------------+ +----------------------------+ +--------------------------+
68 | print(fix_file)
69 |
70 |
71 | # Subgraphs can be accessed by their name from any participating graph
72 | assert udpate_db_from_file.subgraphs["fix_file"] is fix_file
73 | assert fix_file.subgraphs["udpate_db_from_file"] is udpate_db_from_file
74 |
--------------------------------------------------------------------------------
/examples/vfx_render_farm_conversion.py:
--------------------------------------------------------------------------------
1 | """Demonstrating how to convert a flowpipe graph to a render farm job.
2 |
3 | This guide expects that your render farm can handle dependencies between tasks.
4 | """
5 | import json
6 | import logging
7 | import os
8 | from tempfile import gettempdir
9 |
10 | from flowpipe import Graph, INode, Node
11 |
12 | # -----------------------------------------------------------------------------
13 | #
14 | # Necessary utilities
15 | #
16 | # -----------------------------------------------------------------------------
17 |
18 |
19 | class JsonDatabase:
20 | """The Database stores the JSON-serialized nodes.
21 |
22 | The storage can also be handled via a database, this is just the easiest
23 | way for demonstrational purposes. In production, a file based storage also
24 | has advantages for debugging and allows for easy hacking by just altering
25 | the JSON files directly.
26 | """
27 |
28 | PATH = os.path.join(gettempdir(), "json-database", "{identifier}.json")
29 |
30 | @staticmethod
31 | def set(node):
32 | """Store the node under it's identifier."""
33 | serialized_json = JsonDatabase.PATH.format(identifier=node.identifier)
34 | if not os.path.exists(os.path.dirname(serialized_json)):
35 | os.makedirs(os.path.dirname(serialized_json))
36 | with open(serialized_json, "w") as f:
37 | json.dump(node.serialize(), f, indent=2)
38 | return serialized_json
39 |
40 | @staticmethod
41 | def get(identifier):
42 | """Retrieve the node behind the given identifier."""
43 | serialized_json = JsonDatabase.PATH.format(identifier=identifier)
44 | with open(serialized_json, "r") as f:
45 | data = json.load(f)
46 | return INode.deserialize(data)
47 |
48 |
49 | # Command templates to execute a flowpipe node in the terminal.
50 | # Uses different python interpreters and commands based on the host application
51 | # The template just needs the path to the serialized json file and optionally
52 | # a range of frames passed to the node for the implicit batch conversion.
53 | COMMANDS = {
54 | "python": (
55 | "python -c '"
56 | "from my_farm import conversion;"
57 | 'conversion.evaluate_on_farm("{serialized_json}", {frames})\''
58 | ),
59 | "maya": (
60 | "mayapy -c '"
61 | "import maya.standalone;"
62 | 'maya.standalone.initialize(name="python");'
63 | "from my_farm import conversion;"
64 | 'conversion.evaluate_on_farm("{serialized_json}", {frames})\''
65 | ),
66 | }
67 |
68 |
69 | def convert_graph_to_job(graph):
70 | """Convert the graph to a dict representing a typical render farm job."""
71 | job = {"name": graph.name, "tasks": []}
72 |
73 | # Turn every node into a farm task
74 | tasks = {}
75 | for node in graph.nodes:
76 | serialized_json = JsonDatabase.set(node)
77 |
78 | tasks[node.name] = []
79 |
80 | # IMPLICIT BATCHING:
81 | # Create individual tasks for each batch if the batch size is defined
82 | # Feed the calculated frame range to each batch
83 | if node.metadata.get("batch_size") is not None:
84 | batch_size = node.metadata["batch_size"]
85 | frames = node.inputs["frames"].value
86 | i = 0
87 | while i < len(frames) - 1:
88 | end = i + batch_size
89 | if end > len(frames) - 1:
90 | end = len(frames)
91 | f = frames[i:end]
92 |
93 | task = {"name": "{0}-{1}".format(node.name, i / batch_size)}
94 | command = COMMANDS.get(
95 | node.metadata.get("interpreter", "python"), None
96 | )
97 | task["command"] = command.format(
98 | serialized_json=serialized_json, frames=f
99 | )
100 | job["tasks"].append(task)
101 |
102 | tasks[node.name].append(task)
103 |
104 | i += batch_size
105 | else:
106 | task = {"name": node.name}
107 | command = COMMANDS.get(
108 | node.metadata.get("interpreter", "python"), None
109 | )
110 | task["command"] = command.format(
111 | serialized_json=serialized_json, frames=None
112 | )
113 | job["tasks"].append(task)
114 |
115 | tasks[node.name].append(task)
116 |
117 | # The dependencies between the tasks based on the connections of the Nodes
118 | for node_name in tasks:
119 | for task in tasks[node_name]:
120 | node = graph[node_name]
121 | task["dependencies"] = []
122 | for upstream in [n.name for n in node.upstream_nodes]:
123 | task["dependencies"] += [t["name"] for t in tasks[upstream]]
124 |
125 | return job
126 |
127 |
128 | def evaluate_on_farm(serialized_json, frames=None):
129 | """Evaluate the node behind the given json file.
130 |
131 | 1. Deserialize the node
132 | 2. Collect any input values from any upstream dependencies
133 | For implicit batching, the given frames are assigned to the node,
134 | overriding whatever might be stored in the json file, becuase all
135 | batches share the same json file.
136 | 3. Evaluate the node
137 | 4. Serialize the node back into its original file
138 | For implicit farm conversion, the serialization only happens once,
139 | for the 'last' batch, knowing that the last batch in numbers might
140 | not be the 'last' batch actually executed.
141 | """
142 | # Debug logs might be useful on the farm
143 | logging.baseConfig.setLevel(logging.DEBUG)
144 |
145 | # Deserialize the node from the serialized json
146 | with open(serialized_json, "r") as f:
147 | data = json.load(f)
148 | node = INode.deserialize(data)
149 |
150 | # Retrieve the upstream output data
151 | for name, input_plug in data["inputs"].items():
152 | for identifier, output_plug in input_plug["connections"].items():
153 | upstream_node = JsonDatabase.get(identifier)
154 | node.inputs[name].value = upstream_node.outputs[output_plug].value
155 |
156 | # Specifically assign the batch frames here if applicable
157 | if frames is not None:
158 | all_frames = node.inputs["frames"]
159 | node.inputs["frames"] = frames
160 |
161 | # Actually evalute the node
162 | node.evaluate()
163 |
164 | # Store the result back into the same file ONLY once
165 | # ALL batch processes access the same json file so the result is only stored
166 | # for the last batch, knowing that the last batch in numbers might not be
167 | # the last batch actually executed
168 | if frames is not None and frames[-1] != all_frames[-1]:
169 | return
170 |
171 | with open(serialized_json, "w") as f:
172 | json.dump(node.serialize(), f, indent=2)
173 |
174 |
175 | # -----------------------------------------------------------------------------
176 | #
177 | # Examples
178 | #
179 | # -----------------------------------------------------------------------------
180 |
181 |
182 | @Node(outputs=["renderings"], metadata={"interpreter": "maya"})
183 | def MayaRender(frames, scene_file):
184 | """Render the given frames from the given scene.."""
185 | return {"renderings": "/renderings/file.%04d.exr"}
186 |
187 |
188 | @Node(outputs=["status"])
189 | def UpdateDatabase(id_, images):
190 | """Update the database entries of the given asset with the given data."""
191 | return {"status": True}
192 |
193 |
194 | def implicit_batching(frames, batch_size):
195 | """Batches are created during the farm conversion."""
196 | graph = Graph(name="Rendering")
197 | render = MayaRender(
198 | graph=graph,
199 | frames=list(range(frames)),
200 | scene_file="/scene/for/rendering.ma",
201 | metadata={"batch_size": batch_size},
202 | )
203 | update = UpdateDatabase(graph=graph, id_=123456)
204 | render.outputs["renderings"].connect(update.inputs["images"])
205 |
206 | print(graph)
207 | print(json.dumps(convert_graph_to_job(graph), indent=2))
208 |
209 |
210 | def explicit_batching(frames, batch_size):
211 | """Batches are already part of the graph."""
212 | graph = Graph(name="Rendering")
213 | update_database = UpdateDatabase(graph=graph, id_=123456)
214 | for i in range(0, frames, batch_size):
215 | maya_render = MayaRender(
216 | name="MayaRender{0}-{1}".format(i, i + batch_size),
217 | graph=graph,
218 | frames=list(range(i, i + batch_size)),
219 | scene_file="/scene/for/rendering.ma",
220 | )
221 | maya_render.outputs["renderings"].connect(
222 | update_database.inputs["images"][str(i)]
223 | )
224 |
225 | print(graph)
226 | print(json.dumps(convert_graph_to_job(graph), indent=2))
227 |
228 |
229 | if __name__ == "__main__":
230 | implicit_batching(30, 10)
231 | explicit_batching(30, 10)
232 |
--------------------------------------------------------------------------------
/examples/vfx_rendering.py:
--------------------------------------------------------------------------------
1 | """Demo a complex workflow of a rendering with a series of subsequent actions:
2 |
3 | - Render a CG render out of Maya
4 | - Check the resulting images for potential defects caused by potential server glitches
5 | - Register the CG render in the database
6 | - Create and render a slap comp
7 | - Convert the rendered slapcomp to a quicktime
8 |
9 | +---------------------------+ +----------------------+ +-------------------------+ +-----------------------+ +-----------------------+
10 | | MayaRender0-10 | | CheckImages0-10 | | CreateSlapComp | | NukeRender0-10 | | Quicktime |
11 | |---------------------------| |----------------------| |-------------------------| |-----------------------| |-----------------------|
12 | o frames<[0, 1, 2, > | +--->o images<> | % images | o frames<[0, 1, 2, > | % images |
13 | o scene_file<"/scene/fo> | | | images o---------->o images.0<> | +--->o scene_file<> | +--->o images.0<> |
14 | | renderings o-----+ +----------------------+ |--->o images.10<> | --+ | renderings o-----+--->o images.10<> |
15 | +---------------------------+ +-----------------------+ |--->o images.20<> | | +-----------------------+ |--->o images.20<> |
16 | +---------------------------+ | CheckImages10-20 | | o template<"nuke_temp> | | +-----------------------+ | | quicktime o
17 | | MayaRender10-20 | |-----------------------| | | slapcomp o--- | | NukeRender10-20 | | +-----------------------+
18 | |---------------------------| +--->o images<> | | +-------------------------+ | |-----------------------| |
19 | o frames<[10, 11, 1> | | | images o-----| +-----------------------+ | o frames<[10, 11, 1> | |
20 | o scene_file<"/scene/fo> | | +-----------------------+ | | UpdateDatabase | +--->o scene_file<> | |
21 | | renderings o-----+ +-----------------------+ | |-----------------------| | | renderings o-----+
22 | +---------------------------+ | CheckImages20-30 | | o id_<123456> | | +-----------------------+ |
23 | +---------------------------+ |-----------------------| | % images | | +-----------------------+ |
24 | | MayaRender20-30 | +--->o images<> | +--->o images.0<> | | | NukeRender20-30 | |
25 | |---------------------------| | | images o-----+--->o images.10<> | | |-----------------------| |
26 | o frames<[20, 21, 2> | | +-----------------------+ +--->o images.20<> | | o frames<[20, 21, 2> | |
27 | o scene_file<"/scene/fo> | | | status o +--->o scene_file<> | |
28 | | renderings o-----+ +-----------------------+ | renderings o-----+
29 | +---------------------------+ +-----------------------+
30 | """
31 |
32 | from flowpipe import Graph, Node
33 |
34 |
35 | @Node(outputs=["renderings"], metadata={"interpreter": "maya"})
36 | def MayaRender(frames, scene_file):
37 | return {"renderings": "/renderings/file.%04d.exr"}
38 |
39 |
40 | @Node(outputs=["images"])
41 | def CheckImages(images):
42 | return {"images": images}
43 |
44 |
45 | @Node(outputs=["slapcomp"])
46 | def CreateSlapComp(images, template):
47 | return {"slapcomp": "slapcomp.nk"}
48 |
49 |
50 | @Node(outputs=["renderings"], metadata={"interpreter": "nuke"})
51 | def NukeRender(frames, scene_file):
52 | return {"renderings": "/renderings/file.%04d.exr"}
53 |
54 |
55 | @Node(outputs=["quicktime"])
56 | def Quicktime(images):
57 | return {"quicktime": "resulting.mov"}
58 |
59 |
60 | @Node(outputs=["status"])
61 | def UpdateDatabase(id_, images):
62 | """Update the database entries of the given asset with the given data."""
63 | return {"status": True}
64 |
65 |
66 | def complex_cg_render(frames, batch_size):
67 | graph = Graph(name="Rendering")
68 |
69 | slapcomp = CreateSlapComp(graph=graph, template="nuke_template.nk")
70 | update_database = UpdateDatabase(graph=graph, id_=123456)
71 |
72 | for i in range(0, frames, batch_size):
73 | maya_render = MayaRender(
74 | name="MayaRender{0}-{1}".format(i, i + batch_size),
75 | graph=graph,
76 | frames=range(i, i + batch_size),
77 | scene_file="/scene/for/rendering.ma",
78 | )
79 | check_images = CheckImages(
80 | name="CheckImages{0}-{1}".format(i, i + batch_size), graph=graph
81 | )
82 | maya_render.outputs["renderings"].connect(
83 | check_images.inputs["images"]
84 | )
85 | check_images.outputs["images"].connect(
86 | slapcomp.inputs["images"][str(i)]
87 | )
88 | check_images.outputs["images"].connect(
89 | update_database.inputs["images"][str(i)]
90 | )
91 |
92 | quicktime = Quicktime()
93 |
94 | for i in range(0, frames, batch_size):
95 | nuke_render = NukeRender(
96 | name="NukeRender{0}-{1}".format(i, i + batch_size),
97 | graph=graph,
98 | frames=range(i, i + batch_size),
99 | )
100 | slapcomp.outputs["slapcomp"].connect(nuke_render.inputs["scene_file"])
101 | nuke_render.outputs["renderings"].connect(
102 | quicktime.inputs["images"][str(i)]
103 | )
104 |
105 | print(graph)
106 |
107 |
108 | if __name__ == "__main__":
109 | complex_cg_render(30, 10)
110 |
--------------------------------------------------------------------------------
/examples/workflow_design_pattern.py:
--------------------------------------------------------------------------------
1 | """Demonstration of the Workflow Design Pattern.
2 |
3 | As the name suggests, this pattern wants to represent workflows.
4 | It is basically an extension of the 'Command Pattern' meant for more complex,
5 | long-running commands consisting of multiple sub-commands. Workflows also
6 | provide multiple ways of evaluation, usually local and remote.
7 |
8 | A workflow would be a common, pre-defined set of tasks frequently used in a
9 | pipeline, for example:
10 | - prepare a delivery to the client
11 | - publish geometry with a subsequent turntable rendering
12 | - ingest data from vendors, including data cleanup and transformation
13 |
14 | The Workflow builds a Graph and initializes it with user provided settings as
15 | well as data taken from other sources (database, filesystem).
16 | """
17 | import getpass
18 |
19 | from flowpipe import Graph, Node
20 |
21 |
22 | class Workflow(object):
23 | """Abstract base class defining a workflow, based on a flowpipe graph.
24 |
25 | The Workflow holds a graph and provides two ways to evaluate the graph,
26 | locally and remotely.
27 | """
28 |
29 | def __init__(self):
30 | self.graph = Graph()
31 |
32 | def evaluate_locally(self):
33 | """Evaluate the graph locally."""
34 | self.graph.evaluate()
35 |
36 | def evaluate_remotely(self):
37 | """See examples/vfx_render_farm_conversion.py on how to implement a
38 | conversion from flowpipe graphs to your render farm.
39 | """
40 | pass
41 |
42 |
43 | class PublishWorkflow(Workflow):
44 | """Publish a model and add a turntable render of it to the database."""
45 |
46 | def __init__(self, source_file):
47 | super(PublishWorkflow, self).__init__()
48 | publish = Publish(graph=self.graph)
49 | message = SendMessage(graph=self.graph)
50 | turntable = CreateTurntable(graph=self.graph)
51 | update_database = UpdateDatabase(graph=self.graph)
52 | publish.outputs["published_file"].connect(
53 | turntable.inputs["alembic_cache"]
54 | )
55 | publish.outputs["published_file"].connect(
56 | message.inputs["values"]["path"]
57 | )
58 | turntable.outputs["turntable"].connect(
59 | update_database.inputs["images"]
60 | )
61 |
62 | # Initialize the graph from user input
63 | publish.inputs["source_file"].value = source_file
64 |
65 | # Initialize the graph through pipeline logic
66 | # These things can also be done in the nodes themselves of course,
67 | # it's a design choice and depends on the case
68 | message.inputs["template"].value = (
69 | "Hello,\n\n"
70 | "The following file has been published: {path}\n\n"
71 | "Thank you,\n\n"
72 | "{sender}"
73 | )
74 | message.inputs["values"]["sender"].value = getpass.getuser()
75 | message.inputs["values"]["recipients"].value = [
76 | "john@mail.com",
77 | "jane@mail.com",
78 | ]
79 | turntable.inputs["render_template"].value = "template.ma"
80 | update_database.inputs["asset"].value = source_file.split(".")[0]
81 | update_database.inputs["status"].value = "published"
82 |
83 |
84 | # -----------------------------------------------------------------------------
85 | #
86 | # The Nodes used in the Graph
87 | #
88 | # -----------------------------------------------------------------------------
89 |
90 |
91 | @Node(outputs=["published_file"])
92 | def Publish(source_file):
93 | """Publish the given source file."""
94 | return {"published_file": "/published/file.abc"}
95 |
96 |
97 | @Node(outputs=["return_status"])
98 | def SendMessage(template, values, recipients):
99 | """Send message to given recipients."""
100 | print("--------------------------------------")
101 | print(template.format(**values))
102 | print("--------------------------------------")
103 | return {"return_status": 0}
104 |
105 |
106 | @Node(outputs=["turntable"])
107 | def CreateTurntable(alembic_cache, render_template):
108 | """Load the given cache into the given template file and render."""
109 | return {"turntable": "/turntable/turntable.%04d.jpg"}
110 |
111 |
112 | @Node(outputs=["asset"])
113 | def UpdateDatabase(asset, images, status):
114 | """Update the database entries of the given asset with the given data."""
115 | return {"asset": asset}
116 |
117 |
118 | if __name__ == "__main__":
119 | workflow = PublishWorkflow("model.ma")
120 | print(workflow.graph)
121 | workflow.evaluate_locally()
122 |
--------------------------------------------------------------------------------
/examples/world_clock.py:
--------------------------------------------------------------------------------
1 | """Demonstrating the basic capabilities of flowpipe.
2 |
3 | A graph implementation of a world clock for demonstrational purposes:
4 |
5 | +------------------+ +---------------------+ +----------------------+
6 | | CurrentTime | | London | | WorldClock |
7 | |------------------| |---------------------| |----------------------|
8 | | time o-----+--->o time<> | % times |
9 | +------------------+ | o timezone<0> | +--->o times.London<> |
10 | | | converted_time o-----+--->o times.Munich<> |
11 | | +---------------------+ +--->o times.Vancouver<> |
12 | | +---------------------+ | +----------------------+
13 | | | Munich | |
14 | | |---------------------| |
15 | |--->o time<> | |
16 | | o timezone<1> | |
17 | | | converted_time o-----|
18 | | +---------------------+ |
19 | | +---------------------+ |
20 | | | Vancouver | |
21 | | |---------------------| |
22 | +--->o time<> | |
23 | o timezone<-8> | |
24 | | converted_time o-----+
25 | +---------------------+
26 | """
27 | from datetime import datetime
28 | from time import time
29 |
30 | from flowpipe import Graph, INode, InputPlug, Node, OutputPlug
31 |
32 |
33 | @Node(outputs=["time"])
34 | def CurrentTime():
35 | """The @Node decorator turns the wrapped function into a Node object.
36 |
37 | Any arguments to the function are used as input plugs to the Node.
38 | The outputs are defined in the decorator explicitely.
39 | """
40 | return {"time": time()}
41 |
42 |
43 | class ConvertTime(INode):
44 | """A node can be derived from the INode interface.
45 |
46 | The plugs are defined in the init method.
47 | The compute method received the inputs from any connected upstream nodes.
48 | """
49 |
50 | def __init__(self, time=None, timezone=0, **kwargs):
51 | super(ConvertTime, self).__init__(**kwargs)
52 | InputPlug("time", self)
53 | InputPlug("timezone", self, timezone)
54 | OutputPlug("converted_time", self)
55 |
56 | def compute(self, time, timezone):
57 | return {"converted_time": time + timezone * 60 * 60}
58 |
59 |
60 | @Node()
61 | def ShowTimes(times):
62 | """Nodes do not necessarily have to define output and input plugs."""
63 | print("-- World Clock -------------------")
64 | for location, t in times.items():
65 | print(
66 | "It is now: {time:%H:%M} in {location}".format(
67 | time=datetime.fromtimestamp(t), location=location
68 | )
69 | )
70 | print("----------------------------------")
71 |
72 |
73 | # The Graph holds the nodes
74 | graph = Graph(name="World Clock")
75 | current_time = CurrentTime(graph=graph)
76 | van = ConvertTime(name="Vancouver", timezone=-8, graph=graph)
77 | ldn = ConvertTime(name="London", timezone=0, graph=graph)
78 | muc = ConvertTime(name="Munich", timezone=1, graph=graph)
79 | world_clock = ShowTimes(graph=graph)
80 |
81 | # Connecting nodes can be done via the bit shift operator as well
82 | current_time.outputs["time"].connect(van.inputs["time"])
83 | current_time.outputs["time"].connect(ldn.inputs["time"])
84 | current_time.outputs["time"].connect(muc.inputs["time"])
85 | van.outputs["converted_time"] >> world_clock.inputs["times"]["Vancouver"]
86 | ldn.outputs["converted_time"] >> world_clock.inputs["times"]["London"]
87 | muc.outputs["converted_time"] >> world_clock.inputs["times"]["Munich"]
88 |
89 | # Display the graph
90 | print(graph)
91 |
92 | graph.evaluate()
93 |
--------------------------------------------------------------------------------
/flowpipe-for-vfx-pipelines.md:
--------------------------------------------------------------------------------
1 | ## Flowpipe for VFX Pipelines
2 |
3 | Flowpipe was inspired by commonly experienced challenges in vfx/animation pipelines.
4 |
5 | **Re-usability**
6 |
7 | Flowpipe encourages the re-usability of code through encapsualation into nodes that do just one thing. [Code example](examples/house_and_birthday.py)!
8 |
9 | **Code Design in a 1000+ ways**
10 |
11 | Usually a pipeline codebase tends to be organized in as many ways as the number of developers ever employed by the company. Every situation has a mutlitude of possible solutions and if there is no common framework and not enough structure every developer will pick whatever feels right to them.
12 | Flowpipe helps by providing this very simple framework. Developers will be able to understand each other's code better and faster and can collaborate more easily.
13 |
14 | **Planning and Coordination**
15 |
16 | Thinking about the seolution to a problem in a graph-like fashion is a helpful approach in a lot of situations.
17 | Since flowpipe naturally supports this approach, the planning phase can oftentimes be mapped more or less directly to a flowpipe graph. This helps to reason about the implementation, not only with other developers but also with non-technical people!
18 |
19 | **Render Farm**
20 |
21 | Usually any code that has to run on a render farm is wrapped individually with some help from the farm API itself and some in-house functionality. This means that the render farm leaves an imprint everywhere in the code base. It also means that getting things to run on the farm is usually a tedious process requiring custom code every time.
22 | This is where flowpipe can really make a difference through the abstraction of logic into a graph which can then be translated into a farm job network in a unified way and thus avoiding all these issues.
23 | Please see the detailed explanation below and the code examples on [vfx_render_farm_conversion.py](examples/vfx_render_farm_conversion.py)!
24 |
25 | ### Workflow Design Pattern
26 |
27 | As the name suggests, this pattern wants to represent workflows. It is basically an extension of the 'Command Pattern' meant for more complex, long-running commands consisting of multiple sub-commands. Workflows also provide multiple ways of evaluation, usually local and remote (farm).
28 |
29 | A workflow would be a common, pre-defined set of tasks frequently used in a pipeline, for example:
30 |
31 | * prepare a delivery to the client
32 | * publish geometry with a subsequent turntable rendering
33 | * ingest data from vendors, including data cleanup and transformation
34 |
35 | The Workflow builds a Graph and initializes it with user provided settings as well as data taken from other sources (database, filesystem).
36 |
37 | Refer to the [workflow_design_pattern.py](examples/workflow_design_pattern.py)! for an implementation example.
38 |
39 | This can be a powerful approach, especially when used with the Farm Conversion.
40 |
41 | ```c
42 | +--------------------------+ +--------------------------------+ +----------------------+
43 | | Publish | | CreateTurntable | | UpdateDatabase |
44 | |--------------------------| |--------------------------------| |----------------------|
45 | o source_file<"model.ma"> | +--->o alembic_cache<> | o asset<"model"> |
46 | | published_file o-----+ o render_template<"template.> | +--->o images<> |
47 | +--------------------------+ | | turntable o-----+ o status<"published> |
48 | | +--------------------------------+ | asset o
49 | | +----------------------------------+ +----------------------+
50 | | | SendMessage |
51 | | |----------------------------------|
52 | | o recipients<> |
53 | | o template<"Hello,\n\> |
54 | | % values |
55 | +--->o values.path<> |
56 | o values.recipients<["john@mai> |
57 | o values.sender<"sender"> |
58 | | return_status o
59 | +----------------------------------+
60 | ```
61 |
62 | ### Farm Conversion
63 |
64 | Since workflows rely on Flowpipe graphs they can be converted into a farm job of equal shape through this single entry point.
65 |
66 | Every node is converted into a farm task. The flowpipe connections are used to determine the farm task dependencies.
67 | Each node gets serialized to json and stored in a "database" before submission. On the farm, the node gets deserialized from there, with any upstream data also taken from the json "database". After evaluation, the node gets serialized back into the database, making the outputs available for the subsequent nodes.
68 |
69 | There are three basic utilities required for this approach:
70 |
71 | 1. Convert a Graph to an equivalent farm job
72 | 2. Evaluate a Node on the farm
73 | 3. Handling the data transferral between nodes on the farm
74 |
75 | Any farm specific settings are stored in the metadata of the nodes and/or directly provided on job creation.
76 |
77 | Refer to [vfx_render_farm_conversion.py](examples/vfx_render_farm_conversion.py)! for a pseudo-implementation of all the required parts and [vfx_rendering.py](examples/vfx_rendering.py)! for an example of a complex graph.
78 | It also touches on more complex concepts like implicit and explicit batching.
79 |
--------------------------------------------------------------------------------
/flowpipe/__init__.py:
--------------------------------------------------------------------------------
1 | """Flow-based programming with python."""
2 | from .graph import Graph # noqa F40
3 | from .node import INode, Node # noqa F401
4 | from .plug import ( # noqa F401
5 | InputPlug,
6 | InputPlugGroup,
7 | OutputPlug,
8 | SubInputPlug,
9 | SubOutputPlug,
10 | )
11 |
--------------------------------------------------------------------------------
/flowpipe/errors.py:
--------------------------------------------------------------------------------
1 | """Exceptions raised by flowpipe."""
2 |
3 |
4 | class CycleError(Exception):
5 | """Raised when an action would result in a cycle in a graph."""
6 |
7 |
8 | class FlowpipeMultiprocessingError(Exception):
9 | """Raised when a Node can not be pickled, most likely due to inputs not being picklable."""
10 |
--------------------------------------------------------------------------------
/flowpipe/evaluator.py:
--------------------------------------------------------------------------------
1 | """Classes to evaluate flowpipe Graphs in various ways."""
2 |
3 | import logging
4 | import time
5 | from concurrent import futures
6 | from multiprocessing import Manager, Process
7 | from pickle import PicklingError
8 |
9 | from .errors import FlowpipeMultiprocessingError
10 |
11 | log = logging.getLogger(__name__)
12 |
13 |
14 | class Evaluator:
15 | """An engine to evaluate a Graph."""
16 |
17 | @staticmethod
18 | def _evaluation_sequence(graph):
19 | """Sort Nodes into a sequential, flat execution order.
20 |
21 | Replicated here for flexibility; defaults to Graph's implementation.
22 |
23 | Args:
24 | graph (flowpipe.Graph): The graph to evaluate.
25 | Returns:
26 | (list of INode): The nodes in the order in which to compute them.
27 | """
28 | return graph.evaluation_sequence
29 |
30 | def _nodes_to_evaluate(self, graph, skip_clean):
31 | """Get the nodes to evaluate, in order."""
32 | nodes = self._evaluation_sequence(graph)
33 | if skip_clean:
34 | nodes = [n for n in nodes if n.is_dirty]
35 | return nodes
36 |
37 | def _evaluate_nodes(self, nodes):
38 | """Perform the actual node evaluation."""
39 | raise NotImplementedError # pragma: no cover
40 |
41 | def evaluate(self, graph, skip_clean=False):
42 | """Evaluate the graph.
43 |
44 | Args:
45 | graph (flowpipe.Graph): The graph to evaluate.
46 | skip_clean (bool): Whether to skip nodes that are clean.
47 | data_persistence (bool): If false, the data on plugs that have
48 | connections gets cleared (set to None). This reduces the
49 | reference count of objects.
50 | """
51 | nodes = self._nodes_to_evaluate(graph, skip_clean)
52 | self._evaluate_nodes(nodes)
53 |
54 |
55 | class LinearEvaluator(Evaluator):
56 | """Evaluate the graph linearly in a single thread."""
57 |
58 | def _evaluate_nodes(self, nodes):
59 | """Evaluate the graph linearly in a single thread.
60 |
61 | Args:
62 | nodes (list of INode): The nodes to evaluate
63 |
64 | """
65 | for node in nodes:
66 | node.evaluate()
67 |
68 |
69 | class ThreadedEvaluator(Evaluator):
70 | """Evaluate each node in a separate thread."""
71 |
72 | def __init__(self, max_workers=None):
73 | """Intialize with the graph and how many threads to use.
74 |
75 | Args:
76 | graph (flowpipe.Graph): The graph to evaluate.
77 | max_workers (int): The number of threads to use in parallel,
78 | defaults to the futures.ThreadPoolExecutor default.
79 |
80 | """
81 | self.max_workers = max_workers
82 |
83 | def _evaluate_nodes(self, nodes):
84 | """Evaluate each node in a separate thread.
85 |
86 | Args:
87 | nodes (list of INode): The nodes to evaluate
88 |
89 | """
90 | # create copy to prevent side effects
91 | nodes_to_evaluate = list(nodes)
92 |
93 | def node_runner(node):
94 | node.evaluate()
95 | return node
96 |
97 | running_futures = {}
98 | with futures.ThreadPoolExecutor(max_workers=self.max_workers) as tpe:
99 | while nodes_to_evaluate or running_futures:
100 | log.debug(
101 | "Iterating thread submission with %s nodes to "
102 | "evaluate and %s running futures",
103 | len(nodes_to_evaluate),
104 | len(running_futures),
105 | )
106 | # Submit new nodes that are ready to be evaluated
107 | not_submitted = []
108 | for node in nodes_to_evaluate:
109 | if not any(n.is_dirty for n in node.upstream_nodes):
110 | fut = tpe.submit(node_runner, node)
111 | running_futures[node.name] = fut
112 | else:
113 | not_submitted.append(node)
114 | nodes_to_evaluate = not_submitted
115 |
116 | # A deadlock situation:
117 | # No nodes running means no nodes can turn clean but nodes on
118 | # nodes_to_evaluate not submitted means dirty upstream nodes
119 | # and while loop will never terminate
120 | if nodes_to_evaluate and not running_futures:
121 | for node in nodes_to_evaluate: # pragma: no cover
122 | dirty_upstream = [
123 | nn.name
124 | for nn in node.upstream_nodes
125 | if nn.is_dirty
126 | ]
127 | log.debug(
128 | "Node to evaluate: %s\n"
129 | "- Dirty upstream nodes:\n%s",
130 | node.name,
131 | "\n".join(dirty_upstream),
132 | )
133 | raise RuntimeError(
134 | f"Execution hit deadlock: {len(nodes_to_evaluate)} "
135 | "nodes left to evaluate, but no nodes running."
136 | ) # pragma: no cover
137 |
138 | # Wait until a future finishes, then remove all finished nodes
139 | # from the relevant lists
140 | status = futures.wait(
141 | list(running_futures.values()),
142 | return_when=futures.FIRST_COMPLETED,
143 | )
144 | for future in status.done:
145 | del running_futures[future.result().name]
146 |
147 |
148 | class LegacyMultiprocessingEvaluator(Evaluator):
149 | """Evaluate nodes in separate processes."""
150 |
151 | def __init__(self, submission_delay=0.1):
152 | """Initialize with the graph and the delay between launching nodes.
153 |
154 | Args:
155 | submission_delay (float): The delay in seconds between loops
156 | issuing new threads/processes if nodes are ready to process.
157 |
158 | """
159 | self.submission_delay = submission_delay
160 |
161 | def _evaluate_nodes(self, nodes):
162 | # create copy to prevent side effects
163 | nodes_to_evaluate = list(nodes)
164 | manager = Manager()
165 | nodes_data = manager.dict()
166 | processes = {}
167 |
168 | def upstream_ready(node):
169 | """Check whether all upstream nodes have been evaluated."""
170 | for upstream in node.upstream_nodes:
171 | if upstream in nodes_to_evaluate:
172 | return False
173 | return True
174 |
175 | while nodes_to_evaluate:
176 | for node in nodes_to_evaluate:
177 | process = processes.get(node.name)
178 | if process and not process.is_alive():
179 | # If the node is done computing, drop it from the list
180 | nodes_to_evaluate.remove(node)
181 | _update_node(node, nodes_data[node.identifier])
182 | continue
183 | if node.name not in processes and upstream_ready(node):
184 | # If all deps are ready and no thread is active, create one
185 | try:
186 | nodes_data[node.identifier] = node.to_json()
187 | except PicklingError as exc:
188 | raise FlowpipeMultiprocessingError(
189 | "Error pickling/unpickling node.\n"
190 | "This is most likely due to input values that can not "
191 | "be pickled/unpickled properly.\n"
192 | "If any of your input plugs contain flowpipe graphs, "
193 | "that in turn are made up of Nodes created with the "
194 | "@Node decorator, please consider reworking your Nodes. "
195 | "You can either switch to class based nodes by "
196 | "subclassing from Node or invoke the FunctionNode "
197 | "explicitly instead. Refer to: https://github.com/PaulSchweizer/flowpipe/issues/168#issuecomment-1767779623 " # pylint: disable=line-too-long
198 | "for details."
199 | ) from exc
200 | processes[node.name] = Process(
201 | target=_evaluate_node_in_process,
202 | name=f"flowpipe.{node.graph.name}.{node.name}",
203 | args=(node.identifier, nodes_data),
204 | )
205 | processes[node.name].daemon = True
206 | processes[node.name].start()
207 |
208 | time.sleep(self.submission_delay)
209 |
210 |
211 | def _evaluate_node_in_process(identifier, nodes_data):
212 | """Evaluate a node when multiprocessing.
213 |
214 | 1. Deserializing the node from the given nodes_data dict
215 | 2. Retrieving upstream data from the nodes_data dict
216 | 3. Evaluating the node
217 | 4. Serializing the results back into the nodes_data
218 |
219 | Args:
220 | identifier (str): The identifier of the node to evaluate
221 | nodes_data (dict): Used like a "database" to store the nodes
222 | """
223 | # pylint: disable=import-outside-toplevel, cyclic-import
224 | from flowpipe.node import INode
225 |
226 | data = nodes_data[identifier]
227 | node = INode.from_json(data)
228 |
229 | for name, input_plug in data["inputs"].items():
230 | for input_identifier, output_plug in input_plug["connections"].items():
231 | upstream_node = INode.from_json(nodes_data[input_identifier])
232 | node.inputs[name].value = upstream_node.outputs[output_plug].value
233 | for sub_name, sub_plug in input_plug["sub_plugs"].items():
234 | for sub_id, sub_output in sub_plug["connections"].items():
235 | upstream_node = INode.from_json(nodes_data[sub_id])
236 | node.inputs[name][
237 | sub_name
238 | ].value = upstream_node.all_outputs()[sub_output].value
239 |
240 | node.evaluate()
241 |
242 | for name, plug in node.outputs.items():
243 | data["outputs"][name]["value"] = plug.value
244 | for sub_name, sub_plug in plug.sub_plugs.items():
245 | if sub_name not in data["outputs"][name]["sub_plugs"]:
246 | data["outputs"][name]["sub_plugs"][
247 | sub_name
248 | ] = sub_plug.serialize()
249 | data["outputs"][name]["sub_plugs"][sub_name][
250 | "value"
251 | ] = sub_plug.value
252 |
253 | nodes_data[identifier] = data
254 |
255 |
256 | def _update_node(node, data):
257 | """Apply the plug values of the data dict to the node object."""
258 | for name, input_plug in data["inputs"].items():
259 | node.inputs[name].value = input_plug["value"]
260 | for sub_name, sub_plug in input_plug["sub_plugs"].items():
261 | node.inputs[name][sub_name].value = sub_plug["value"]
262 | node.inputs[name][sub_name].is_dirty = False
263 | node.inputs[name].is_dirty = False
264 | for name, output_plug in data["outputs"].items():
265 | node.outputs[name].value = output_plug["value"]
266 | for sub_name, sub_plug in output_plug["sub_plugs"].items():
267 | node.outputs[name][sub_name].value = sub_plug["value"]
268 | node.outputs[name][sub_name].is_dirty = False
269 | node.outputs[name].is_dirty = False
270 |
--------------------------------------------------------------------------------
/flowpipe/event.py:
--------------------------------------------------------------------------------
1 | """Events are emitted during node evaluation.
2 |
3 | They an be used to observe the evaluation process.
4 | """
5 | import logging
6 |
7 | log = logging.getLogger(__name__)
8 |
9 |
10 | class Event:
11 | """Very simple implementation of an event system.event
12 |
13 | The event simply calls the registered functions with the given arguments.
14 | Please note that the integrity of the listeners is not enforced or checked.
15 | """
16 |
17 | def __init__(self, name):
18 | """Initialize the list of listeners
19 |
20 | Args:
21 | name (str): The (unique) name of the signal
22 | """
23 | self.name = name
24 | self._listeners = []
25 |
26 | def emit(self, *args, **kwargs):
27 | """Call all the listeners with the given args and kwargs."""
28 | for listener in self._listeners:
29 | listener(*args, **kwargs)
30 |
31 | def register(self, listener):
32 | """Register the given function object if it is not yet registered."""
33 | if not self.is_registered(listener):
34 | self._listeners.append(listener)
35 |
36 | def deregister(self, listener):
37 | """Deregister the given function object if it is registered."""
38 | if self.is_registered(listener):
39 | self._listeners.pop(self._listeners.index(listener))
40 | log.debug("%s deregistered", listener)
41 | else:
42 | log.exception("%s was never registered", listener)
43 |
44 | def is_registered(self, listener):
45 | """Whether the given function object is already registered."""
46 | return listener in self._listeners
47 |
48 | def clear(self):
49 | """Remove all listeners from this event."""
50 | for listener in self._listeners:
51 | self.deregister(listener)
52 |
--------------------------------------------------------------------------------
/flowpipe/graph.py:
--------------------------------------------------------------------------------
1 | """A Graph of Nodes."""
2 | from __future__ import absolute_import, print_function
3 |
4 | import logging
5 | import pickle
6 | import warnings
7 |
8 | from ascii_canvas import canvas, item
9 |
10 | from .errors import CycleError
11 | from .evaluator import (
12 | LegacyMultiprocessingEvaluator,
13 | LinearEvaluator,
14 | ThreadedEvaluator,
15 | )
16 | from .plug import InputPlug, InputPlugGroup, OutputPlug
17 | from .utilities import deserialize_graph
18 |
19 | log = logging.getLogger(__name__)
20 |
21 |
22 | class Graph:
23 | """A graph of Nodes."""
24 |
25 | def __init__(self, name=None, nodes=None):
26 | """Initialize the list of Nodes, inputs and outpus."""
27 | self.name = name or self.__class__.__name__
28 | self.nodes = nodes or []
29 | self.inputs = {}
30 | self.outputs = {}
31 |
32 | def __unicode__(self):
33 | """Display the Graph."""
34 | return self.node_repr()
35 |
36 | def __str__(self):
37 | """Show all input and output Plugs."""
38 | return self.__unicode__().encode("utf-8").decode()
39 |
40 | def __getitem__(self, key):
41 | """Grant access to Nodes via their name."""
42 | for node in self.nodes:
43 | if node.name == key:
44 | return node
45 | # Search through subgraphs if no node found on graph itself
46 | if "." in key:
47 | subgraph_name = key.split(".")[0]
48 | node_name = key.split(".")[-1]
49 | for node in self.all_nodes:
50 | if node.name == node_name and node.graph.name == subgraph_name:
51 | return node
52 |
53 | raise KeyError(
54 | f"Graph does not contain a Node named '{key}'. "
55 | "If the node is part of a subgraph of this graph, use this "
56 | "form to access the node: '{subgraph.name}.{node.name}', "
57 | "e.g. 'sub.node'"
58 | )
59 |
60 | @property
61 | def all_nodes(self):
62 | """Expand the graph with all its subgraphs into a flat list of nodes.
63 |
64 | Please note that in this expanded list, the node names are no longer
65 | guaranteed to be unique!
66 |
67 | Returns:
68 | (list of INode): All nodes, including the nodes from subgraphs
69 | """
70 | nodes = list(self.nodes)
71 | for subgraph in self.subgraphs.values():
72 | nodes += subgraph.nodes
73 | return list(set(nodes))
74 |
75 | @property
76 | def subgraphs(self):
77 | """All other graphs that the nodes of this graph are connected to.
78 |
79 | Returns:
80 | A dict in the form of ``{graph.name: graph}``
81 | """
82 | subgraphs = {}
83 | for node in self.nodes:
84 | for downstream in node.downstream_nodes:
85 | if downstream.graph is not self:
86 | subgraphs[downstream.graph.name] = downstream.graph
87 | for upstream in node.upstream_nodes:
88 | if upstream.graph is not self:
89 | subgraphs[upstream.graph.name] = upstream.graph
90 | return subgraphs
91 |
92 | @property
93 | def evaluation_matrix(self):
94 | """Sort nodes into a 2D matrix based on their dependency.
95 |
96 | Rows affect each other and have to be evaluated in sequence.
97 | The Nodes on each row however can be evaluated in parallel as
98 | they are independent of each other.
99 | The amount of Nodes in each row can vary.
100 |
101 | Returns:
102 | (list of list of INode): Each sub list represents a row.
103 | """
104 | # Inspired by Kahn's algorithm
105 | nodes_to_sort = set(self.all_nodes)
106 | matrix = []
107 |
108 | # cache since this is called often
109 | parents = {node: node.parents for node in nodes_to_sort}
110 |
111 | sorted_nodes = set()
112 | next_level = {node for node in nodes_to_sort if not parents[node]}
113 |
114 | while next_level:
115 | matrix.append(next_level)
116 | sorted_nodes |= next_level
117 |
118 | next_level = set()
119 | # The next level are all unsorted children of the latest sorted
120 | # nodes that don't have any unsorted parents
121 | for node in matrix[-1]:
122 | for candidate in node.children - sorted_nodes:
123 | if all(
124 | parent in sorted_nodes for parent in parents[candidate]
125 | ):
126 | next_level.add(candidate)
127 |
128 | return [sorted(level, key=lambda node: node.name) for level in matrix]
129 |
130 | @property
131 | def evaluation_sequence(self):
132 | """Sort Nodes into a sequential, flat execution order.
133 |
134 | Returns:
135 | (list of INode): A one dimensional representation of the
136 | evaluation matrix.
137 | """
138 | return [node for row in self.evaluation_matrix for node in row]
139 |
140 | @property
141 | def input_groups(self):
142 | """Return all inputs that are actually input groups."""
143 | return {
144 | k: v
145 | for k, v in self.inputs.items()
146 | if isinstance(v, InputPlugGroup)
147 | }
148 |
149 | def add_node(self, node):
150 | """Add given Node to the Graph.
151 |
152 | Nodes on a Graph have to have unique names.
153 | """
154 | if node not in self.nodes:
155 | for existing_node in self.nodes:
156 | if existing_node.name == node.name:
157 | raise ValueError(
158 | f"Can not add Node of name '{node.name}', a Node with this "
159 | "name already exists on this Graph. Node names on "
160 | "a Graph have to be unique."
161 | )
162 | self.nodes.append(node)
163 | node.graph = self
164 | else:
165 | log.warning("Node '%s' is already part of this Graph", node.name)
166 |
167 | def delete_node(self, node):
168 | """Disconnect all plugs and then delete the node object."""
169 | if node in self.nodes:
170 | for plug in node.all_inputs().values():
171 | for connection in plug.connections:
172 | plug.disconnect(connection)
173 | for plug in node.all_outputs().values():
174 | for connection in plug.connections:
175 | plug.disconnect(connection)
176 | del self.nodes[self.nodes.index(node)]
177 |
178 | def add_plug(self, plug, name=None):
179 | """Promote the given plug this graph.
180 |
181 | Args:
182 | plug (flowpipe.plug.IPlug): The plug to promote to this graph
183 | name (str): Optionally use the given name instead of the name of
184 | the given plug
185 | """
186 | if isinstance(plug, InputPlug):
187 | if plug not in self.inputs.values():
188 | self.inputs[name or plug.name] = plug
189 | else:
190 | key = list(self.inputs.keys())[
191 | list(self.inputs.values()).index(plug)
192 | ]
193 | raise ValueError(
194 | f"The given plug '{plug.name}' has already been promoted to this "
195 | f"Graph und the key '{key}'"
196 | )
197 | elif isinstance(plug, OutputPlug):
198 | if plug not in self.outputs.values():
199 | self.outputs[name or plug.name] = plug
200 | else:
201 | key = list(self.outputs.keys())[
202 | list(self.outputs.values()).index(plug)
203 | ]
204 | raise ValueError(
205 | f"The given plug {plug.name} has already been promoted to this "
206 | f"Graph und the key '{key}'"
207 | )
208 | else:
209 | raise TypeError(
210 | f"Plugs of type '{type(plug)}' can not be promoted directly to a Graph. "
211 | f"Only plugs of type '{InputPlug}' or '{OutputPlug}' can be promoted."
212 | ) # pragma: no cover
213 |
214 | def accepts_connection(self, output_plug, input_plug):
215 | """Raise exception if new connection would violate integrity of graph.
216 |
217 | Args:
218 | output_plug (flowpipe.plug.OutputPlug): The output plug
219 | input_plug (flowpipe.plug.InputPlug): The input plug
220 | Raises:
221 | CycleError and ValueError
222 | Returns:
223 | True if the connection is accepted
224 | """
225 | out_node = output_plug.node
226 | in_node = input_plug.node
227 |
228 | # Plugs can't be connected to other plugs on their own node
229 | if in_node is out_node:
230 | raise CycleError(
231 | "Can't connect plugs that are part of the same node."
232 | )
233 |
234 | # If that is downstream of this
235 | if out_node in in_node.downstream_nodes:
236 | raise CycleError(
237 | "Can't connect OutputPlugs to plugs of an upstream node."
238 | )
239 |
240 | # Names of subgraphs have to be unique
241 | if (
242 | in_node.graph.name in self.subgraphs
243 | and in_node.graph not in self.subgraphs.values()
244 | ):
245 | raise ValueError(
246 | f"This node is part of graph '{in_node.graph.name}', but a different "
247 | "graph with the same name is already part of this "
248 | "graph. Subgraph names on a Graph have to "
249 | "be unique"
250 | )
251 |
252 | return True
253 |
254 | def evaluate(
255 | self,
256 | mode="linear",
257 | skip_clean=False,
258 | submission_delay=0.1,
259 | max_workers=None,
260 | data_persistence=True,
261 | evaluator=None,
262 | ):
263 | """Evaluate all Nodes in the graph.
264 |
265 | Sorts the nodes in the graph into a resolution order and evaluates the
266 | nodes. Evaluation can be parallelized by utilizing the dependencies
267 | between the nodes - see the "mode" keyword for the options.
268 |
269 | Note that no checks are in place whether the node execution is actually
270 | thread-safe or fit for multiprocessing. It is assumed to be given if
271 | the respective mode is selected.
272 |
273 | Some keyword arguments do not affect all evaluation modes.
274 |
275 | Args:
276 | mode (str): The evaluation mode. Possible modes are
277 | * linear : Iterates over all nodes in a single thread
278 | * threading : Evaluates each node in a new thread
279 | * multiprocessing : Evaluates each node in a new process
280 | skip_clean (bool): Whether to skip nodes that are 'clean' (as
281 | tracked by the 'is_dirty' attribute on the node), i.e. whose
282 | inputs have not changed since their output was computed
283 | submission_delay (float): The delay in seconds between loops
284 | issuing new threads/processes if nodes are ready to process.
285 | max_workers (int): The maximum number of parallel threads to spawn.
286 | None defaults to your pythons ThreadPoolExecutor default.
287 | data_persistence (bool): If false, the data on plugs that have
288 | connections gets cleared (set to None). This reduces the
289 | reference count of objects.
290 | evaluator (flowpipe.evaluators.Evaluator): The evaluator to use.
291 | For the basic evaluation modes will be picked by 'mode'.
292 | """
293 | log.info('Evaluating Graph "%s"', self.name)
294 |
295 | # map mode keywords to evaluation functions and their arguments
296 | eval_modes = {
297 | "linear": (LinearEvaluator, {}),
298 | "threading": (ThreadedEvaluator, {"max_workers": max_workers}),
299 | "multiprocessing": (
300 | LegacyMultiprocessingEvaluator,
301 | {"submission_delay": submission_delay},
302 | ),
303 | }
304 |
305 | if mode and evaluator:
306 | raise ValueError("Both 'mode' and 'evaluator' arguments passed.")
307 | if mode:
308 | try:
309 | eval_cls, eval_args = eval_modes[mode]
310 | except KeyError as exc:
311 | raise ValueError(f"Unkown mode: {mode}") from exc
312 | evaluator = eval_cls(**eval_args)
313 |
314 | evaluator.evaluate(graph=self, skip_clean=skip_clean)
315 |
316 | if not data_persistence:
317 | for node in self.nodes:
318 | for input_plug in node.all_inputs().values():
319 | if input_plug.connections:
320 | input_plug.value = None
321 | for output_plug in node.all_outputs().values():
322 | if output_plug.connections:
323 | output_plug.value = None
324 |
325 | def to_pickle(self):
326 | """Serialize the graph into a pickle."""
327 | return pickle.dumps(self)
328 |
329 | def to_json(self):
330 | """Serialize the graph into a json."""
331 | return self._serialize()
332 |
333 | def serialize(self, with_subgraphs=True): # pragma: no cover
334 | """Serialize the graph in its grid form.
335 |
336 | Deprecated.
337 | """
338 | warnings.warn(
339 | "Graph.serialize is deprecated. Instead, use one of "
340 | "Graph.to_json or Graph.to_pickle",
341 | DeprecationWarning,
342 | )
343 |
344 | return self._serialize(with_subgraphs)
345 |
346 | def _serialize(self, with_subgraphs=True):
347 | """Serialize the graph in its grid form.
348 |
349 | Args:
350 | with_subgraphs (bool): Set to false to avoid infinite recursion
351 | """
352 | data = {
353 | "module": self.__module__,
354 | "cls": self.__class__.__name__,
355 | "name": self.name,
356 | }
357 | data["nodes"] = [node.to_json() for node in self.nodes]
358 | if with_subgraphs:
359 | data["subgraphs"] = [
360 | graph.serialize(with_subgraphs=False)
361 | for graph in sorted(
362 | self.subgraphs.values(), key=lambda g: g.name
363 | )
364 | ]
365 | return data
366 |
367 | @staticmethod
368 | def from_pickle(data):
369 | """De-serialize from the given pickle data."""
370 | return pickle.loads(data)
371 |
372 | @staticmethod
373 | def from_json(data):
374 | """De-serialize from the given json data."""
375 | return deserialize_graph(data)
376 |
377 | @staticmethod
378 | def deserialize(data): # pragma: no cover
379 | """De-serialize from the given json data."""
380 | warnings.warn(
381 | "Graph.deserialize is deprecated. Instead, use one of "
382 | "Graph.from_json or Graph.from_pickle",
383 | DeprecationWarning,
384 | )
385 | return deserialize_graph(data)
386 |
387 | def node_repr(self):
388 | """Format to visualize the Graph."""
389 | canvas_ = canvas.Canvas()
390 | x_pos = 0
391 |
392 | evaluation_matrix = self.evaluation_matrix
393 |
394 | for row in evaluation_matrix:
395 | y_pos = 0
396 | x_diff = 0
397 | for node in row:
398 | item_ = item.Item(str(node), [x_pos, y_pos])
399 | node.item = item_
400 | x_diff = (
401 | item_.bbox[2] - item_.bbox[0] + 4
402 | if item_.bbox[2] - item_.bbox[0] + 4 > x_diff
403 | else x_diff
404 | )
405 | y_pos += item_.bbox[3] - item_.bbox[1]
406 | canvas_.add_item(item_)
407 | x_pos += x_diff
408 |
409 | # Include the input groups if any have been set
410 | y_off = 2
411 | locked_items = []
412 | if self.input_groups:
413 | for input_group in self.input_groups.values():
414 | y_off += 1
415 | i = item.Item(f"o {input_group.name}", [0, y_off])
416 | canvas_.add_item(i)
417 | locked_items.append(i)
418 | for plug in input_group.plugs:
419 | y_off += 1
420 | i = item.Item(
421 | f"`-{plug.node.name}.{plug.name}",
422 | [2, y_off],
423 | )
424 | canvas_.add_item(i)
425 | locked_items.append(i)
426 |
427 | # Move all items down by Y
428 | for i in canvas_.items:
429 | if i not in locked_items:
430 | i.position[0] += 2
431 | i.position[1] += y_off + 1 + int(bool(self.input_groups))
432 |
433 | canvas_.add_item(item.Rectangle(x_pos, canvas_.bbox[3] + 1, [0, 0]), 0)
434 |
435 | # Crop the name of the graph if it is too long
436 | name = self.name
437 | canvas_.add_item(item.Item(f"{name:^{x_pos}}", [0, 1]), 0)
438 | canvas_.add_item(item.Rectangle(x_pos, 3, [0, 0]), 0)
439 |
440 | if self.input_groups:
441 | canvas_.add_item(item.Rectangle(x_pos, y_off + 2, [0, 0]), 0)
442 |
443 | for node in self.all_nodes:
444 | for i, plug in enumerate(node.sort_plugs(node.all_outputs())):
445 | for connection in node.sort_plugs(node.all_outputs())[
446 | plug
447 | ].connections:
448 | dnode = connection.node
449 | start = [
450 | node.item.position[0] + node.item.bbox[2],
451 | node.item.position[1] + 3 + len(node.all_inputs()) + i,
452 | ]
453 | end = [
454 | dnode.item.position[0],
455 | dnode.item.position[1]
456 | + 3
457 | + list(
458 | dnode.sort_plugs(dnode.all_inputs()).values()
459 | ).index(connection),
460 | ]
461 | canvas_.add_item(item.Line(start, end), 0)
462 |
463 | return canvas_.render()
464 |
465 | def list_repr(self):
466 | """List representation of the graph showing Nodes and connections."""
467 | pretty = []
468 | pretty.append(self.name)
469 | if self.input_groups:
470 | pretty.append("[Input Groups]")
471 | for name in sorted(self.input_groups.keys()):
472 | input_group = self.input_groups[name]
473 | pretty.append(f" [g] {name}:")
474 | for plug in input_group.plugs:
475 | pretty.append(f" {plug.node.name}.{plug.name}")
476 | for node in self.evaluation_sequence:
477 | pretty.append(node.list_repr())
478 | return "\n ".join(pretty)
479 |
480 |
481 | default_graph = Graph(name="default")
482 |
483 |
484 | def get_default_graph():
485 | """Retrieve the default graph."""
486 | return default_graph
487 |
488 |
489 | def set_default_graph(graph):
490 | """Set a graph as the default graph."""
491 | if not isinstance(graph, Graph):
492 | raise TypeError("Can only set 'Graph' instances as default graph!")
493 |
494 | global default_graph # pylint: disable=global-statement, invalid-name
495 | default_graph = graph
496 |
497 |
498 | def reset_default_graph():
499 | """Reset the default graph to an empty graph."""
500 | set_default_graph(Graph(name="default"))
501 |
--------------------------------------------------------------------------------
/flowpipe/node.py:
--------------------------------------------------------------------------------
1 | """Nodes manipulate incoming data and provide the outgoing data."""
2 | from __future__ import absolute_import, print_function
3 |
4 | import copy
5 | import inspect
6 | import json
7 | import logging
8 | import pickle
9 | import time
10 | import uuid
11 | import warnings
12 | from abc import ABCMeta, abstractmethod
13 |
14 | from .event import Event
15 | from .graph import get_default_graph
16 | from .plug import InputPlug, InputPlugGroup, OutputPlug, SubOutputPlug, SubPlug
17 | from .utilities import (
18 | NodeEncoder,
19 | deserialize_node,
20 | import_class,
21 | sanitize_string_input,
22 | )
23 |
24 | log = logging.getLogger(__name__)
25 |
26 |
27 | class INode:
28 | """Holds input and output Plugs and a method for computing."""
29 |
30 | __metaclass__ = ABCMeta
31 |
32 | EVENT_TYPES = [
33 | "evaluation-omitted",
34 | "evaluation-started",
35 | "evaluation-finished",
36 | "evaluation-exception",
37 | ]
38 |
39 | def __init__(
40 | self, name=None, identifier=None, metadata=None, graph="default"
41 | ):
42 | """Initialize the input and output dictionaries and the name.
43 |
44 | Args:
45 | name (str): If not provided, the class name is used.
46 | graph (Graph): The graph to add the node to.
47 | If set to 'default', the Node is added to the default graph.
48 | If set to None, the Node is not added to any grpah.
49 | """
50 | self.events = {ev_type: Event(ev_type) for ev_type in self.EVENT_TYPES}
51 |
52 | self.name = name if name is not None else self.__class__.__name__
53 | self.identifier = (
54 | identifier
55 | if identifier is not None
56 | else f"{self.name}-{uuid.uuid4()}"
57 | )
58 | self.inputs = {}
59 | self.outputs = {}
60 | self.metadata = metadata or {}
61 | self.omit = False
62 | try:
63 | self.file_location = inspect.getfile(self.__class__)
64 | except TypeError as error: # pragma: no cover
65 | # Excluded from tests, as this is a hard-to test fringe case
66 | if all(s in str(error) for s in ("__main__", "built-in class")):
67 | warnings.warn("Cannot serialize nodes defined in '__main__'")
68 | self.file_location = None
69 | else:
70 | raise
71 | self.class_name = self.__class__.__name__
72 | self.graph = graph
73 | if graph is not None:
74 | if graph == "default":
75 | graph = get_default_graph()
76 | graph.add_node(self)
77 | self.stats = {}
78 |
79 | def __unicode__(self):
80 | """Show all input and output Plugs."""
81 | return self.node_repr()
82 |
83 | def __str__(self):
84 | """Show all input and output Plugs."""
85 | return self.__unicode__().encode("utf-8").decode()
86 |
87 | @property
88 | def is_dirty(self):
89 | """Whether any of the input Plug data has changed and is dirty."""
90 | for input_ in self.inputs.values():
91 | if input_.is_dirty:
92 | return True
93 | return False
94 |
95 | @property
96 | def parents(self):
97 | """Nodes connected directly to inputs of this Node."""
98 | parents = set()
99 | for input_ in self.inputs.values():
100 | for conn in input_.connections:
101 | parents.add(conn.node)
102 | for sub_plug in input_.sub_plugs.values():
103 | for conn in sub_plug.connections:
104 | parents.add(conn.node)
105 | return parents
106 |
107 | @property
108 | def upstream_nodes(self):
109 | """Nodes connected directly or indirectly to inputs of this Node."""
110 | upstream_nodes = {}
111 | for input_ in self.inputs.values():
112 | upstreams = [c.node for c in input_.connections]
113 | for sub_plug in input_.sub_plugs.values():
114 | upstreams += [c.node for c in sub_plug.connections]
115 | for upstream in upstreams:
116 | if upstream.identifier not in upstream_nodes:
117 | upstream_nodes[upstream.identifier] = upstream
118 | for upstream2 in upstream.upstream_nodes:
119 | if upstream2.identifier not in upstream_nodes:
120 | upstream_nodes[upstream2.identifier] = upstream2
121 | return list(upstream_nodes.values())
122 |
123 | @property
124 | def children(self):
125 | """Nodes connected directly to outputs of this Node."""
126 | children = set()
127 | for output in self.outputs.values():
128 | for conn in output.connections:
129 | children.add(conn.node)
130 | for sub_plug in output.sub_plugs.values():
131 | for conn in sub_plug.connections:
132 | children.add(conn.node)
133 | return children
134 |
135 | @property
136 | def downstream_nodes(self):
137 | """Nodes connected directly or indirectly to outputs of this Node."""
138 | downstream_nodes = {}
139 | for output in self.outputs.values():
140 | downstreams = [c.node for c in output.connections]
141 | for sub_plug in output.sub_plugs.values():
142 | downstreams += [c.node for c in sub_plug.connections]
143 | for downstream in downstreams:
144 | if downstream.identifier not in downstream_nodes:
145 | downstream_nodes[downstream.identifier] = downstream
146 | for downstream2 in downstream.downstream_nodes:
147 | if downstream2.identifier not in downstream_nodes:
148 | downstream_nodes[
149 | downstream2.identifier
150 | ] = downstream2
151 | return list(downstream_nodes.values())
152 |
153 | def evaluate(self):
154 | """Compute this Node, log it and clean the input Plugs.
155 |
156 | Also push a stat report in the following form containing the Node,
157 | evaluation time and timestamp the computation started.
158 | """
159 | if self.omit:
160 | self.events["evaluation-omitted"].emit(self)
161 | return {}
162 |
163 | self.events["evaluation-started"].emit(self)
164 |
165 | inputs = {}
166 | for name, plug in self.inputs.items():
167 | inputs[name] = plug.value
168 |
169 | # Compute and redirect the output to the output plugs
170 | start_time = time.time()
171 | try:
172 | outputs = self.compute(**inputs) or {}
173 | except Exception:
174 | self.events["evaluation-exception"].emit(self)
175 | raise
176 | eval_time = time.time() - start_time
177 |
178 | self.stats = {"eval_time": eval_time, "start_time": start_time}
179 |
180 | # all_outputs = self.all_outputs()
181 | for name, value in outputs.items():
182 | if "." in name:
183 | parent_plug, sub_plug = name.split(".")
184 | self.outputs[parent_plug][sub_plug].value = value
185 | else:
186 | self.outputs[name].value = value
187 |
188 | # Set the inputs clean
189 | for input_ in self.all_inputs().values():
190 | input_.is_dirty = False
191 |
192 | self.events["evaluation-finished"].emit(self)
193 |
194 | return outputs
195 |
196 | @abstractmethod
197 | def compute(self, *args, **kwargs): # pragma: no cover
198 | """Implement the data manipulation in the subclass.
199 |
200 | Return a dictionary with the outputs from this function.
201 | """
202 | raise NotImplementedError("Compute must be overwritten")
203 |
204 | def __rshift__(self, other):
205 | """Syntactic sugar for connecting this node by output names."""
206 | self.connect(other)
207 |
208 | def connect(self, other):
209 | """Connect this node's outputs to another plug's input by name.
210 |
211 | If other is an InputPlug, connect the output with matching name.
212 | If other is an INode, connect all outputs with matching names.
213 |
214 | Note: This will also connect up sub-plugs if, and only if, they already
215 | exist. As they are dynamically created, they will come into existence
216 | only after being referenced explicity at least once. Before, the
217 | connect() method will not pick them up.
218 | """
219 | connections = [] # keep track of the connections established
220 | if isinstance(other, INode):
221 | for key, plug in self.outputs.items():
222 | if key in other.inputs:
223 | plug.connect(other.inputs[key])
224 | connections.append(f"Node: {other.name}, Plug: {key}")
225 | for sub in plug.sub_plugs:
226 | plug[sub].connect(other.inputs[key][sub])
227 | connections.append(
228 | f"Node: {other.name}, Plug: {key}, SubPlug: {sub}"
229 | )
230 | if not connections:
231 | raise ValueError(f"{other.name} has no matching inputs")
232 | elif isinstance(other, (InputPlug, InputPlugGroup)):
233 | try:
234 | if isinstance(other, SubPlug):
235 | out_name, sub_name = other.name.split(".")
236 | out = self.outputs[out_name][sub_name]
237 | else:
238 | out = self.outputs[other.name]
239 | except KeyError as exc:
240 | raise KeyError(f"No output named {other.name}") from exc
241 | else:
242 | out.connect(other)
243 | connections.append(f"Plug: {other.name}")
244 |
245 | for sub in out.sub_plugs:
246 | out.sub_plugs[sub].connect(other[sub])
247 | connections.append(f"Plug: {other.name}, Subplug: {sub}")
248 | else:
249 | raise TypeError(f"Cannot connect outputs to {type(other)}")
250 | log.debug(
251 | "Connected node %s with %s", self.name, "\n".join(connections)
252 | )
253 |
254 | def on_input_plug_set_dirty(self):
255 | """Propagate the dirty state to the connected downstream nodes."""
256 | for output_plug in self.outputs.values():
257 | for connected_plug in output_plug.connections:
258 | connected_plug.is_dirty = True
259 |
260 | def to_pickle(self):
261 | """Serialize the node into a pickle."""
262 | return pickle.dumps(self)
263 |
264 | def to_json(self):
265 | """Serialize the node to json."""
266 | return self._serialize()
267 |
268 | def serialize(self): # pragma: no cover
269 | """Serialize the node to json.
270 |
271 | Deprecated and kept for backwards compatibility.
272 | """
273 | warnings.warn(
274 | "Node.serialize is deprecated. Instead, use one of "
275 | "Node.to_json or Node.to_pickle",
276 | DeprecationWarning,
277 | )
278 | return self._serialize()
279 |
280 | def _serialize(self):
281 | """Perform the serialization to json."""
282 | if self.file_location is None: # pragma: no cover
283 | raise RuntimeError(
284 | "Cannot serialize a node that was not defined in a file"
285 | )
286 | inputs = {}
287 | for plug in self.inputs.values():
288 | inputs[plug.name] = plug.serialize()
289 | outputs = {}
290 | for plug in self.outputs.values():
291 | outputs[plug.name] = plug.serialize()
292 | return {
293 | "file_location": self.file_location,
294 | "module": self.__module__,
295 | "cls": self.__class__.__name__,
296 | "name": self.name,
297 | "identifier": self.identifier,
298 | "inputs": inputs,
299 | "outputs": outputs,
300 | "metadata": self.metadata,
301 | }
302 |
303 | @staticmethod
304 | def from_pickle(data):
305 | """De-serialize from the given pickle data."""
306 | return pickle.loads(data)
307 |
308 | @staticmethod
309 | def from_json(data):
310 | """De-serialize from the given json data."""
311 | return deserialize_node(data)
312 |
313 | @staticmethod
314 | def deserialize(data): # pragma: no cover
315 | """De-serialize from the given json data."""
316 | warnings.warn(
317 | "Node.deserialize is deprecated. Instead, use one of "
318 | "Node.from_json or Node.from_pickle",
319 | DeprecationWarning,
320 | )
321 | return deserialize_node(data)
322 |
323 | def post_deserialize(self, data):
324 | """Perform more data operations after initial serialization."""
325 | self.name = data["name"]
326 | self.identifier = data["identifier"]
327 | self.metadata = data["metadata"]
328 | self.file_location = data["file_location"]
329 | for name, input_ in data["inputs"].items():
330 | self.inputs[name].value = input_["value"]
331 | for sub_name, sub_plug in input_["sub_plugs"].items():
332 | self.inputs[name][sub_name].value = sub_plug["value"]
333 | for name, output in data["outputs"].items():
334 | self.outputs[name].value = output["value"]
335 | for sub_name, sub_plug in output["sub_plugs"].items():
336 | self.outputs[name][sub_name].value = sub_plug["value"]
337 |
338 | def node_repr(self):
339 | """The node formated into a string looking like a node.
340 |
341 | ::
342 |
343 | +--Node.graph.name--+
344 | | Node.Name |
345 | |-------------------|
346 | % compound_in |
347 | o compound_in-1 |
348 | o compound_in-2 |
349 | o in |
350 | | out o
351 | | compound_out %
352 | | compound_out-1 o
353 | | compound_out-2 o
354 | +-------------------+
355 | """
356 | max_value_length = 10
357 |
358 | all_inputs = self.all_inputs()
359 | all_outputs = self.all_outputs()
360 |
361 | offset = ""
362 | if [i for i in all_inputs.values() if i.connections]:
363 | offset = " " * 3
364 |
365 | width = (
366 | len(
367 | max(
368 | list(all_inputs)
369 | + list(all_outputs)
370 | + [self.name]
371 | + list(
372 | plug.name
373 | + "".join(
374 | [
375 | s
376 | for i, s in enumerate(str(plug.value))
377 | if i < max_value_length
378 | ]
379 | )
380 | for plug in all_inputs.values()
381 | if plug.value is not None
382 | )
383 | + list(
384 | plug.name
385 | + "".join(
386 | [
387 | s
388 | for i, s in enumerate(str(plug.value))
389 | if i < max_value_length
390 | ]
391 | )
392 | for plug in all_outputs.values()
393 | if plug.value is not None
394 | ),
395 | key=len,
396 | )
397 | )
398 | + 7
399 | )
400 |
401 | if self.graph.subgraphs:
402 | width = max([width, len(self.graph.name) + 7])
403 | pretty = f"{offset}+{self.graph.name:-^{width}}+"
404 | else:
405 | pretty = offset + "+" + "-" * width + "+"
406 |
407 | pretty += f"\n{offset}|{self.name:^{width}}|"
408 | pretty += "\n" + offset + "|" + "-" * width + "|"
409 |
410 | def _short_value(plug):
411 | if plug.value is not None and not plug.sub_plugs:
412 | value = str(plug.value)
413 | if len(value) > max_value_length:
414 | return f"<{value[: max_value_length - 3]}...>"
415 | return f"<{value}>"
416 | return "<>"
417 |
418 | # Inputs
419 | for input_ in sorted(all_inputs.keys()):
420 | pretty += "\n"
421 | in_plug = all_inputs[input_]
422 | if in_plug.connections:
423 | pretty += "-->"
424 | else:
425 | pretty += offset
426 | symbol = "%" if in_plug.sub_plugs else "o"
427 | dist = " " if isinstance(in_plug, SubPlug) else ""
428 | value_in_plug = _short_value(in_plug)
429 | value_in_plug = sanitize_string_input(value_in_plug)
430 | plug = f"{symbol} {dist}{input_}{value_in_plug}".format()
431 | pretty += f"{plug:{width + 1}}|"
432 |
433 | # Outputs
434 | for output in sorted(all_outputs.keys()):
435 | out_plug = all_outputs[output]
436 | dist = 2 if isinstance(out_plug, SubPlug) else 1
437 | value_out_plug = _short_value(out_plug)
438 | value_out_plug = sanitize_string_input(value_out_plug)
439 | symbol = "%" if out_plug.sub_plugs else "o"
440 | pretty += (
441 | f"\n{offset}|{output:>{width - dist - len(value_out_plug)}}"
442 | f"{value_out_plug}{dist * ' '}{symbol}"
443 | )
444 | if all_outputs[output].connections:
445 | pretty += "---"
446 |
447 | pretty += "\n" + offset + "+" + "-" * width + "+"
448 | return pretty
449 |
450 | def list_repr(self):
451 | """List representation of the node showing inputs and their values.
452 |
453 | ::
454 |
455 | Node
456 | [i] in: "A"
457 | [i] in_compound
458 | [i] in_compound.0: "B"
459 | [i] in_compound.1 << Node1.out
460 | [o] compound_out
461 | [o] in_compound.0: null
462 | [o] compound_out.1 >> Node2.in, Node3.in
463 | [o] out >> Node4.in
464 | """
465 | pretty = []
466 | pretty.append(self.name)
467 | for name, plug in sorted(self.all_inputs().items()):
468 | if plug.sub_plugs:
469 | pretty.append(f" [i] {name}")
470 | continue
471 | if plug.connections:
472 | indent = " " if isinstance(plug, SubPlug) else " "
473 | node_name = plug.connections[0].node.name
474 | plug_name = plug.connections[0].name
475 | pretty.append(f"{indent}[i] {name} << {node_name}.{plug_name}")
476 | else:
477 | indent = " " if isinstance(plug, SubPlug) else " "
478 | pretty.append(
479 | f"{indent}[i] {name}: {json.dumps(plug.value, cls=NodeEncoder)}"
480 | )
481 | for name, plug in sorted(self.all_outputs().items()):
482 | if plug.sub_plugs:
483 | pretty.append(f" [o] {name}")
484 | continue
485 | if plug.connections:
486 | connections = ", ".join(
487 | [f"{c.node.name}.{c.name}" for c in plug.connections]
488 | )
489 | indent = " " if isinstance(plug, SubPlug) else " "
490 | pretty.append(f"{indent}[o] {name} >> {connections}")
491 | else:
492 | indent = " " if isinstance(plug, SubPlug) else " "
493 | pretty.append(
494 | f"{indent}[o] {name}: {json.dumps(plug.value, cls=NodeEncoder)}"
495 | )
496 |
497 | return "\n".join(pretty)
498 |
499 | def all_inputs(self):
500 | """Collate all input plugs and their sub_plugs into one dictionary."""
501 | all_inputs = {}
502 | for plug in self.inputs.values():
503 | all_inputs[plug.name] = plug
504 | for sub in plug.sub_plugs.values():
505 | all_inputs[sub.name] = sub
506 | return all_inputs
507 |
508 | def all_outputs(self):
509 | """Collate all output plugs and their sub_plugs into one dictionary."""
510 | all_outputs = {}
511 | for plug in self.outputs.values():
512 | all_outputs[plug.name] = plug
513 | for sub in plug.sub_plugs.values():
514 | all_outputs[sub.name] = sub
515 | return all_outputs
516 |
517 | @staticmethod
518 | def sort_plugs(plugs):
519 | """Sort the given plugs alphabetically into a dict."""
520 | sorted_plugs = {}
521 | for i in sorted(plugs, key=lambda x: x.lower()):
522 | sorted_plugs[i] = plugs[i]
523 | return sorted_plugs
524 |
525 |
526 | class FunctionNode(INode):
527 | """Wrap a function into a Node."""
528 |
529 | # Some names have to stay reserved as they are used to construct the Node
530 | RESERVED_INPUT_NAMES = (
531 | "func",
532 | "name",
533 | "identifier",
534 | "inputs",
535 | "outputs",
536 | "metadata",
537 | "omit",
538 | "graph",
539 | )
540 |
541 | def __init__(
542 | self,
543 | func=None,
544 | outputs=None,
545 | name=None,
546 | identifier=None,
547 | metadata=None,
548 | graph=None,
549 | **kwargs,
550 | ):
551 | """The data on the function is used to drive the Node.
552 | The function itself becomes the compute method.
553 | The function input args become the InputPlugs.
554 | Other function attributes, name, __doc__ also transfer to the Node.
555 | """
556 | super().__init__(
557 | name or getattr(func, "__name__", None),
558 | identifier,
559 | metadata,
560 | graph,
561 | )
562 | self._initialize(func, outputs or [], metadata)
563 | for plug, value in kwargs.items():
564 | self.inputs[plug].value = value
565 |
566 | def __call__(self, **kwargs):
567 | """Create and return an instance of the Node."""
568 | metadata = copy.deepcopy(self.metadata)
569 | metadata.update(kwargs.pop("metadata", {}))
570 | graph = kwargs.pop("graph", "default")
571 | outputs = []
572 | for output in self.outputs.values():
573 | outputs.append(output.name)
574 | for key in output.sub_plugs.keys():
575 | outputs.append(f"{output.name}.{key}")
576 | return self.__class__(
577 | func=self.func,
578 | outputs=outputs,
579 | metadata=metadata,
580 | graph=graph,
581 | **kwargs,
582 | )
583 |
584 | def compute(self, *args, **kwargs):
585 | """Call and return the wrapped function."""
586 | if self._use_self:
587 | return self.func(self, *args, **kwargs)
588 | return self.func(*args, **kwargs)
589 |
590 | def _serialize(self):
591 | """Also serialize the location of the wrapped function."""
592 | data = super()._serialize()
593 | data["func"] = {
594 | "module": self.func.__module__,
595 | "name": self.func.__name__,
596 | }
597 | return data
598 |
599 | def post_deserialize(self, data):
600 | """Apply the function back to the node."""
601 | self.name = data["name"]
602 | self.identifier = data["identifier"]
603 | self.metadata = data["metadata"]
604 | self.file_location = data["file_location"]
605 |
606 | # The function could either be a function or, if the function is
607 | # wrapped with the @Node decorator, it would already be a Node class.
608 | node_or_function = import_class(
609 | data["func"]["module"], data["func"]["name"], data["file_location"]
610 | )
611 | node = None
612 | if isinstance(node_or_function, FunctionNode):
613 | node = node_or_function
614 | else:
615 | node = FunctionNode(
616 | name=self.name,
617 | identifier=self.identifier,
618 | metadata=self.metadata,
619 | func=node_or_function,
620 | outputs=list(data["outputs"].keys()),
621 | )
622 | node.file_location = self.file_location
623 |
624 | node = node(graph=None)
625 |
626 | self._initialize(node.func, data["outputs"].keys(), data["metadata"])
627 | for name, input_ in data["inputs"].items():
628 | self.inputs[name].value = input_["value"]
629 | for sub_name, sub_plug in input_["sub_plugs"].items():
630 | self.inputs[name][sub_name].value = sub_plug["value"]
631 | for name, output in data["outputs"].items():
632 | self.outputs[name].value = output["value"]
633 | for sub_name, sub_plug in output["sub_plugs"].items():
634 | self.outputs[name][sub_name].value = sub_plug["value"]
635 |
636 | def _initialize(self, func, outputs, metadata):
637 | """Use the function and the list of outputs to setup the Node."""
638 | self.func = func
639 | self.__doc__ = func.__doc__
640 | self._use_self = False
641 | self.metadata = metadata or {}
642 | if func is not None:
643 | self.file_location = inspect.getfile(func)
644 | self.class_name = self.func.__name__
645 | arg_spec = inspect.getfullargspec(
646 | func
647 | ) # pylint: disable=deprecated-method
648 | defaults = {}
649 | if arg_spec.defaults is not None:
650 | defaults = dict(
651 | zip(
652 | arg_spec.args[-len(arg_spec.defaults) :],
653 | arg_spec.defaults,
654 | )
655 | )
656 | forbidden_inputs = []
657 | for input_ in arg_spec.args:
658 | if input_ in self.RESERVED_INPUT_NAMES:
659 | forbidden_inputs.append(input_)
660 | continue
661 | if input_ != "self":
662 | plug = InputPlug(input_, self)
663 | plug.value = defaults.get(input_, None)
664 | else:
665 | self._use_self = True
666 | if forbidden_inputs:
667 | raise ValueError(
668 | f"{', '.join(forbidden_inputs)} are reserved names and "
669 | "can not be used as inputs!\n"
670 | f"Reserved names are: {self.RESERVED_INPUT_NAMES}"
671 | )
672 |
673 | if outputs is not None:
674 | for output in outputs:
675 | if "." in output:
676 | parent, subplug = output.split(".")
677 | parent_plug = self.outputs.get(parent)
678 | if parent_plug is None:
679 | parent_plug = OutputPlug(parent, self)
680 | SubOutputPlug(subplug, self, parent_plug)
681 | else:
682 | if self.outputs.get(output) is None:
683 | OutputPlug(output, self)
684 |
685 | def to_pickle(self): # pragma: no cover
686 | """Pickle the node. -- DOES NOT WORK FOR FunctionNode."""
687 | raise NotImplementedError(
688 | "Pickling is not implemented for FunctionNode. "
689 | "Consider subclassing flowpipe.node.INode to pickle nodes."
690 | )
691 |
692 |
693 | def Node(*args, **kwargs): # pylint: disable=invalid-name
694 | """Wrap the given function into a Node."""
695 | cls = kwargs.pop("cls", FunctionNode)
696 |
697 | def node(func):
698 | return cls(func, *args, **kwargs)
699 |
700 | return node
701 |
--------------------------------------------------------------------------------
/flowpipe/plug.py:
--------------------------------------------------------------------------------
1 | """Plugs are ins and outs for Nodes through which they exchange data."""
2 | from __future__ import print_function
3 |
4 | import sys
5 | import warnings
6 | from abc import abstractmethod
7 |
8 | from .utilities import get_hash
9 |
10 | if sys.version_info.major > 2: # pragma: no cover
11 | basestring = str # pylint: disable=invalid-name
12 |
13 |
14 | class IPlug:
15 | """The interface for the plugs.
16 |
17 | Plugs are associated with a Node and can be connected, disconnected
18 | and hold a value, that can be accesses by the associated Node.
19 | """
20 |
21 | def __init__(self, name, node):
22 | """Initialize the Interface.
23 |
24 | Args:
25 | name (str): The name of the Plug.
26 | node (INode): The Node holding the Plug.
27 | """
28 | if "." in name and not isinstance(self, SubPlug):
29 | raise ValueError(
30 | 'Names for plugs can not contain dots "." as these are '
31 | "reserved to identify sub plugs."
32 | )
33 | self.name = name
34 | self.node = node
35 | self.connections = []
36 | self.sub_plugs = {}
37 | self._value = None
38 | self._is_dirty = True
39 |
40 | def __rshift__(self, other):
41 | """Create a connection to the given IPlug.
42 |
43 | Args:
44 | other (IPlug): The IPlug to connect to.
45 | """
46 | warnings.warn(
47 | "Use the connect method instead", DeprecationWarning, stacklevel=2
48 | )
49 | self.connect(other)
50 |
51 | def __lshift__(self, other):
52 | """Break a connection to the given IPlug.
53 |
54 | Args:
55 | other (IPlug): The IPlug to disconnect.
56 | """
57 | warnings.warn(
58 | "Use the disconnect method instead",
59 | DeprecationWarning,
60 | stacklevel=2,
61 | )
62 | self.disconnect(other)
63 |
64 | @property
65 | def _sub_plugs(self):
66 | """Deprecated but included for backwards compatibility."""
67 | warnings.warn(
68 | "`_sub_plugs` is deprecated, please use `sub_plugs` instead.",
69 | DeprecationWarning,
70 | stacklevel=2,
71 | )
72 | return self.sub_plugs
73 |
74 | # Extra function to make re-use in subclasses easier
75 | def _update_value(self, value):
76 | """Update the internal value."""
77 | old_hash = get_hash(self._value)
78 | new_hash = get_hash(value)
79 | self._value = value
80 | if old_hash is None or new_hash is None or (old_hash != new_hash):
81 | self.is_dirty = True
82 |
83 | @property
84 | def value(self):
85 | """Access to the value on this Plug."""
86 | if self.sub_plugs:
87 | return {name: plug.value for name, plug in self.sub_plugs.items()}
88 | return self._value
89 |
90 | @value.setter
91 | def value(self, value):
92 | """Set the Plug dirty when the value is being changed."""
93 | self._update_value(value)
94 |
95 | @property
96 | def is_dirty(self):
97 | """Access to the dirty status on this Plug."""
98 | if self.sub_plugs:
99 | for sub_plug in self.sub_plugs.values():
100 | if sub_plug.is_dirty:
101 | return True
102 | return False
103 | return self._is_dirty
104 |
105 | @is_dirty.setter
106 | def is_dirty(self, status):
107 | """Set the Plug dirty informs the node this Plug belongs to."""
108 | self._is_dirty = status
109 | if status:
110 | self.node.on_input_plug_set_dirty()
111 |
112 | @abstractmethod
113 | def connect(self, plug): # pragma: no cover
114 | """Has to be implemented in the subclass."""
115 | raise NotImplementedError("The subclass has to define connect()")
116 |
117 | def disconnect(self, plug):
118 | """Break the connection to the given Plug."""
119 | if isinstance(plug, InputPlugGroup):
120 | for plug_ in plug:
121 | self.disconnect(plug_)
122 | return
123 | if plug in self.connections:
124 | self.connections.pop(self.connections.index(plug))
125 | self.is_dirty = True
126 | if self in plug.connections:
127 | plug.connections.pop(plug.connections.index(self))
128 | plug.is_dirty = True
129 |
130 | def promote_to_graph(self, name=None):
131 | """Add this plug to the graph of this plug's node.
132 |
133 | Args:
134 | name (str): Optionally provide a different name for the Plug
135 | """
136 | self.node.graph.add_plug(self, name=name)
137 |
138 |
139 | class OutputPlug(IPlug):
140 | """Provides data to an InputPlug."""
141 |
142 | def __init__(self, name, node):
143 | """Initialize the OutputPlug.
144 |
145 | Can be connected to an InputPlug.
146 | Args:
147 | name (str): The name of the Plug.
148 | node (INode): The Node holding the Plug.
149 | """
150 | self.accepted_plugs = (InputPlug, InputPlugGroup)
151 | super().__init__(name, node)
152 | if not isinstance(self, SubPlug):
153 | self.node.outputs[self.name] = self
154 |
155 | def __rshift__(self, other):
156 | """Syntactic sugar for the connect() method.
157 |
158 | If `other` is a INode with an input matching this plug's name, connect.
159 | """
160 | # softly check if the "other" is a Node with inputs
161 | if hasattr(other, "inputs"):
162 | for iname, iplug in other.inputs.items():
163 | if iname == self.name:
164 | target = iplug
165 | else:
166 | target = other
167 | self.connect(target)
168 |
169 | def connect(self, plug):
170 | """Connect this Plug to the given InputPlug.
171 |
172 | Set both participating Plugs dirty.
173 | """
174 | if not isinstance(plug, self.accepted_plugs):
175 | raise TypeError(f"Cannot connect {type(self)} to {type(plug)}")
176 | if isinstance(plug, InputPlugGroup):
177 | for plug_ in plug:
178 | self.connect(plug_)
179 | return
180 |
181 | if self.node.graph.accepts_connection(self, plug):
182 | for connection in plug.connections:
183 | plug.disconnect(connection)
184 | if plug not in self.connections:
185 | self.connections.append(plug)
186 | plug.value = self.value
187 | self.is_dirty = True
188 | plug.is_dirty = True
189 | if self not in plug.connections:
190 | plug.connections = [self]
191 | plug.is_dirty = True
192 |
193 | def __getitem__(self, key):
194 | """Retrieve a sub plug by key.
195 |
196 | If it does not exist yet, it is created automatically!
197 | Args:
198 | key (str): The name of the sub plug
199 | """
200 | if not isinstance(key, basestring):
201 | raise TypeError(
202 | "Only strings are allowed as sub-plug keys! "
203 | "This is due to the fact that JSON serialization only allows "
204 | "strings as keys."
205 | )
206 | if not self.sub_plugs.get(key):
207 | self.sub_plugs[key] = SubOutputPlug(
208 | key=key, node=self.node, parent_plug=self
209 | )
210 | return self.sub_plugs[key]
211 |
212 | def _update_value(self, value):
213 | """Propagate the dirty state to all connected Plugs as well."""
214 | super()._update_value(value)
215 | for plug in self.connections:
216 | plug.value = value
217 |
218 | def serialize(self):
219 | """Serialize the Plug containing all it's connections."""
220 | connections = {}
221 | for connection in self.connections:
222 | connections.setdefault(connection.node.identifier, [])
223 | connections[connection.node.identifier].append(connection.name)
224 | return {
225 | "name": self.name,
226 | "value": self.value if not self.sub_plugs else None,
227 | "connections": connections,
228 | "sub_plugs": {
229 | name: sub_plug.serialize()
230 | for name, sub_plug in self.sub_plugs.items()
231 | },
232 | }
233 |
234 |
235 | class InputPlug(IPlug):
236 | """Receives data from an OutputPlug."""
237 |
238 | def __init__(self, name, node, value=None):
239 | """Initialize the InputPlug.
240 |
241 | Can be connected to an OutputPlug.
242 | Args:
243 | name (str): The name of the Plug.
244 | node (INode): The Node holding the Plug.
245 | """
246 | self.accepted_plugs = (OutputPlug,)
247 |
248 | super().__init__(name, node)
249 | self.value = value
250 | self.is_dirty = True
251 | if not isinstance(self, SubPlug):
252 | self.node.inputs[self.name] = self
253 |
254 | def connect(self, plug):
255 | """Connect this Plug to the given OutputPlug.
256 |
257 | Set both participating Plugs dirty.
258 | """
259 | if not isinstance(plug, self.accepted_plugs):
260 | raise TypeError(f"Cannot connect {type(self)} to {type(plug)}")
261 | plug.connect(self)
262 |
263 | def __getitem__(self, key):
264 | """Retrieve a sub plug by key.
265 |
266 | If it does not exist yet, it is created automatically!
267 | Args:
268 | key (str): The name of the sub plug
269 | """
270 | if not isinstance(key, basestring):
271 | raise TypeError(
272 | "Only strings are allowed as sub-plug keys! "
273 | "This is due to the fact that JSON serialization only allows "
274 | "strings as keys."
275 | )
276 | if not self.sub_plugs.get(key):
277 | self.sub_plugs[key] = SubInputPlug(
278 | key=key, node=self.node, parent_plug=self
279 | )
280 | return self.sub_plugs[key]
281 |
282 | def _update_value(self, value):
283 | if self.sub_plugs:
284 | return
285 | super()._update_value(value)
286 |
287 | def serialize(self):
288 | """Serialize the Plug containing all it's connections."""
289 | connections = {}
290 | if self.connections:
291 | connections[
292 | self.connections[0].node.identifier
293 | ] = self.connections[0].name
294 | return {
295 | "name": self.name,
296 | "value": self.value if not self.sub_plugs else None,
297 | "connections": connections,
298 | "sub_plugs": {
299 | name: sub_plug.serialize()
300 | for name, sub_plug in self.sub_plugs.items()
301 | },
302 | }
303 |
304 |
305 | class SubPlug:
306 | """Mixin that unifies common properties of subplugs."""
307 |
308 | @property
309 | def is_dirty(self):
310 | """Access to the dirty status on this Plug."""
311 | return self._is_dirty
312 |
313 | @is_dirty.setter
314 | def is_dirty(self, status):
315 | """Setting the Plug dirty informs its parent plug."""
316 | self._is_dirty = status
317 | if status:
318 | self.parent_plug.is_dirty = status # pylint: disable=no-member
319 |
320 | def promote_to_graph(self, name=None):
321 | """Add this plug to the graph of this plug's node.
322 |
323 | NOTE: Subplugs can only be added to a graph via their parent plug.
324 |
325 | Args:
326 | name (str): Optionally provide a different name for the Plug
327 | """
328 | # prevent adding SubPlug to the graph witout their parents
329 | raise TypeError(
330 | "Cannot add SubPlug to graph! Add the parent plug instead."
331 | )
332 |
333 |
334 | class SubInputPlug(SubPlug, InputPlug):
335 | """Held by a parent input plug to form a compound plug."""
336 |
337 | def __init__(self, key, node, parent_plug, value=None):
338 | """Initialize the plug.
339 |
340 | Can be connected to an OutputPlug.
341 | Args:
342 | key (str): The key will be used to form the name of the Plug:
343 | {parent_plug.name}.{key}.
344 | node (INode): The Node holding the Plug.
345 | parent_plug (InputPlug): The parent plug holding this Plug.
346 | """
347 | # super().__init__() refers to self.parent_plug, so need to set it here
348 | self.key = key
349 | self.parent_plug = parent_plug
350 | self.parent_plug.sub_plugs[key] = self
351 |
352 | super().__init__(f"{parent_plug.name}.{key}", node)
353 | self.value = value
354 | self.is_dirty = True
355 |
356 | def serialize(self):
357 | """Serialize the Plug containing all it's connections."""
358 | connections = {}
359 | if self.connections:
360 | connections[
361 | self.connections[0].node.identifier
362 | ] = self.connections[0].name
363 | return {
364 | "name": self.name,
365 | "value": self.value,
366 | "connections": connections,
367 | }
368 |
369 |
370 | class SubOutputPlug(SubPlug, OutputPlug):
371 | """Held by a parent output plug to form a compound plug."""
372 |
373 | def __init__(self, key, node, parent_plug, value=None):
374 | """Initialize the plug.
375 |
376 | Can be connected to an InputPlug.
377 | Args:
378 | key (str): The key will be used to form the name of the Plug:
379 | {parent_plug.name}.{key}.
380 | node (INode): The Node holding the Plug.
381 | parent_plug (InputPlug): The parent plug holding this Plug.
382 | """
383 | # super().__init__() refers to self.parent_plug, so need to set it here
384 | self.key = key
385 | self.parent_plug = parent_plug
386 | self.parent_plug.sub_plugs[key] = self
387 |
388 | super().__init__(f"{parent_plug.name}.{key}", node)
389 | self.value = value
390 | self.is_dirty = True
391 |
392 | def _update_value(self, value):
393 | """Propagate the dirty state to all connected Plugs as well."""
394 | super()._update_value(value)
395 | for plug in self.connections:
396 | plug.value = value
397 | parent_value = self.parent_plug.value or {}
398 | parent_value[self.key] = value
399 | self.parent_plug.value = parent_value
400 |
401 | def serialize(self):
402 | """Serialize the Plug containing all it's connections."""
403 | connections = {}
404 | for connection in self.connections:
405 | connections.setdefault(connection.node.identifier, [])
406 | connections[connection.node.identifier].append(connection.name)
407 | return {
408 | "name": self.name,
409 | "value": self.value,
410 | "connections": connections,
411 | }
412 |
413 |
414 | class InputPlugGroup:
415 | """Group plugs inside a group into one entry point on the graph."""
416 |
417 | def __init__(self, name, graph, plugs=None):
418 | """Initialize the group and assigning it to the `Graph.input_groups`.
419 |
420 | Can be connected to an OutputPlug.
421 | Args:
422 | name (str): The name of the InputPlugGroup.
423 | graph (Graph): The Graph holding the PlugGroup.
424 | plugs (list of InputPlug): The plugs in this group.
425 | """
426 | self.name = name
427 | self.graph = graph
428 | self.plugs = plugs or []
429 | self.graph.inputs[self.name] = self
430 |
431 | def connect(self, plug):
432 | """Connect all plugs in this group to the given plug."""
433 | for input_plug in self.plugs:
434 | plug.connect(input_plug)
435 |
436 | def disconnect(self, plug):
437 | """Disconnect all plugs in this group from the given plug."""
438 | for input_plug in self.plugs:
439 | plug.disconnect(input_plug)
440 |
441 | def __iter__(self):
442 | """Convenience to iterate over the plugs in this group."""
443 | for plug in self.plugs:
444 | yield plug
445 |
446 | def __rshift__(self, other):
447 | """Syntactic sugar for the connect() method."""
448 | self.connect(other)
449 |
450 | def __lshift__(self, other):
451 | """Syntactic sugar for the disconnect() method."""
452 | self.disconnect(other)
453 |
454 | @property
455 | def value(self):
456 | """Getting the value of an InputPlugGroup is not supported.
457 |
458 | The value property is implemented nonetheless, in order to allow for
459 | convenient setting of the value of all plugs in the InputPlugGroup.
460 | """
461 | raise AttributeError(
462 | "Getting the value of an InputPlugGroup is not supported"
463 | )
464 |
465 | @value.setter
466 | def value(self, new_value):
467 | """Set the value for all grouped plugs."""
468 | for plug in self.plugs:
469 | plug.value = new_value
470 |
--------------------------------------------------------------------------------
/flowpipe/utilities.py:
--------------------------------------------------------------------------------
1 | """Utilities for serializing and importing Nodes."""
2 | try:
3 | import importlib
4 | except ImportError:
5 | pass
6 | import json
7 | import sys
8 | from hashlib import sha256
9 |
10 |
11 | def import_class(module, cls_name, file_location=None):
12 | """Import and return the given class from the given module.
13 |
14 | File location can be given to import the class from a location that
15 | is not accessible through the PYTHONPATH.
16 | This works from python 2.6 to python 3.
17 | """
18 | try:
19 | module = importlib.import_module(module)
20 | except NameError: # pragma: no cover
21 | module = __import__(module, globals(), locals(), ["object"], -1)
22 | except ModuleNotFoundError: # pragma: no cover
23 | pass # this exception will be ignored to force the Source File load further down
24 | try:
25 | cls = getattr(module, cls_name)
26 | except AttributeError: # pragma: no cover
27 | loader = importlib.machinery.SourceFileLoader("module", file_location)
28 | spec = importlib.machinery.ModuleSpec(
29 | "module", loader, origin=file_location
30 | )
31 | module = importlib.util.module_from_spec(spec)
32 | loader.exec_module(module)
33 | cls = getattr(module, cls_name)
34 | return cls
35 |
36 |
37 | def deserialize_node(data):
38 | """De-serialize a node from the given json data."""
39 | node = import_class(data["module"], data["cls"], data["file_location"])(
40 | graph=None
41 | )
42 | node.post_deserialize(data)
43 | return node
44 |
45 |
46 | def deserialize_graph(data):
47 | """De-serialize from the given json data."""
48 | graph = import_class(data["module"], data["cls"])()
49 | graph.name = data["name"]
50 | graph.nodes = []
51 | for node in data["nodes"]:
52 | deserialized_node = deserialize_node(node)
53 | graph.nodes.append(deserialized_node)
54 | deserialized_node.graph = graph
55 |
56 | nodes = {n.identifier: n for n in graph.nodes}
57 |
58 | all_nodes = list(data["nodes"])
59 |
60 | subgraphs = []
61 | for sub_data in data.get("subgraphs", []):
62 | subgraph = import_class(sub_data["module"], sub_data["cls"])()
63 | subgraph.name = sub_data["name"]
64 | subgraph.nodes = []
65 | for node in sub_data["nodes"]:
66 | deserialized_node = deserialize_node(node)
67 | subgraph.nodes.append(deserialized_node)
68 | deserialized_node.graph = subgraph
69 | all_nodes += sub_data["nodes"]
70 | subgraphs.append(subgraph)
71 | nodes.update({n.identifier: n for n in subgraph.nodes})
72 |
73 | for node in all_nodes: # data['nodes']:
74 | this = nodes[node["identifier"]]
75 | for name, input_ in node["inputs"].items():
76 | for identifier, plug in input_["connections"].items():
77 | upstream = nodes[identifier]
78 | upstream.outputs[plug].connect(this.inputs[name])
79 | for sub_plug_name, sub_plug in input_["sub_plugs"].items():
80 | sub_plug_name = sub_plug_name.split(".")[-1]
81 | for identifier, plug in sub_plug["connections"].items():
82 | upstream = nodes[identifier]
83 | upstream.outputs[plug].connect(
84 | this.inputs[name][sub_plug_name]
85 | )
86 | return graph
87 |
88 |
89 | class NodeEncoder(json.JSONEncoder):
90 | """Custom JSONEncoder to handle non-json serializable node values.
91 |
92 | If the value is not json serializable, a sha256 hash of its bytes is
93 | encoded instead.
94 | """
95 |
96 | def default(self, o):
97 | """Encode the object, handling type errors by encoding into sha256."""
98 | try:
99 | return super().default(o)
100 | except TypeError:
101 | try:
102 | return sha256(o).hexdigest()
103 | except TypeError:
104 | return str(o)
105 | except ValueError:
106 | return sha256(bytes(o)).hexdigest()
107 |
108 |
109 | def get_hash(obj, hash_func=lambda x: sha256(x).hexdigest()):
110 | """Safely get the hash of an object.
111 |
112 | This function tries to compute the hash as safely as possible, dealing with
113 | json data and strings in a well-defined manner.
114 |
115 | Args:
116 | obj: The object to hash
117 | hash_func (func(obj) -> str): The hashing function to use
118 |
119 | Returns:
120 | (str): A hash of the obj
121 |
122 | """
123 | try:
124 | return hash_func(obj)
125 | except (TypeError, ValueError):
126 | try:
127 | json_string = json.dumps(obj, sort_keys=True)
128 | except TypeError: # pragma: no cover
129 | pass
130 | else:
131 | obj = json_string
132 | if isinstance(obj, str):
133 | return hash_func(obj.encode("utf-8"))
134 | if sys.version_info.major > 2: # pragma: no cover
135 | try:
136 | return hash_func(bytes(obj))
137 | except TypeError:
138 | return None
139 | else:
140 | return None # pragma: no cover
141 |
142 |
143 | def sanitize_string_input(input_str):
144 | """
145 | Escapes dangerous "{" for f strings. Call it before running format
146 | Args:
147 | input_str (str): string to be sanitized
148 | Returns:
149 | (str): Sanitized string
150 | """
151 | return input_str.replace("{", "{{").replace("}", "}}")
152 |
--------------------------------------------------------------------------------
/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PaulSchweizer/flowpipe/79f0e793809df7eb88515f90c4a6cc49f8283b18/logo.png
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.black]
2 | line-length = 79
3 |
4 | [tool.isort]
5 | profile = "black"
6 | skip = ["flowpipe/__init__.py"]
7 |
8 | [tool.pylint."MESSAGES CONTROL"]
9 | disable = [
10 | "duplicate-code",
11 | "no-else-raise",
12 | "too-many-function-args",
13 | "too-many-locals",
14 | "too-many-arguments",
15 | "too-few-public-methods",
16 | "too-many-instance-attributes",
17 | "too-many-public-methods",
18 | ]
19 |
20 | [tool.poetry]
21 | name = "Flowpipe"
22 | version = "1.0.4"
23 | description = "A lightweight framework for flow-based programming in python."
24 | authors = ["Paul Schweizer "]
25 | license = "MIT"
26 | readme = "README.md"
27 | repository = "https://github.com/PaulSchweizer/flowpipe"
28 | documentation = "https://flowpipe.readthedocs.io/en/latest/"
29 | classifiers = [
30 | "Programming Language :: Python",
31 | "Programming Language :: Python :: 3.7",
32 | "Programming Language :: Python :: 3.8",
33 | "Programming Language :: Python :: 3.9",
34 | "Programming Language :: Python :: 3.10",
35 | ]
36 |
37 | [tool.poetry.dependencies]
38 | python = ">=3.9"
39 | ascii-canvas = ">=2.0.0"
40 |
41 | [tool.poetry.group.dev.dependencies]
42 | black = "^23.11.0"
43 | mock = "^5.1.0"
44 | numpy = "^1.26.2"
45 | pre-commit = "^3.5.0"
46 | pylint = "^3.0.1"
47 | pytest-cov = "^4.1.0"
48 | pytest = "^7.4.2"
49 | isort = { version = "^5.12.0", extras = ["pyproject"] }
50 | mypy = "^1.6.1"
51 |
52 | [build-system]
53 | requires = ["poetry-core>=1.0.0"]
54 | build-backend = "poetry.core.masonry.api"
55 |
--------------------------------------------------------------------------------
/readthedocs.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | build:
4 | os: ubuntu-20.04
5 | tools:
6 | python: "3.10"
7 | jobs:
8 | pre_create_environment:
9 | - asdf plugin add poetry
10 | - asdf install poetry latest
11 | - asdf global poetry latest
12 | - poetry config virtualenvs.create false
13 | post_install:
14 | - poetry install
15 |
16 | sphinx:
17 | configuration: docs/conf.py
18 | builder: html
19 | fail_on_warning: False
20 |
21 | python:
22 | install:
23 | - requirements: docs/requirements.txt
24 |
25 |
26 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | """The setup file only exists to be able to build the docs on readthedocs!"""
2 | from setuptools import find_packages, setup
3 |
4 | with open("README.md") as stream:
5 | long_description = stream.read()
6 |
7 | REQUIREMENTS = [
8 | "ascii-canvas>=2.0.0",
9 | ]
10 |
11 | setup(
12 | name="flowpipe",
13 | version="1.0.4",
14 | author="Paul Schweizer",
15 | author_email="paulschweizer@gmx.net",
16 | description="Lightweight flow-based programming framework.",
17 | long_description=long_description,
18 | long_description_content_type="text/markdown",
19 | url="https://github.com/PaulSchweizer/flowpipe",
20 | packages=find_packages(),
21 | install_requires=REQUIREMENTS,
22 | classifiers=[
23 | "Programming Language :: Python",
24 | ],
25 | )
26 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from flowpipe.graph import Graph
4 | from flowpipe.node import INode
5 | from flowpipe.plug import InputPlug, OutputPlug
6 |
7 |
8 | class NodeForTesting(INode):
9 | """
10 | +---------------------+
11 | | NodeForTesting |
12 | |---------------------|
13 | o in1<> |
14 | o in2<> |
15 | | out o
16 | | out2 o
17 | +---------------------+
18 | """
19 |
20 | def __init__(self, name=None, in1=None, in2=None, **kwargs):
21 | super(NodeForTesting, self).__init__(name, **kwargs)
22 | OutputPlug("out", self)
23 | OutputPlug("out2", self)
24 | InputPlug("in1", self, in1)
25 | InputPlug("in2", self, in2)
26 |
27 | def compute(self, in1, in2):
28 | """Multiply the two inputs."""
29 | return {"out": in1 * in2, "out2": None}
30 |
31 |
32 | @pytest.fixture
33 | def branching_graph():
34 | """
35 | +------------+ +------------+ +--------------------+
36 | | Start | | Node2 | | End |
37 | |------------| |------------| |--------------------|
38 | o in1<0> | +--->o in1<> | % in1 |
39 | o in2<0> | | o in2<0> | +--->o in1.1<> |
40 | | out o-----+ | out o-----|--->o in1.2<> |
41 | | out2 o | | out2 o | o in2<0> |
42 | +------------+ | +------------+ | | out o
43 | | +------------+ | | out2 o
44 | | | Node1 | | +--------------------+
45 | | |------------| |
46 | +--->o in1<> | |
47 | o in2<0> | |
48 | | out o-----+
49 | | out2 o
50 | +------------+
51 | """
52 | graph = Graph(name="TestGraph")
53 | start = NodeForTesting(name="Start", graph=graph)
54 | n1 = NodeForTesting(name="Node1", graph=graph)
55 | n2 = NodeForTesting(name="Node2", graph=graph)
56 | end = NodeForTesting(name="End", graph=graph)
57 | start.outputs["out"] >> n1.inputs["in1"]
58 | start.outputs["out"] >> n2.inputs["in1"]
59 | n1.outputs["out"] >> end.inputs["in1"]["1"]
60 | n2.outputs["out"] >> end.inputs["in1"]["2"]
61 |
62 | yield graph
63 |
--------------------------------------------------------------------------------
/tests/test_convert_function_to_node.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 |
3 | import json
4 |
5 | import pytest
6 |
7 | from flowpipe.graph import reset_default_graph
8 | from flowpipe.node import FunctionNode, INode, Node
9 |
10 |
11 | @pytest.fixture
12 | def clear_default_graph():
13 | reset_default_graph()
14 |
15 |
16 | @Node(outputs=["out"])
17 | def function_for_testing(input1, input2):
18 | """Test documentation."""
19 | return {"out": "TestHasPassed"}
20 |
21 |
22 | def test_input_plugs_are_taken_from_func_inputs(clear_default_graph):
23 | """Input args to the unction are used as input plugs for the node."""
24 |
25 | @Node()
26 | def function(arg, kwarg="intial_value"):
27 | pass
28 |
29 | node = function()
30 | assert 2 == len(node.inputs.keys())
31 | assert "arg" in node.inputs.keys()
32 | assert "kwarg" in node.inputs.keys()
33 |
34 |
35 | def test_name_is_taken_from_func_name_if_not_provided(clear_default_graph):
36 | """Function name is converted to node name if not provided."""
37 |
38 | @Node()
39 | def function():
40 | pass
41 |
42 | node = function()
43 | assert "function" == node.name
44 |
45 |
46 | def test_name_can_be_provided_as_kwarg(clear_default_graph):
47 | """Name and identifier can be provided."""
48 |
49 | @Node()
50 | def function():
51 | pass
52 |
53 | node = function(name="ProvidedNodeName", identifier="TestIdentifier")
54 | assert "ProvidedNodeName" == node.name
55 | assert "TestIdentifier" == node.identifier
56 |
57 |
58 | def test_doc_is_taken_from_func(clear_default_graph):
59 | """Docstring is taken from the function."""
60 |
61 | @Node()
62 | def function():
63 | """Function Documentation"""
64 |
65 | node = function()
66 | assert function.__doc__ == node.__doc__
67 |
68 |
69 | def test_define_outputs(clear_default_graph):
70 | """Outputs have to be defined as a list of strings."""
71 |
72 | @Node(outputs=["out1", "out2"])
73 | def function():
74 | pass
75 |
76 | node = function()
77 | assert 2 == len(node.outputs.keys())
78 | assert "out1" in node.outputs.keys()
79 | assert "out2" in node.outputs.keys()
80 |
81 |
82 | def test_decorator_returns_node_instances(clear_default_graph):
83 | """A call to the decorated function returns a Node instance."""
84 |
85 | @Node()
86 | def function():
87 | pass
88 |
89 | node1 = function(graph=None)
90 | node2 = function(graph=None)
91 | assert node1 != node2
92 |
93 |
94 | def test_serialize_function_node(clear_default_graph):
95 | """Serialization also stored the location of the function."""
96 | node = function_for_testing(graph=None)
97 | data = json.dumps(node.to_json())
98 | deserialized_node = INode.from_json(json.loads(data))
99 | assert node.__doc__ == deserialized_node.__doc__
100 | assert node.name == deserialized_node.name
101 | assert node.inputs.keys() == deserialized_node.inputs.keys()
102 | assert node.outputs.keys() == deserialized_node.outputs.keys()
103 | assert node.evaluate() == deserialized_node.evaluate()
104 |
105 |
106 | def test_use_self_as_first_arg_if_present(clear_default_graph):
107 | """If wrapped function has self as first arg, it's used reference to class like in a method."""
108 |
109 | @Node(outputs=["test"])
110 | def function(self, arg1, arg2):
111 | return {"test": self.test}
112 |
113 | node = function(graph=None)
114 | node.test = "test"
115 | assert "test" == node.evaluate()["test"]
116 |
117 | @Node(outputs=["test"])
118 | def function(arg1, arg2):
119 | return {"test": "Test without self"}
120 |
121 | node = function(graph=None)
122 | assert "Test without self" == node.evaluate()["test"]
123 |
124 |
125 | def test_assign_input_args_to_function_input_plugs(clear_default_graph):
126 | """Assign inputs to function to the input plugs."""
127 |
128 | @Node(outputs=["test"])
129 | def function(arg):
130 | return {"test": arg}
131 |
132 | node = function(arg="test")
133 | assert "test" == node.evaluate()["test"]
134 |
135 |
136 | def test_provide_custom_node_class(clear_default_graph):
137 | """The 'node' key is used to pass a custom class to be used as the Node."""
138 |
139 | class CustomFunctionNode(FunctionNode):
140 | pass
141 |
142 | @Node(cls=CustomFunctionNode, outputs=["test"])
143 | def function(arg):
144 | return {"test": arg}
145 |
146 | node = function(arg="test")
147 | assert isinstance(node, CustomFunctionNode)
148 |
149 |
150 | def test_passing_metadata_updates_exisiting_metadata(clear_default_graph):
151 | @Node(metadata={"arg_1": "value", "arg_2": "value"})
152 | def function(arg):
153 | return {}
154 |
155 | node = function(graph=None)
156 | assert node.metadata == {"arg_1": "value", "arg_2": "value"}
157 |
158 | node = function(
159 | metadata={"arg_1": "new_value", "arg3": "new_value"}, graph=None
160 | )
161 | assert node.metadata == {
162 | "arg_1": "new_value",
163 | "arg_2": "value",
164 | "arg3": "new_value",
165 | }
166 |
167 |
168 | def test_default_args_are_assigned_to_input_plugs(clear_default_graph):
169 | @Node()
170 | def function(arg_1, arg_2="test_1", arg_3="test_2"):
171 | return {}
172 |
173 | node = function()
174 |
175 | assert node.inputs["arg_1"].value is None
176 | assert node.inputs["arg_2"].value == "test_1"
177 | assert node.inputs["arg_3"].value == "test_2"
178 |
179 |
180 | def test_metadata_is_unique_for_each_node_created(clear_default_graph):
181 | @Node(metadata={"key": [1, 2, 3]})
182 | def function():
183 | pass
184 |
185 | node1 = function(graph=None)
186 | node2 = function(graph=None)
187 |
188 | assert node1.metadata is not node2.metadata
189 |
190 |
191 | def test_class_name_restored_after_deserialization(clear_default_graph):
192 | """Serialization also stored the location of the function."""
193 | node = function_for_testing(graph=None)
194 | data = json.dumps(node.to_json())
195 | deserialized_node = INode.from_json(json.loads(data))
196 |
197 | assert node.class_name == "function_for_testing"
198 | assert deserialized_node.class_name == "function_for_testing"
199 |
200 |
201 | def test_node_reserved_names():
202 | with pytest.raises(ValueError):
203 |
204 | @Node()
205 | def function(
206 | func, name, identifier, inputs, outputs, metadata, omit, graph
207 | ):
208 | pass
209 |
210 |
211 | def test_create_node_with_sub_output_plugs():
212 | @Node(outputs=["out.a", "out", "out.b"])
213 | def function1(in_):
214 | pass
215 |
216 | node = function1(name="contains_all_plugs")
217 | assert len(node.outputs["out"].sub_plugs) == 2
218 |
219 | @Node(outputs=["out.a", "out.b"])
220 | def function2(in_):
221 | pass
222 |
223 | node = function2(name="contains_only_subplugs")
224 | assert len(node.outputs["out"].sub_plugs) == 2
225 |
--------------------------------------------------------------------------------
/tests/test_event.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 |
3 | from flowpipe.event import Event
4 |
5 |
6 | def test_listeners_only_registered_once():
7 | def listener(x, y):
8 | pass
9 |
10 | event = Event("test")
11 | event.register(listener)
12 | event.register(listener)
13 | assert event.is_registered(listener)
14 | assert 1 == len(event._listeners)
15 |
16 |
17 | def test_deregister_if_registered():
18 | def listener(x, y):
19 | pass
20 |
21 | event = Event("test")
22 | event.register(listener)
23 | event.deregister(listener)
24 | assert not event.is_registered(listener)
25 |
26 | event.deregister(listener)
27 |
28 |
29 | def test_event_emitt():
30 | def listener(arg, kwarg):
31 | assert arg == 123
32 | assert kwarg == "test"
33 |
34 | event = Event("test")
35 | event.register(listener)
36 | event.emit(123, kwarg="test")
37 |
38 |
39 | def test_event_clear():
40 | def listener(arg, kwarg):
41 | pass
42 |
43 | event = Event("test")
44 | event.register(listener)
45 | event.clear()
46 |
47 | assert not event.is_registered(listener)
48 | assert len(event._listeners) == 0
49 |
--------------------------------------------------------------------------------
/tests/test_examples.py:
--------------------------------------------------------------------------------
1 | import glob
2 | import importlib.machinery
3 | import importlib.util
4 | import os
5 | import sys
6 |
7 |
8 | def load_source(modname, filename):
9 | loader = importlib.machinery.SourceFileLoader(modname, filename)
10 | spec = importlib.util.spec_from_file_location(
11 | modname, filename, loader=loader
12 | )
13 | module = importlib.util.module_from_spec(spec)
14 | sys.modules[module.__name__] = module
15 | loader.exec_module(module)
16 | return module
17 |
18 |
19 | def test_examples():
20 | """Run the example files to ensure their integrity."""
21 | examples = os.path.join(
22 | os.path.dirname(os.path.dirname(__file__)), "examples", "*.py"
23 | )
24 |
25 | for example in glob.glob(examples):
26 | load_source(os.path.basename(example).replace(".", "_"), example)
27 |
--------------------------------------------------------------------------------
/tests/test_inputpluggroup.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from flowpipe import Graph, InputPlugGroup, Node
4 |
5 |
6 | @Node(outputs=["out"])
7 | def DemoNode(in_):
8 | """
9 | +-----------+
10 | | DemoNode |
11 | |-----------|
12 | o in_<> |
13 | | out<> o
14 | +-----------+
15 | """
16 | return {"out": in_}
17 |
18 |
19 | @pytest.fixture
20 | def demo_graph_fixture():
21 | """
22 | +---main----+ +---sub----+
23 | | A | | C1 |
24 | |-----------| |----------|
25 | o in_<> | +--->o in_<> |
26 | | out<> o-----+ | out<> o
27 | +-----------+ | +----------+
28 | | +---sub----+
29 | | | C2 |
30 | | |----------|
31 | +--->o in_<> |
32 | | out<> o
33 | +----------+
34 | """
35 | # Sub graph
36 | sub = Graph("sub")
37 | c1 = DemoNode(graph=sub, name="C1")
38 | c2 = DemoNode(graph=sub, name="C2")
39 |
40 | # Main graph
41 | main = Graph("main")
42 | DemoNode(graph=main, name="A")
43 |
44 | # Group inputs in the sub graph
45 | InputPlugGroup(
46 | "graph_in",
47 | sub,
48 | [
49 | c1.inputs["in_"],
50 | c2.inputs["in_"],
51 | ],
52 | )
53 | return sub, main
54 |
55 |
56 | def test_connect_groupinput_to_output(demo_graph_fixture):
57 | sub, main = demo_graph_fixture
58 | sub.inputs["graph_in"].connect(main["A"].outputs["out"])
59 |
60 | assert main["A"].outputs["out"] in sub["C1"].inputs["in_"].connections
61 | assert main["A"].outputs["out"] in sub["C2"].inputs["in_"].connections
62 |
63 | sub.inputs["graph_in"].disconnect(main["A"].outputs["out"])
64 |
65 | assert main["A"].outputs["out"] not in sub["C1"].inputs["in_"].connections
66 | assert main["A"].outputs["out"] not in sub["C2"].inputs["in_"].connections
67 |
68 |
69 | def test_connect_output_to_groupinput(demo_graph_fixture):
70 | sub, main = demo_graph_fixture
71 | main["A"].outputs["out"].connect(sub.inputs["graph_in"])
72 |
73 | assert main["A"].outputs["out"] in sub["C1"].inputs["in_"].connections
74 | assert main["A"].outputs["out"] in sub["C2"].inputs["in_"].connections
75 |
76 | main["A"].outputs["out"].disconnect(sub.inputs["graph_in"])
77 |
78 | assert main["A"].outputs["out"] not in sub["C1"].inputs["in_"].connections
79 | assert main["A"].outputs["out"] not in sub["C2"].inputs["in_"].connections
80 |
81 |
82 | def test_rshift_connect_groupinput_to_output(demo_graph_fixture):
83 | sub, main = demo_graph_fixture
84 | sub.inputs["graph_in"] >> main["A"].outputs["out"]
85 |
86 | assert main["A"].outputs["out"] in sub["C1"].inputs["in_"].connections
87 | assert main["A"].outputs["out"] in sub["C2"].inputs["in_"].connections
88 |
89 | sub.inputs["graph_in"] << main["A"].outputs["out"]
90 |
91 | assert main["A"].outputs["out"] not in sub["C1"].inputs["in_"].connections
92 | assert main["A"].outputs["out"] not in sub["C2"].inputs["in_"].connections
93 |
94 |
95 | def test_rshift_connect_output_to_groupinput(demo_graph_fixture):
96 | sub, main = demo_graph_fixture
97 | main["A"].outputs["out"] >> sub.inputs["graph_in"]
98 |
99 | assert main["A"].outputs["out"] in sub["C1"].inputs["in_"].connections
100 | assert main["A"].outputs["out"] in sub["C2"].inputs["in_"].connections
101 |
102 | main["A"].outputs["out"] << sub.inputs["graph_in"]
103 |
104 | assert main["A"].outputs["out"] not in sub["C1"].inputs["in_"].connections
105 | assert main["A"].outputs["out"] not in sub["C2"].inputs["in_"].connections
106 |
107 |
108 | def test_connect_groupinput_to_suboutput(demo_graph_fixture):
109 | sub, main = demo_graph_fixture
110 | sub.inputs["graph_in"].connect(main["A"].outputs["out"]["1"])
111 |
112 | assert main["A"].outputs["out"]["1"] in sub["C1"].inputs["in_"].connections
113 | assert main["A"].outputs["out"]["1"] in sub["C2"].inputs["in_"].connections
114 |
115 | sub.inputs["graph_in"].disconnect(main["A"].outputs["out"]["1"])
116 |
117 | assert (
118 | main["A"].outputs["out"]["1"]
119 | not in sub["C1"].inputs["in_"].connections
120 | )
121 | assert (
122 | main["A"].outputs["out"]["1"]
123 | not in sub["C2"].inputs["in_"].connections
124 | )
125 |
126 |
127 | def test_connect_suboutput_to_groupinput(demo_graph_fixture):
128 | sub, main = demo_graph_fixture
129 | main["A"].outputs["out"]["1"].connect(sub.inputs["graph_in"])
130 |
131 | assert main["A"].outputs["out"]["1"] in sub["C1"].inputs["in_"].connections
132 | assert main["A"].outputs["out"]["1"] in sub["C2"].inputs["in_"].connections
133 |
134 | main["A"].outputs["out"]["1"].disconnect(sub.inputs["graph_in"])
135 |
136 | assert (
137 | main["A"].outputs["out"]["1"]
138 | not in sub["C1"].inputs["in_"].connections
139 | )
140 | assert (
141 | main["A"].outputs["out"]["1"]
142 | not in sub["C2"].inputs["in_"].connections
143 | )
144 |
145 |
146 | def test_setting_value_of_groupinput(demo_graph_fixture):
147 | random_string = "foo"
148 | sub, _ = demo_graph_fixture
149 | sub.inputs["graph_in"].value = random_string
150 |
151 | assert sub["C1"].inputs["in_"].value == random_string
152 | assert sub["C2"].inputs["in_"].value == random_string
153 |
154 |
155 | def test_getting_value_of_groupinput_is_not_possible(demo_graph_fixture):
156 | sub, _ = demo_graph_fixture
157 | with pytest.raises(AttributeError):
158 | sub.inputs["graph_in"].value
159 |
--------------------------------------------------------------------------------
/tests/test_multiprocessing.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 | from flowpipe.graph import Graph
4 | from flowpipe.node import Node
5 |
6 | # A value lower than 1 does not make a difference since starting the different
7 | # processes eats up time
8 | SLEEP_TIME = 3
9 |
10 |
11 | @Node(outputs=["out"])
12 | def Sleeper(in1):
13 | time.sleep(SLEEP_TIME)
14 |
15 |
16 | def test_multiprocessed_evaluation_is_faster():
17 | """Test by having sleeper nodes sleep in parallel and check total grah timing.
18 | +---------------+ +---------------+
19 | | Sleeper1 | | Sleeper2 |
20 | |---------------| |---------------|
21 | o in1<> | +--->o in1<> |
22 | | out o-----+ | out o
23 | +---------------+ | +---------------+
24 | | +---------------+
25 | | | Sleeper3 |
26 | | |---------------|
27 | +--->o in1<> |
28 | | | out o
29 | | +---------------+
30 | | +---------------+
31 | | | Sleeper4 |
32 | | |---------------|
33 | +--->o in1<> |
34 | | out o
35 | +---------------+
36 | """
37 | delay = 0.05
38 | graph = Graph(name="threaded")
39 |
40 | s1 = Sleeper(name="Sleeper1", graph=graph)
41 | s2 = Sleeper(name="Sleeper2", graph=graph)
42 | s3 = Sleeper(name="Sleeper3", graph=graph)
43 | s4 = Sleeper(name="Sleeper4", graph=graph)
44 |
45 | s1.outputs["out"] >> s2.inputs["in1"]
46 | s1.outputs["out"] >> s3.inputs["in1"]
47 | s1.outputs["out"] >> s4.inputs["in1"]
48 |
49 | start = time.time()
50 | graph.evaluate(mode="multiprocessing")
51 | end = time.time()
52 |
53 | runtime = end - start
54 |
55 | assert runtime < len(graph.nodes) * SLEEP_TIME + len(graph.nodes) * delay
56 |
57 |
58 | @Node(outputs=["result", "results"])
59 | def AddNode(number1, number2, numbers):
60 | """'numbers' and 'results' are used as compound plug."""
61 | result = {"result": number1 + number2}
62 | if numbers is not None:
63 | for i, _ in enumerate(numbers.keys()):
64 | result["results.{0}".format(i)] = i
65 | return result
66 |
67 |
68 | def test_multiprocessing_evaluation_updates_the_original_graph():
69 | """Multi processing updates the original graph object.
70 |
71 | +---------------+ +---------------+ +------------------------+
72 | | AddNode1 | | AddNode2 | | AddNode5 |
73 | |---------------| |---------------| |------------------------|
74 | o number1<1> | +--->o number1<2> | o number1<1> |
75 | o number2<1> | | o number2<1> | o number2<1> |
76 | o numbers<> | | o numbers<> | +--->% numbers |
77 | | result o-----+ | result o |--->o numbers.0<> |
78 | | results o | | results o |--->o numbers.1<> |
79 | +---------------+ | +---------------+ | | result o
80 | | +---------------+ | | results o
81 | | | AddNode3 | | +------------------------+
82 | | |---------------| |
83 | +--->o number1<2> | |
84 | | o number2<1> | |
85 | | o numbers<> | |
86 | | | result o |
87 | | | results o |
88 | | +---------------+ |
89 | | +------------------------+ |
90 | | | AddNode4 | |
91 | | |------------------------| |
92 | +--->o number1<2> | |
93 | | o number2<1> | |
94 | | % numbers | |
95 | +--->o numbers.0<2> | |
96 | +--->o numbers.1<2> | |
97 | | result o |
98 | | results %-----+
99 | | results.0 o-----+
100 | | results.1 o-----+
101 | +------------------------+
102 | """
103 | graph = Graph(name="multiprocessing")
104 |
105 | n1 = AddNode(name="AddNode1", graph=graph, number1=1, number2=1)
106 | n2 = AddNode(name="AddNode2", graph=graph, number2=1)
107 | n3 = AddNode(name="AddNode3", graph=graph, number2=1)
108 | n4 = AddNode(name="AddNode4", graph=graph, number2=1)
109 | n5 = AddNode(name="AddNode5", graph=graph, number1=1, number2=1)
110 |
111 | n1.outputs["result"] >> n2.inputs["number1"]
112 | n1.outputs["result"] >> n3.inputs["number1"]
113 |
114 | n1.outputs["result"] >> n4.inputs["number1"]
115 | n1.outputs["result"] >> n4.inputs["numbers"]["0"]
116 | n1.outputs["result"] >> n4.inputs["numbers"]["1"]
117 |
118 | n4.outputs["results"]["0"] >> n5.inputs["numbers"]["0"]
119 | n4.outputs["results"]["1"] >> n5.inputs["numbers"]["1"]
120 |
121 | n4.outputs["results"] >> n5.inputs["numbers"]
122 |
123 | graph.evaluate(mode="multiprocessing", submission_delay=0.05)
124 |
125 | assert n2.outputs["result"].value == 3
126 | assert n3.outputs["result"].value == 3
127 |
128 | assert n4.outputs["results"].value == {"0": 0, "1": 1}
129 | assert n5.outputs["results"].value == {"0": 0, "1": 1}
130 |
131 | assert not n1.is_dirty
132 | assert not n2.is_dirty
133 | assert not n3.is_dirty
134 | assert not n4.is_dirty
135 | assert not n5.is_dirty
136 |
--------------------------------------------------------------------------------
/tests/test_plugs.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 |
3 | import pytest
4 |
5 | from flowpipe.graph import Graph, reset_default_graph
6 | from flowpipe.node import INode, Node
7 | from flowpipe.plug import InputPlug, OutputPlug
8 |
9 |
10 | @pytest.fixture
11 | def clear_default_graph():
12 | reset_default_graph()
13 |
14 |
15 | class NodeForTesting(INode):
16 | def __init__(self, **kwargs):
17 | super(NodeForTesting, self).__init__(**kwargs)
18 |
19 | def compute(self):
20 | pass
21 |
22 |
23 | def test_connecting_different_input_disconnects_existing_ones(
24 | clear_default_graph,
25 | ):
26 | @Node(outputs=["a_out"])
27 | def A(a):
28 | pass
29 |
30 | @Node(outputs=["b_out"])
31 | def B(b, b_compound):
32 | pass
33 |
34 | @Node(outputs=["c_out"])
35 | def C(c):
36 | pass
37 |
38 | @Node(outputs=["d_out_compound"])
39 | def D(d, d_compound):
40 | pass
41 |
42 | @Node(outputs=["e_out_compound"])
43 | def E(e, e_compound):
44 | pass
45 |
46 | a = A()
47 | b = B()
48 | c = C()
49 | d = D()
50 | e = E()
51 |
52 | a.outputs["a_out"].connect(b.inputs["b"])
53 | c.outputs["c_out"].connect(b.inputs["b"])
54 |
55 | assert not a.outputs["a_out"].connections
56 |
57 | b.inputs["b"].connect(a.outputs["a_out"])
58 |
59 | assert a.outputs["a_out"].connections
60 |
61 | b.inputs["b"].connect(c.outputs["c_out"])
62 |
63 | assert not a.outputs["a_out"].connections
64 |
65 | b.inputs["b_compound"]["0"].connect(a.outputs["a_out"])
66 | assert b.inputs["b_compound"]["0"].connections[0] == a.outputs["a_out"]
67 |
68 | b.inputs["b_compound"]["0"].connect(c.outputs["c_out"])
69 | assert b.inputs["b_compound"]["0"].connections[0] == c.outputs["c_out"]
70 |
71 | d.outputs["d_out_compound"]["0"].connect(e.inputs["e_compound"]["0"])
72 | e.inputs["e_compound"]["1"].connect(d.outputs["d_out_compound"]["0"])
73 | assert (
74 | e.inputs["e_compound"]["0"].connections[0]
75 | == d.outputs["d_out_compound"]["0"]
76 | )
77 | assert (
78 | e.inputs["e_compound"]["1"].connections[0]
79 | == d.outputs["d_out_compound"]["0"]
80 | )
81 | assert len(d.outputs["d_out_compound"]["0"].connections) == 2
82 |
83 |
84 | def test_connect_and_disconnect_nodes(clear_default_graph):
85 | """Connect and disconnect nodes."""
86 | n1 = NodeForTesting(name="n1")
87 | n2 = NodeForTesting(name="n2")
88 | out_plug_a = OutputPlug("out", n1)
89 | in_plug_a = InputPlug("in_a", n2)
90 | in_plug_b = InputPlug("in_b", n2)
91 | in_plug_c = InputPlug("in_c", n2)
92 | in_plug_d = InputPlug("in_d", n2)
93 | in_plug_compound = InputPlug("in_compound", n2)
94 | out_plug_compound = OutputPlug("out_compound", n1)
95 |
96 | # Connect the out to the in
97 | out_plug_a >> in_plug_a
98 | assert 1 == len(out_plug_a.connections)
99 | assert 1 == len(in_plug_a.connections)
100 | out_plug_compound["0"] >> in_plug_c
101 | assert 1 == len(out_plug_compound["0"].connections)
102 | assert 1 == len(in_plug_c.connections)
103 | out_plug_compound["1"] >> in_plug_compound["1"]
104 | assert 1 == len(out_plug_compound["1"].connections)
105 | assert 1 == len(in_plug_compound["1"].connections)
106 |
107 | # Connect the same nodes multiple times
108 | out_plug_a >> in_plug_a
109 | assert 1 == len(out_plug_a.connections)
110 | assert 1 == len(in_plug_a.connections)
111 | out_plug_compound["0"] >> in_plug_c
112 | assert 1 == len(out_plug_compound["0"].connections)
113 | assert 1 == len(in_plug_c.connections)
114 | out_plug_compound["1"] >> in_plug_compound["1"]
115 | assert 1 == len(out_plug_compound["1"].connections)
116 | assert 1 == len(in_plug_compound["1"].connections)
117 |
118 | # Connect the in to the out
119 | in_plug_b >> out_plug_a
120 | assert 2 == len(out_plug_a.connections)
121 | assert 1 == len(in_plug_b.connections)
122 | in_plug_d >> out_plug_compound["0"]
123 | assert 2 == len(out_plug_compound["0"].connections)
124 | assert 1 == len(in_plug_d.connections)
125 | in_plug_compound["2"] >> out_plug_compound["1"]
126 | assert 2 == len(out_plug_compound["1"].connections)
127 | assert 1 == len(in_plug_compound["1"].connections)
128 |
129 | # Connect the in to the multiple times
130 | in_plug_b >> out_plug_a
131 | assert 2 == len(out_plug_a.connections)
132 | assert 1 == len(in_plug_b.connections)
133 | in_plug_d >> out_plug_compound["0"]
134 | assert 2 == len(out_plug_compound["0"].connections)
135 | assert 1 == len(in_plug_d.connections)
136 | in_plug_compound["2"] >> out_plug_compound["1"]
137 | assert 2 == len(out_plug_compound["1"].connections)
138 | assert 1 == len(in_plug_compound["1"].connections)
139 |
140 |
141 | def test_change_connections_sets_plug_dirty(clear_default_graph):
142 | """Connecting and disconnecting sets the plug dirty."""
143 | n1 = NodeForTesting(name="n1")
144 | n2 = NodeForTesting(name="n2")
145 | out_plug = OutputPlug("out", n1)
146 | in_plug = InputPlug("in", n2)
147 | out_compound_plug = OutputPlug("out_compound", n1)
148 | in_compound_plug = InputPlug("in_compound", n2)
149 |
150 | in_plug.is_dirty = False
151 | out_plug >> in_plug
152 | assert in_plug.is_dirty
153 |
154 | in_plug.is_dirty = False
155 | out_plug << in_plug
156 | assert in_plug.is_dirty
157 |
158 | in_compound_plug["0"].is_dirty = False
159 | out_compound_plug["0"] >> in_compound_plug["0"]
160 | assert in_compound_plug["0"].is_dirty
161 |
162 | in_compound_plug["0"].is_dirty = False
163 | out_compound_plug["0"] << in_compound_plug["0"]
164 | assert in_compound_plug["0"].is_dirty
165 |
166 |
167 | def test_set_value_sets_plug_dirty(clear_default_graph):
168 | """Connecting and disconnecting sets the plug dirty."""
169 | n = NodeForTesting()
170 | in_plug = InputPlug("in", n)
171 | in_compound_plug = InputPlug("in_compound", n)
172 |
173 | in_plug.is_dirty = False
174 | assert not in_plug.is_dirty
175 | in_plug.value = "NewValue"
176 | assert in_plug.is_dirty
177 |
178 | in_compound_plug.is_dirty = False
179 | assert not in_compound_plug.is_dirty
180 | in_compound_plug.value = "NewValue"
181 | assert in_compound_plug.is_dirty
182 |
183 |
184 | def test_set_output_pushes_value_to_connected_input(clear_default_graph):
185 | """OutPlugs push their values to their connected input plugs."""
186 | n1 = NodeForTesting(name="n1")
187 | n2 = NodeForTesting(name="n2")
188 | out_plug = OutputPlug("out", n1)
189 | in_plug = InputPlug("in", n2)
190 |
191 | out_compound_plug = OutputPlug("out_compound", n1)
192 | in_compound_plug = InputPlug("in_compound", n2)
193 |
194 | out_plug.value = "OldValue"
195 | assert in_plug.value != out_plug.value
196 |
197 | out_plug >> in_plug
198 | in_plug.is_dirty = False
199 | assert in_plug.value == out_plug.value
200 | assert not in_plug.is_dirty
201 |
202 | out_plug.value = "NewValue"
203 | assert in_plug.is_dirty
204 | assert in_plug.value == out_plug.value
205 |
206 | out_compound_plug.value = "OldValue"
207 | assert in_compound_plug.value != out_compound_plug.value
208 |
209 | out_compound_plug >> in_compound_plug
210 | in_compound_plug.is_dirty = False
211 | assert in_compound_plug.value == out_compound_plug.value
212 | assert not in_compound_plug.is_dirty
213 |
214 | out_compound_plug.value = "NewValue"
215 | assert in_compound_plug.is_dirty
216 | assert in_compound_plug.value == out_compound_plug.value
217 |
218 |
219 | def test_assign_initial_value_to_input_plug(clear_default_graph):
220 | """Assign an initial value to an InputPlug."""
221 | n = NodeForTesting()
222 | in_plug = InputPlug("in", n)
223 | assert in_plug.value is None
224 |
225 | in_plug = InputPlug("in", n, 123)
226 | assert 123 == in_plug.value
227 |
228 |
229 | def test_serialize(clear_default_graph):
230 | """Serialize the Plug to json."""
231 | n1 = NodeForTesting(name="n1")
232 | n2 = NodeForTesting(name="n2")
233 | out_plug = OutputPlug("out", n1)
234 | out_plug.value = "out_value"
235 | in1_plug = InputPlug("in1", n2)
236 | in2_plug = InputPlug("in2", n2)
237 | in_plug_with_value = InputPlug("in_value", n2, "value")
238 | compound_out_plug = OutputPlug("compound_out", n1)
239 | compound_in_plug = InputPlug("compound_in", n2)
240 | out_plug >> in1_plug
241 | out_plug >> compound_in_plug["incoming"]
242 | compound_out_plug["0"] >> in2_plug
243 |
244 | compound_in_plug["0"].value = 0
245 | compound_in_plug["key"].value = "value"
246 |
247 | in_serialized = in1_plug.serialize()
248 | assert in_serialized == {
249 | "name": "in1",
250 | "value": "out_value",
251 | "connections": {out_plug.node.identifier: "out"},
252 | "sub_plugs": {},
253 | }
254 |
255 | in_plug_with_value_serialized = in_plug_with_value.serialize()
256 | assert in_plug_with_value_serialized == {
257 | "name": "in_value",
258 | "value": "value",
259 | "connections": {},
260 | "sub_plugs": {},
261 | }
262 |
263 | compound_in_serialized = compound_in_plug.serialize()
264 | assert compound_in_serialized == {
265 | "name": "compound_in",
266 | "value": None,
267 | "connections": {},
268 | "sub_plugs": {
269 | "0": {"connections": {}, "name": "compound_in.0", "value": 0},
270 | "incoming": {
271 | "connections": {out_plug.node.identifier: "out"},
272 | "name": "compound_in.incoming",
273 | "value": "out_value",
274 | },
275 | "key": {
276 | "connections": {},
277 | "name": "compound_in.key",
278 | "value": "value",
279 | },
280 | },
281 | }
282 |
283 | out_serialized = out_plug.serialize()
284 | assert out_serialized == {
285 | "name": "out",
286 | "value": "out_value",
287 | "connections": {
288 | in1_plug.node.identifier: ["in1", "compound_in.incoming"]
289 | },
290 | "sub_plugs": {},
291 | }
292 |
293 | compound_out_serialized = compound_out_plug.serialize()
294 | assert compound_out_serialized == {
295 | "connections": {},
296 | "name": "compound_out",
297 | "value": None,
298 | "sub_plugs": {
299 | "0": {
300 | "connections": {in2_plug.node.identifier: ["in2"]},
301 | "name": "compound_out.0",
302 | "value": None,
303 | }
304 | },
305 | }
306 |
307 | in2_plug_serialized = in2_plug.serialize()
308 | assert in2_plug_serialized == {
309 | "connections": {compound_out_plug.node.identifier: "compound_out.0"},
310 | "name": "in2",
311 | "value": None,
312 | "sub_plugs": {},
313 | }
314 |
315 |
316 | def test_compound_plugs_can_only_be_strings_or_unicodes(clear_default_graph):
317 | @Node(outputs=["compound_out"])
318 | def A(compound_in):
319 | pass
320 |
321 | node = A(graph=None)
322 |
323 | with pytest.raises(TypeError):
324 | node.inputs["compound_in"][0].value = 0
325 |
326 | with pytest.raises(TypeError):
327 | node.outputs["compound_out"][0].value = 0
328 |
329 | node.inputs["compound_in"][u"unicode"].value = "unicode"
330 | node.outputs["compound_out"][u"unicode"].value = "unicode"
331 |
332 | assert node.inputs["compound_in"][u"unicode"].value == "unicode"
333 | assert node.outputs["compound_out"][u"unicode"].value == "unicode"
334 |
335 |
336 | def test_compound_input_plugs_are_accessible_by_index(clear_default_graph):
337 | @Node(outputs=["value"])
338 | def A(value):
339 | return {"value": value}
340 |
341 | @Node(outputs=["sum"])
342 | def B(compound_in):
343 | return {"sum": sum(compound_in.values())}
344 |
345 | a1 = A(name="a1", value=1)
346 | a2 = A(name="a2", value=2)
347 | a3 = A(name="a3", value=3)
348 | b = B()
349 |
350 | a1.outputs["value"].connect(b.inputs["compound_in"]["0"])
351 | a2.outputs["value"].connect(b.inputs["compound_in"]["1"])
352 | a3.outputs["value"].connect(b.inputs["compound_in"]["2"])
353 |
354 | a1.evaluate()
355 | a2.evaluate()
356 | a3.evaluate()
357 |
358 | b.evaluate()
359 |
360 | assert b.outputs["sum"].value == 6
361 |
362 |
363 | def test_compound_output_plugs_are_accessible_by_index(clear_default_graph):
364 | @Node(outputs=["compound_out"])
365 | def A(values):
366 | return {
367 | "compound_out.0": values[0],
368 | "compound_out.1": values[1],
369 | "compound_out.2": values[2],
370 | }
371 |
372 | @Node(outputs=["sum"])
373 | def B(compound_in):
374 | return {"sum": sum(compound_in.values())}
375 |
376 | a = A(values=[1, 2, 3])
377 | b = B()
378 |
379 | a.outputs["compound_out"]["0"].connect(b.inputs["compound_in"]["0"])
380 | a.outputs["compound_out"]["1"].connect(b.inputs["compound_in"]["1"])
381 | a.outputs["compound_out"]["2"].connect(b.inputs["compound_in"]["2"])
382 |
383 | a.evaluate()
384 | b.evaluate()
385 |
386 | assert b.outputs["sum"].value == 6
387 |
388 |
389 | def test_compound_plugs_can_be_connected_individually(clear_default_graph):
390 | @Node(outputs=["value", "compound_out"])
391 | def A(compound_in, in1):
392 | pass
393 |
394 | a1 = A(name="a1")
395 | a2 = A(name="a2")
396 |
397 | a2.inputs["compound_in"]["0"].connect(a1.outputs["value"])
398 | a1.outputs["compound_out"]["0"].connect(a2.inputs["in1"])
399 |
400 |
401 | def test_compound_plugs_are_not_dirty_if_parent_plug_is_dirty(
402 | clear_default_graph,
403 | ):
404 | @Node(outputs=["compound_out"])
405 | def A(compound_in):
406 | pass
407 |
408 | node = A(graph=None)
409 | node.inputs["compound_in"]["0"].value = 0
410 | node.inputs["compound_in"]["1"].value = 1
411 |
412 | node.inputs["compound_in"].is_dirty = False
413 | node.inputs["compound_in"]["0"].is_dirty = False
414 | node.inputs["compound_in"]["1"].is_dirty = False
415 |
416 | node.inputs["compound_in"].is_dirty = True
417 |
418 | assert not node.inputs["compound_in"]["0"].is_dirty
419 | assert not node.inputs["compound_in"]["1"].is_dirty
420 |
421 | node.outputs["compound_out"]["0"].value = 0
422 | node.outputs["compound_out"]["0"].value = 1
423 |
424 | node.outputs["compound_out"].is_dirty = False
425 | node.outputs["compound_out"]["0"].is_dirty = False
426 | node.outputs["compound_out"]["1"].is_dirty = False
427 |
428 | node.outputs["compound_out"].is_dirty = True
429 |
430 | assert not node.outputs["compound_out"]["0"].is_dirty
431 | assert not node.outputs["compound_out"]["1"].is_dirty
432 |
433 |
434 | def test_compound_plugs_propagate_dirty_state_to_their_parent(
435 | clear_default_graph,
436 | ):
437 | @Node(outputs=["compound_out"])
438 | def A(compound_in):
439 | pass
440 |
441 | node = A()
442 | node.inputs["compound_in"]["0"].value = 0
443 | node.inputs["compound_in"]["1"].value = 1
444 |
445 | node.inputs["compound_in"].is_dirty = False
446 | node.inputs["compound_in"]["0"].is_dirty = False
447 | node.inputs["compound_in"]["1"].is_dirty = False
448 |
449 | node.inputs["compound_in"]["0"].is_dirty = True
450 |
451 | assert node.inputs["compound_in"].is_dirty
452 |
453 | node.outputs["compound_out"]["0"].value = 0
454 | node.outputs["compound_out"]["1"].value = 1
455 |
456 | node.outputs["compound_out"]["0"].is_dirty = False
457 | node.outputs["compound_out"]["1"].is_dirty = False
458 | assert not node.outputs["compound_out"].is_dirty
459 |
460 | node.outputs["compound_out"]["0"].is_dirty = True
461 | assert node.outputs["compound_out"].is_dirty
462 |
463 |
464 | def test_compound_plug_ignores_direct_value_assignment(clear_default_graph):
465 | @Node(outputs=["compound_out"])
466 | def A(compound_in):
467 | pass
468 |
469 | node = A()
470 | node.inputs["compound_in"]["0"].value = 0
471 | node.inputs["compound_in"]["1"].value = 1
472 |
473 | node.outputs["compound_out"]["0"].value = 0
474 | node.outputs["compound_out"]["1"].value = 1
475 |
476 | node.inputs["compound_in"].value = 2
477 | assert node.inputs["compound_in"].value == {"0": 0, "1": 1}
478 |
479 | node.outputs["compound_out"].value = 2
480 | assert node.outputs["compound_out"].value == {"0": 0, "1": 1}
481 |
482 |
483 | def test_plugs_can_not_contain_dots(clear_default_graph):
484 | @Node()
485 | def A():
486 | pass
487 |
488 | with pytest.raises(ValueError):
489 | OutputPlug(name="name.with.dots", node=A(graph=None))
490 |
491 | with pytest.raises(ValueError):
492 | InputPlug(name="name.with.dots", node=A(graph=None))
493 |
494 |
495 | def test_compound_output_plugs_inform_parent_on_value_set(clear_default_graph):
496 | """
497 | +--------------------+ +----------------------+
498 | | Generate | | MyPrint |
499 | |--------------------| |----------------------|
500 | | out %--------->o value<{"1": 1, "> |
501 | | out.0 o +----------------------+
502 | | out.1 o
503 | | out.2 o
504 | +--------------------+
505 | """
506 |
507 | @Node(outputs=["out"])
508 | def Generate():
509 | return {"out.{0}".format(i): i for i in range(3)}
510 |
511 | @Node(outputs=["out"])
512 | def TestNode(value):
513 | return {"out": value}
514 |
515 | graph = Graph()
516 | generate = Generate(graph=graph)
517 | test = TestNode(graph=graph)
518 | generate.outputs["out"] >> test.inputs["value"]
519 | graph.evaluate()
520 |
521 | assert test.outputs["out"].value == {"0": 0, "1": 1, "2": 2}
522 |
523 |
524 | def test_plug_gets_dirty_only_on_change(clear_default_graph):
525 | """Test that plugs only change dirtyness if a real change happens."""
526 | in_test, out_test = "foo", "bar"
527 | n1 = NodeForTesting(name="n1")
528 | n2 = NodeForTesting(name="n2")
529 | out_plug = OutputPlug("out", n1)
530 | in_plug = InputPlug("in", n2)
531 |
532 | out_plug >> in_plug
533 |
534 | in_plug.value = in_test
535 | out_plug.value = out_test
536 | assert in_plug.is_dirty
537 | assert out_plug.is_dirty
538 |
539 | in_plug.is_dirty = False
540 | out_plug.is_dirty = False
541 | assert not in_plug.is_dirty
542 | assert not out_plug.is_dirty
543 |
544 | same_val = in_plug.value
545 | in_plug.value = same_val
546 | assert not in_plug.is_dirty
547 | assert not out_plug.is_dirty
548 |
549 | out_plug.value = out_test
550 | assert not in_plug.is_dirty
551 | assert not out_plug.is_dirty
552 |
553 | out_plug.value = "baz"
554 | assert in_plug.is_dirty
555 | assert out_plug.is_dirty
556 |
557 |
558 | def test_forbidden_connect(clear_default_graph):
559 | """Test connections between plugs that are forbidden."""
560 | n1 = NodeForTesting(name="n1")
561 | in_plug1 = InputPlug("in", n1)
562 | out_plug1 = OutputPlug("out", n1)
563 |
564 | n2 = NodeForTesting(name="n2")
565 | InputPlug("in", n2)
566 | out_plug2 = OutputPlug("out", n2)
567 |
568 | with pytest.raises(TypeError):
569 | out_plug1.connect(out_plug2)
570 |
571 | with pytest.raises(TypeError):
572 | in_plug1.connect(in_plug1)
573 |
574 | with pytest.raises(TypeError):
575 | out_plug1.connect("a string")
576 |
577 |
578 | def test_rshift_into_node(clear_default_graph):
579 | """Test the syntactic sugar for rshift operator between plug and node."""
580 | n1 = NodeForTesting(name="n1")
581 | n2 = NodeForTesting(name="n2")
582 | out_plug = OutputPlug("foo", n1)
583 | in_plug = InputPlug("foo", n2)
584 |
585 | out_plug >> n2
586 |
587 | assert in_plug in out_plug.connections
588 |
--------------------------------------------------------------------------------
/tests/test_subgraphs.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from flowpipe import Graph, Node
4 |
5 |
6 | @Node(outputs=["out"])
7 | def DemoNode(in_):
8 | return {"out": in_}
9 |
10 |
11 | def _nested_graph():
12 | """Create this nested subgraph:
13 | +---------------+ +---------------+ +---------------+ +---------------+
14 | | DemoNode | | DemoNode | | DemoNode | | DemoNode |
15 | |---------------| |---------------| |---------------| |---------------|
16 | o in_<> | +--->o in_<> | +--->o in_<> | +--->o in_<> |
17 | | out o-----+ | out o-----+ | out o-----+ | out o
18 | +---------------+ +---------------+ +---------------+ +---------------+
19 | +-------------+
20 | | sub0-2 |
21 | |-------------|
22 | o in_<> |
23 | | out o
24 | +-------------+
25 | +-------------+
26 | | sub1-2 |
27 | |-------------|
28 | o in_<> |
29 | | out o
30 | +-------------+
31 | +-------------+
32 | | sub2-2 |
33 | |-------------|
34 | o in_<> |
35 | | out o
36 | +-------------+
37 | """
38 | main = Graph("main")
39 | DemoNode(graph=main)
40 |
41 | parent = main
42 | for i in range(3):
43 | sub = Graph("sub" + str(i))
44 | DemoNode(graph=sub)
45 | DemoNode(graph=sub, name="sub" + str(i) + "-2")
46 | parent["DemoNode"].outputs["out"] >> sub["DemoNode"].inputs["in_"]
47 | parent = sub
48 | return main
49 |
50 |
51 | def test_nodes_only_contains_levels_of_graph():
52 | graph = _nested_graph()
53 | assert len(graph.nodes) == 1
54 |
55 |
56 | def test_subgraph_names_need_to_be_unique():
57 | """
58 | +--------------------+ +--------------------+
59 | | node1 | | node1 |
60 | |--------------------| |--------------------|
61 | o in_<> | +--->o in_<{"a": null> |
62 | | out %-----+ | out o
63 | | out.a o | +--------------------+
64 | +--------------------+ | +--------------------+
65 | +------------+ | | node2 |
66 | | node2 | | |--------------------|
67 | |------------| +--->o in_<{"a": null> |
68 | o in_<> | | out o
69 | | out o +--------------------+
70 | +------------+
71 | """
72 | main = Graph("main")
73 | DemoNode(name="node1", graph=main)
74 | DemoNode(name="node2", graph=main)
75 |
76 | sub1 = Graph("sub")
77 | DemoNode(name="node1", graph=sub1)
78 | DemoNode(name="node2", graph=sub1)
79 |
80 | sub2 = Graph("sub")
81 | DemoNode(name="node1", graph=sub2)
82 | DemoNode(name="node2", graph=sub2)
83 |
84 | main["node1"].outputs["out"] >> sub1["node1"].inputs["in_"]
85 | with pytest.raises(ValueError):
86 | main["node1"].outputs["out"] >> sub2["node1"].inputs["in_"]
87 |
88 | with pytest.raises(ValueError):
89 | main["node1"].outputs["out"]["a"] >> sub2["node1"].inputs["in_"]
90 |
91 | with pytest.raises(ValueError):
92 | main["node1"].outputs["out"]["a"] >> sub2["node1"].inputs["in_"]["a"]
93 |
94 | with pytest.raises(ValueError):
95 | main["node1"].outputs["out"] >> sub2["node1"].inputs["in_"]["a"]
96 |
97 | # Connecting to the same graph does not throw an error
98 | #
99 | main["node1"].outputs["out"] >> sub1["node2"].inputs["in_"]
100 |
101 |
102 | def test_subgraphs_can_be_accessed_by_name():
103 | graph = _nested_graph()
104 |
105 | assert len(graph.subgraphs) == 3
106 | assert graph.subgraphs["sub0"].name == "sub0"
107 | assert graph.subgraphs["sub1"].name == "sub1"
108 | assert graph.subgraphs["sub2"].name == "sub2"
109 |
110 |
111 | def test_plugs_can_be_promoted_to_graph_level_under_new_name():
112 | main = Graph("main")
113 | DemoNode(name="node1", graph=main)
114 |
115 | main["node1"].inputs["in_"].promote_to_graph()
116 | main["node1"].outputs["out"].promote_to_graph(name="graph_out")
117 |
118 | assert main.inputs["in_"] is main["node1"].inputs["in_"]
119 | assert main.outputs["graph_out"] is main["node1"].outputs["out"]
120 |
121 |
122 | def test_plugs_can_only_be_promoted_once_to_graph_level():
123 | main = Graph("main")
124 | DemoNode(name="node1", graph=main)
125 |
126 | main["node1"].inputs["in_"].promote_to_graph()
127 | main["node1"].outputs["out"].promote_to_graph()
128 |
129 | with pytest.raises(ValueError):
130 | main["node1"].inputs["in_"].promote_to_graph(name="different_name")
131 | with pytest.raises(ValueError):
132 | main["node1"].outputs["out"].promote_to_graph(name="different_name")
133 |
134 |
135 | def test_subplugs_can_not_be_promoted_individually():
136 | main = Graph("main")
137 | DemoNode(name="node1", graph=main)
138 |
139 | with pytest.raises(TypeError):
140 | main["node1"].inputs["in_"]["sub"].promote_to_graph()
141 | with pytest.raises(TypeError):
142 | main["node1"].outputs["out"]["sub"].promote_to_graph()
143 |
144 | # Promoting the main plug will of course give access to subplugs as well
145 | main["node1"].inputs["in_"].promote_to_graph()
146 | assert main.inputs["in_"]["sub"] == main["node1"].inputs["in_"]["sub"]
147 |
148 |
149 | def test_serialize_nested_graph_to_json():
150 | graph = _nested_graph()
151 |
152 | serialized = graph.to_json()
153 | deserialized = Graph.from_json(serialized).to_json()
154 |
155 | assert serialized == deserialized
156 |
157 |
158 | def test_access_node_of_subgraph_by_key():
159 | main = Graph("main")
160 | main_node = DemoNode(name="node", graph=main)
161 |
162 | sub = Graph("sub")
163 | sub_node = DemoNode(name="node", graph=sub)
164 |
165 | main["node"].outputs["out"] >> sub["node"].inputs["in_"]
166 |
167 | assert main["node"] == main_node
168 | assert main["sub.node"] == sub_node
169 |
--------------------------------------------------------------------------------
/tests/test_utilities.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 |
3 | import json
4 | import re
5 | import sys
6 | from hashlib import sha256
7 |
8 | import mock
9 | import numpy as np
10 |
11 | import flowpipe.utilities as util
12 | from flowpipe import INode
13 |
14 |
15 | class WeirdObject(object):
16 | """An object that is not json serializable and has no bytes() interface."""
17 |
18 | foo = "bar"
19 |
20 |
21 | def test_node_encoder():
22 | """Test the custom JSONEncoder."""
23 | valid_object = {"key": "value", "other_key": [1, 2, 3]}
24 | json_string = json.dumps(valid_object)
25 | recovered_json = json.loads(json_string)
26 | for k, v in valid_object.items():
27 | assert v == recovered_json[k]
28 |
29 | bytes_object = {"key": "value", "other_key": bytes(42)}
30 | json_string = json.dumps(bytes_object, cls=util.NodeEncoder)
31 | recovered_json = json.loads(json_string)
32 | for k, v in bytes_object.items():
33 | assert (
34 | v == recovered_json[k]
35 | or sha256(v).hexdigest() == recovered_json[k]
36 | )
37 |
38 | weird_object = {"key": "value", "other_key": WeirdObject()}
39 | json_string = json.dumps(weird_object, cls=util.NodeEncoder)
40 | recovered_json = json.loads(json_string)
41 | for k, v in weird_object.items():
42 | assert (
43 | v == recovered_json[k]
44 | or re.search("WeirdObject object at", str(recovered_json[k]))
45 | or sha256(v).hexdigest() == recovered_json[k]
46 | )
47 |
48 | weird_np_array = {"key": "value", "other_key": np.arange(10)[::2]}
49 | json_string = json.dumps(weird_np_array, cls=util.NodeEncoder)
50 | recovered_json = json.loads(json_string)
51 | for k, v in weird_np_array.items():
52 | assert (
53 | # v could be any type, so for simplicity we cast to str
54 | str(v) == str(recovered_json[k])
55 | or sha256(bytes(v)).hexdigest() == recovered_json[k]
56 | )
57 |
58 |
59 | def test_get_hash():
60 | """Test the hashing function."""
61 | number = 42
62 | assert (
63 | util.get_hash(number)
64 | == "73475cb40a568e8da8a045ced110137e159f890ac4da883b6b17dc651b3a8049"
65 | )
66 |
67 | js = {"foo": "bar", "baz": {"zoom": "zulu"}}
68 | assert (
69 | util.get_hash(js)
70 | == "8336ea0f6e482df0c7a738c83a2b8d3357cf0234c29cfd232fa6627bdc54289e"
71 | )
72 |
73 | invalid_js = "kazoo{" # A generic string that's not json
74 | if sys.version_info.major > 2:
75 | assert (
76 | util.get_hash(invalid_js)
77 | == "c21e3435e752b72514e34139f116afee1f72cf496c1cc94c9087088c139dfb7d"
78 | )
79 | else:
80 | assert (
81 | util.get_hash(invalid_js)
82 | == "5324bcf2641f119108d1f99b92687b0af513e572c68dfed217344ffeff1f35a9"
83 | )
84 |
85 | x = WeirdObject()
86 | assert util.get_hash(x) is None
87 |
88 |
89 | def test_sanitize_string():
90 | """
91 | Given a string with curly braces inside
92 | Return a string with the curly braces escaped
93 | """
94 | test_string = "This is a {test} string"
95 | expected_string = "This is a {{test}} string"
96 | sanitized_string = util.sanitize_string_input(test_string)
97 | assert sanitized_string == expected_string
98 |
99 |
100 | class NodeInTest(INode):
101 | """"""
102 |
103 |
104 | @mock.patch("importlib.import_module", side_effect=ModuleNotFoundError)
105 | def test_import_class_from_current_file(_):
106 | cls = util.import_class(
107 | "test_utilities", "NodeInTest", file_location=__file__
108 | )
109 | assert cls.__name__ == NodeInTest.__name__
110 |
--------------------------------------------------------------------------------