├── .github └── workflows │ └── docker-publish.yaml ├── .gitignore ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── README.md ├── api ├── CLI │ ├── __init__.py │ ├── cli.py │ ├── matterflowcli.py │ ├── setup.py │ └── test.py ├── Dockerfile ├── matterflow │ ├── matterflow │ │ ├── .coveragerc │ │ ├── __init__.py │ │ ├── connection.py │ │ ├── node.py │ │ ├── node_factory.py │ │ ├── nodes │ │ │ ├── __init__.py │ │ │ ├── cloud │ │ │ │ ├── __init__.py │ │ │ │ ├── batch_put_to_sitewise.py │ │ │ │ ├── write_json_to_azure.py │ │ │ │ ├── write_json_to_gcp.py │ │ │ │ └── write_json_to_s3.py │ │ │ ├── connection │ │ │ │ ├── __init__.py │ │ │ │ ├── mqtt_connection_in.py │ │ │ │ ├── mqtt_connection_out.py │ │ │ │ └── ws_connection.py │ │ │ ├── flow_control │ │ │ │ ├── __init__.py │ │ │ │ ├── dynamic_input.py │ │ │ │ ├── integer_input.py │ │ │ │ └── string_input.py │ │ │ ├── io │ │ │ │ ├── __init__.py │ │ │ │ ├── read_csv.py │ │ │ │ ├── read_json.py │ │ │ │ ├── table_creator.py │ │ │ │ ├── write_csv.py │ │ │ │ ├── write_json.py │ │ │ │ └── write_json_to_csv.py │ │ │ ├── manipulation │ │ │ │ ├── __init__.py │ │ │ │ ├── combine.py │ │ │ │ ├── filter.py │ │ │ │ ├── join.py.tbd │ │ │ │ ├── json_to_csv.py │ │ │ │ ├── mapping.py │ │ │ │ ├── mergejson.py │ │ │ │ ├── pivot.py.tbd │ │ │ │ ├── sizebuffer.py │ │ │ │ ├── timebuffer.py │ │ │ │ ├── translateattributes.py │ │ │ │ └── unflattenattributes.py │ │ │ └── visualization │ │ │ │ ├── __init__.py │ │ │ │ └── graph.py │ │ ├── parameters.py │ │ ├── tests │ │ │ ├── sample_matter.json │ │ │ ├── sample_test_data.py │ │ │ ├── test_node.py │ │ │ ├── test_parameters.py │ │ │ ├── test_pyworkflow.py │ │ │ └── test_workflow.py │ │ └── workflow.py │ └── setup.py ├── mf │ ├── connection │ │ ├── __init__.py │ │ ├── admin.py │ │ ├── apps.py │ │ ├── middleware.py │ │ ├── migrations │ │ │ ├── 0001_initial.py │ │ │ ├── 0002_connectionmodel_delete_mymodel.py │ │ │ ├── 0003_alter_connectionmodel_id.py │ │ │ ├── 0004_alter_connectionmodel_id.py │ │ │ ├── 0005_alter_connectionmodel_id.py │ │ │ └── __init__.py │ │ ├── models.py │ │ ├── tests.py │ │ ├── urls.py │ │ └── views.py │ ├── flow │ │ ├── __init__.py │ │ ├── admin.py │ │ ├── apps.py │ │ ├── middleware.py │ │ ├── migrations │ │ │ ├── 0001_initial.py │ │ │ └── __init__.py │ │ ├── models.py │ │ ├── tests.py │ │ ├── urls.py │ │ └── views.py │ ├── instance │ │ ├── __init__.py │ │ ├── admin.py │ │ ├── apps.py │ │ ├── middleware.py │ │ ├── migrations │ │ │ ├── 0001_initial.py │ │ │ └── __init__.py │ │ ├── models.py │ │ ├── tests.py │ │ ├── urls.py │ │ └── views.py │ ├── manage.py │ ├── mf │ │ ├── __init__.py │ │ ├── asgi.py │ │ ├── settings.py │ │ ├── urls.py │ │ ├── views.py │ │ └── wsgi.py │ ├── model │ │ ├── __init__.py │ │ ├── admin.py │ │ ├── apps.py │ │ ├── middleware.py │ │ ├── migrations │ │ │ ├── 0001_initial.py │ │ │ └── __init__.py │ │ ├── models.py │ │ ├── tests.py │ │ ├── urls.py │ │ └── views.py │ ├── models.py │ ├── node │ │ ├── __init__.py │ │ ├── admin.py │ │ ├── apps.py │ │ ├── migrations │ │ │ └── __init__.py │ │ ├── models.py │ │ ├── tests.py │ │ ├── urls.py │ │ └── views.py │ ├── process │ │ ├── __init__.py │ │ ├── admin.py │ │ ├── apps.py │ │ ├── middleware.py │ │ ├── migrations │ │ │ └── __init__.py │ │ ├── models.py │ │ ├── tests.py │ │ ├── urls.py │ │ └── views.py │ └── workflow │ │ ├── __init__.py │ │ ├── admin.py │ │ ├── apps.py │ │ ├── middleware.py │ │ ├── migrations │ │ └── __init__.py │ │ ├── models.py │ │ ├── tests.py │ │ ├── urls.py │ │ └── views.py ├── requirements.txt ├── supervisor_confs │ └── foo.conf ├── supervisord.conf └── test_mqtt.py ├── data ├── attribute_updated.json ├── fabric_start.json ├── humidity_sensor_update.json ├── initial_nodes.json ├── initial_nodes_summary.json ├── mqtt_in.json ├── node_event_added.json ├── node_event_added_summary.json ├── node_event_leave.json ├── node_event_sw_fault.json ├── node_removed.json ├── node_updated.json ├── size_buffer_output.json └── temperator_sensor_update.json ├── docs ├── README.md └── logo.png ├── run.sh └── web ├── .eslintrc.cjs ├── .storybook └── main.js ├── README.md ├── index.html ├── package-lock.json ├── package.json ├── postcss.config.js ├── public ├── favicon.ico ├── json-icon.png ├── logo.svg ├── matterflowdemo.png ├── matterflowexample.png ├── rule.gif ├── state1.gif ├── state2.gif ├── state3.gif └── vite.svg ├── src ├── API.jsx ├── App.jsx ├── Components │ ├── About.js │ ├── App.js │ ├── BannerBox.jsx │ ├── CustomNode │ │ ├── BooleanInput.jsx │ │ ├── CustomNodeFactory.jsx │ │ ├── CustomNodeModel.jsx │ │ ├── CustomNodeWidget.jsx │ │ ├── FileUploadInput.jsx │ │ ├── FlowVariableOverride.jsx │ │ ├── GraphView.jsx │ │ ├── InstanceSelectInput.jsx │ │ ├── NodeConfig.jsx │ │ ├── OptionInput.jsx │ │ ├── SelectInput.jsx │ │ └── SimpleInput.jsx │ ├── CustomNodeUpload.jsx │ ├── EnvUploadModal.jsx │ ├── FlowList.jsx │ ├── FlowMenu.jsx │ ├── GlobalFlowMenu.jsx │ ├── InstanceEditor.jsx │ ├── InstanceList.jsx │ ├── InstanceModal.jsx │ ├── JMESPathTester.jsx │ ├── JsonDataInput.jsx │ ├── JsonTree.jsx │ ├── MFLink │ │ ├── MFLinkFactory.jsx │ │ ├── MFLinkModel.jsx │ │ └── MFLinkWidget.jsx │ ├── MFPort │ │ ├── MFPortFactory.jsx │ │ └── MFPortModel.jsx │ ├── MobileWarning.jsx │ ├── ModelEditor.jsx │ ├── ModelList.jsx │ ├── ModelMenu.jsx │ ├── ModelModal.jsx │ ├── ModelSelect.jsx │ ├── ModelsInstancesList.jsx │ ├── NodeDataExplorer.jsx │ ├── NodeMenu.jsx │ ├── ProcessTable.jsx │ ├── ResizableDrawer.jsx │ ├── StatusLight.jsx │ ├── WatermarkText.jsx │ └── Workspace.jsx ├── assets │ └── react.svg ├── index.css ├── lib │ └── formatters.tsx ├── main.jsx ├── pages │ └── HomePage.jsx ├── router.jsx ├── stories │ └── HomePage.stories.js └── styles │ ├── CustomNode.css │ ├── GraphView.css │ ├── NodeConfig.css │ ├── ResizableDrawer.css │ ├── StatusLight.css │ ├── Supervisor.css │ ├── Workspace.css │ └── index.css ├── tailwind.config.js └── vite.config.js /.github/workflows/docker-publish.yaml: -------------------------------------------------------------------------------- 1 | name: Build and Push Multi-Platform Docker Image 2 | 3 | on: 4 | release: 5 | types: [published] 6 | workflow_dispatch: 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | 11 | strategy: 12 | matrix: 13 | platform: 14 | - linux/amd64 15 | # - linux/arm/v7 16 | - linux/arm64 17 | # - linux/arm/v6 18 | include: 19 | - platform: linux/amd64 20 | build_from: ghcr.io/home-assistant/amd64-base-python 21 | arch_name: amd64 22 | # - platform: linux/arm/v7 23 | # build_from: ghcr.io/home-assistant/armv7-base-python 24 | # arch_name: armv7 25 | - platform: linux/arm64 26 | build_from: ghcr.io/home-assistant/aarch64-base-python 27 | arch_name: aarch64 28 | # - platform: linux/arm/v6 29 | # build_from: ghcr.io/home-assistant/armhf-base-python 30 | # arch_name: armhf 31 | steps: 32 | - name: Checkout repository 33 | uses: actions/checkout@v3 34 | 35 | - name: Set up QEMU 36 | uses: docker/setup-qemu-action@v2 37 | with: 38 | platforms: all 39 | 40 | - name: Set up Docker Buildx 41 | uses: docker/setup-buildx-action@v2 42 | 43 | - name: Log in to Docker Hub 44 | uses: docker/login-action@v2 45 | with: 46 | username: ${{ secrets.DOCKERHUB_USERNAME }} 47 | password: ${{ secrets.DOCKERHUB_TOKEN }} 48 | 49 | - name: Build and push multi-platform image 50 | uses: docker/build-push-action@v4 51 | with: 52 | push: true 53 | platforms: ${{ matrix.platform }} 54 | build-args: | 55 | BUILD_FROM=${{ matrix.build_from }} 56 | TARGETPLATFORM=${{ matrix.platform }} 57 | tags: | 58 | oideibrett/image-${{ matrix.arch_name }}-matterflow:latest 59 | oideibrett/image-${{ matrix.arch_name }}-matterflow:${{ github.sha }} 60 | oideibrett/image-${{ matrix.arch_name }}-matterflow:${{ github.event.release.tag_name }} 61 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .environment 2 | .vscode 3 | .idea/ 4 | .DS_Store 5 | 6 | # Logs 7 | logs 8 | *.log 9 | npm-debug.log* 10 | yarn-debug.log* 11 | yarn-error.log* 12 | pnpm-debug.log* 13 | lerna-debug.log* 14 | 15 | node_modules 16 | dist 17 | dist-ssr 18 | *.local 19 | 20 | # Editor directories and files 21 | .vscode/* 22 | !.vscode/extensions.json 23 | .idea 24 | .DS_Store 25 | *.suo 26 | *.ntvs* 27 | *.njsproj 28 | *.sln 29 | *.sw? 30 | 31 | # Byte-compiled / optimized / DLL files 32 | __pycache__/ 33 | *.py[cod] 34 | *$py.class 35 | 36 | # migrations files 37 | #migrations/ 38 | 39 | # C extensions 40 | *.so 41 | 42 | # Distribution / packaging 43 | .Python 44 | build/ 45 | develop-eggs/ 46 | dist/ 47 | downloads/ 48 | eggs/ 49 | .eggs/ 50 | #lib/ 51 | lib64/ 52 | parts/ 53 | sdist/ 54 | var/ 55 | wheels/ 56 | share/python-wheels/ 57 | *.egg-info/ 58 | .installed.cfg 59 | *.egg 60 | MANIFEST 61 | 62 | # PyInstaller 63 | # Usually these files are written by a python script from a template 64 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 65 | *.manifest 66 | *.spec 67 | 68 | # Installer logs 69 | pip-log.txt 70 | pip-delete-this-directory.txt 71 | 72 | # Unit test / coverage reports 73 | htmlcov/ 74 | .tox/ 75 | .nox/ 76 | .coverage 77 | .coverage.* 78 | .cache 79 | nosetests.xml 80 | coverage.xml 81 | *.cover 82 | *.py,cover 83 | .hypothesis/ 84 | .pytest_cache/ 85 | cover/ 86 | 87 | # Translations 88 | *.mo 89 | *.pot 90 | 91 | # Django stuff: 92 | *.log 93 | local_settings.py 94 | db.sqlite3 95 | db.sqlite3-journal 96 | 97 | # Flask stuff: 98 | .webassets-cache 99 | 100 | # Scrapy stuff: 101 | .scrapy 102 | 103 | # Sphinx documentation 104 | docs/_build/ 105 | 106 | # PyBuilder 107 | .pybuilder/ 108 | target/ 109 | 110 | # Jupyter Notebook 111 | .ipynb_checkpoints 112 | 113 | # IPython 114 | profile_default/ 115 | ipython_config.py 116 | 117 | # pyenv 118 | # For a library or package, you might want to ignore these files since the code is 119 | # intended to run in multiple environments; otherwise, check them in: 120 | # .python-version 121 | 122 | # pipenv 123 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 124 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 125 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 126 | # install all needed dependencies. 127 | #Pipfile.lock 128 | 129 | # poetry 130 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 131 | # This is especially recommended for binary packages to ensure reproducibility, and is more 132 | # commonly ignored for libraries. 133 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 134 | #poetry.lock 135 | 136 | # pdm 137 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 138 | #pdm.lock 139 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 140 | # in version control. 141 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 142 | .pdm.toml 143 | .pdm-python 144 | .pdm-build/ 145 | 146 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 147 | __pypackages__/ 148 | 149 | # Celery stuff 150 | celerybeat-schedule 151 | celerybeat.pid 152 | 153 | # SageMath parsed files 154 | *.sage.py 155 | 156 | # Environments 157 | .env 158 | .venv 159 | env/ 160 | venv/ 161 | ENV/ 162 | env.bak/ 163 | venv.bak/ 164 | 165 | # Spyder project settings 166 | .spyderproject 167 | .spyproject 168 | 169 | # Rope project settings 170 | .ropeproject 171 | 172 | # mkdocs documentation 173 | /site 174 | 175 | # mypy 176 | .mypy_cache/ 177 | .dmypy.json 178 | dmypy.json 179 | 180 | # Pyre type checker 181 | .pyre/ 182 | 183 | # pytype static type analyzer 184 | .pytype/ 185 | 186 | # Cython debug symbols 187 | cython_debug/ 188 | 189 | # PyCharm 190 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 191 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 192 | # and can be added to the global gitignore or merged into this file. For a more nuclear 193 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 194 | #.idea/ 195 | 196 | #Dont commit any files marked as ready for deletion TBD 197 | *_TBD 198 | 199 | #Dont commit any supervisor flow process files 200 | api/supervisor_confs/ 201 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Stage 1: Build Stage 2 | ARG BUILD_FROM="ghcr.io/home-assistant/amd64-base-python" 3 | FROM --platform=$BUILDPLATFORM ${BUILD_FROM} AS build 4 | ARG TARGETPLATFORM 5 | ARG BUILDPLATFORM 6 | ARG BUILD_ARCH 7 | ARG BUILD_DATE 8 | ARG BUILD_DESCRIPTION 9 | ARG BUILD_NAME 10 | ARG BUILD_REF 11 | ARG BUILD_REPOSITORY 12 | ARG BUILD_VERSION 13 | 14 | ENV LANG=C.UTF-8 15 | 16 | RUN echo "Docker buildx running on $BUILDPLATFORM, building for $TARGETPLATFORM" 17 | 18 | # Install build tools 19 | RUN apk add --no-cache git jq cargo npm python3-dev build-base 20 | 21 | WORKDIR /matterflow/ 22 | 23 | # Clone the matterflow repository 24 | RUN git clone https://github.com/MatterCoder/matterflow.git . && \ 25 | mkdir dist && \ 26 | jq -n --arg commit $(git rev-parse --short HEAD) '$commit' > dist/.hash 27 | 28 | WORKDIR /matterflow/api 29 | 30 | # Create Python venv and install Python dependencies 31 | RUN python3 -m venv venv && \ 32 | if [ "$TARGETPLATFORM" = "linux/arm/v7" ] || [ "$TARGETPLATFORM" = "linux/arm64" ] || [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then \ 33 | venv/bin/pip install --index-url=https://www.piwheels.org/simple --no-cache-dir -r requirements.txt; \ 34 | else \ 35 | venv/bin/pip install --no-cache-dir -r requirements.txt; \ 36 | fi && \ 37 | venv/bin/pip install supervisor 38 | 39 | # Clone and install python-matter-server 40 | RUN git clone https://github.com/home-assistant-libs/python-matter-server.git /python-matter-server && \ 41 | mkdir /python-matter-server/dist && \ 42 | jq -n --arg commit $(cd /python-matter-server; git rev-parse --short HEAD) '$commit' > /python-matter-server/dist/.hash && \ 43 | /matterflow/api/venv/bin/pip install /python-matter-server 44 | 45 | # Install web front-end dependencies and build assets 46 | WORKDIR /matterflow/web 47 | RUN npm ci && npm run build 48 | 49 | # Stage 2: Runtime Stage 50 | FROM --platform=$BUILDPLATFORM ${BUILD_FROM} AS runtime 51 | 52 | WORKDIR /matterflow 53 | 54 | # Copy necessary files from build stage 55 | COPY --from=build /matterflow /matterflow 56 | COPY --from=build /python-matter-server /python-matter-server 57 | 58 | # Install runtime dependencies only 59 | RUN apk add --no-cache dumb-init nodejs npm 60 | 61 | # Set environment variables and permissions 62 | WORKDIR /matterflow/api 63 | RUN echo "SECRET_KEY=tmp" > mf/.environment && \ 64 | echo "DIR_PATH='/data'" >> mf/.environment 65 | 66 | # Copy run script and make it executable 67 | COPY run.sh / 68 | RUN chmod +x /run.sh 69 | 70 | CMD ["/run.sh"] 71 | 72 | # Labels 73 | LABEL \ 74 | io.hass.name="${BUILD_NAME}" \ 75 | io.hass.description="${BUILD_DESCRIPTION}" \ 76 | io.hass.arch="${BUILD_ARCH}" \ 77 | io.hass.type="addon" \ 78 | io.hass.version=${BUILD_VERSION} \ 79 | org.opencontainers.image.title="${BUILD_NAME}" \ 80 | org.opencontainers.image.description="${BUILD_DESCRIPTION}" \ 81 | org.opencontainers.image.vendor="Home Assistant Community Add-ons" \ 82 | org.opencontainers.image.licenses="MIT" \ 83 | org.opencontainers.image.source="https://github.com/${BUILD_REPOSITORY}" \ 84 | org.opencontainers.image.documentation="https://github.com/${BUILD_REPOSITORY}/blob/main/README.md" \ 85 | org.opencontainers.image.created=${BUILD_DATE} \ 86 | org.opencontainers.image.revision=${BUILD_REF} \ 87 | org.opencontainers.image.version=${BUILD_VERSION} 88 | -------------------------------------------------------------------------------- /api/CLI/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/CLI/__init__.py -------------------------------------------------------------------------------- /api/CLI/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup(name='MATTERFLOWCLI', 4 | version='0.0.0', 5 | py_modules= ['matterflowcli'], 6 | install_requires=[ 7 | 'Click', 8 | ], 9 | entry_points=''' 10 | [console_scripts] 11 | matterflow=matterflowcli:event 12 | ''', 13 | description='CLI application for matterflow virtual programming tool', 14 | author='Team', 15 | license='MIT', 16 | zip_safe=False) -------------------------------------------------------------------------------- /api/CLI/test.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/CLI/test.py -------------------------------------------------------------------------------- /api/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.12 2 | 3 | WORKDIR /visual-programming/back-end 4 | 5 | COPY Pipfile Pipfile.lock ./ 6 | COPY CLI/ ./CLI/ 7 | COPY matterflow/ ./matterflow/ 8 | 9 | RUN pip install pipenv 10 | RUN pipenv install --dev --ignore-pipfile 11 | 12 | COPY mf/ ./mf 13 | RUN echo "SECRET_KEY=tmp" > mf/.environment 14 | 15 | EXPOSE 8000 16 | 17 | WORKDIR /visual-programming/back-end/mf 18 | 19 | CMD pipenv run python manage.py runserver 0.0.0.0:8000 20 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit= 3 | */.local/share/virtualenvs/* 4 | ./tests/* 5 | ./nodes/custom_nodes/* -------------------------------------------------------------------------------- /api/matterflow/matterflow/__init__.py: -------------------------------------------------------------------------------- 1 | from .workflow import Workflow, WorkflowException 2 | from .node import * 3 | from .node_factory import node_factory 4 | from .connection import ConnectionFactory 5 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/node_factory.py: -------------------------------------------------------------------------------- 1 | from .nodes import * 2 | import importlib 3 | 4 | 5 | def node_factory(node_info): 6 | # Create a new Node with info 7 | # TODO: should perform error-checking or add default values if missing 8 | node_type = node_info.get('node_type') 9 | node_key = node_info.get('node_key') 10 | 11 | if node_type == 'io': 12 | new_node = io_node(node_key, node_info) 13 | elif node_type == 'cloud': 14 | new_node = cloud_node(node_key, node_info) 15 | elif node_type == 'manipulation': 16 | new_node = manipulation_node(node_key, node_info) 17 | elif node_type == 'flow_control': 18 | new_node = flow_node(node_key, node_info) 19 | elif node_type == 'visualization': 20 | new_node = visualization_node(node_key, node_info) 21 | elif node_type == 'connection': 22 | new_node = connection_node(node_key, node_info) 23 | else: 24 | new_node = custom_node(node_key, node_info) 25 | 26 | return new_node 27 | 28 | def connection_node(node_key, node_info): 29 | if node_key == 'WsConnectionNode': 30 | return WsConnectionNode(node_info) 31 | elif node_key == 'MqttConnectionInNode': 32 | return MqttConnectionInNode(node_info) 33 | elif node_key == 'MqttConnectionOutNode': 34 | return MqttConnectionOutNode(node_info) 35 | else: 36 | return None 37 | 38 | def flow_node(node_key, node_info): 39 | if node_key == 'StringNode': 40 | return StringNode(node_info) 41 | elif node_key == 'IntegerNode': 42 | return IntegerNode(node_info) 43 | elif node_key == 'DynamicNode': 44 | return DynamicNode(node_info) 45 | else: 46 | return None 47 | 48 | 49 | def io_node(node_key, node_info): 50 | if node_key == 'ReadCsvNode': 51 | return ReadCsvNode(node_info) 52 | elif node_key == 'TableCreatorNode': 53 | return TableCreatorNode(node_info) 54 | elif node_key == 'WriteCsvNode': 55 | return WriteCsvNode(node_info) 56 | elif node_key == 'ReadJsonNode': 57 | return ReadJsonNode(node_info) 58 | elif node_key == 'WriteJsonNode': 59 | return WriteJsonNode(node_info) 60 | elif node_key == 'WriteJsonToCsvNode': 61 | return WriteJsonToCsvNode(node_info) 62 | else: 63 | return None 64 | 65 | 66 | def manipulation_node(node_key, node_info): 67 | if node_key == 'JoinNode': 68 | return JoinNode(node_info) 69 | elif node_key == 'PivotNode': 70 | return PivotNode(node_info) 71 | elif node_key == 'FilterNode': 72 | return FilterNode(node_info) 73 | elif node_key == 'UnflattenAttributesNode': 74 | return UnflattenAttributesNode(node_info) 75 | elif node_key == 'MappingNode': 76 | return MappingNode(node_info) 77 | elif node_key == 'CombineNode': 78 | return CombineNode(node_info) 79 | elif node_key == 'SizeBufferNode': 80 | return SizeBufferNode(node_info) 81 | elif node_key == 'TimeBufferNode': 82 | return TimeBufferNode(node_info) 83 | elif node_key == 'MergeJsonNode': 84 | return MergeJsonNode(node_info) 85 | elif node_key == 'TranslateAttributesNode': 86 | return TranslateAttributesNode(node_info) 87 | elif node_key == 'JsonToCsvNode': 88 | return JsonToCsvNode(node_info) 89 | else: 90 | return None 91 | 92 | 93 | def visualization_node(node_key, node_info): 94 | if node_key == 'GraphNode': 95 | return GraphNode(node_info) 96 | else: 97 | return None 98 | 99 | def cloud_node(node_key, node_info): 100 | if node_key == 'WriteJsonToS3Node': 101 | return WriteJsonToS3Node(node_info) 102 | elif node_key == 'BatchPutToSitewiseNode': 103 | return BatchPutToSitewiseNode(node_info) 104 | elif node_key == 'WriteJsonToGCPNode': 105 | return WriteJsonToGCPNode(node_info) 106 | elif node_key == 'WriteJsonToAzureNode': 107 | return WriteJsonToAzureNode(node_info) 108 | else: 109 | return None 110 | 111 | 112 | def custom_node(node_key, node_info): 113 | try: 114 | filename = node_info.get('filename') 115 | module = importlib.import_module(f'matterflow.nodes.custom_nodes.{filename}') 116 | my_class = getattr(module, node_key) 117 | instance = my_class(node_info) 118 | 119 | return instance 120 | except Exception as e: 121 | # print(str(e)) 122 | return None 123 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/__init__.py: -------------------------------------------------------------------------------- 1 | from .flow_control import * 2 | from .io import * 3 | from .manipulation import * 4 | from .visualization import * 5 | from .connection import * 6 | from .cloud import * 7 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/cloud/__init__.py: -------------------------------------------------------------------------------- 1 | from .write_json_to_s3 import WriteJsonToS3Node 2 | from .batch_put_to_sitewise import BatchPutToSitewiseNode 3 | from .write_json_to_gcp import WriteJsonToGCPNode 4 | from .write_json_to_azure import WriteJsonToAzureNode -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/cloud/batch_put_to_sitewise.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import IONode, NodeException 2 | from matterflow.parameters import * 3 | import json 4 | import jmespath 5 | from io import StringIO # for handling in-memory text streams 6 | import boto3 7 | import pandas as pd 8 | 9 | class BatchPutToSitewiseNode(IONode): 10 | """BatchPutToSitewiseNode 11 | 12 | Sends a list of asset property values to IoT SiteWise. Each value is a timestamp-quality-value (TQV) data point. 13 | 14 | Raises: 15 | NodeException: any error writing to Sitewise. 16 | """ 17 | name = "Batch Put To Sitewise" 18 | num_in = 1 19 | num_out = 0 20 | download_result = False 21 | 22 | OPTIONS = { 23 | "aws_access_key_id": StringParameter( 24 | "AWS_SERVER_PUBLIC_KEY", 25 | docstring="AWS_SERVER_PUBLIC_KEY for s3" 26 | ), 27 | "aws_secret_access_key": StringParameter( 28 | "AWS_SERVER_SECRET_KEY", 29 | docstring="AWS_SERVER_SECRET_KEY for s3" 30 | ), 31 | "aws_region_name": StringParameter( 32 | "AWS_REGION_NAME", 33 | docstring="AWS_REGION_NAME for sitewise" 34 | ), 35 | "exclude": StringParameter( 36 | "Exclude", 37 | default="", 38 | docstring="Exclude json matching this jmespath query" 39 | ), 40 | "array_of_entries": BooleanParameter( 41 | "Input is array of entries", 42 | default=False, 43 | docstring="Specify if input is array of entries" 44 | ), 45 | } 46 | 47 | def execute(self, predecessor_data, flow_vars): 48 | 49 | #this is an example of what comes in currently 50 | #we only current handle one entry at a time 51 | #TBD = we need to handle multiple entries in an array 52 | #and in that case we dont need to do entries=[entries] 53 | #in boto3 call 54 | example_entry_string = ''' 55 | { 56 | "entryId": "2", 57 | "propertyAlias": "/sensors/Sensor1/Temperature", 58 | "propertyValues": [ 59 | { 60 | "timestamp": { 61 | "timeInSeconds": 1729627045, 62 | "offsetInNanos": 0 63 | }, 64 | "value": { 65 | "integerValue": 15 66 | } 67 | }, 68 | { 69 | "timestamp": { 70 | "timeInSeconds": 1729626800, 71 | "offsetInNanos": 0 72 | }, 73 | "value": { 74 | "integerValue": 16 75 | } 76 | } 77 | ] 78 | } 79 | ''' 80 | 81 | try: 82 | 83 | if flow_vars["exclude"].get_value() != '': 84 | print("trying to exclude now...................") 85 | filter_search_string = flow_vars["exclude"].get_value() 86 | 87 | search_results = jmespath.search(filter_search_string, predecessor_data[0]) 88 | if search_results is not None: #if we found what we are looking for then exclude and dont write to disk 89 | return '{"excluded":"true"}' 90 | 91 | # Convert JSON data to string 92 | json_string = json.dumps(predecessor_data[0]) 93 | 94 | print(json_string) 95 | 96 | # Set up Boto3 resource and specify the bucket and object name 97 | client = boto3.client('iotsitewise', 98 | aws_access_key_id = flow_vars["aws_access_key_id"].get_value(), 99 | aws_secret_access_key = flow_vars["aws_secret_access_key"].get_value(), 100 | region_name = flow_vars["aws_region_name"].get_value() 101 | ) 102 | 103 | entries = predecessor_data[0] 104 | #entries = json.loads(example_entry_string) 105 | 106 | # If we are passing in a single entry we need to make it a list 107 | if not flow_vars["array_of_entries"].get_value(): 108 | entries_array = [entries] 109 | else: 110 | entries_array = entries 111 | 112 | try: 113 | response = client.batch_put_asset_property_value( 114 | entries=entries_array 115 | ) 116 | 117 | except Exception as e: 118 | json_string = '{"error":"aws sitewise error - check your credentials or format"}' 119 | print(e) 120 | 121 | return json_string 122 | 123 | except Exception as e: 124 | raise NodeException('batch put to sitewise', str(e)) 125 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/connection/__init__.py: -------------------------------------------------------------------------------- 1 | from .ws_connection import WsConnectionNode 2 | from .mqtt_connection_in import MqttConnectionInNode 3 | from .mqtt_connection_out import MqttConnectionOutNode -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/connection/mqtt_connection_in.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import ConnectionNode, NodeException 2 | from matterflow.parameters import * 3 | import json 4 | import os 5 | import pandas as pd 6 | from matterflow.connection import * 7 | 8 | class MqttConnectionInNode(ConnectionNode): 9 | """MqttConnectionInNode 10 | 11 | Reads from the Mqtt Broker into a workflow. 12 | 13 | Raises: 14 | NodeException: any error reading mqtt, converting 15 | to workflow. 16 | """ 17 | name = "MQTT Connection (In)" 18 | num_in = 0 19 | num_out = 1 20 | 21 | test_file_path = "" 22 | 23 | OPTIONS = { 24 | "file": FileParameter( 25 | "Test Json", 26 | default=test_file_path, 27 | docstring="Json File" 28 | ), 29 | "connection": TextParameter( 30 | "Connection Settings", 31 | default='{"host": "localhost","port": 1883,"keepalive": 60,"username": "mqtt_user","password": "mqtt_password"}', 32 | docstring="Connection Settings Input" 33 | ), 34 | "input": TextParameter( 35 | "Input Settings", 36 | default='{"topics": ["sensor/temperature","home/lights/kids_bedroom"]}', 37 | docstring="Input Settings Input" 38 | ), 39 | } 40 | 41 | def execute(self, predecessor_data, flow_vars): 42 | 43 | ''' 44 | #executingInBrowser will be true if running in the visual editor and false if running on command line 45 | executingInBrowser = click.get_text_stream('stdin').isatty() 46 | 47 | if executingInBrowser: 48 | return '{"message":"executing in browser"}' 49 | else: 50 | return '{"message":"executing in cli"}' 51 | ''' 52 | 53 | try: 54 | DIR_PATH = os.getenv('DIR_PATH') or '/tmp' 55 | if flow_vars["file"].get_value() == DIR_PATH + "/": 56 | return '{"message":"try uploading a test json file"}' 57 | else: 58 | df = pd.read_json( 59 | flow_vars["file"].get_value() 60 | , typ='series' 61 | ) 62 | 63 | 64 | return df.to_json() 65 | 66 | except Exception as e: 67 | print(str(e)) 68 | raise NodeException('MQTT Connection In', str(e)) 69 | 70 | def validate(self): 71 | """Validate Node configuration 72 | 73 | Checks all Node options and validates all Parameter classes using 74 | their validation method. 75 | 76 | Raises: 77 | ValidationError: invalid Parameter value 78 | """ 79 | super().validate() 80 | 81 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/connection/mqtt_connection_out.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import ConnectionNode, NodeException 2 | from matterflow.parameters import * 3 | import json 4 | import pandas as pd 5 | from matterflow.connection import * 6 | import jmespath 7 | import asyncio 8 | import aiomqtt 9 | 10 | class MqttConnectionOutNode(ConnectionNode): 11 | """MqttConnectionOutNode 12 | 13 | Send messages to the Mqtt Broker from a workflow. 14 | 15 | Raises: 16 | NodeException: any error sending mqtt, converting 17 | to workflow. 18 | """ 19 | name = "MQTT Connection (Out)" 20 | num_in = 1 21 | num_out = 0 22 | 23 | test_file_path = "" 24 | 25 | OPTIONS = { 26 | "connection": TextParameter( 27 | "Connection Settings", 28 | default='{"host": "localhost","port": 1883,"username": "mqtt_user","password": "mqtt_password"}', 29 | docstring="Connection Settings" 30 | ), 31 | "output": TextParameter( 32 | "Output", 33 | default='{"Topic": "sensors/response","QoS": 1,"Named Root": "sensor_data","Retain": false,"Breakup Arrays": false,"Template": "{temperature}","AWS IoT Core": false}', 34 | docstring="Output Settings" 35 | ), 36 | "exclude": StringParameter( 37 | "Exclude", 38 | default="", 39 | docstring="Exclude json matching this jmespath query" 40 | ), 41 | } 42 | 43 | def execute(self, predecessor_data, flow_vars): 44 | 45 | try: 46 | if flow_vars["exclude"].get_value() != '': 47 | print("trying to exclude now...................") 48 | filter_search_string = flow_vars["exclude"].get_value() 49 | 50 | search_results = jmespath.search(filter_search_string, predecessor_data[0]) 51 | if search_results is not None: #if we found what we are looking for then exclude and dont write to disk 52 | return '{"excluded":"true"}' 53 | 54 | # Convert JSON data to string 55 | json_string = json.dumps(predecessor_data[0]) 56 | 57 | # Create the MQTT client 58 | connection_settings = json.loads(flow_vars["connection"].get_value()) 59 | output_settings = json.loads(flow_vars["output"].get_value()) 60 | 61 | # Define the async task to run the MQTT client publish 62 | async def mqtt_publish(payload, connection_settings, output_settings): 63 | try: 64 | async with aiomqtt.Client(hostname=connection_settings["host"], port=connection_settings["port"], username=connection_settings["username"], password=connection_settings["password"]) as client: 65 | await client.publish(output_settings["Topic"], payload=payload) 66 | except Exception as e: 67 | print(f"Error in MQTT publish: {str(e)}") 68 | raise 69 | 70 | payload = json_string 71 | # Check if an event loop is already running 72 | try: 73 | loop = asyncio.get_running_loop() 74 | # Run the task in the current running loop 75 | asyncio.create_task(mqtt_publish(payload, connection_settings, output_settings)) 76 | except RuntimeError: # No event loop is running 77 | asyncio.run(mqtt_publish(payload, connection_settings, output_settings)) 78 | 79 | return json_string 80 | 81 | except Exception as e: 82 | print(str(e)) 83 | raise NodeException('MQTT Connection Out', str(e)) 84 | 85 | def validate(self): 86 | """Validate Node configuration 87 | 88 | Checks all Node options and validates all Parameter classes using 89 | their validation method. 90 | 91 | Raises: 92 | ValidationError: invalid Parameter value 93 | """ 94 | super().validate() 95 | 96 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/connection/ws_connection.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import ConnectionNode, NodeException 2 | from matterflow.parameters import * 3 | import json 4 | import pandas as pd 5 | from matterflow.connection import * 6 | import click 7 | import os 8 | import socket 9 | import jmespath 10 | 11 | def isWebsocketOpen(ip,port): 12 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 13 | try: 14 | s.connect((ip, int(port))) 15 | s.shutdown(2) 16 | return True 17 | except: 18 | return False 19 | 20 | class WsConnectionNode(ConnectionNode): 21 | """WsConnectionNode 22 | 23 | Reads from the Matter server Websocket into a workflow. 24 | 25 | Raises: 26 | NodeException: any error reading web socket, converting 27 | to workflow. 28 | """ 29 | name = "Matter WS Connection (In)" 30 | num_in = 0 31 | num_out = 1 32 | 33 | #test_file_path = os.path.dirname(os.path.realpath(__file__)) + "/../../tests/sample_matter.json" 34 | test_file_path = "" 35 | 36 | OPTIONS = { 37 | "file": FileParameter( 38 | "Test Json", 39 | default=test_file_path, 40 | docstring="Json File" 41 | ), 42 | "accept_events": SelectParameter( 43 | "Accepted Events", 44 | options=["*","fabric_id", "result", "event == 'attribute_updated'", "event == 'node_added'", "event == 'node_updated'", "event == 'node_event'", "event == 'node_removed'"], 45 | default="*", 46 | docstring="Which events are accepted from Matter Websocket" 47 | ), 48 | } 49 | 50 | def execute(self, predecessor_data, flow_vars): 51 | 52 | ''' 53 | #executingInBrowser will be true if running in the visual editor and false if running on command line 54 | executingInBrowser = click.get_text_stream('stdin').isatty() 55 | 56 | if executingInBrowser: 57 | return '{"message":"executing in browser"}' 58 | else: 59 | return '{"message":"executing in cli"}' 60 | ''' 61 | 62 | try: 63 | DIR_PATH = os.getenv('DIR_PATH') or '/tmp' 64 | 65 | if flow_vars["file"].get_value() == DIR_PATH +"/": 66 | return '{"message":"try uploading a test json file"}' 67 | else: 68 | df = pd.read_json( 69 | flow_vars["file"].get_value() 70 | , typ='series' 71 | ) 72 | 73 | # Now try to match the accepted events 74 | expression = flow_vars["accept_events"].get_value() 75 | data = df.to_json() 76 | result = jmespath.search(expression, json.loads(data)) 77 | if result is None or result == False: 78 | raise ResourceWarning('Info: No match found in event from Matter WS. Expected ' + expression) 79 | 80 | return df.to_json() 81 | 82 | except Exception as e: 83 | print(str(e)) 84 | raise NodeException('WS Connection', str(e)) 85 | 86 | def validate(self): 87 | """Validate Node configuration 88 | 89 | Checks all Node options and validates all Parameter classes using 90 | their validation method. 91 | 92 | Raises: 93 | ValidationError: invalid Parameter value 94 | """ 95 | super().validate() 96 | 97 | ''' 98 | value = self.options["connection"].get_value() 99 | if not isinstance(value, str): 100 | raise Exception("Sorry, input must be a string") 101 | 102 | try: 103 | json_settings = json.loads(value) 104 | except Exception as e: 105 | raise Exception("input must be a valid json object") 106 | 107 | if not isWebsocketOpen(json_settings['host'],json_settings['port']): 108 | raise Exception(f"Websocket must be available on {json_settings['host']} and port {json_settings['port']}") 109 | ''' 110 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/flow_control/__init__.py: -------------------------------------------------------------------------------- 1 | from .string_input import StringNode 2 | from .integer_input import IntegerNode 3 | from .dynamic_input import DynamicNode 4 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/flow_control/dynamic_input.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import FlowNode, NodeException 2 | from matterflow.parameters import * 3 | import jmespath 4 | import json 5 | 6 | class DynamicNode(FlowNode): 7 | """DynamicNode object 8 | 9 | Allows for Strings to be dyanmically read in to replace 'string' fields in Nodes 10 | """ 11 | name = "Dynamic Input" 12 | num_in = 1 13 | num_out = 0 14 | color = 'purple' 15 | 16 | OPTIONS = { 17 | "default_value": StringParameter( 18 | "Default Value", 19 | docstring="Value this node will pass as a flow variable if no dymanic input is found" 20 | ), 21 | "var_name": StringParameter( 22 | "Variable Name", 23 | default="my_var", 24 | docstring="Name of the variable to use in another Node" 25 | ), 26 | "expression": StringParameter( 27 | "Expression", 28 | default='*', 29 | docstring="Extract text using this JMESPath expression" 30 | ) 31 | } 32 | 33 | def execute(self, predecessor_data, flow_vars): 34 | 35 | filter_settings = flow_vars["expression"].get_value() 36 | default_value = flow_vars["default_value"].get_value() 37 | 38 | #filter the input if required 39 | filter = '*' #match everything but overwrite below if we have a filter 40 | if len(filter_settings)>0: 41 | filter = filter_settings 42 | 43 | data = jmespath.search(filter, predecessor_data[0]) 44 | 45 | # Check if data is a primitive type (str, int, float, bool) and not None 46 | if data is not None and isinstance(data, (str, int, float, bool)): 47 | return json.dumps({ 48 | "value": data 49 | }) 50 | else: 51 | print("dynamic import not found, returning default value") 52 | returnObj = { 53 | "value": default_value 54 | } 55 | return json.dumps(returnObj) 56 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/flow_control/integer_input.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import FlowNode, NodeException 2 | from matterflow.parameters import * 3 | 4 | 5 | class IntegerNode(FlowNode): 6 | """StringNode object 7 | 8 | Allows for Strings to replace 'string' fields in Nodes 9 | """ 10 | name = "Integer Input" 11 | num_in = 0 12 | num_out = 0 13 | color = 'purple' 14 | 15 | OPTIONS = { 16 | "default_value": IntegerParameter( 17 | "Default Value", 18 | docstring="Value this node will pass as a flow variable" 19 | ), 20 | "var_name": StringParameter( 21 | "Variable Name", 22 | default="my_var", 23 | docstring="Name of the variable to use in another Node" 24 | ) 25 | } 26 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/flow_control/string_input.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import FlowNode, NodeException 2 | from matterflow.parameters import * 3 | 4 | 5 | class StringNode(FlowNode): 6 | """StringNode object 7 | 8 | Allows for Strings to replace 'string' fields in Nodes 9 | """ 10 | name = "String Input" 11 | num_in = 0 12 | num_out = 0 13 | color = 'purple' 14 | 15 | OPTIONS = { 16 | "default_value": StringParameter( 17 | "Default Value", 18 | docstring="Value this node will pass as a flow variable" 19 | ), 20 | "var_name": StringParameter( 21 | "Variable Name", 22 | default="my_var", 23 | docstring="Name of the variable to use in another Node" 24 | ) 25 | } 26 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/io/__init__.py: -------------------------------------------------------------------------------- 1 | from .read_csv import ReadCsvNode 2 | from .read_json import ReadJsonNode 3 | from .write_csv import WriteCsvNode 4 | from .write_json import WriteJsonNode 5 | from .table_creator import TableCreatorNode 6 | from .write_json_to_csv import WriteJsonToCsvNode 7 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/io/read_csv.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import IONode, NodeException 2 | from matterflow.parameters import * 3 | 4 | import pandas as pd 5 | 6 | 7 | class ReadCsvNode(IONode): 8 | """ReadCsvNode 9 | 10 | Reads a CSV file into a pandas DataFrame. 11 | 12 | Raises: 13 | NodeException: any error reading CSV file, converting 14 | to DataFrame. 15 | """ 16 | name = "Read CSV" 17 | num_in = 0 18 | num_out = 1 19 | 20 | OPTIONS = { 21 | "file": FileParameter( 22 | "File", 23 | docstring="CSV File" 24 | ), 25 | "sep": StringParameter( 26 | "Delimiter", 27 | default=",", 28 | docstring="Column delimiter" 29 | ), 30 | # user-specified headers are probably integers, but haven't figured out 31 | # arguments with multiple possible types 32 | "header": StringParameter( 33 | "Header Row", 34 | default="infer", 35 | docstring="Row number containing column names (0-indexed)" 36 | ), 37 | } 38 | 39 | def execute(self, predecessor_data, flow_vars): 40 | try: 41 | df = pd.read_csv( 42 | flow_vars["file"].get_value(), 43 | sep=flow_vars["sep"].get_value(), 44 | header=flow_vars["header"].get_value() 45 | ) 46 | return df.to_json() 47 | except Exception as e: 48 | raise NodeException('read csv', str(e)) 49 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/io/read_json.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import IONode, NodeException 2 | from matterflow.parameters import * 3 | import pandas as pd 4 | import json 5 | 6 | class ReadJsonNode(IONode): 7 | """ReadJsonNode 8 | 9 | Reads a Json file into a workflow. 10 | 11 | Raises: 12 | NodeException: any error reading json file, converting 13 | to workflow. 14 | """ 15 | name = "Read Json" 16 | num_in = 0 17 | num_out = 1 18 | 19 | OPTIONS = { 20 | "file": FileParameter( 21 | "File", 22 | docstring="Json File" 23 | ), 24 | "multiline": BooleanParameter( 25 | "Multi-line JSON file", 26 | default=False, 27 | docstring="Set multiline to True if the file contains multiple JSON objects, each on a separate line; otherwise, leave it as False for a single JSON object." 28 | ), 29 | "pollingTime": IntegerParameter( 30 | "Poll file every X seconds", 31 | default=0, 32 | docstring="If polling time set then the file will be polled every X seconds. If < 0, the flow will only run when file changes." 33 | ), 34 | } 35 | 36 | def execute(self, predecessor_data, flow_vars): 37 | print("*"*80) 38 | print("file") 39 | print(flow_vars["file"].get_value()) 40 | try: 41 | # Read from file 42 | with open(flow_vars["file"].get_value(), 'r') as f: 43 | if flow_vars["multiline"].get_value(): 44 | # Process each line as a separate JSON object 45 | json_objects = [] 46 | for line in f: 47 | stripped_line = line.strip() 48 | if stripped_line: # Avoid empty lines 49 | json_objects.append(json.loads(stripped_line)) 50 | 51 | # Create a new JSON object that is an array of the JSON objects 52 | json_string = json.dumps(json_objects) 53 | else: 54 | # Read entire file as one JSON object 55 | json_string = f.read() 56 | 57 | # Check that it's valid JSON by converting it to an object and then back to string 58 | json_string = json.dumps(json.loads(json_string)) 59 | 60 | return json_string 61 | 62 | except Exception as e: 63 | print("got error in read") 64 | raise NodeException('read json', str(e)) 65 | 66 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/io/table_creator.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import IONode, NodeException 2 | from matterflow.parameters import * 3 | 4 | import pandas as pd 5 | import io 6 | 7 | 8 | class TableCreatorNode(IONode): 9 | """Accepts raw-text CSV input to create data tables. 10 | 11 | Raises: 12 | NodeException: any error reading CSV file, converting 13 | to DataFrame. 14 | """ 15 | name = "Table Creator" 16 | num_in = 0 17 | num_out = 1 18 | 19 | OPTIONS = { 20 | "input": TextParameter( 21 | "Input", 22 | default="", 23 | docstring="Text input" 24 | ), 25 | "sep": StringParameter( 26 | "Delimiter", 27 | default=",", 28 | docstring="Column delimiter" 29 | ), 30 | # user-specified headers are probably integers, but haven't figured out 31 | # arguments with multiple possible types 32 | "header": StringParameter( 33 | "Header Row", 34 | default="infer", 35 | docstring="Row number containing column names (0-indexed)" 36 | ), 37 | } 38 | 39 | def execute(self, predecessor_data, flow_vars): 40 | try: 41 | df = pd.read_csv( 42 | io.StringIO(flow_vars["input"].get_value()), 43 | sep=flow_vars["sep"].get_value(), 44 | header=flow_vars["header"].get_value() 45 | ) 46 | return df.to_json() 47 | except Exception as e: 48 | raise NodeException('read csv', str(e)) 49 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/io/write_csv.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import IONode, NodeException 2 | from matterflow.parameters import * 3 | 4 | import pandas as pd 5 | import csv 6 | import json 7 | import os 8 | 9 | class WriteCsvNode(IONode): 10 | """WriteCsvNode 11 | 12 | Writes the current DataFrame to a CSV file. 13 | 14 | Raises: 15 | NodeException: any error writing CSV file, converting 16 | from DataFrame. 17 | """ 18 | name = "Write CSV" 19 | num_in = 1 20 | num_out = 0 21 | download_result = False 22 | 23 | OPTIONS = { 24 | "file": StringParameter( 25 | "Filename", 26 | docstring="CSV file to write" 27 | ), 28 | "sep": StringParameter( 29 | "Delimiter", 30 | default=",", 31 | docstring="Column delimiter" 32 | ), 33 | "index": BooleanParameter( 34 | "Write Index", 35 | default=True, 36 | docstring="Write index as column?" 37 | ), 38 | } 39 | 40 | def write_json_to_csv(self, json_obj, csv_file, separator=',', add_index=False): 41 | # Check if the CSV file exists 42 | file_exists = os.path.isfile(csv_file) 43 | 44 | # Extract keys from the JSON object 45 | keys = json_obj.keys() 46 | 47 | # Write the JSON data to CSV 48 | with open(csv_file, 'a', newline='') as file: 49 | writer = csv.DictWriter(file, fieldnames=['index'] + list(keys), delimiter=separator) 50 | 51 | if not file_exists: 52 | writer.writeheader() 53 | else: 54 | with open(csv_file, 'r') as check_file: 55 | reader = csv.DictReader(check_file, delimiter=separator) 56 | 57 | existing_keys = set(reader.fieldnames) 58 | existing_keys.remove('index') 59 | if set(keys) != existing_keys: 60 | raise ValueError("New JSON object keys do not match the columns in the existing CSV file.") 61 | 62 | if add_index: 63 | with open(csv_file, 'r') as check_file: 64 | reader = csv.DictReader(check_file, delimiter=separator) 65 | index = sum(1 for _ in reader) + 1 66 | 67 | writer.writerow({'index': index, **json_obj}) 68 | else: 69 | writer.writerow(json_obj) 70 | 71 | def execute(self, predecessor_data, flow_vars): 72 | try: 73 | 74 | # Write to CSV and save 75 | self.write_json_to_csv( 76 | predecessor_data[0], 77 | csv_file=flow_vars["file"].get_value(), 78 | separator=flow_vars["sep"].get_value(), 79 | add_index=flow_vars["index"].get_value() 80 | ) 81 | 82 | return '{"written":"true"}' 83 | except Exception as e: 84 | raise NodeException('write csv', str(e)) 85 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/io/write_json.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import IONode, NodeException 2 | from matterflow.parameters import * 3 | import json 4 | import jmespath 5 | 6 | class WriteJsonNode(IONode): 7 | """WriteJsonNode 8 | 9 | Writes the current json to a Json file. 10 | 11 | Raises: 12 | NodeException: any error writing Json file, converting 13 | from json data. 14 | """ 15 | name = "Write Json" 16 | num_in = 1 17 | num_out = 0 18 | download_result = False 19 | 20 | OPTIONS = { 21 | "file": StringParameter( 22 | "Filename", 23 | docstring="Json file to write" 24 | ), 25 | "write_mode": SelectParameter( 26 | "Write Mode", 27 | options=["overwrite", "append"], 28 | default="append", 29 | docstring="Overwrite or append to file" 30 | ), 31 | "exclude": StringParameter( 32 | "Exclude", 33 | default="", 34 | docstring="Exclude json matching this jmespath query" 35 | ), 36 | } 37 | 38 | def execute(self, predecessor_data, flow_vars): 39 | 40 | try: 41 | 42 | if flow_vars["exclude"].get_value() != '': 43 | print("trying to exclude now...................") 44 | filter_search_string = flow_vars["exclude"].get_value() 45 | 46 | search_results = jmespath.search(filter_search_string, predecessor_data[0]) 47 | if search_results is not None: #if we found what we are looking for then exclude and dont write to disk 48 | return '{"excluded":"true"}' 49 | 50 | # Convert JSON data to string 51 | json_string = json.dumps(predecessor_data[0]) 52 | 53 | write_mode = 'a' 54 | if flow_vars["write_mode"].get_value() == 'overwrite': 55 | write_mode = 'w' 56 | 57 | # Write to CSV and save 58 | with open(flow_vars["file"].get_value(), write_mode) as f: 59 | f.write(json_string) 60 | f.write('\n') 61 | f.close() 62 | 63 | return json_string 64 | 65 | except Exception as e: 66 | raise NodeException('write json', str(e)) 67 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/manipulation/__init__.py: -------------------------------------------------------------------------------- 1 | from .filter import FilterNode 2 | from .unflattenattributes import UnflattenAttributesNode 3 | from .mapping import MappingNode 4 | from .combine import CombineNode 5 | from .sizebuffer import SizeBufferNode 6 | from .timebuffer import TimeBufferNode 7 | from .mergejson import MergeJsonNode 8 | from .translateattributes import TranslateAttributesNode 9 | from .json_to_csv import JsonToCsvNode -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/manipulation/combine.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import ManipulationNode, NodeException 2 | from matterflow.parameters import * 3 | 4 | import pandas as pd 5 | import json 6 | 7 | class CombineNode(ManipulationNode): 8 | name = "Combiner" 9 | num_in = 2 10 | num_out = 1 11 | 12 | OPTIONS = { 13 | } 14 | 15 | def execute(self, predecessor_data, flow_vars): 16 | try: 17 | first_json = predecessor_data[0] 18 | second_json = predecessor_data[1] 19 | combined_json = [ 20 | first_json, second_json 21 | ] 22 | 23 | return json.dumps(combined_json) 24 | 25 | except Exception as e: 26 | raise NodeException('combine', str(e)) 27 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/manipulation/filter.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import ManipulationNode, NodeException 2 | from matterflow.parameters import * 3 | 4 | import pandas as pd 5 | import json 6 | import jmespath 7 | 8 | class FilterNode(ManipulationNode): 9 | name = "Filter" 10 | num_in = 1 11 | num_out = 1 12 | 13 | OPTIONS = { 14 | "filter": StringParameter( 15 | "Filter", 16 | default='*', 17 | docstring="Jmespath query to filter" 18 | ), 19 | "include": BooleanParameter( 20 | "Include", 21 | default=True, 22 | docstring="Include entries found by filter" 23 | ), 24 | "data": BooleanParameter( 25 | "Output Filtered Data", 26 | default=False, 27 | docstring="Output filtered data instead of original data entry" 28 | ), 29 | } 30 | 31 | def execute(self, predecessor_data, flow_vars): 32 | # Convert JSON data to string 33 | try: 34 | json_string = json.dumps(predecessor_data[0]) 35 | 36 | message = json.loads(json.dumps(predecessor_data[0])) 37 | filter_settings = flow_vars["filter"].get_value() 38 | include_settings = flow_vars["include"].get_value() 39 | data_settings = flow_vars["data"].get_value() 40 | 41 | #filter the input if required 42 | filter = '*' #match everything but overwrite below if we have a filter 43 | if len(filter_settings)>0: 44 | filter = filter_settings 45 | 46 | input = '{"filtered":"true"}' 47 | transformedandfilterdata = jmespath.search(filter, message) 48 | if transformedandfilterdata is not None and transformedandfilterdata is not False: 49 | #we have found a match 50 | if include_settings: #check if we are to include 51 | if data_settings: #check if data settings is true then we return the filtered 52 | print("Filtering - sending back transformed data") 53 | if type(transformedandfilterdata) is list and len(transformedandfilterdata)>0: 54 | transformedandfilterdata[0]['filtered'] = "true" 55 | input = json.dumps(transformedandfilterdata) 56 | else: 57 | print("Filtering - sending back original") 58 | input = json.dumps(message) 59 | else: 60 | if not include_settings: 61 | input = json.dumps(message) 62 | else: 63 | print("Filtering - ignoring message as the message does not given matching filter") 64 | 65 | return input 66 | 67 | except Exception as e: 68 | raise NodeException('filter', str(e)) 69 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/manipulation/join.py.tbd: -------------------------------------------------------------------------------- 1 | from matterflow.node import ManipulationNode, NodeException 2 | from matterflow.parameters import * 3 | 4 | import pandas as pd 5 | 6 | 7 | class JoinNode(ManipulationNode): 8 | name = "Joiner" 9 | num_in = 2 10 | num_out = 1 11 | 12 | OPTIONS = { 13 | "on": StringParameter("Join Column", docstring="Name of column to join on") 14 | } 15 | 16 | def execute(self, predecessor_data, flow_vars): 17 | try: 18 | first_df = pd.DataFrame.from_dict(predecessor_data[0]) 19 | second_df = pd.DataFrame.from_dict(predecessor_data[1]) 20 | combined_df = pd.merge( 21 | first_df, 22 | second_df, 23 | on=flow_vars["on"].get_value() 24 | ) 25 | return combined_df.to_json() 26 | except Exception as e: 27 | raise NodeException('join', str(e)) 28 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/manipulation/json_to_csv.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import ManipulationNode, NodeException 2 | from matterflow.parameters import * 3 | import json 4 | import jmespath 5 | import csv 6 | import os 7 | import io 8 | import pandas as pd 9 | from typing import List, Dict, Union 10 | 11 | 12 | def json_to_csv(json_data: Union[List[Dict], Dict]): 13 | keys: List[str] = None 14 | # Handle case where input is a single JSON object (dict) 15 | if isinstance(json_data, dict): 16 | json_array = [json_data] 17 | elif isinstance(json_data, list): 18 | json_array = json_data 19 | else: 20 | raise ValueError("Input must be a list of JSON objects or a single JSON object") 21 | 22 | # Handle empty array case 23 | if not json_array: 24 | raise ValueError("The input JSON array is empty") 25 | 26 | # Determine the CSV headers (from provided keys or common keys in JSON objects) 27 | if keys: 28 | headers = keys 29 | else: 30 | headers = set(json_array[0].keys()) 31 | for obj in json_array[1:]: 32 | headers.intersection_update(obj.keys()) 33 | headers = sorted(headers) # Optional: sort headers alphabetically 34 | 35 | # Check if headers are empty after intersection 36 | if not headers: 37 | raise ValueError("No common keys found across JSON objects") 38 | 39 | 40 | try: 41 | file = io.StringIO() 42 | writer = csv.DictWriter(file, fieldnames=headers) 43 | writer.writeheader() 44 | for obj in json_array: 45 | # Filter out keys that are not in the headers list 46 | row = {key: obj.get(key, "") for key in headers} 47 | writer.writerow(row) 48 | 49 | print(f"Json to CSV converted successfully.") 50 | return file.getvalue() 51 | 52 | except Exception as e: 53 | print(f"An error occurred: {e}") 54 | 55 | 56 | class JsonToCsvNode(ManipulationNode): 57 | """JsonToCsvNode 58 | 59 | Converts the current json to CSV format. Must be a single json object or list of json objects (but must have the same structure) 60 | 61 | Raises: 62 | NodeException: any error converting Json file, converting 63 | from json data to csv data. 64 | """ 65 | name = "Json To Csv" 66 | num_in = 1 67 | num_out = 1 68 | 69 | OPTIONS = { 70 | "exclude": StringParameter( 71 | "Exclude", 72 | default="", 73 | docstring="Exclude json matching this jmespath query" 74 | ), 75 | } 76 | 77 | def execute(self, predecessor_data, flow_vars): 78 | 79 | try: 80 | 81 | if flow_vars["exclude"].get_value() != '': 82 | filter_search_string = flow_vars["exclude"].get_value() 83 | 84 | search_results = jmespath.search(filter_search_string, predecessor_data[0]) 85 | if search_results is not None: #if we found what we are looking for then exclude and dont write to disk 86 | return '{"excluded":"true"}' 87 | 88 | # Get JSON data 89 | json_data = predecessor_data[0] 90 | 91 | #csv_string = json_to_csv(json_data=json_data) 92 | 93 | 94 | file = io.StringIO() 95 | df = pd.read_json(io.StringIO(json.dumps(json_data)), orient='index') 96 | 97 | df.to_csv(file, index=False) 98 | 99 | output_data = file.getvalue() 100 | 101 | csv_data = { 102 | "csv": output_data 103 | } 104 | json_string = json.dumps(csv_data) 105 | 106 | return json_string 107 | 108 | except Exception as e: 109 | raise NodeException('json to csv', str(e)) 110 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/manipulation/mergejson.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import ManipulationNode, NodeException 2 | from matterflow.parameters import * 3 | 4 | import pandas as pd 5 | import json 6 | import jmespath 7 | from collections import defaultdict 8 | 9 | 10 | ####################### 11 | def merge_jsons(json1, json2, jmespath_expr_for_matching, jmespath_expr_for_insertion, append=False): 12 | """ 13 | Merges json2 into json1 based on matching certain fields using JMESPath expressions. 14 | 15 | :param json1: The first JSON object (into which data will be merged). 16 | :param json2: The second JSON object (data from here will be merged into json1). 17 | :param jmespath_expr_for_matching: JMESPath expression to identify the data from json2. 18 | :param jmespath_expr_for_insertion: JMESPath expression to find where the data from json2 should be inserted in json1. 19 | :param append: Boolean flag indicating whether to append the matched data or replace it. 20 | 21 | :return: The merged JSON object. 22 | """ 23 | 24 | # Extract the data from json2 based on the jmespath expression 25 | json2_data = jmespath.search(jmespath_expr_for_matching, json2) 26 | 27 | if json2_data is None: 28 | raise ValueError(f"No matching data found in json2 for expression: {jmespath_expr_for_matching}") 29 | 30 | # Find the place in json1 where the data from json2 should be inserted 31 | json1_insertion_point = jmespath.search(jmespath_expr_for_insertion, json1) 32 | 33 | if json1_insertion_point is None: 34 | raise ValueError(f"No matching insertion point found in json1 for expression: {jmespath_expr_for_insertion}") 35 | 36 | # Handle different cases of merging 37 | if append: 38 | if isinstance(json1_insertion_point, list): 39 | json1_insertion_point.append(json2_data) 40 | else: 41 | raise TypeError(f"Cannot append data to non-list insertion point for expression: {jmespath_expr_for_insertion}") 42 | else: 43 | # Replace the insertion point data with the matched data from json2 44 | json1_insertion_point = json2_data 45 | 46 | return json1 47 | ####################### 48 | 49 | class MergeJsonNode(ManipulationNode): 50 | """MergeJsonNode 51 | 52 | Merges the incoming json into another. 53 | 54 | Raises: 55 | NodeException: any error merging Json files, converting 56 | from json data. 57 | """ 58 | 59 | name = "MergeJson" 60 | num_in = 2 61 | num_out = 1 62 | 63 | OPTIONS = { 64 | "jmespath_expr_for_matching": StringParameter( 65 | "JmespathMatching", 66 | default='*', 67 | docstring="Jmespath expression for matchong" 68 | ), 69 | "jmespath_expr_for_insertion": StringParameter( 70 | "JmespathInsertion", 71 | default='*', 72 | docstring="Jmespath expression for insertion" 73 | ), 74 | "append": BooleanParameter( 75 | "Append", 76 | default=True, 77 | docstring="A boolean flag indicating whether to append the data from input2 to the insertion point in input1 or replace the content at the insertion point." 78 | ), 79 | } 80 | 81 | def execute(self, predecessor_data, flow_vars): 82 | 83 | json1 = predecessor_data[1] 84 | json2 = predecessor_data[0] # the json from the websocket is always first 85 | jmespath_expr_for_matching = flow_vars["jmespath_expr_for_matching"].get_value() 86 | jmespath_expr_for_insertion = flow_vars["jmespath_expr_for_insertion"].get_value() 87 | append = flow_vars["append"].get_value() 88 | 89 | # Merge JSON data and convert to string 90 | try: 91 | json_object = merge_jsons(json1, json2, jmespath_expr_for_matching, jmespath_expr_for_insertion, append) 92 | json_string = json.dumps(json_object) 93 | 94 | return json_string 95 | 96 | except Exception as e: 97 | raise NodeException('mergejson', str(e)) 98 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/manipulation/pivot.py.tbd: -------------------------------------------------------------------------------- 1 | from matterflow.node import ManipulationNode, NodeException 2 | from matterflow.parameters import * 3 | 4 | import pandas as pd 5 | 6 | 7 | class PivotNode(ManipulationNode): 8 | name = "Pivoting" 9 | num_in = 1 10 | num_out = 3 11 | 12 | OPTIONS = { 13 | 'index': StringParameter( 14 | 'Index', 15 | docstring='Column to aggregate (column, grouper, array or list)' 16 | ), 17 | 'values': StringParameter( 18 | 'Values', 19 | docstring='Column name to use to populate new frame\'s values (column, grouper, array or list)' 20 | ), 21 | 'columns': StringParameter( 22 | 'Column Name Row', 23 | docstring='Column(s) to use for populating new frame values. (column, grouper, array or list)' 24 | ), 25 | 'aggfunc': StringParameter( 26 | 'Aggregation function', 27 | default='mean', 28 | docstring='Function used for aggregation (function, list of functions, dict, default numpy.mean)' 29 | ), 30 | 'fill_value': StringParameter( 31 | 'Fill value', 32 | docstring='Value to replace missing values with (scalar)' 33 | ), 34 | 'margins': BooleanParameter( 35 | 'Margins name', 36 | default=False, 37 | docstring='Add all rows/columns' 38 | ), 39 | 'dropna': BooleanParameter( 40 | 'Drop NaN columns', 41 | default=True, 42 | docstring='Ignore columns with all NaN entries' 43 | ), 44 | 'margins_name': StringParameter( 45 | 'Margins name', 46 | default='All', 47 | docstring='Name of the row/column that will contain the totals when margins is True' 48 | ), 49 | 'observed': BooleanParameter( 50 | 'Column Name Row', 51 | default=False, 52 | docstring='Row number with column names (0-indexed) or "infer"' 53 | ) 54 | } 55 | 56 | def execute(self, predecessor_data, flow_vars): 57 | try: 58 | input_df = pd.DataFrame.from_dict(predecessor_data[0]) 59 | output_df = pd.DataFrame.pivot_table(input_df, **self.options) 60 | return output_df.to_json() 61 | except Exception as e: 62 | raise NodeException('pivot', str(e)) 63 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/manipulation/sizebuffer.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import ManipulationNode, NodeException 2 | from matterflow.parameters import * 3 | import os 4 | import pandas as pd 5 | import json 6 | 7 | class SizeBufferNode(ManipulationNode): 8 | """SizeBuffer 9 | 10 | Buffers the incoming json into a buffer until the size of file to buffer (Bytes) is reached. 11 | 12 | Raises: 13 | NodeException: any error buffering Json files, converting 14 | from json data. 15 | """ 16 | 17 | name = "SizeBuffer" 18 | num_in = 1 19 | num_out = 1 20 | 21 | OPTIONS = { 22 | "bufferSize": IntegerParameter( 23 | "Size To Buffer", 24 | default=0, 25 | docstring="Size of file to buffer (Bytes)" 26 | ), 27 | } 28 | 29 | def execute(self, predecessor_data, flow_vars): 30 | 31 | # Ignore any predecessor data that was rejected by preceding nodes 32 | if 'meta' in predecessor_data[0] and predecessor_data[0]['meta']['status'] == 'rejected': 33 | return predecessor_data[0] 34 | 35 | # use a temporary file to buffer into 36 | DIR_PATH = os.getenv('DIR_PATH') or '/tmp' 37 | tempFileName = DIR_PATH + "/" + self.node_id + "_sizebuffer.json" 38 | 39 | # Read the existing JSONL file 40 | try: 41 | with open(tempFileName, 'r') as f: 42 | data = json.load(f) 43 | except FileNotFoundError: 44 | data = [] 45 | 46 | # Append the new JSON object to the list 47 | data.append(predecessor_data[0]) 48 | 49 | # Write the updated list to the JSONL file 50 | with open(tempFileName, 'w') as f: 51 | json.dump(data, f) 52 | 53 | # Check the file size 54 | fileSize = os.path.getsize(tempFileName) 55 | bufferSize = flow_vars["bufferSize"].get_value() 56 | 57 | # Check if the file size is smaller than the buffer size 58 | if fileSize < bufferSize: 59 | #return '[]' 60 | raise ResourceWarning('Not yet reached buffer size of ' + str(bufferSize) + ' bytes. Currently at ' + str(fileSize) + ' bytes') 61 | else: 62 | # Return the entire contents of the file 63 | with open(tempFileName, 'r') as f: 64 | jsonObj = json.load(f) 65 | 66 | #remove the buffer file 67 | os.remove(tempFileName) 68 | return json.dumps(jsonObj) #return the jsonObj 69 | 70 | def validate(self): 71 | """Validate Node configuration 72 | 73 | Checks all Node options and validates all Parameter classes using 74 | their validation method. 75 | 76 | Raises: 77 | ValidationError: invalid Parameter value 78 | """ 79 | super().validate() 80 | 81 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/manipulation/timebuffer.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import ManipulationNode, NodeException 2 | from matterflow.parameters import * 3 | import os 4 | import pandas as pd 5 | import json 6 | import time 7 | 8 | class TimeBufferNode(ManipulationNode): 9 | """TimeBuffer 10 | 11 | Buffers the incoming json into a buffer until the time duration to buffer (seconds) is reached. 12 | 13 | Raises: 14 | NodeException: any error buffering Json files, converting 15 | from json data. 16 | """ 17 | 18 | name = "TimeBuffer" 19 | num_in = 1 20 | num_out = 1 21 | 22 | OPTIONS = { 23 | "bufferTime": IntegerParameter( 24 | "Time To Buffer", 25 | default=0, 26 | docstring="Time duration to buffer (seconds)" 27 | ), 28 | } 29 | 30 | def execute(self, predecessor_data, flow_vars): 31 | # Ignore any predecessor data that was rejected by preceding nodes 32 | if 'meta' in predecessor_data[0] and predecessor_data[0]['meta']['status'] == 'rejected': 33 | return predecessor_data[0] 34 | 35 | # Define the temporary file for buffering 36 | DIR_PATH = os.getenv('DIR_PATH') or '/tmp' 37 | tempFileName = DIR_PATH + "/" + self.node_id + "_timebuffer.json" 38 | 39 | # Read the existing JSONL file or initialize an empty buffer 40 | try: 41 | with open(tempFileName, 'r') as f: 42 | data = json.load(f) 43 | except FileNotFoundError: 44 | data = [] 45 | 46 | # Append the new JSON object to the list 47 | data.append(predecessor_data[0]) 48 | 49 | # Determine when the file was created or last modified 50 | if os.path.exists(tempFileName): 51 | file_creation_time = os.path.getmtime(tempFileName) # File's last modified time 52 | else: 53 | file_creation_time = time.time() # Fallback (unlikely to be used) 54 | 55 | # Write the updated list to the JSONL file 56 | with open(tempFileName, 'w') as f: 57 | json.dump(data, f) 58 | 59 | # Get the current time and buffer duration 60 | current_time = time.time() 61 | buffer_time = flow_vars["bufferTime"].get_value() 62 | 63 | # Check if the buffer duration has elapsed 64 | elapsed_time = int(current_time - file_creation_time) # Elapsed time in seconds 65 | if elapsed_time < buffer_time: 66 | raise ResourceWarning( 67 | f"Not yet reached buffer time of {buffer_time} seconds. " 68 | f"Elapsed time: {elapsed_time} seconds" 69 | ) 70 | else: 71 | # Return the entire contents of the buffer 72 | with open(tempFileName, 'r') as f: 73 | jsonObj = json.load(f) 74 | 75 | # Remove the buffer file 76 | os.remove(tempFileName) 77 | return json.dumps(jsonObj) # Return the buffered data 78 | 79 | def validate(self): 80 | """Validate Node configuration 81 | 82 | Checks all Node options and validates all Parameter classes using 83 | their validation method. 84 | 85 | Raises: 86 | ValidationError: invalid Parameter value 87 | """ 88 | super().validate() -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/manipulation/unflattenattributes.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import ManipulationNode, NodeException 2 | from matterflow.parameters import * 3 | 4 | import pandas as pd 5 | import json 6 | import jmespath 7 | from collections import defaultdict 8 | 9 | 10 | ####################### 11 | 12 | def transform_json_data(input_json): 13 | def nested_dict(): 14 | return defaultdict(nested_dict) 15 | 16 | # Convert the defaultdict back to a regular dictionary 17 | def convert_to_regular_dict(d): 18 | if isinstance(d, defaultdict): 19 | d = {k: convert_to_regular_dict(v) for k, v in d.items()} 20 | return d 21 | 22 | def unflatten_attributes(data): 23 | if isinstance(data, dict): 24 | transformed_data = nested_dict() 25 | keys_to_remove = [] 26 | for key, value in data.items(): 27 | parts = key.split('/') 28 | #if len(parts) == 3 and all(part.isdigit() for part in parts): 29 | if len(parts) == 3: 30 | transformed_data[parts[0]][parts[1]][parts[2]] = value 31 | keys_to_remove.append(key) 32 | else: 33 | transformed_data[key] = value 34 | 35 | # Remove the original flat keys after adding the nested structure 36 | for key in keys_to_remove: 37 | del data[key] 38 | 39 | # Convert defaultdict back to regular dictionary 40 | return convert_to_regular_dict(transformed_data) 41 | elif isinstance(data, list): 42 | if len(data) == 3 and isinstance(data[1], str): 43 | parts = data[1].split('/') 44 | #if len(parts) == 3 and all(part.isdigit() for part in parts): 45 | if len(parts) == 3: 46 | transformed_data = nested_dict() 47 | transformed_data[parts[0]][parts[1]][parts[2]] = data[2] 48 | return [data[0], convert_to_regular_dict(transformed_data)] 49 | return [unflatten_attributes(item) if isinstance(item, (dict, list)) else item for item in data] 50 | else: 51 | return data 52 | 53 | def process_json(data): 54 | if isinstance(data, list): 55 | return [process_json(item) for item in data] 56 | elif isinstance(data, dict): 57 | if 'data' in data: 58 | #check if the attributes are nested 59 | if 'attributes' in data['data']: 60 | data['data']['attributes'] = unflatten_attributes(data['data']['attributes']) 61 | else: 62 | data['data'] = unflatten_attributes(data['data']) 63 | if 'result' in data: 64 | for result in data['result']: 65 | if 'attributes' in result: 66 | result['attributes'] = unflatten_attributes(result['attributes']) 67 | return data 68 | else: 69 | return data 70 | 71 | try: 72 | # Load the input JSON string into a Python object 73 | data = json.loads(input_json) 74 | 75 | # Process the JSON object 76 | processed_data = process_json(data) 77 | 78 | # Convert the Python object back to a JSON string 79 | return json.dumps(processed_data) 80 | 81 | except Exception as e: 82 | # Return the original JSON string in case of an exception 83 | return input_json 84 | ####################### 85 | 86 | 87 | class UnflattenAttributesNode(ManipulationNode): 88 | name = "UnflattenAttributes" 89 | num_in = 1 90 | num_out = 1 91 | 92 | OPTIONS = { 93 | } 94 | 95 | def execute(self, predecessor_data, flow_vars): 96 | # Convert JSON data to string 97 | try: 98 | json_string = json.dumps(predecessor_data[0]) 99 | 100 | json_string = transform_json_data(json_string) 101 | 102 | return json_string 103 | 104 | except Exception as e: 105 | raise NodeException('unflatten abbributes', str(e)) 106 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/visualization/__init__.py: -------------------------------------------------------------------------------- 1 | from .graph import GraphNode 2 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/nodes/visualization/graph.py: -------------------------------------------------------------------------------- 1 | from matterflow.node import VizNode, NodeException 2 | from matterflow.parameters import * 3 | 4 | import pandas as pd 5 | import altair as alt 6 | 7 | 8 | class GraphNode(VizNode): 9 | """Displays a pandas DataFrame in a visual graph. 10 | 11 | Raises: 12 | NodeException: any error generating Altair Chart. 13 | """ 14 | name = "Graph Node" 15 | num_in = 1 16 | num_out = 0 17 | 18 | OPTIONS = { 19 | "graph_type": SelectParameter( 20 | "Graph Type", 21 | options=["area", "bar", "line", "point"], 22 | default="bar", 23 | docstring="Graph viz type" 24 | ), 25 | "mark_options": BooleanParameter( 26 | "Specify mark options", 27 | default=False, 28 | docstring="Specify mark options" 29 | ), 30 | "width": IntegerParameter( 31 | "Mark width", 32 | default=10, 33 | docstring="Width of marks" 34 | ), 35 | "height": IntegerParameter( 36 | "Mark height", 37 | default=10, 38 | docstring="Height of marks" 39 | ), 40 | "encode_options": BooleanParameter( 41 | "Specify encoding options", 42 | default=True, 43 | docstring="Specify encoding options" 44 | ), 45 | "x_axis": StringParameter( 46 | "X-Axis", 47 | default="a", 48 | docstring="X-axis values" 49 | ), 50 | "y_axis": StringParameter( 51 | "Y-Axis", 52 | default="average(b)", 53 | docstring="Y-axis values" 54 | ) 55 | } 56 | 57 | def execute(self, predecessor_data, flow_vars): 58 | try: 59 | df = pd.DataFrame.from_dict(predecessor_data[0]) 60 | 61 | if flow_vars["mark_options"].get_value(): 62 | mark_options = { 63 | "height": flow_vars["height"].get_value(), 64 | "width": flow_vars["width"].get_value(), 65 | } 66 | else: 67 | mark_options = {} 68 | 69 | if flow_vars["encode_options"].get_value(): 70 | encode_options = { 71 | "x": flow_vars["x_axis"].get_value(), 72 | "y": flow_vars["y_axis"].get_value(), 73 | } 74 | else: 75 | encode_options = {} 76 | 77 | graph_type = flow_vars["graph_type"].get_value() 78 | 79 | # Generate requested chart with options 80 | if graph_type == "area": 81 | chart = alt.Chart(df).mark_area(**mark_options).encode(**encode_options) 82 | elif graph_type == "bar": 83 | chart = alt.Chart(df).mark_bar(**mark_options).encode(**encode_options) 84 | elif graph_type == "line": 85 | chart = alt.Chart(df).mark_line(**mark_options).encode(**encode_options) 86 | elif graph_type == "point": 87 | chart = alt.Chart(df).mark_point(**mark_options).encode(**encode_options) 88 | else: 89 | chart = None 90 | 91 | return chart.to_json() 92 | except Exception as e: 93 | print(e) 94 | raise NodeException('graph node', str(e)) 95 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/tests/test_node.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from matterflow import * 3 | from matterflow.nodes import * 4 | from matterflow.tests.sample_test_data import GOOD_NODES, BAD_NODES, DATA_FILES 5 | 6 | 7 | class NodeTestCase(unittest.TestCase): 8 | def test_add_join_csv_node(self): 9 | node_to_add = node_factory(GOOD_NODES["join_node"]) 10 | self.assertIsInstance(node_to_add, JoinNode) 11 | 12 | def test_add_filter_csv_node(self): 13 | node_to_add = node_factory(GOOD_NODES["filter_node"]) 14 | self.assertIsInstance(node_to_add, FilterNode) 15 | 16 | def test_add_pivot_csv_node(self): 17 | node_to_add = node_factory(GOOD_NODES["pivot_node"]) 18 | self.assertIsInstance(node_to_add, PivotNode) 19 | 20 | def test_add_graph_csv_node(self): 21 | node_to_add = node_factory(GOOD_NODES["graph_node"]) 22 | self.assertIsInstance(node_to_add, GraphNode) 23 | 24 | def test_add_string_node(self): 25 | node_to_add = node_factory(GOOD_NODES["string_input"]) 26 | self.assertIsInstance(node_to_add, StringNode) 27 | 28 | def test_add_integer_node(self): 29 | node_to_add = node_factory(GOOD_NODES["integer_input"]) 30 | self.assertIsInstance(node_to_add, IntegerNode) 31 | 32 | def test_fail_add_node(self): 33 | bad_nodes = [ 34 | node_factory(BAD_NODES["bad_node_type"]), 35 | node_factory(BAD_NODES["bad_flow_node"]), 36 | node_factory(BAD_NODES["bad_io_node"]), 37 | node_factory(BAD_NODES["bad_manipulation_node"]), 38 | node_factory(BAD_NODES["bad_visualization_node"]) 39 | ] 40 | 41 | for bad_node in bad_nodes: 42 | self.assertIsNone(bad_node) 43 | 44 | def test_flow_node_replacement_value(self): 45 | node_to_add = node_factory(GOOD_NODES["string_input"]) 46 | self.assertEqual(node_to_add.get_replacement_value(), "key") 47 | 48 | def test_node_to_string(self): 49 | node_to_add = node_factory(GOOD_NODES["string_input"]) 50 | self.assertEqual(str(node_to_add), "String Input") 51 | 52 | def test_node_to_json(self): 53 | node_to_add = node_factory(GOOD_NODES["string_input"]) 54 | 55 | dict_to_compare = { 56 | "name": "String Input", 57 | "node_id": "7", 58 | "node_type": "flow_control", 59 | "node_key": "StringNode", 60 | "data": None, 61 | "is_global": False, 62 | "option_replace": {}, 63 | "option_values": { 64 | "default_value": "key", 65 | "var_name": "local_flow_var" 66 | } 67 | } 68 | 69 | self.assertDictEqual(node_to_add.to_json(), dict_to_compare) 70 | 71 | def test_node_execute_not_implemented(self): 72 | test_node = Node(dict()) 73 | test_io_node = IONode(dict()) 74 | test_manipulation_node = ManipulationNode(dict()) 75 | test_visualization_node = VizNode(dict()) 76 | 77 | nodes = [test_node, test_io_node, test_manipulation_node, test_visualization_node] 78 | 79 | for node_to_execute in nodes: 80 | with self.assertRaises(NotImplementedError): 81 | node_to_execute.execute(None, None) 82 | 83 | def test_node_execute_exception(self): 84 | read_csv_node = node_factory(GOOD_NODES["read_csv_node"]) 85 | write_csv_node = node_factory(GOOD_NODES["write_csv_node"]) 86 | join_node = node_factory(GOOD_NODES["join_node"]) 87 | 88 | nodes = [read_csv_node, write_csv_node, join_node] 89 | for node_to_execute in nodes: 90 | with self.assertRaises(NodeException): 91 | node_to_execute.execute(dict(), dict()) 92 | 93 | def test_validate_node(self): 94 | node_to_validate = node_factory(GOOD_NODES["string_input"]) 95 | node_to_validate.validate() 96 | 97 | def test_validate_input_data(self): 98 | node_to_validate = node_factory(GOOD_NODES["join_node"]) 99 | node_to_validate.validate_input_data(2) 100 | 101 | def test_validate_input_data_exception(self): 102 | node_to_validate = node_factory(GOOD_NODES["join_node"]) 103 | 104 | try: 105 | node_to_validate.validate_input_data(0) 106 | except NodeException as e: 107 | self.assertEqual(str(e), "execute: JoinNode requires 2 inputs. 0 were provided") 108 | 109 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/tests/test_parameters.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from matterflow import * 3 | import networkx as nx 4 | from matterflow.tests.sample_test_data import GOOD_PARAMETERS, BAD_PARAMETERS 5 | 6 | 7 | class ParameterTestCase(unittest.TestCase): 8 | def test_string_param(self): 9 | full_json = { 10 | 'type': 'string', 11 | 'label': 'Index', 12 | 'value': 'my value', 13 | 'docstring': 'my docstring' 14 | } 15 | 16 | self.assertDictEqual(GOOD_PARAMETERS["string_param"].to_json(), full_json) 17 | 18 | def test_parameter_validate_not_implemented(self): 19 | test_param = Parameter(dict()) 20 | params = [test_param] 21 | 22 | for param_to_validate in params: 23 | with self.assertRaises(NotImplementedError): 24 | param_to_validate.validate() 25 | 26 | def test_validate_string_param(self): 27 | with self.assertRaises(ParameterValidationError): 28 | BAD_PARAMETERS["bad_string_param"].validate() 29 | 30 | def test_validate_integer_param(self): 31 | with self.assertRaises(ParameterValidationError): 32 | BAD_PARAMETERS["bad_int_param"].validate() 33 | 34 | def test_validate_boolean_param(self): 35 | with self.assertRaises(ParameterValidationError): 36 | BAD_PARAMETERS["bad_bool_param"].validate() 37 | 38 | def test_validate_text_param(self): 39 | with self.assertRaises(ParameterValidationError): 40 | BAD_PARAMETERS["bad_text_param"].validate() 41 | 42 | def test_validate_select_param(self): 43 | with self.assertRaises(ParameterValidationError): 44 | BAD_PARAMETERS["bad_select_param"].validate() 45 | 46 | def test_validate_file_param(self): 47 | with self.assertRaises(ParameterValidationError): 48 | BAD_PARAMETERS["bad_file_param"].validate() 49 | 50 | def test_parameter_validation_error(self): 51 | try: 52 | BAD_PARAMETERS["bad_string_param"].validate() 53 | except ParameterValidationError as e: 54 | self.assertEqual(str(e), "Invalid value '42' (type 'int') for StringParameter") 55 | 56 | -------------------------------------------------------------------------------- /api/matterflow/matterflow/tests/test_pyworkflow.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from matterflow import Workflow, WorkflowException, Node, NodeException, node_factory 3 | from matterflow.nodes import * 4 | import networkx as nx 5 | 6 | from matterflow.tests.sample_test_data import GOOD_NODES, BAD_NODES, DATA_FILES 7 | 8 | 9 | class MatterflowTestCase(unittest.TestCase): 10 | def setUp(self): 11 | 12 | with open('/tmp/sample1.csv', 'w') as f: 13 | f.write(DATA_FILES["sample1"]) 14 | 15 | with open('/tmp/sample2.csv', 'w') as f: 16 | f.write(DATA_FILES["sample2"]) 17 | 18 | self.matterflow = Workflow("My Workflow", root_dir="/tmp") 19 | 20 | self.read_csv_node_1 = Node(GOOD_NODES["read_csv_node"]) 21 | 22 | self.read_csv_node_2 = Node({ 23 | "name": "Read CSV", 24 | "node_id": "2", 25 | "node_type": "io", 26 | "node_key": "ReadCsvNode", 27 | "is_global": False, 28 | "options": { 29 | "file": "/tmp/sample2.csv", 30 | "sep": ";", 31 | }, 32 | "option_replace": { 33 | "sep": { 34 | "node_id": "1", 35 | "is_global": True, 36 | } 37 | } 38 | }) 39 | 40 | self.join_node = Node(GOOD_NODES["join_node"]) 41 | 42 | self.write_csv_node = Node({ 43 | "name": "Write CSV", 44 | "node_id": "4", 45 | "node_type": "io", 46 | "node_key": "WriteCsvNode", 47 | "is_global": False, 48 | "options": { 49 | "file": "/tmp/sample_out.csv" 50 | } 51 | }) 52 | 53 | self.string_flow_node = Node(GOOD_NODES["string_input"]) 54 | self.string_global_flow_node = Node(GOOD_NODES["global_flow_var"]) 55 | 56 | self.nodes = [ 57 | self.read_csv_node_1, 58 | self.read_csv_node_2, 59 | self.join_node, 60 | self.write_csv_node, 61 | self.string_flow_node, 62 | self.string_global_flow_node, 63 | ] 64 | self.edges = [("1", "3"), ("2", "3"), ("3", "4"), ("7", "3")] 65 | 66 | def create_workflow(self): 67 | # When created in setUp(), duplicate Node/Edge errors would arise 68 | for node in self.nodes: 69 | self.matterflow.update_or_add_node(node) 70 | 71 | for edge in self.edges: 72 | source_node = self.matterflow.get_node(edge[0]) 73 | target_node = self.matterflow.get_node(edge[1]) 74 | self.matterflow.add_edge(source_node, target_node) 75 | 76 | def test_get_local_flow_nodes(self): 77 | node_with_flow = self.matterflow.get_node("3") 78 | flow_nodes = self.matterflow.load_flow_nodes(node_with_flow.option_replace) 79 | self.assertEqual(len(flow_nodes), 1) 80 | 81 | def test_get_global_flow_nodes(self): 82 | node_with_flow = self.matterflow.get_node("2") 83 | flow_nodes = self.matterflow.load_flow_nodes(node_with_flow.option_replace) 84 | self.assertEqual(len(flow_nodes), 1) 85 | 86 | def test_get_global_flow_node_exception(self): 87 | node_with_flow = self.matterflow.get_node("1") 88 | flow_nodes = self.matterflow.load_flow_nodes(node_with_flow.option_replace) 89 | self.assertEqual(len(flow_nodes), 0) 90 | 91 | def test_get_execution_order(self): 92 | self.create_workflow() 93 | order = self.matterflow.execution_order() 94 | self.assertEqual(order, ["7", "2", "1", "3", "4"]) 95 | 96 | def test_xexecute_workflow(self): 97 | order = self.matterflow.execution_order() 98 | 99 | for node in order: 100 | executed_node = self.matterflow.execute(node) 101 | self.matterflow.update_or_add_node(executed_node) 102 | 103 | # def test_execute_workflow_load_data(self): 104 | # print(self.matterflow.graph.nodes) 105 | # data = self.matterflow.load_input_data("3") 106 | 107 | def test_fail_execute_node(self): 108 | with self.assertRaises(WorkflowException): 109 | self.matterflow.execute("100") 110 | 111 | def test_upload_file(self): 112 | with open('/tmp/sample1.csv', 'rb') as f: 113 | to_open = '/tmp/sample_upload.csv' 114 | saved_filed = self.matterflow.upload_file(f, to_open) 115 | 116 | self.assertEqual(to_open, saved_filed) 117 | -------------------------------------------------------------------------------- /api/matterflow/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup(name='matterflow', 4 | version='0.0.0', 5 | description='Python representation of visual programming workflows', 6 | author='', 7 | license='MIT', 8 | packages=['matterflow'], 9 | zip_safe=False) -------------------------------------------------------------------------------- /api/mf/connection/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/connection/__init__.py -------------------------------------------------------------------------------- /api/mf/connection/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | 3 | # Register your models here. 4 | from .models import ConnectionModel 5 | 6 | #uncomment the next line to manage using the admin panel 7 | admin.site.register(ConnectionModel) -------------------------------------------------------------------------------- /api/mf/connection/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class ConnectionConfig(AppConfig): 5 | name = 'connection' 6 | -------------------------------------------------------------------------------- /api/mf/connection/middleware.py: -------------------------------------------------------------------------------- 1 | from django.http import JsonResponse 2 | 3 | -------------------------------------------------------------------------------- /api/mf/connection/migrations/0001_initial.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.0.6 on 2024-07-09 11:16 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | 8 | initial = True 9 | 10 | dependencies = [ 11 | ] 12 | 13 | operations = [ 14 | migrations.CreateModel( 15 | name='MyModel', 16 | fields=[ 17 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), 18 | ('name', models.CharField(max_length=100)), 19 | ('description', models.TextField()), 20 | ], 21 | ), 22 | ] 23 | -------------------------------------------------------------------------------- /api/mf/connection/migrations/0002_connectionmodel_delete_mymodel.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.0.6 on 2024-07-09 11:30 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | 8 | dependencies = [ 9 | ('connection', '0001_initial'), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name='ConnectionModel', 15 | fields=[ 16 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), 17 | ('name', models.CharField(max_length=100)), 18 | ('description', models.TextField()), 19 | ('json_data', models.TextField()), 20 | ], 21 | ), 22 | migrations.DeleteModel( 23 | name='MyModel', 24 | ), 25 | ] 26 | -------------------------------------------------------------------------------- /api/mf/connection/migrations/0003_alter_connectionmodel_id.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.0.6 on 2024-07-09 12:12 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | 8 | dependencies = [ 9 | ('connection', '0002_connectionmodel_delete_mymodel'), 10 | ] 11 | 12 | operations = [ 13 | migrations.AlterField( 14 | model_name='connectionmodel', 15 | name='id', 16 | field=models.AutoField(primary_key=True, serialize=False), 17 | ), 18 | ] 19 | -------------------------------------------------------------------------------- /api/mf/connection/migrations/0004_alter_connectionmodel_id.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.0.6 on 2024-07-09 12:23 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | 8 | dependencies = [ 9 | ('connection', '0003_alter_connectionmodel_id'), 10 | ] 11 | 12 | operations = [ 13 | migrations.AlterField( 14 | model_name='connectionmodel', 15 | name='id', 16 | field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), 17 | ), 18 | ] 19 | -------------------------------------------------------------------------------- /api/mf/connection/migrations/0005_alter_connectionmodel_id.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.0.6 on 2024-07-09 13:57 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | 8 | dependencies = [ 9 | ('connection', '0004_alter_connectionmodel_id'), 10 | ] 11 | 12 | operations = [ 13 | migrations.AlterField( 14 | model_name='connectionmodel', 15 | name='id', 16 | field=models.AutoField(editable=False, primary_key=True, serialize=False), 17 | ), 18 | ] 19 | -------------------------------------------------------------------------------- /api/mf/connection/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/connection/migrations/__init__.py -------------------------------------------------------------------------------- /api/mf/connection/models.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | 3 | class ConnectionModel(models.Model): 4 | id = models.AutoField(primary_key=True, editable=False) 5 | name = models.CharField(max_length=100) 6 | description = models.TextField() 7 | json_data = models.TextField() 8 | 9 | def __str__(self): 10 | return self.name -------------------------------------------------------------------------------- /api/mf/connection/tests.py: -------------------------------------------------------------------------------- 1 | from django.test import TestCase 2 | 3 | # Create your tests here. 4 | -------------------------------------------------------------------------------- /api/mf/connection/urls.py: -------------------------------------------------------------------------------- 1 | from django.urls import path 2 | from . import views 3 | 4 | urlpatterns = [ 5 | path('', views.get_connections, name='get connections'), 6 | path('new', views.new_connection, name='new connection'), 7 | path('', views.handle_connection, name='handle connection'), 8 | ] 9 | -------------------------------------------------------------------------------- /api/mf/flow/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/flow/__init__.py -------------------------------------------------------------------------------- /api/mf/flow/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | 3 | # Register your models here. 4 | from .models import FlowModel 5 | 6 | #uncomment the next line to manage using the admin panel 7 | admin.site.register(FlowModel) -------------------------------------------------------------------------------- /api/mf/flow/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class FlowConfig(AppConfig): 5 | name = 'flow' 6 | 7 | -------------------------------------------------------------------------------- /api/mf/flow/middleware.py: -------------------------------------------------------------------------------- 1 | from django.http import JsonResponse 2 | 3 | -------------------------------------------------------------------------------- /api/mf/flow/migrations/0001_initial.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.0.6 on 2024-07-10 08:15 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | 8 | initial = True 9 | 10 | dependencies = [ 11 | ] 12 | 13 | operations = [ 14 | migrations.CreateModel( 15 | name='FlowModel', 16 | fields=[ 17 | ('id', models.AutoField(editable=False, primary_key=True, serialize=False)), 18 | ('name', models.CharField(max_length=100)), 19 | ('description', models.TextField()), 20 | ('json_data', models.TextField()), 21 | ], 22 | ), 23 | ] 24 | -------------------------------------------------------------------------------- /api/mf/flow/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/flow/migrations/__init__.py -------------------------------------------------------------------------------- /api/mf/flow/models.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | 3 | class FlowModel(models.Model): 4 | id = models.AutoField(primary_key=True, editable=False) 5 | name = models.CharField(max_length=100) 6 | description = models.TextField() 7 | json_data = models.TextField() 8 | 9 | def __str__(self): 10 | return self.name -------------------------------------------------------------------------------- /api/mf/flow/tests.py: -------------------------------------------------------------------------------- 1 | from django.test import TestCase 2 | 3 | # Create your tests here. 4 | -------------------------------------------------------------------------------- /api/mf/flow/urls.py: -------------------------------------------------------------------------------- 1 | from django.urls import path 2 | from . import views 3 | 4 | urlpatterns = [ 5 | path('', views.get_flows, name='get flows'), 6 | path('new', views.new_flow, name='new flow'), 7 | path('', views.handle_flow, name='handle flow'), 8 | ] 9 | -------------------------------------------------------------------------------- /api/mf/instance/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/instance/__init__.py -------------------------------------------------------------------------------- /api/mf/instance/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | 3 | # Register your models here. 4 | from .models import InstanceModel 5 | 6 | #uncomment the next line to manage using the admin panel 7 | admin.site.register(InstanceModel) -------------------------------------------------------------------------------- /api/mf/instance/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class InstanceConfig(AppConfig): 5 | name = 'instance' 6 | -------------------------------------------------------------------------------- /api/mf/instance/middleware.py: -------------------------------------------------------------------------------- 1 | from django.http import JsonResponse 2 | 3 | -------------------------------------------------------------------------------- /api/mf/instance/migrations/0001_initial.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.0.7 on 2024-07-22 10:39 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | 8 | initial = True 9 | 10 | dependencies = [ 11 | ] 12 | 13 | operations = [ 14 | migrations.CreateModel( 15 | name='InstanceModel', 16 | fields=[ 17 | ('id', models.AutoField(editable=False, primary_key=True, serialize=False)), 18 | ('name', models.CharField(max_length=100)), 19 | ('description', models.TextField()), 20 | ('json_data', models.TextField()), 21 | ], 22 | ), 23 | ] 24 | -------------------------------------------------------------------------------- /api/mf/instance/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/instance/migrations/__init__.py -------------------------------------------------------------------------------- /api/mf/instance/models.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | 3 | class InstanceModel(models.Model): 4 | id = models.AutoField(primary_key=True, editable=False) 5 | name = models.CharField(max_length=100) 6 | description = models.TextField() 7 | json_data = models.TextField() 8 | 9 | def __str__(self): 10 | return self.name -------------------------------------------------------------------------------- /api/mf/instance/tests.py: -------------------------------------------------------------------------------- 1 | from django.test import TestCase 2 | 3 | # Create your tests here. 4 | -------------------------------------------------------------------------------- /api/mf/instance/urls.py: -------------------------------------------------------------------------------- 1 | from django.urls import path 2 | from . import views 3 | 4 | urlpatterns = [ 5 | path('', views.get_instances, name='get instances'), 6 | path('new', views.new_instance, name='new instance'), 7 | path('', views.handle_instance, name='handle instance'), 8 | ] 9 | -------------------------------------------------------------------------------- /api/mf/manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Django's command-line utility for administrative tasks.""" 3 | import os 4 | import sys 5 | 6 | import dotenv 7 | 8 | def main(): 9 | dotenv.load_dotenv(".environment") 10 | 11 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mf.settings') 12 | try: 13 | from django.core.management import execute_from_command_line 14 | except ImportError as exc: 15 | raise ImportError( 16 | "Couldn't import Django. Are you sure it's installed and " 17 | "available on your PYTHONPATH environment variable? Did you " 18 | "forget to activate a virtual environment?" 19 | ) from exc 20 | execute_from_command_line(sys.argv) 21 | 22 | 23 | if __name__ == '__main__': 24 | main() 25 | -------------------------------------------------------------------------------- /api/mf/mf/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/mf/__init__.py -------------------------------------------------------------------------------- /api/mf/mf/asgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | ASGI config for matterflow project. 3 | 4 | It exposes the ASGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/ 8 | """ 9 | 10 | import os 11 | 12 | from django.core.asgi import get_asgi_application 13 | 14 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mf.settings') 15 | 16 | application = get_asgi_application() 17 | -------------------------------------------------------------------------------- /api/mf/mf/settings.py: -------------------------------------------------------------------------------- 1 | """ 2 | Django settings for matterflow project. 3 | 4 | Generated by 'django-admin startproject' using Django 3.0.4. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/3.0/topics/settings/ 8 | 9 | For the full list of settings and their values, see 10 | https://docs.djangoproject.com/en/3.0/ref/settings/ 11 | """ 12 | 13 | import os 14 | 15 | # Build paths inside the project like this: os.path.join(BASE_DIR, ...) 16 | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 17 | 18 | 19 | # Quick-start development settings - unsuitable for production 20 | # See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ 21 | 22 | # SECURITY WARNING: keep the secret key used in production secret! 23 | SECRET_KEY = os.environ['SECRET_KEY'] 24 | 25 | # SECURITY WARNING: don't run with debug turned on in production! 26 | DEBUG = True 27 | 28 | #Allow everything as we are running inside a docker container with 29 | #restricted access from outside 30 | ALLOWED_HOSTS = ['*'] 31 | 32 | # Application definition 33 | 34 | INSTALLED_APPS = [ 35 | 'django.contrib.admin', 36 | 'django.contrib.auth', 37 | 'django.contrib.contenttypes', 38 | 'django.contrib.sessions', 39 | 'django.contrib.messages', 40 | 'django.contrib.staticfiles', 41 | # Project apps 42 | 'workflow.apps.WorkflowConfig', 43 | 'node.apps.NodeConfig', 44 | 'connection.apps.ConnectionConfig', 45 | 'flow.apps.FlowConfig', 46 | 'process.apps.ProcessConfig', 47 | 'model.apps.ModelConfig', 48 | 'instance.apps.InstanceConfig', 49 | 'drf_yasg' 50 | ] 51 | 52 | MIDDLEWARE = [ 53 | 'django.middleware.security.SecurityMiddleware', 54 | 'django.contrib.sessions.middleware.SessionMiddleware', 55 | 'django.middleware.common.CommonMiddleware', 56 | 'django.middleware.csrf.CsrfViewMiddleware', 57 | 'django.contrib.auth.middleware.AuthenticationMiddleware', 58 | 'django.contrib.messages.middleware.MessageMiddleware', 59 | 'django.middleware.clickjacking.XFrameOptionsMiddleware', 60 | 61 | # Custom middleware 62 | 'workflow.middleware.WorkflowMiddleware', 63 | ] 64 | 65 | ROOT_URLCONF = 'mf.urls' 66 | 67 | TEMPLATES = [ 68 | { 69 | 'BACKEND': 'django.template.backends.django.DjangoTemplates', 70 | 'DIRS': [], 71 | 'APP_DIRS': True, 72 | 'OPTIONS': { 73 | 'context_processors': [ 74 | 'django.template.context_processors.debug', 75 | 'django.template.context_processors.request', 76 | 'django.contrib.auth.context_processors.auth', 77 | 'django.contrib.messages.context_processors.messages', 78 | ], 79 | }, 80 | }, 81 | ] 82 | 83 | SESSION_ENGINE = 'django.contrib.sessions.backends.file' 84 | 85 | WSGI_APPLICATION = 'mf.wsgi.application' 86 | 87 | DIR_PATH = os.getenv('DIR_PATH') or '/tmp' 88 | 89 | # Database 90 | # https://docs.djangoproject.com/en/3.0/ref/settings/#databases 91 | DATABASES = { 92 | 'default': { 93 | 'ENGINE': 'django.db.backends.sqlite3', 94 | 'NAME': DIR_PATH + "/db.sqlite3", 95 | } 96 | } 97 | 98 | # MEDIA_ROOT is either '/data' or '/tmp' 99 | MEDIA_ROOT = DIR_PATH 100 | 101 | # Password validation 102 | # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators 103 | 104 | AUTH_PASSWORD_VALIDATORS = [ 105 | { 106 | 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', 107 | }, 108 | { 109 | 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 110 | }, 111 | { 112 | 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', 113 | }, 114 | { 115 | 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', 116 | }, 117 | ] 118 | 119 | 120 | # Internationalization 121 | # https://docs.djangoproject.com/en/3.0/topics/i18n/ 122 | 123 | LANGUAGE_CODE = 'en-us' 124 | 125 | TIME_ZONE = 'UTC' 126 | 127 | USE_I18N = True 128 | 129 | USE_L10N = True 130 | 131 | USE_TZ = True 132 | 133 | 134 | # Static files (CSS, JavaScript, Images) 135 | # https://docs.djangoproject.com/en/3.0/howto/static-files/ 136 | 137 | STATIC_URL = '/static/' 138 | -------------------------------------------------------------------------------- /api/mf/mf/urls.py: -------------------------------------------------------------------------------- 1 | """mf URL Configuration 2 | 3 | The `urlpatterns` list routes URLs to views. For more information please see: 4 | https://docs.djangoproject.com/en/3.0/topics/http/urls/ 5 | Examples: 6 | Function views 7 | 1. Add an import: from my_app import views 8 | 2. Add a URL to urlpatterns: path('', views.home, name='home') 9 | Class-based views 10 | 1. Add an import: from other_app.views import Home 11 | 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') 12 | Including another URLconf 13 | 1. Import the include() function: from django.urls import include, path 14 | 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) 15 | """ 16 | from django.contrib import admin 17 | from django.urls import path, include 18 | from rest_framework import permissions 19 | 20 | from . import views 21 | 22 | from drf_yasg.views import get_schema_view 23 | from drf_yasg import openapi 24 | 25 | schema_view = get_schema_view( 26 | openapi.Info( 27 | title="Visual Programming API", 28 | default_version='v1', 29 | description="Back-end documentation for Visual Programming KNIME based application." 30 | ), 31 | public=True, 32 | permission_classes=(permissions.AllowAny,) 33 | ) 34 | 35 | urlpatterns = [ 36 | path('schema/', schema_view.without_ui(cache_timeout=0), name='schema-json'), 37 | path('swagger/', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'), 38 | path('redoc/', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'), 39 | path('admin/', admin.site.urls), 40 | path('info/', views.info), 41 | path('node/', include('node.urls')), 42 | path('workflow/', include('workflow.urls')), 43 | path('connection/', include('connection.urls')), 44 | path('flow/', include('flow.urls')), 45 | path('process/', include('process.urls')), 46 | path('model/', include('model.urls')), 47 | path('instance/', include('instance.urls')) 48 | ] 49 | -------------------------------------------------------------------------------- /api/mf/mf/views.py: -------------------------------------------------------------------------------- 1 | from django.http import JsonResponse 2 | from rest_framework.decorators import api_view 3 | from drf_yasg.utils import swagger_auto_schema 4 | from matterflow import Node 5 | from modulefinder import ModuleFinder 6 | 7 | 8 | 9 | @swagger_auto_schema(method='get', responses={200:'JSON response with data'}) 10 | @api_view(['GET']) 11 | def info(request): 12 | """Retrieve app info. 13 | 14 | Args: 15 | request: Django request Object 16 | 17 | Returns: 18 | 200 - JSON response with data. 19 | """ 20 | data = { 21 | "application": "visual_programming", 22 | "version": "negative something", 23 | "about": "super-duper workflows!" 24 | } 25 | return JsonResponse(data) 26 | -------------------------------------------------------------------------------- /api/mf/mf/wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | WSGI config for mf project. 3 | 4 | It exposes the WSGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/ 8 | """ 9 | 10 | import os 11 | 12 | from django.core.wsgi import get_wsgi_application 13 | 14 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mf.settings') 15 | 16 | application = get_wsgi_application() 17 | -------------------------------------------------------------------------------- /api/mf/model/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/model/__init__.py -------------------------------------------------------------------------------- /api/mf/model/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | 3 | # Register your models here. 4 | from .models import ModelModel 5 | 6 | #uncomment the next line to manage using the admin panel 7 | admin.site.register(ModelModel) -------------------------------------------------------------------------------- /api/mf/model/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class ModelConfig(AppConfig): 5 | name = 'model' 6 | -------------------------------------------------------------------------------- /api/mf/model/middleware.py: -------------------------------------------------------------------------------- 1 | from django.http import JsonResponse 2 | 3 | -------------------------------------------------------------------------------- /api/mf/model/migrations/0001_initial.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.0.7 on 2024-07-22 08:50 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | 8 | initial = True 9 | 10 | dependencies = [ 11 | ] 12 | 13 | operations = [ 14 | migrations.CreateModel( 15 | name='ModelModel', 16 | fields=[ 17 | ('id', models.AutoField(editable=False, primary_key=True, serialize=False)), 18 | ('name', models.CharField(max_length=100)), 19 | ('description', models.TextField()), 20 | ('json_data', models.TextField()), 21 | ], 22 | ), 23 | ] 24 | -------------------------------------------------------------------------------- /api/mf/model/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/model/migrations/__init__.py -------------------------------------------------------------------------------- /api/mf/model/models.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | 3 | class ModelModel(models.Model): 4 | id = models.AutoField(primary_key=True, editable=False) 5 | name = models.CharField(max_length=100) 6 | description = models.TextField() 7 | json_data = models.TextField() 8 | 9 | def __str__(self): 10 | return self.name -------------------------------------------------------------------------------- /api/mf/model/tests.py: -------------------------------------------------------------------------------- 1 | from django.test import TestCase 2 | 3 | # Create your tests here. 4 | -------------------------------------------------------------------------------- /api/mf/model/urls.py: -------------------------------------------------------------------------------- 1 | from django.urls import path 2 | from . import views 3 | 4 | urlpatterns = [ 5 | path('', views.get_models, name='get models'), 6 | path('new', views.new_model, name='new model'), 7 | path('', views.handle_model, name='handle model'), 8 | ] 9 | -------------------------------------------------------------------------------- /api/mf/models.py: -------------------------------------------------------------------------------- 1 | from .connection.models import ConnectionModel -------------------------------------------------------------------------------- /api/mf/node/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/node/__init__.py -------------------------------------------------------------------------------- /api/mf/node/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | 3 | # Register your models here. 4 | -------------------------------------------------------------------------------- /api/mf/node/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class NodeConfig(AppConfig): 5 | name = 'node' 6 | -------------------------------------------------------------------------------- /api/mf/node/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/node/migrations/__init__.py -------------------------------------------------------------------------------- /api/mf/node/models.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/node/models.py -------------------------------------------------------------------------------- /api/mf/node/tests.py: -------------------------------------------------------------------------------- 1 | from django.test import TestCase 2 | 3 | # Create your tests here. 4 | -------------------------------------------------------------------------------- /api/mf/node/urls.py: -------------------------------------------------------------------------------- 1 | from django.urls import path 2 | from . import views 3 | 4 | urlpatterns = [ 5 | path('', views.node, name='node'), 6 | path('', views.handle_node, name='handle node'), 7 | path('global/', views.handle_node, name='handle node'), 8 | path('/execute', views.execute_node, name='execute node'), 9 | path('/retrieve_data', views.retrieve_data, name='retrieve data'), 10 | path('/retrieve_data_by_file/', views.retrieve_data_by_file, name='retrieve data by file'), 11 | path('edge//', views.handle_edge, name='handle edge') 12 | ] 13 | -------------------------------------------------------------------------------- /api/mf/process/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/process/__init__.py -------------------------------------------------------------------------------- /api/mf/process/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | 3 | -------------------------------------------------------------------------------- /api/mf/process/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class ProcessConfig(AppConfig): 5 | name = 'process' 6 | 7 | -------------------------------------------------------------------------------- /api/mf/process/middleware.py: -------------------------------------------------------------------------------- 1 | from django.http import JsonResponse 2 | 3 | -------------------------------------------------------------------------------- /api/mf/process/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/process/migrations/__init__.py -------------------------------------------------------------------------------- /api/mf/process/models.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/process/models.py -------------------------------------------------------------------------------- /api/mf/process/tests.py: -------------------------------------------------------------------------------- 1 | from django.test import TestCase 2 | 3 | # Create your tests here. 4 | -------------------------------------------------------------------------------- /api/mf/process/urls.py: -------------------------------------------------------------------------------- 1 | from django.urls import path 2 | from . import views 3 | 4 | urlpatterns = [ 5 | path('', views.get_processes, name='get processes'), 6 | path('new', views.new_process, name='new process'), 7 | path('start', views.start_process, name='start process'), 8 | path('stop', views.stop_process, name='stop process'), 9 | path('delete', views.delete_process, name='delete process'), 10 | ] 11 | -------------------------------------------------------------------------------- /api/mf/workflow/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/workflow/__init__.py -------------------------------------------------------------------------------- /api/mf/workflow/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | 3 | # Register your models here. 4 | -------------------------------------------------------------------------------- /api/mf/workflow/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class WorkflowConfig(AppConfig): 5 | name = 'workflow' 6 | -------------------------------------------------------------------------------- /api/mf/workflow/middleware.py: -------------------------------------------------------------------------------- 1 | from matterflow import Workflow, WorkflowException 2 | from django.http import JsonResponse 3 | 4 | 5 | class WorkflowMiddleware: 6 | """ Custom middleware 7 | 8 | https://docs.djangoproject.com/en/3.0/topics/http/middleware/ 9 | """ 10 | def __init__(self, get_response): 11 | self.get_response = get_response 12 | 13 | # One-time configuration and initialization. 14 | 15 | def __call__(self, request): 16 | # Code executed each request before view (and later middleware) called 17 | 18 | path = request.path 19 | 20 | if not path.startswith('/workflow/') and not path.startswith('/node/'): 21 | # Workflow needed only for /workflow and /node routes 22 | pass 23 | elif path == '/workflow/open' or path == '/workflow/new' or path == '/workflow/activate': 24 | # 'open' loads from file upload, 'new' inits new Workflow 25 | pass 26 | else: 27 | # All other cases, load workflow from session 28 | try: 29 | request.matterflow = Workflow.from_json(request.session) 30 | 31 | # Check if a graph is present 32 | if request.matterflow.graph is None: 33 | return JsonResponse({ 34 | 'message': 'A workflow has not been created yet.' 35 | }, status=404) 36 | except WorkflowException as e: 37 | return JsonResponse({e.action: e.reason}, status=500) 38 | 39 | response = self.get_response(request) 40 | 41 | # Code executed for each request/response after the view is called 42 | 43 | # Request should have 'matterflow' attribute, but do not crash if not 44 | if hasattr(request, 'matterflow'): 45 | # Save Workflow back to session 46 | request.session.update(request.matterflow.to_session_dict()) 47 | 48 | return response 49 | -------------------------------------------------------------------------------- /api/mf/workflow/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/workflow/migrations/__init__.py -------------------------------------------------------------------------------- /api/mf/workflow/models.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/api/mf/workflow/models.py -------------------------------------------------------------------------------- /api/mf/workflow/tests.py: -------------------------------------------------------------------------------- 1 | from django.test import TestCase 2 | 3 | # Create your tests here. 4 | -------------------------------------------------------------------------------- /api/mf/workflow/urls.py: -------------------------------------------------------------------------------- 1 | from django.urls import path 2 | from . import views 3 | 4 | urlpatterns = [ 5 | path('new', views.new_workflow, name='new workflow'), 6 | path('open', views.open_workflow, name='open workflow'), 7 | path('activate', views.activate_workflow, name='activate workflow'), 8 | path('edit', views.edit_workflow, name='edit workflow'), 9 | path('save', views.save_workflow, name='save'), 10 | path('execute', views.execute_workflow, name='execute workflow'), 11 | path('execute//successors', views.get_successors, name='get node successors'), 12 | path('globals', views.global_vars, name="retrieve global variables"), 13 | path('upload', views.upload_file, name='upload file'), 14 | path('download', views.download_file, name='download file'), 15 | path('nodes', views.retrieve_nodes_for_user, name='retrieve node list'), 16 | path('savetoserver', views.save_workflow_to_server, name='save to server'), 17 | ] 18 | -------------------------------------------------------------------------------- /api/requirements.txt: -------------------------------------------------------------------------------- 1 | -i https://pypi.org/simple 2 | coverage==5.5; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4' 3 | coverage-badge==1.1.2 4 | setuptools==75.6.0; python_version >= '3.9' 5 | v==1; python_version >= '3' and python_version < '4' 6 | aiohappyeyeballs==2.4.3; python_version >= '3.8' 7 | aiohttp==3.11.6; python_version >= '3.9' 8 | aiomqtt==2.3.0; python_version >= '3.8' and python_version < '4.0' 9 | aiosignal==1.3.1; python_version >= '3.7' 10 | altair==5.5.0; python_version >= '3.9' 11 | asgiref==3.8.1; python_version >= '3.8' 12 | attrs==24.2.0; python_version >= '3.7' 13 | autopep8==2.3.1; python_version >= '3.8' 14 | azure-core==1.32.0; python_version >= '3.8' 15 | azure-storage-blob==12.24.0; python_version >= '3.8' 16 | boto3==1.35.69; python_version >= '3.8' 17 | botocore==1.35.69; python_version >= '3.8' 18 | cachetools==5.5.0; python_version >= '3.7' 19 | certifi==2024.8.30; python_version >= '3.6' 20 | cffi==1.17.1; platform_python_implementation != 'PyPy' 21 | charset-normalizer==3.4.0; python_full_version >= '3.7.0' 22 | click==8.1.7; python_version >= '3.7' 23 | cryptography==43.0.3; python_version >= '3.7' 24 | django==5.1.3; python_version >= '3.10' 25 | djangorestframework==3.15.2; python_version >= '3.8' 26 | drf-yasg==1.21.8; python_version >= '3.6' 27 | frozenlist==1.5.0; python_version >= '3.8' 28 | google-api-core==2.23.0; python_version >= '3.7' 29 | google-auth==2.36.0; python_version >= '3.7' 30 | google-cloud-core==2.4.1; python_version >= '3.7' 31 | google-cloud-storage==2.18.2; python_version >= '3.7' 32 | google-crc32c==1.6.0; python_version >= '3.9' 33 | google-resumable-media==2.7.2; python_version >= '3.7' 34 | googleapis-common-protos==1.66.0; python_version >= '3.7' 35 | idna==3.10; python_version >= '3.6' 36 | inflection==0.5.1; python_version >= '3.5' 37 | isodate==0.7.2; python_version >= '3.7' 38 | jinja2==3.1.4; python_version >= '3.7' 39 | jmespath==1.0.1; python_version >= '3.7' 40 | jsonschema==4.23.0; python_version >= '3.8' 41 | jsonschema-specifications==2024.10.1; python_version >= '3.9' 42 | markupsafe==3.0.2; python_version >= '3.9' 43 | -e ./matterflow 44 | -e ./CLI 45 | multidict==6.1.0; python_version >= '3.8' 46 | narwhals==1.14.2; python_version >= '3.8' 47 | networkx==3.4.2; python_version >= '3.10' 48 | numpy==2.1.3; python_version >= '3.11' 49 | packaging==24.2; python_version >= '3.8' 50 | paho-mqtt==2.1.0; python_version >= '3.7' 51 | pandas==2.2.3; python_version >= '3.9' 52 | propcache==0.2.0; python_version >= '3.8' 53 | proto-plus==1.25.0; python_version >= '3.7' 54 | protobuf==5.28.3; python_version >= '3.8' 55 | pyasn1==0.6.1; python_version >= '3.8' 56 | pyasn1-modules==0.4.1; python_version >= '3.8' 57 | pycodestyle==2.12.1; python_version >= '3.8' 58 | pycparser==2.22; python_version >= '3.8' 59 | python-dateutil==2.9.0.post0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' 60 | python-dotenv==1.0.1; python_version >= '3.8' 61 | pytz==2024.2 62 | pyyaml==6.0.2; python_version >= '3.8' 63 | referencing==0.35.1; python_version >= '3.8' 64 | requests==2.32.3; python_version >= '3.8' 65 | rpds-py==0.21.0; python_version >= '3.9' 66 | rsa==4.9; python_version >= '3.6' and python_version < '4' 67 | s3transfer==0.10.4; python_version >= '3.8' 68 | six==1.16.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' 69 | sqlparse==0.5.2; python_version >= '3.8' 70 | supervisor==4.2.5 71 | typing-extensions==4.12.2; python_version < '3.14' 72 | tzdata==2024.2; python_version >= '2' 73 | uritemplate==4.1.1; python_version >= '3.6' 74 | urllib3==2.2.3; python_version >= '3.10' 75 | websocket-client==1.8.0; python_version >= '3.8' 76 | websockets==14.1; python_version >= '3.9' 77 | yarl==1.18.0; python_version >= '3.9' 78 | -------------------------------------------------------------------------------- /api/supervisor_confs/foo.conf: -------------------------------------------------------------------------------- 1 | [program:foo] 2 | command=/bin/bash -c 'i=1; while true; do echo $i; i=$((i+1)); sleep 5; done' 3 | -------------------------------------------------------------------------------- /api/test_mqtt.py: -------------------------------------------------------------------------------- 1 | import json 2 | connection_settings_string = '{"host": "localhost", "port": 1883, "keepalive": 60, "username": "mqtt_user", "password": "mqtt_password"}' 3 | print(connection_settings_string) 4 | connection_settings = json.loads(connection_settings_string) 5 | 6 | print(connection_settings) 7 | exit(0) 8 | 9 | ''' 10 | import paho.mqtt.client as mqtt 11 | 12 | broker_address="localhost" 13 | #broker_address="iot.eclipse.org" #use external broker 14 | client = mqtt.Client(mqtt.CallbackAPIVersion.VERSION2, "P1") #create new instance 15 | client.connect(broker_address) #connect to broker 16 | client.publish("home/lights/kids_bedroom","On") #publish 17 | ''' 18 | 19 | import paho.mqtt.client as mqtt #import the client1 20 | import time 21 | ############ 22 | def on_message(client, userdata, message): 23 | print("message received " ,str(message.payload.decode("utf-8"))) 24 | print("message topic=",message.topic) 25 | print("message qos=",message.qos) 26 | print("message retain flag=",message.retain) 27 | ######################################## 28 | broker_address="localhost" 29 | print("creating new instance") 30 | client = mqtt.Client(mqtt.CallbackAPIVersion.VERSION2, "P1") #create new instance 31 | client.on_message=on_message #attach function to callback 32 | print("connecting to broker") 33 | client.connect(broker_address) #connect to broker 34 | client.loop_start() #start the loop 35 | print("Subscribing to topic","home/lights/kids_bedroom") 36 | client.subscribe("home/lights/kids_bedroom") 37 | time.sleep(4) # wait 38 | client.loop_stop() #stop the loop -------------------------------------------------------------------------------- /data/attribute_updated.json: -------------------------------------------------------------------------------- 1 | {"event": "attribute_updated", "data": [1, "1/6/0", true]} 2 | -------------------------------------------------------------------------------- /data/fabric_start.json: -------------------------------------------------------------------------------- 1 | {"fabric_id": 1, "compressed_fabric_id": 4735055327387330746, "schema_version": 11, "min_supported_schema_version": 9, "sdk_version": "2024.9.0", "wifi_credentials_set": false, "thread_credentials_set": false, "bluetooth_enabled": false} 2 | -------------------------------------------------------------------------------- /data/humidity_sensor_update.json: -------------------------------------------------------------------------------- 1 | {"event":"attribute_updated","data":[1,"6/1029/0",7700]} 2 | -------------------------------------------------------------------------------- /data/initial_nodes_summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "message_id": "7115326", 3 | "result": [ 4 | { 5 | "node_id": 1, 6 | "date_commissioned": "2024-10-18T12:47:46.282763", 7 | "last_interview": "2024-10-18T12:47:46.282779", 8 | "interview_version": 6, 9 | "available": true, 10 | "is_bridge": false, 11 | "attributes": { 12 | "0/40/0": 17, 13 | "0/40/1": "Meross", 14 | "0/40/2": 4933, 15 | "0/40/3": "Smart Plug Mini", 16 | "0/40/4": 40963, 17 | "0/40/5": "", 18 | "0/40/6": "XX" 19 | }, 20 | "attribute_subscriptions": [] 21 | }, 22 | { 23 | "node_id": 2, 24 | "date_commissioned": "2024-10-18T12:48:48.001339", 25 | "last_interview": "2024-10-18T12:48:48.001358", 26 | "interview_version": 6, 27 | "available": true, 28 | "is_bridge": true, 29 | "attributes": { 30 | "0/40/0": 1, 31 | "0/40/1": "SONOFF\n", 32 | "0/40/2": 4897, 33 | "0/40/3": "Smart Home Hub\n", 34 | "0/40/4": 36, 35 | "0/40/5": "", 36 | "0/40/6": "XX", 37 | "3/57/5": "SONOFFMotion Sensor", 38 | "3/57/1": "SONOFF", 39 | "3/57/3": "SNZB-06P", 40 | "4/57/5": "eWeLinkTemperature/Humidity Sen", 41 | "4/57/1": "eWeLink", 42 | "4/57/3": "TH01", 43 | "6/57/5": "Humidity Sensor", 44 | "6/57/1": "eWeLink", 45 | "6/57/3": "TH01", 46 | "6/57/17": true 47 | }, 48 | "attribute_subscriptions": [] 49 | } 50 | ] 51 | } -------------------------------------------------------------------------------- /data/mqtt_in.json: -------------------------------------------------------------------------------- 1 | {"topic": "home/lights/kids_bedroom", "payload": "ON"} 2 | -------------------------------------------------------------------------------- /data/node_event_added_summary.json: -------------------------------------------------------------------------------- 1 | {"event":"node_added","data":{"node_id":3,"date_commissioned":"2024-10-18T14:19:45.567724","last_interview":"2024-10-18T14:19:45.567744","interview_version":6,"available":false,"is_bridge":false,"attributes":{"0/3/0":0,"0/3/1":2,"0/3/65532":0,"0/3/65533":4,"0/3/65528":[],"0/3/65529":[0,64],"0/64/0":[{"0":"room","1":"bedroom 2"},{"0":"orientation","1":"North"},{"0":"floor","1":"2"},{"0":"direction","1":"up"}],"1/3/0":0,"1/3/1":2,"2/3/0":0,"2/3/1":0,"2/3/65532":0,"2/3/65533":4,"2/3/65528":[],"2/3/65529":[0,64],"2/3/65531":[0,1,65528,65529,65531,65532,65533],"2/1030/65531":[0,1,2,65528,65529,65531,65532,65533]},"attribute_subscriptions":[]}} 2 | -------------------------------------------------------------------------------- /data/node_event_leave.json: -------------------------------------------------------------------------------- 1 | {"event": "node_event", "data": {"node_id": 3, "endpoint_id": 0, "cluster_id": 40, "event_id": 2, "event_number": 8, "priority": 1, "timestamp": 1729331843743, "timestamp_type": 1, "data": {"fabricIndex": 1}}} 2 | -------------------------------------------------------------------------------- /data/node_event_sw_fault.json: -------------------------------------------------------------------------------- 1 | {"event": "node_event", "data": {"node_id": 3, "endpoint_id": 0, "cluster_id": 52, "event_id": 0, "event_number": 6, "priority": 1, "timestamp": 1729261446021, "timestamp_type": 1, "data": {"id": 363928, "name": "363928", "faultRecording": "RnJpIE9jdCAxOCAxNToyNDowNiAyMDI0"}}} 2 | -------------------------------------------------------------------------------- /data/node_removed.json: -------------------------------------------------------------------------------- 1 | {"event": "node_removed", "data": 3} 2 | -------------------------------------------------------------------------------- /data/size_buffer_output.json: -------------------------------------------------------------------------------- 1 | [{"timestamp":{"timeInSeconds":1731695245,"offsetInNanos":0},"value":{"integerValue":1888}},{"timestamp":{"timeInSeconds":1731695250,"offsetInNanos":0},"value":{"integerValue":1888}},{"timestamp":{"timeInSeconds":1731695254,"offsetInNanos":0},"value":{"integerValue":1888}},{"timestamp":{"timeInSeconds":1731695259,"offsetInNanos":0},"value":{"integerValue":1888}}] 2 | -------------------------------------------------------------------------------- /data/temperator_sensor_update.json: -------------------------------------------------------------------------------- 1 | {"event":"attribute_updated","data":[1,"5/1026/0",1889]} 2 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | using MQTT 2 | 3 | Install Mosquitto Server 4 | 5 | By default, the Mosquitto package is not available in the Ubuntu 22.04 default repo. So you will need to add Mosquitto's official repository to the APT. You can add it with the following command: 6 | 7 | add-apt-repository ppa:mosquitto-dev/mosquitto-ppa -y 8 | 9 | Once the repository is added to APT, you can install it with the following command: 10 | 11 | apt install mosquitto mosquitto-clients -y 12 | 13 | Once the installation has been completed, you can verify the Mosquitto status with the following command: 14 | 15 | systemctl status mosquitto 16 | 17 | You should see the following output: 18 | 19 | ? mosquitto.service - Mosquitto MQTT Broker 20 | Loaded: loaded (/lib/systemd/system/mosquitto.service; enabled; vendor preset: enabled) 21 | Active: active (running) since Tue 2022-12-06 04:50:33 UTC; 8s ago 22 | 23 | 24 | To publish 25 | 26 | mosquitto_pub -m '{"event": "attribute_updated", "data": [1, "5/1026/0", 9999]}' -t "home/lights/kids_bedroom" 27 | 28 | 29 | to subscribe 30 | 31 | mosquitto_sub -t "sensors/response" 32 | 33 | 34 | -------------------------------------------------------------------------------- /docs/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/docs/logo.png -------------------------------------------------------------------------------- /run.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/with-contenv bashio 2 | echo "==> Making the temp directory called /data to hold persistent data" 3 | mkdir -p /data 4 | 5 | echo "==> Ensuring /config exists and is writable" 6 | mkdir -p /config 7 | chmod 1777 /config 8 | 9 | echo "==> Starting Matterflow API backend" 10 | 11 | # Activate the virtual environment 12 | source /matterflow/api/venv/bin/activate 13 | 14 | #Start supervisord 15 | cd /matterflow/api 16 | supervisord -c ./supervisord.conf 17 | 18 | cd /matterflow/api/mf 19 | 20 | #Migrate the sql database 21 | python3 manage.py migrate 22 | 23 | #Start the server 24 | PYTHONWARNINGS="ignore" python3 manage.py runserver & 25 | echo "==> Matterflow API backend started!" 26 | 27 | #Start the Matter Server Dashboard 28 | echo "==> Starting the Matter Server Dashboard" 29 | cd /python-matter-server/dashboard 30 | # Run setup script and suppress output 31 | ./script/setup > /dev/null 2>&1 32 | # Run the develop script in the background and suppress its output 33 | ./script/develop > /dev/null 2>&1 & 34 | echo "==> Matter Server Dashboard started - The Matter Server Docker container needs to be started before the dashboard can be accessed" 35 | 36 | #Start the web interface 37 | echo "==> Starting Matterflow Web application" 38 | cd /matterflow/web 39 | npm run preview 40 | echo "==> Matterflow Web application started!" 41 | 42 | -------------------------------------------------------------------------------- /web/.eslintrc.cjs: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | root: true, 3 | env: { browser: true, es2020: true }, 4 | extends: [ 5 | 'eslint:recommended', 6 | 'plugin:react/recommended', 7 | 'plugin:react/jsx-runtime', 8 | 'plugin:react-hooks/recommended', 9 | ], 10 | ignorePatterns: ['dist', '.eslintrc.cjs'], 11 | parserOptions: { ecmaVersion: 'latest', sourceType: 'module' }, 12 | settings: { react: { version: '18.2' } }, 13 | plugins: ['react-refresh'], 14 | rules: { 15 | 'react/prop-types': 'off', 16 | 'react/jsx-no-target-blank': 'off', 17 | 'react-refresh/only-export-components': [ 18 | 'warn', 19 | { allowConstantExport: true }, 20 | ], 21 | }, 22 | } 23 | -------------------------------------------------------------------------------- /web/.storybook/main.js: -------------------------------------------------------------------------------- 1 | export default { 2 | "stories": [ 3 | "../src/**/*.stories.mdx", 4 | "../src/**/*.stories.@(js|jsx|ts|tsx)" 5 | ], 6 | framework: '@storybook/react-vite', // 👈 Add this 7 | staticDirs: ['../public'], 8 | webpackFinal: async (config, { configType }) => { 9 | config.plugins.push( 10 | new webpack.NormalModuleReplacementPlugin( 11 | /..\/src\/API\.jsx/, 12 | '../src/API_mocked.jsx' 13 | ) 14 | ); 15 | } 16 | 17 | }; 18 | 19 | -------------------------------------------------------------------------------- /web/README.md: -------------------------------------------------------------------------------- 1 | # Matterflow Web 2 | 3 | This is the web front end of Matterflow. See documentation for details. -------------------------------------------------------------------------------- /web/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Matterflow 8 | 9 | 10 |
11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /web/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dashboard", 3 | "private": true, 4 | "version": "0.0.0", 5 | "type": "module", 6 | "scripts": { 7 | "dev": "vite", 8 | "build": "vite build", 9 | "lint": "eslint . --ext js,jsx --report-unused-disable-directives --max-warnings 0", 10 | "preview": "vite preview", 11 | "storybook": "storybook dev -p 6006", 12 | "build-storybook": "storybook build" 13 | }, 14 | "dependencies": { 15 | "@ant-design/icons": "^5.5.1", 16 | "@emotion/react": "^11.11.4", 17 | "@emotion/styled": "^11.11.5", 18 | "@heroicons/react": "^1.0.6", 19 | "@mui/icons-material": "^5.15.21", 20 | "@mui/material": "^5.15.21", 21 | "@mui/styled-engine-sc": "^6.0.0-alpha.18", 22 | "@projectstorm/react-diagrams": "^7.0.4", 23 | "@tremor/react": "^3.17.4", 24 | "antd": "^5.21.1", 25 | "bootstrap": "^5.3.3", 26 | "dotenv": "^16.4.5", 27 | "humanize-string": "^3.0.0", 28 | "jmespath": "^0.16.0", 29 | "react": "^18.3.1", 30 | "react-bootstrap": "^2.10.4", 31 | "react-dom": "^18.3.1", 32 | "react-hook-form": "^7.52.1", 33 | "react-router-dom": "^6.24.1", 34 | "react-vega": "^7.6.0", 35 | "react-window": "^1.8.10", 36 | "react18-json-view": "^0.2.8", 37 | "styled-components": "^6.1.11", 38 | "vega": "^5.30.0", 39 | "vega-lite": "^5.21.0", 40 | "vite-plugin-eslint": "^1.8.1" 41 | }, 42 | "devDependencies": { 43 | "@storybook/react-vite": "^8.3.2", 44 | "@types/react": "^18.3.3", 45 | "@types/react-dom": "^18.3.0", 46 | "@vitejs/plugin-react": "^4.3.1", 47 | "autoprefixer": "^10.4.19", 48 | "eslint": "^8.57.0", 49 | "eslint-plugin-react": "^7.34.2", 50 | "eslint-plugin-react-hooks": "^4.6.2", 51 | "eslint-plugin-react-refresh": "^0.4.7", 52 | "postcss": "^8.4.39", 53 | "tailwindcss": "^3.4.4", 54 | "vite": "^5.3.1" 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /web/postcss.config.js: -------------------------------------------------------------------------------- 1 | export default { 2 | plugins: { 3 | tailwindcss: {}, 4 | autoprefixer: {}, 5 | }, 6 | } 7 | -------------------------------------------------------------------------------- /web/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/web/public/favicon.ico -------------------------------------------------------------------------------- /web/public/json-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/web/public/json-icon.png -------------------------------------------------------------------------------- /web/public/matterflowdemo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/web/public/matterflowdemo.png -------------------------------------------------------------------------------- /web/public/matterflowexample.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/web/public/matterflowexample.png -------------------------------------------------------------------------------- /web/public/rule.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/web/public/rule.gif -------------------------------------------------------------------------------- /web/public/state1.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/web/public/state1.gif -------------------------------------------------------------------------------- /web/public/state2.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/web/public/state2.gif -------------------------------------------------------------------------------- /web/public/state3.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MatterCoder/matterflow/27d6e6c12a25dce59011d6696ceaeb521eefa02a/web/public/state3.gif -------------------------------------------------------------------------------- /web/public/vite.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /web/src/App.jsx: -------------------------------------------------------------------------------- 1 | import './index.css' 2 | import { createBrowserRouter, RouterProvider } from 'react-router-dom' 3 | import routes from './router.jsx' //the routes are defined in this file 4 | import './index.css' 5 | 6 | //create the react router - routes defined in router.jsx 7 | const router = createBrowserRouter(routes); 8 | 9 | const App = () => ( 10 | 11 | ) 12 | 13 | export default App 14 | 15 | -------------------------------------------------------------------------------- /web/src/Components/About.js: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | import { Modal } from 'react-bootstrap'; 3 | 4 | function About(props) { 5 | const [show, setShow] = useState(props.show); 6 | const [info, setInfo] = useState(); 7 | 8 | async function fetchInfo() { 9 | const resp = await fetch("/info"); 10 | const data = await resp.json(); 11 | setInfo(data); 12 | } 13 | 14 | const handleClose = () => setShow(false); 15 | const handleShow = (e) => { 16 | e.preventDefault(); 17 | fetchInfo() 18 | setShow(true); 19 | } 20 | 21 | return ( 22 | <> 23 |
About
24 | 25 | 26 | 27 | About Visual Programming 28 | 29 | 30 | {JSON.stringify(info)} 31 | 32 | 33 | 34 | ); 35 | } 36 | 37 | export default About; 38 | -------------------------------------------------------------------------------- /web/src/Components/App.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Container, Row, Col } from 'react-bootstrap'; 3 | import 'bootstrap/dist/css/bootstrap.css'; 4 | import About from './About'; 5 | import Workspace from './Workspace'; 6 | 7 | 8 | function App() { 9 | 10 | return ( 11 | 12 |
13 | 14 | 15 | ) 16 | } 17 | 18 | 19 | function Header() { 20 | return ( 21 | <> 22 | 23 | 24 |

Visual Programming Workspace

25 | 26 | 27 |
28 |
29 | 30 | ) 31 | } 32 | 33 | export default App; 34 | -------------------------------------------------------------------------------- /web/src/Components/CustomNode/BooleanInput.jsx: -------------------------------------------------------------------------------- 1 | import { useState, useEffect } from "react"; 2 | import { Form } from 'react-bootstrap'; 3 | 4 | 5 | const BooleanInput = (props) => { 6 | 7 | const [value, setValue] = useState(props.value); 8 | const handleChange = (event) => { 9 | setValue(event.target.checked); 10 | }; 11 | 12 | const {keyName, onChange} = props; 13 | // whenever value changes, fire callback to update config form 14 | useEffect(() => { 15 | onChange(keyName, value); 16 | }, 17 | [value, keyName, onChange]); 18 | 19 | return ( 20 | 24 | ) 25 | } 26 | 27 | export default BooleanInput 28 | 29 | -------------------------------------------------------------------------------- /web/src/Components/CustomNode/CustomNodeFactory.jsx: -------------------------------------------------------------------------------- 1 | import { AbstractReactFactory } from '@projectstorm/react-canvas-core'; 2 | import CustomNodeModel from './CustomNodeModel'; 3 | import CustomNodeWidget from './CustomNodeWidget'; 4 | 5 | export default class CustomNodeFactory extends AbstractReactFactory { 6 | constructor() { 7 | super('custom-node'); 8 | } 9 | 10 | generateModel(event) { 11 | return new CustomNodeModel(event.initialConfig.options, event.initialConfig.config); 12 | } 13 | 14 | generateReactWidget(event) { 15 | return ; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /web/src/Components/CustomNode/CustomNodeModel.jsx: -------------------------------------------------------------------------------- 1 | import { NodeModel } from '@projectstorm/react-diagrams'; 2 | import MFPortModel from '../MFPort/MFPortModel'; 3 | 4 | export default class CustomNodeModel extends NodeModel { 5 | 6 | constructor(options = {}, config = {}) { 7 | super({ 8 | ...options, 9 | type: 'custom-node' 10 | }); 11 | this.options.node_id = this.options.id; 12 | this.config = config; 13 | this.configParams = options.option_types; 14 | this.options.status = options.status || "unconfigured"; 15 | 16 | // add flow control input port 17 | this.addPort( 18 | new MFPortModel({ 19 | in: true, 20 | type: 'mf-port', 21 | name: 'flow-in' 22 | }) 23 | ); 24 | // if flow node, add flow control output port 25 | if (this.options.node_type === "flow_control") { 26 | this.addPort( 27 | new MFPortModel({ 28 | in: false, 29 | type: 'mf-port', 30 | name: 'flow-out' 31 | }) 32 | ); 33 | } 34 | const nIn = options.num_in === undefined ? 1 : options.num_in; 35 | const nOut = options.num_out === undefined ? 1 : options.num_out; 36 | // setup in and out ports 37 | for (let i = 0; i < nIn; ++i) { 38 | this.addPort( 39 | new MFPortModel({ 40 | in: true, 41 | type: 'mf-port', 42 | name: `in-${i}` 43 | }) 44 | ); 45 | } 46 | for (let i = 0; i < nOut; ++i) { 47 | this.addPort( 48 | new MFPortModel({ 49 | in: false, 50 | type: 'mf-port', 51 | name: `out-${i}` 52 | }) 53 | ); 54 | } 55 | } 56 | 57 | getNodeId() { 58 | return this.options.node_id; 59 | } 60 | 61 | serialize() { 62 | return { 63 | ...super.serialize(), 64 | options: this.options, 65 | config: this.config 66 | } 67 | } 68 | 69 | deserialize(ob, engine) { 70 | super.deserialize(ob, engine); 71 | } 72 | 73 | setStatus(status) { 74 | this.options.status = status; 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /web/src/Components/CustomNode/FileUploadInput.jsx: -------------------------------------------------------------------------------- 1 | import { useState, useRef, useEffect } from "react"; 2 | import { Button } from 'react-bootstrap'; 3 | import * as API from '../../API'; 4 | 5 | /** 6 | * Component representing a file parameter. 7 | * Uploads selected file to server upon selection, and passes 8 | * the filename from the server response to the form callback. 9 | */ 10 | const FileUploadInput = (props) => { 11 | const input = useRef(null); 12 | const [fileName, setFileName] = useState(props.value || ""); 13 | const [status, setStatus] = useState(props.value ? "ready" : "unconfigured"); 14 | 15 | const {keyName, onChange} = props; 16 | // fire callback on mount to update node config state 17 | useEffect(() => { 18 | onChange(keyName, fileName); 19 | }, 20 | [fileName, keyName, onChange]); 21 | 22 | const uploadFile = async file => { 23 | props.disableFunc(true); 24 | setStatus("loading"); 25 | const fd = new FormData(); 26 | fd.append("file", file); 27 | fd.append("nodeId", props.node.options.id); 28 | API.uploadDataFile(fd) 29 | .then(resp => { 30 | setFileName(resp.filename); 31 | setStatus("ready"); 32 | props.disableFunc(false); 33 | setStatus("ready"); 34 | }).catch(() => { 35 | setStatus("failed"); 36 | }); 37 | input.current.value = null; 38 | }; 39 | const onFileSelect = e => { 40 | e.preventDefault(); 41 | if (!input.current.files) return; 42 | uploadFile(input.current.files[0]); 43 | }; 44 | 45 | if (status === "loading") return (
Uploading file...
); 46 | const btnText = status === "ready" ? "Choose Different File" : "Choose File"; 47 | let content; 48 | if (status === "ready") { 49 | //const rxp = new RegExp(props.node.options.id + '-'); 50 | content = ( 51 |
52 | File loaded: 
53 | {fileName} 54 |
55 | ) 56 | } else if (status === "failed") { 57 | content = (
Upload failed. Try a new file.
); 58 | } 59 | return ( 60 | <> 61 | 63 | 64 | 65 | {content} 66 | 67 | ) 68 | } 69 | 70 | export default FileUploadInput 71 | 72 | 73 | -------------------------------------------------------------------------------- /web/src/Components/CustomNode/FlowVariableOverride.jsx: -------------------------------------------------------------------------------- 1 | import { Col, Form } from 'react-bootstrap'; 2 | 3 | const FlowVariableOverride = (props) => { 4 | const handleSelect = (event) => { 5 | const uuid = event.target.value; 6 | const flow = props.flowNodes.find(d => d.node_id === uuid); 7 | const obj = { 8 | node_id: uuid, 9 | is_global: flow.is_global 10 | }; 11 | props.onChange(obj); 12 | }; 13 | const handleCheck = (event) => { props.onFlowCheck(event.target.checked) }; 14 | 15 | return ( 16 | 17 | 20 | {props.checked ? 21 | 23 | 28 | )} 29 | 30 | : null 31 | } 32 | 33 | ) 34 | } 35 | 36 | export default FlowVariableOverride 37 | 38 | 39 | -------------------------------------------------------------------------------- /web/src/Components/CustomNode/InstanceSelectInput.jsx: -------------------------------------------------------------------------------- 1 | import { useState, useEffect } from 'react'; 2 | import * as API from '../../API'; 3 | 4 | const InstanceSelectInput = ( props ) => { 5 | const [value, setValue] = useState(props.value); 6 | const [instances, setInstances] = useState([]); 7 | const [selectedInstance, setSelectedInstance] = useState(''); 8 | 9 | const {keyName, onChange} = props; 10 | 11 | // Fetch instances from the API when the component mounts 12 | useEffect(() => { 13 | const fetchInstances = async () => { 14 | try { 15 | const response = await API.getInstances(); 16 | setInstances(response.data); // Assuming response.data is an array of instances 17 | } catch (error) { 18 | console.error('Failed to fetch instances:', error); 19 | } 20 | }; 21 | 22 | fetchInstances(); 23 | 24 | //set the current select 25 | if (props.value) { 26 | setSelectedInstance(props.value.id); 27 | } 28 | }, [props.value]); 29 | 30 | // whenever value changes, fire callback to update config form 31 | useEffect(() => { 32 | onChange(keyName, value); 33 | }, 34 | [value, keyName, onChange]); 35 | 36 | // Handle selection change 37 | const handleChange = (event) => { 38 | const selectedId = event.target.value; 39 | setSelectedInstance(selectedId); 40 | 41 | // Find the selected instance data 42 | const selectedInstanceData = instances.find(instance => instance.id === parseInt(selectedId)); 43 | if (selectedInstanceData) { 44 | setValue(selectedInstanceData); 45 | setSelectedInstance(selectedInstanceData.id); 46 | } 47 | 48 | }; 49 | 50 | 51 | return ( 52 | <> 53 | 61 | 62 | ); 63 | }; 64 | 65 | export default InstanceSelectInput; 66 | -------------------------------------------------------------------------------- /web/src/Components/CustomNode/OptionInput.jsx: -------------------------------------------------------------------------------- 1 | import { useState } from "react"; 2 | import SimpleInput from "./SimpleInput"; 3 | import FileUploadInput from "./FileUploadInput"; 4 | import BooleanInput from "./BooleanInput"; 5 | import SelectInput from "./SelectInput"; 6 | import InstanceSelectInput from "./InstanceSelectInput"; 7 | 8 | import FlowVariableOverride from "./FlowVariableOverride"; 9 | 10 | import { Col, Row, Form } from "react-bootstrap"; 11 | import { Space, Typography } from "antd"; 12 | 13 | const OptionInput = (props) => { 14 | const [isFlow, setIsFlow] = useState(props.flowValue ? true : false); 15 | 16 | const handleFlowCheck = (bool) => { 17 | // if un-checking, fire callback with null so no stale value is in `option_replace` 18 | if (!bool) props.onChange(props.keyName, null, true); 19 | setIsFlow(bool); 20 | }; 21 | 22 | // fire callback to update `option_replace` with flow node info 23 | const handleFlowVariable = (value) => { 24 | props.onChange(props.keyName, value, true); 25 | }; 26 | 27 | let inputComp; 28 | if (props.type === "file") { 29 | inputComp = ; 30 | } else if (props.type === "string") { 31 | inputComp = ; 32 | } else if (props.type === "text") { 33 | inputComp = ; 34 | } else if (props.type === "int") { 35 | inputComp = ; 36 | } else if (props.type === "boolean") { 37 | inputComp = ; 38 | } else if (props.type === "select") { 39 | inputComp = ; 40 | } else if (props.type === "instanceselect") { 41 | inputComp = ; 42 | } else { 43 | return <>; 44 | } 45 | 46 | const hideFlow = 47 | props.node.options.is_global || 48 | props.type === "file" || 49 | props.flowNodes.length === 0; 50 | 51 | return ( 52 | 53 |
54 | {props.label} 55 |
56 | {props.docstring} 57 | 58 | {inputComp} 59 | {hideFlow ? null : ( 60 | 68 | )} 69 | 70 | 71 | 72 | 73 |
74 | ); 75 | }; 76 | 77 | export default OptionInput; 78 | -------------------------------------------------------------------------------- /web/src/Components/CustomNode/SelectInput.jsx: -------------------------------------------------------------------------------- 1 | import { useState, useEffect } from "react"; 2 | import { Form } from 'react-bootstrap'; 3 | 4 | const SelectInput = (props) => { 5 | const [value, setValue] = useState(props.value); 6 | const handleChange = (event) => { 7 | setValue(event.target.value); 8 | }; 9 | 10 | const {keyName, onChange} = props; 11 | // whenever value changes, fire callback to update config form 12 | useEffect(() => { 13 | onChange(keyName, value); 14 | }, 15 | [value, keyName, onChange]); 16 | 17 | return ( 18 | 21 | {props.options.map(opt => 22 | 23 | )} 24 | 25 | ) 26 | } 27 | 28 | export default SelectInput 29 | 30 | -------------------------------------------------------------------------------- /web/src/Components/CustomNode/SimpleInput.jsx: -------------------------------------------------------------------------------- 1 | import { useState, useEffect } from "react"; 2 | import { Form } from 'react-bootstrap'; 3 | 4 | const SimpleInput = (props) => { 5 | const [value, setValue] = useState(props.value); 6 | const handleChange = (event) => { 7 | setValue(event.target.value); 8 | }; 9 | 10 | const {keyName, onChange, type} = props; 11 | // whenever value changes, fire callback to update config form 12 | useEffect(() => { 13 | const formValue = type === "number" ? Number(value) : value; 14 | onChange(keyName, formValue); 15 | }, 16 | [value, keyName, onChange, type]); 17 | 18 | const extraProps = props.type === "textarea" 19 | ? {as: "textarea", rows: props.rows || 7} 20 | : {type: props.type}; 21 | return ( 22 | 26 | ) 27 | } 28 | 29 | export default SimpleInput 30 | 31 | 32 | -------------------------------------------------------------------------------- /web/src/Components/CustomNodeUpload.jsx: -------------------------------------------------------------------------------- 1 | import {useRef, useState} from "react"; 2 | import * as API from "../API"; 3 | import {Button} from "react-bootstrap"; 4 | 5 | 6 | export default function CustomNodeUpload({ onUpload }) { 7 | 8 | const input = useRef(null); 9 | const [status, setStatus] = useState("ready"); 10 | 11 | const uploadFile = async file => { 12 | setStatus("loading"); 13 | const fd = new FormData(); 14 | fd.append("file", file); 15 | API.uploadDataFile(fd) 16 | .then(resp => { 17 | console.log(resp); 18 | onUpload(); 19 | setStatus("ready"); 20 | }).catch(() => { 21 | setStatus("failed"); 22 | }); 23 | input.current.value = null; 24 | }; 25 | const onFileSelect = e => { 26 | e.preventDefault(); 27 | if (!input.current.files) return; 28 | uploadFile(input.current.files[0]); 29 | }; 30 | 31 | let content; 32 | if (status === "loading") { 33 | content =
Uploading file...
; 34 | } else if (status === "failed") { 35 | content = (
Upload failed. Try a new file.
); 36 | } 37 | return ( 38 | <> 39 | 41 | 46 | {content} 47 | 48 | ) 49 | } 50 | -------------------------------------------------------------------------------- /web/src/Components/EnvUploadModal.jsx: -------------------------------------------------------------------------------- 1 | import { useState } from 'react'; 2 | import { Modal, Upload, Button, message } from 'antd'; 3 | import { UploadOutlined } from '@ant-design/icons'; 4 | import * as dotenv from 'dotenv'; // Import the dotenv parser 5 | import * as API from "../API"; 6 | import CustomNodeModel from "./CustomNode/CustomNodeModel"; 7 | 8 | const EnvUploadModal = ({ visible, onClose, possibleNodes, onUpdate }) => { 9 | const [fileList, setFileList] = useState([]); 10 | const [uploading, setUploading] = useState(false); 11 | const [pendingAddNodeCalls, setPendingAddNodeCalls] = useState(0) 12 | 13 | // Look up option types from appropriate menu item. 14 | // The option types aren't included in the global flow 15 | // serialization from the server. 16 | function lookupOptionTypes(nodeKey) { 17 | const keyMatches = possibleNodes.filter((d) => d.node_key === nodeKey); 18 | if (!keyMatches.length) return {}; 19 | return keyMatches[0].option_types || {}; 20 | } 21 | 22 | function nodeFromData(data) { 23 | const info = { ...data, is_global: true }; 24 | const config = info.options; 25 | delete info.options; 26 | if (!info.option_types) { 27 | info.option_types = lookupOptionTypes(info.node_key); 28 | } 29 | const node = new CustomNodeModel(info, config); 30 | return node; 31 | } 32 | 33 | const handleUpload = () => { 34 | if (fileList.length === 0) { 35 | message.error('Please select a file to upload.'); 36 | return; 37 | } 38 | 39 | const file = fileList[0]; 40 | const reader = new FileReader(); 41 | 42 | reader.onload = async (e) => { 43 | const fileContent = e.target.result; 44 | // Parse the .env content to extract variables 45 | const parsedEnv = dotenv.parse(fileContent); 46 | 47 | // Send each env variable to the backend 48 | setUploading(true); 49 | try { 50 | for (const [key, value] of Object.entries(parsedEnv)) { 51 | setPendingAddNodeCalls(pendingAddNodeCalls + 1); 52 | // Create a new node 53 | const config = { 54 | default_value: value, 55 | description: "From env file", 56 | var_name: key 57 | } 58 | 59 | const varFileName = (!isNaN(value) && value !== '' && typeof value === 'string') || typeof value === 'number' 60 | ? 'integer_input' 61 | : 'string_input'; 62 | 63 | const varTypeNode = possibleNodes.find((node) => (node.filename == varFileName)); 64 | const node = nodeFromData(varTypeNode); 65 | node.config = config; 66 | API.addNode(node) 67 | .then((res) => { 68 | console.log(res); 69 | setPendingAddNodeCalls(pendingAddNodeCalls -1 ); 70 | if (pendingAddNodeCalls === 0) { 71 | setTimeout(() => { 72 | // Update the UI 73 | onUpdate(); 74 | }, 100); // adjust the delay as needed 75 | } 76 | }) 77 | .catch((err) => 78 | console.log(err) 79 | ); 80 | } 81 | message.success('Environment variables uploaded successfully.'); 82 | onClose(); // Close the modal on success 83 | } catch (error) { 84 | console.error("Error uploading variables:", error); 85 | message.error('Failed to upload environment variables.'); 86 | } finally { 87 | setUploading(false); 88 | } 89 | }; 90 | 91 | reader.onerror = () => { 92 | message.error('Failed to read file.'); 93 | }; 94 | 95 | reader.readAsText(file); 96 | }; 97 | 98 | return ( 99 | 105 | Cancel 106 | , 107 | , 116 | ]} 117 | > 118 | { 120 | setFileList([file]); 121 | return false; // Prevent automatic upload 122 | }} 123 | onRemove={() => setFileList([])} 124 | fileList={fileList} 125 | > 126 | 127 | 128 | 129 | ); 130 | }; 131 | 132 | export default EnvUploadModal; 133 | -------------------------------------------------------------------------------- /web/src/Components/FlowMenu.jsx: -------------------------------------------------------------------------------- 1 | import FlowList from "./FlowList"; 2 | import { Card, Typography } from "antd"; 3 | 4 | const FlowMenu = (props) => { 5 | return ( 6 | 7 |
8 | Flow Menu 9 | Control your flows. 10 |
11 | 12 |
13 |
14 |
15 | ); 16 | }; 17 | 18 | export default FlowMenu; 19 | -------------------------------------------------------------------------------- /web/src/Components/InstanceModal.jsx: -------------------------------------------------------------------------------- 1 | import { Modal as AntdModal, Divider } from "antd"; 2 | import InstanceEditor from "./InstanceEditor"; 3 | 4 | const InstanceModal = ({ show, handleClose, instanceName, instanceId }) => { 5 | return ( 6 | 15 |
16 | 17 | 18 |
19 |
20 | ); 21 | }; 22 | 23 | export default InstanceModal; 24 | -------------------------------------------------------------------------------- /web/src/Components/JMESPathTester.jsx: -------------------------------------------------------------------------------- 1 | import { useState, useEffect, useCallback } from "react"; 2 | import * as jmespath from "jmespath"; 3 | import { DragOutlined } from "@ant-design/icons"; 4 | import JsonDataInput from "./JsonDataInput"; 5 | 6 | const JMESPathTester = () => { 7 | const [expression, setExpression] = useState(''); 8 | const [jsonData, setJsonData] = useState(''); 9 | const [result, setResult] = useState(""); 10 | 11 | const evaluateJMESPath = useCallback(() => { 12 | try { 13 | const data = JSON.parse(jsonData); 14 | const searchResult = jmespath.search(data, expression); 15 | setResult(JSON.stringify(searchResult, null, 2)); 16 | } catch (error) { 17 | if (jsonData == '' || expression == '') { 18 | setResult(`Try testng a JMESPath expression against your Sample JSON data and see the result here`); 19 | } 20 | else { 21 | setResult(`Error: ${error.message}`); 22 | } 23 | } 24 | }, [jsonData, expression, setResult]); 25 | 26 | useEffect(() => { 27 | evaluateJMESPath(); 28 | }, [expression, jsonData, evaluateJMESPath]); 29 | 30 | return ( 31 |
32 |
33 |
34 |

35 | Test JMESPath Expression 36 |

37 |

Test a JMESPath Expression on your flow data. To learn more about JMESPath, check out the JMESPath Tutorial and  38 | JMESPath Examples. 39 |

40 |
41 |
{ 44 | e.dataTransfer.setData("text", expression); 45 | }} 46 | > 47 | 50 |
51 |